]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-devirt.c
ChangeLog entries were fixed.
[thirdparty/gcc.git] / gcc / ipa-devirt.c
CommitLineData
eefe9a99
JH
1/* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
23a5b65a 3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
eefe9a99
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
0e1474e5 40 This is the Gimple representation of type information of a polymorphic call.
eefe9a99
JH
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
0e1474e5 43 otr_token is the index into virtual table where address is taken.
eefe9a99
JH
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
0e1474e5 58 virtual table of the base type. Also BINFO_OFFSET specifies
eefe9a99
JH
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
0e1474e5 75 or from DECL_VINDEX of a given virtual table.
eefe9a99
JH
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
0e1474e5 89 inserted into the graph. Also types without virtual methods are not
eefe9a99
JH
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
0e1474e5 98 Edges are represented by odr_type->base and odr_type->derived_types.
eefe9a99
JH
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
bbc9396b
JH
104
105 pass_ipa_devirt performs simple speculative devirtualization.
eefe9a99
JH
106*/
107
108#include "config.h"
109#include "system.h"
110#include "coretypes.h"
111#include "tm.h"
4d648807 112#include "tree.h"
d8a2d370
DN
113#include "print-tree.h"
114#include "calls.h"
eefe9a99 115#include "cgraph.h"
d8a2d370 116#include "expr.h"
eefe9a99 117#include "tree-pass.h"
eefe9a99
JH
118#include "pointer-set.h"
119#include "target.h"
120#include "hash-table.h"
121#include "tree-pretty-print.h"
122#include "ipa-utils.h"
2fb9a547
AM
123#include "tree-ssa-alias.h"
124#include "internal-fn.h"
125#include "gimple-fold.h"
126#include "gimple-expr.h"
eefe9a99 127#include "gimple.h"
bbc9396b 128#include "ipa-inline.h"
61a74079 129#include "diagnostic.h"
68377e53 130#include "tree-dfa.h"
ec77d61f 131#include "demangle.h"
2b5f0895 132#include "dbgcnt.h"
c59f7203
JH
133#include "stor-layout.h"
134#include "intl.h"
135
136static bool odr_types_equivalent_p (tree, tree, bool, bool *, pointer_set_t *);
ec77d61f
JH
137
138static bool odr_violation_reported = false;
68377e53
JH
139
140/* Dummy polymorphic call context. */
141
142const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
143 = {0, NULL, false, true};
eefe9a99 144
0e1474e5
JH
145/* Pointer set of all call targets appearing in the cache. */
146static pointer_set_t *cached_polymorphic_call_targets;
147
eefe9a99
JH
148/* The node of type inheritance graph. For each type unique in
149 One Defintion Rule (ODR) sense, we produce one node linking all
150 main variants of types equivalent to it, bases and derived types. */
151
152struct GTY(()) odr_type_d
153{
eefe9a99
JH
154 /* leader type. */
155 tree type;
549bcbd1 156 /* All bases; built only for main variants of types */
eefe9a99 157 vec<odr_type> GTY((skip)) bases;
549bcbd1
JH
158 /* All derrived types with virtual methods seen in unit;
159 built only for main variants oftypes */
eefe9a99 160 vec<odr_type> GTY((skip)) derived_types;
0e1474e5 161
61a74079
JH
162 /* All equivalent types, if more than one. */
163 vec<tree, va_gc> *types;
164 /* Set of all equivalent types, if NON-NULL. */
165 pointer_set_t * GTY((skip)) types_set;
166
0e1474e5
JH
167 /* Unique ID indexing the type in odr_types array. */
168 int id;
eefe9a99
JH
169 /* Is it in anonymous namespace? */
170 bool anonymous_namespace;
2d1644bf
JH
171 /* Do we know about all derivations of given type? */
172 bool all_derivations_known;
549bcbd1
JH
173 /* Did we report ODR violation here? */
174 bool odr_violated;
eefe9a99
JH
175};
176
177
0e1474e5
JH
178/* Return true if BINFO corresponds to a type with virtual methods.
179
180 Every type has several BINFOs. One is the BINFO associated by the type
181 while other represents bases of derived types. The BINFOs representing
182 bases do not have BINFO_VTABLE pointer set when this is the single
183 inheritance (because vtables are shared). Look up the BINFO of type
184 and check presence of its vtable. */
eefe9a99
JH
185
186static inline bool
187polymorphic_type_binfo_p (tree binfo)
188{
189 /* See if BINFO's type has an virtual table associtated with it. */
190 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
191}
192
2d1644bf
JH
193/* Return TRUE if all derived types of T are known and thus
194 we may consider the walk of derived type complete.
195
196 This is typically true only for final anonymous namespace types and types
197 defined within functions (that may be COMDAT and thus shared across units,
198 but with the same set of derived types). */
199
200static bool
201type_all_derivations_known_p (tree t)
202{
203 if (TYPE_FINAL_P (t))
204 return true;
205 if (flag_ltrans)
206 return false;
207 if (type_in_anonymous_namespace_p (t))
208 return true;
209 return (decl_function_context (TYPE_NAME (t)) != NULL);
210}
211
212/* Return TURE if type's constructors are all visible. */
213
214static bool
215type_all_ctors_visible_p (tree t)
216{
217 return !flag_ltrans
218 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
219 /* We can not always use type_all_derivations_known_p.
220 For function local types we must assume case where
221 the function is COMDAT and shared in between units.
222
223 TODO: These cases are quite easy to get, but we need
224 to keep track of C++ privatizing via -Wno-weak
225 as well as the IPA privatizing. */
226 && type_in_anonymous_namespace_p (t);
227}
228
229/* Return TRUE if type may have instance. */
230
231static bool
232type_possibly_instantiated_p (tree t)
233{
234 tree vtable;
235 varpool_node *vnode;
236
237 /* TODO: Add abstract types here. */
238 if (!type_all_ctors_visible_p (t))
239 return true;
240
241 vtable = BINFO_VTABLE (TYPE_BINFO (t));
242 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
243 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
244 vnode = varpool_get_node (vtable);
245 return vnode && vnode->definition;
246}
247
eefe9a99
JH
248/* One Definition Rule hashtable helpers. */
249
250struct odr_hasher
251{
252 typedef odr_type_d value_type;
253 typedef union tree_node compare_type;
254 static inline hashval_t hash (const value_type *);
255 static inline bool equal (const value_type *, const compare_type *);
256 static inline void remove (value_type *);
257};
258
549bcbd1
JH
259/* Return type that was declared with T's name so that T is an
260 qualified variant of it. */
261
262static inline tree
263main_odr_variant (const_tree t)
264{
265 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
266 return TREE_TYPE (TYPE_NAME (t));
267 /* Unnamed types and non-C++ produced types can be compared by variants. */
268 else
269 return TYPE_MAIN_VARIANT (t);
270}
271
eefe9a99
JH
272/* Produce hash based on type name. */
273
549bcbd1 274static hashval_t
eefe9a99
JH
275hash_type_name (tree t)
276{
549bcbd1 277 gcc_checking_assert (main_odr_variant (t) == t);
eefe9a99
JH
278
279 /* If not in LTO, all main variants are unique, so we can do
280 pointer hash. */
281 if (!in_lto_p)
282 return htab_hash_pointer (t);
283
284 /* Anonymous types are unique. */
285 if (type_in_anonymous_namespace_p (t))
286 return htab_hash_pointer (t);
287
61a74079 288 /* For polymorphic types, we can simply hash the virtual table. */
549bcbd1
JH
289 if (TREE_CODE (t) == RECORD_TYPE
290 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
61a74079
JH
291 {
292 tree v = BINFO_VTABLE (TYPE_BINFO (t));
293 hashval_t hash = 0;
294
295 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
296 {
297 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
298 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
299 }
300
301 v = DECL_ASSEMBLER_NAME (v);
61a74079
JH
302 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
303 return hash;
304 }
305
eefe9a99
JH
306 /* Rest is not implemented yet. */
307 gcc_unreachable ();
308}
309
310/* Return the computed hashcode for ODR_TYPE. */
311
312inline hashval_t
313odr_hasher::hash (const value_type *odr_type)
314{
315 return hash_type_name (odr_type->type);
316}
317
549bcbd1
JH
318/* For languages with One Definition Rule, work out if
319 types are the same based on their name.
320
321 This is non-trivial for LTO where minnor differences in
322 the type representation may have prevented type merging
323 to merge two copies of otherwise equivalent type.
324
325 Until we start streaming mangled type names, this function works
326 only for polymorphic types. */
327
328bool
329types_same_for_odr (const_tree type1, const_tree type2)
330{
331 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
332
333 type1 = main_odr_variant (type1);
334 type2 = main_odr_variant (type2);
335
336 if (type1 == type2)
337 return true;
338
339 if (!in_lto_p)
340 return false;
341
342 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
343 on the corresponding TYPE_STUB_DECL. */
344 if (type_in_anonymous_namespace_p (type1)
345 || type_in_anonymous_namespace_p (type2))
346 return false;
347
01a92e70
JH
348 /* See if types are obvoiusly different (i.e. different codes
349 or polymorphis wrt non-polymorphic). This is not strictly correct
350 for ODR violating programs, but we can't do better without streaming
351 ODR names. */
352 if (TREE_CODE (type1) != TREE_CODE (type2))
353 return false;
354 if (TREE_CODE (type1) == RECORD_TYPE
355 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
356 return false;
357 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
358 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
359 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
360 return false;
361
549bcbd1
JH
362 /* At the moment we have no way to establish ODR equivlaence at LTO
363 other than comparing virtual table pointrs of polymorphic types.
364 Eventually we should start saving mangled names in TYPE_NAME.
365 Then this condition will become non-trivial. */
366
367 if (TREE_CODE (type1) == RECORD_TYPE
368 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
369 && BINFO_VTABLE (TYPE_BINFO (type1))
370 && BINFO_VTABLE (TYPE_BINFO (type2)))
371 {
372 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
373 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
374 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
375 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
376 return (operand_equal_p (TREE_OPERAND (v1, 1),
377 TREE_OPERAND (v2, 1), 0)
378 && DECL_ASSEMBLER_NAME
379 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
380 == DECL_ASSEMBLER_NAME
381 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
382 }
383 gcc_unreachable ();
384}
385
386
0e1474e5 387/* Compare types T1 and T2 and return true if they are
eefe9a99
JH
388 equivalent. */
389
390inline bool
391odr_hasher::equal (const value_type *t1, const compare_type *ct2)
392{
393 tree t2 = const_cast <tree> (ct2);
394
549bcbd1 395 gcc_checking_assert (main_odr_variant (t2) == t2);
eefe9a99
JH
396 if (t1->type == t2)
397 return true;
398 if (!in_lto_p)
399 return false;
400 return types_same_for_odr (t1->type, t2);
401}
402
0e1474e5 403/* Free ODR type V. */
eefe9a99
JH
404
405inline void
406odr_hasher::remove (value_type *v)
407{
408 v->bases.release ();
409 v->derived_types.release ();
61a74079
JH
410 if (v->types_set)
411 pointer_set_destroy (v->types_set);
eefe9a99
JH
412 ggc_free (v);
413}
414
415/* ODR type hash used to lookup ODR type based on tree type node. */
416
c203e8a7
TS
417typedef hash_table<odr_hasher> odr_hash_type;
418static odr_hash_type *odr_hash;
eefe9a99
JH
419
420/* ODR types are also stored into ODR_TYPE vector to allow consistent
421 walking. Bases appear before derived types. Vector is garbage collected
422 so we won't end up visiting empty types. */
423
424static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
425#define odr_types (*odr_types_ptr)
426
c7e1befa
JH
427/* Set TYPE_BINFO of TYPE and its variants to BINFO. */
428void
429set_type_binfo (tree type, tree binfo)
430{
431 for (; type; type = TYPE_NEXT_VARIANT (type))
432 if (COMPLETE_TYPE_P (type))
433 TYPE_BINFO (type) = binfo;
434 else
435 gcc_assert (!TYPE_BINFO (type));
436}
437
c59f7203
JH
438/* Compare T2 and T2 based on name or structure. */
439
440static bool
441odr_subtypes_equivalent_p (tree t1, tree t2, pointer_set_t *visited)
442{
443 bool an1, an2;
444
445 /* This can happen in incomplete types that should be handled earlier. */
446 gcc_assert (t1 && t2);
447
448 t1 = main_odr_variant (t1);
449 t2 = main_odr_variant (t2);
450 if (t1 == t2)
451 return true;
452 if (TREE_CODE (t1) != TREE_CODE (t2))
453 return false;
454 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
455 return false;
456 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
457 return false;
458
459 /* Anonymous namespace types must match exactly. */
460 an1 = type_in_anonymous_namespace_p (t1);
461 an2 = type_in_anonymous_namespace_p (t2);
462 if (an1 != an2 || an1)
463 return false;
464
465 /* For types where we can not establish ODR equivalency, recurse and deeply
466 compare. */
467 if (TREE_CODE (t1) != RECORD_TYPE
468 || !TYPE_BINFO (t1) || !TYPE_BINFO (t2)
469 || !polymorphic_type_binfo_p (TYPE_BINFO (t1))
470 || !polymorphic_type_binfo_p (TYPE_BINFO (t2)))
471 {
472 /* This should really be a pair hash, but for the moment we do not need
473 100% reliability and it would be better to compare all ODR types so
474 recursion here is needed only for component types. */
475 if (pointer_set_insert (visited, t1))
476 return true;
69dc8208 477 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
c59f7203
JH
478 }
479 return types_same_for_odr (t1, t2);
480}
481
482/* Output ODR violation warning about T1 and T2 with REASON.
483 Display location of ST1 and ST2 if REASON speaks about field or
484 method of the type.
485 If WARN is false, do nothing. Set WARNED if warning was indeed
486 output. */
487
488void
489warn_odr (tree t1, tree t2, tree st1, tree st2,
490 bool warn, bool *warned, const char *reason)
491{
492 tree decl2 = TYPE_NAME (t2);
493
494 if (!warn)
495 return;
496 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
497 "type %qT violates one definition rule",
498 t1))
499 return;
500 if (!st1)
501 ;
502 else if (TREE_CODE (st1) == FIELD_DECL)
503 {
504 inform (DECL_SOURCE_LOCATION (decl2),
505 "a different type is defined in another translation unit");
506 inform (DECL_SOURCE_LOCATION (st1),
507 "the first difference of corresponding definitions is field %qD",
508 st1);
509 decl2 = st2;
510 }
511 else if (TREE_CODE (st1) == FUNCTION_DECL)
512 {
513 inform (DECL_SOURCE_LOCATION (decl2),
514 "a different type is defined in another translation unit");
515 inform (DECL_SOURCE_LOCATION (st1),
516 "the first difference of corresponding definitions is method %qD",
517 st1);
518 decl2 = st2;
519 }
520 else
521 return;
522 inform (DECL_SOURCE_LOCATION (decl2), reason);
523
524 if (warned)
525 *warned = true;
526}
527
528/* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
529 because they are used on same place in ODR matching types.
530 They are not; inform the user. */
531
532void
533warn_types_mismatch (tree t1, tree t2)
534{
535 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
536 return;
537 /* In Firefox it is a common bug to have same types but in
538 different namespaces. Be a bit more informative on
539 this. */
540 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
541 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
542 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
543 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
544 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
545 DECL_NAME (TYPE_CONTEXT (t2))))))
546 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
547 "type %qT should match type %qT but is defined "
548 "in different namespace ",
549 t1, t2);
550 else
551 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
552 "type %qT should match type %qT",
553 t1, t2);
554 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
555 "the incompatible type is defined here");
556}
557
558/* Compare T1 and T2, report ODR violations if WARN is true and set
559 WARNED to true if anything is reported. Return true if types match.
560 If true is returned, the types are also compatible in the sense of
561 gimple_canonical_types_compatible_p. */
562
563static bool
564odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, pointer_set_t *visited)
565{
566 /* Check first for the obvious case of pointer identity. */
567 if (t1 == t2)
568 return true;
569 gcc_assert (!type_in_anonymous_namespace_p (t1));
570 gcc_assert (!type_in_anonymous_namespace_p (t2));
571
572 /* Can't be the same type if the types don't have the same code. */
573 if (TREE_CODE (t1) != TREE_CODE (t2))
574 {
575 warn_odr (t1, t2, NULL, NULL, warn, warned,
576 G_("a different type is defined in another translation unit"));
577 return false;
578 }
579
580 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
581 {
582 warn_odr (t1, t2, NULL, NULL, warn, warned,
583 G_("a type with different qualifiers is defined in another "
584 "translation unit"));
585 return false;
586 }
587
588 if (comp_type_attributes (t1, t2) != 1)
589 {
590 warn_odr (t1, t2, NULL, NULL, warn, warned,
591 G_("a type with attributes "
592 "is defined in another translation unit"));
593 return false;
594 }
595
596 if (TREE_CODE (t1) == ENUMERAL_TYPE)
597 {
598 tree v1, v2;
599 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
600 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
601 {
602 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
603 {
604 warn_odr (t1, t2, NULL, NULL, warn, warned,
605 G_("an enum with different value name"
606 " is defined in another translation unit"));
607 return false;
608 }
609 if (TREE_VALUE (v1) != TREE_VALUE (v2)
610 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
611 DECL_INITIAL (TREE_VALUE (v2)), 0))
612 {
613 warn_odr (t1, t2, NULL, NULL, warn, warned,
614 G_("an enum with different values is defined"
615 " in another translation unit"));
616 return false;
617 }
618 }
619 if (v1 || v2)
620 {
621 warn_odr (t1, t2, NULL, NULL, warn, warned,
622 G_("an enum with mismatching number of values "
623 "is defined in another translation unit"));
624 return false;
625 }
626 }
627
628 /* Non-aggregate types can be handled cheaply. */
629 if (INTEGRAL_TYPE_P (t1)
630 || SCALAR_FLOAT_TYPE_P (t1)
631 || FIXED_POINT_TYPE_P (t1)
632 || TREE_CODE (t1) == VECTOR_TYPE
633 || TREE_CODE (t1) == COMPLEX_TYPE
634 || TREE_CODE (t1) == OFFSET_TYPE
635 || POINTER_TYPE_P (t1))
636 {
637 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
638 {
639 warn_odr (t1, t2, NULL, NULL, warn, warned,
640 G_("a type with different precision is defined "
641 "in another translation unit"));
642 return false;
643 }
644 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
645 {
646 warn_odr (t1, t2, NULL, NULL, warn, warned,
647 G_("a type with different signedness is defined "
648 "in another translation unit"));
649 return false;
650 }
651
652 if (TREE_CODE (t1) == INTEGER_TYPE
653 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
654 {
655 /* char WRT uint_8? */
656 warn_odr (t1, t2, NULL, NULL, warn, warned,
657 G_("a different type is defined in another "
658 "translation unit"));
659 return false;
660 }
661
662 /* For canonical type comparisons we do not want to build SCCs
663 so we cannot compare pointed-to types. But we can, for now,
664 require the same pointed-to type kind and match what
665 useless_type_conversion_p would do. */
666 if (POINTER_TYPE_P (t1))
667 {
668 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
669 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
670 {
671 warn_odr (t1, t2, NULL, NULL, warn, warned,
672 G_("it is defined as a pointer in different address "
673 "space in another translation unit"));
674 return false;
675 }
676
677 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
678 {
679 warn_odr (t1, t2, NULL, NULL, warn, warned,
680 G_("it is defined as a pointer to different type "
681 "in another translation unit"));
682 if (warn && warned)
683 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
684 return false;
685 }
686 }
687
688 /* Tail-recurse to components. */
689 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
690 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
691 {
692 /* Probably specific enough. */
693 warn_odr (t1, t2, NULL, NULL, warn, warned,
694 G_("a different type is defined "
695 "in another translation unit"));
696 if (warn && warned)
697 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
698 return false;
699 }
700
701 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
702 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
703 TYPE_SIZE_UNIT (t2), 0));
704 gcc_assert (TYPE_MODE (t1) == TYPE_MODE (t2));
705
706 return true;
707 }
708
709 /* Do type-specific comparisons. */
710 switch (TREE_CODE (t1))
711 {
712 case ARRAY_TYPE:
713 {
714 /* Array types are the same if the element types are the same and
715 the number of elements are the same. */
716 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
717 {
718 warn_odr (t1, t2, NULL, NULL, warn, warned,
719 G_("a different type is defined in another "
720 "translation unit"));
721 if (warn && warned)
722 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
723 }
724 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
725 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
726 == TYPE_NONALIASED_COMPONENT (t2));
727
728 tree i1 = TYPE_DOMAIN (t1);
729 tree i2 = TYPE_DOMAIN (t2);
730
731 /* For an incomplete external array, the type domain can be
732 NULL_TREE. Check this condition also. */
733 if (i1 == NULL_TREE || i2 == NULL_TREE)
734 return true;
735
736 tree min1 = TYPE_MIN_VALUE (i1);
737 tree min2 = TYPE_MIN_VALUE (i2);
738 tree max1 = TYPE_MAX_VALUE (i1);
739 tree max2 = TYPE_MAX_VALUE (i2);
740
741 /* In C++, minimums should be always 0. */
742 gcc_assert (min1 == min2);
743 if (!operand_equal_p (max1, max2, 0))
744 {
745 warn_odr (t1, t2, NULL, NULL, warn, warned,
746 G_("an array of different size is defined "
747 "in another translation unit"));
748 return false;
749 }
750 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
751 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
752 TYPE_SIZE_UNIT (t2), 0));
753 }
754 return true;
755
756 case METHOD_TYPE:
757 case FUNCTION_TYPE:
758 /* Function types are the same if the return type and arguments types
759 are the same. */
760 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
761 {
762 warn_odr (t1, t2, NULL, NULL, warn, warned,
763 G_("has different return value "
764 "in another translation unit"));
765 if (warn && warned)
766 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
767 return false;
768 }
769
770 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
771 return true;
772 else
773 {
774 tree parms1, parms2;
775
776 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
777 parms1 && parms2;
778 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
779 {
780 if (!odr_subtypes_equivalent_p
781 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
782 {
783 warn_odr (t1, t2, NULL, NULL, warn, warned,
784 G_("has different parameters in another "
785 "translation unit"));
786 if (warn && warned)
787 warn_types_mismatch (TREE_VALUE (parms1),
788 TREE_VALUE (parms2));
789 return false;
790 }
791 }
792
793 if (parms1 || parms2)
794 {
795 warn_odr (t1, t2, NULL, NULL, warn, warned,
796 G_("has different parameters "
797 "in another translation unit"));
798 return false;
799 }
800
801 return true;
802 }
803
804 case RECORD_TYPE:
805 case UNION_TYPE:
806 case QUAL_UNION_TYPE:
807 {
808 tree f1, f2;
809
810 /* For aggregate types, all the fields must be the same. */
811 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
812 {
813 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
814 f1 || f2;
815 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
816 {
817 /* Skip non-fields. */
818 while (f1 && TREE_CODE (f1) != FIELD_DECL)
819 f1 = TREE_CHAIN (f1);
820 while (f2 && TREE_CODE (f2) != FIELD_DECL)
821 f2 = TREE_CHAIN (f2);
822 if (!f1 || !f2)
823 break;
824 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
825 break;
826 if (DECL_NAME (f1) != DECL_NAME (f2)
827 && !DECL_ARTIFICIAL (f1))
828 {
829 warn_odr (t1, t2, f1, f2, warn, warned,
830 G_("a field with different name is defined "
831 "in another translation unit"));
832 return false;
833 }
834 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
835 {
836 /* Do not warn about artificial fields and just go into generic
837 field mismatch warning. */
838 if (DECL_ARTIFICIAL (f1))
839 break;
840
841 warn_odr (t1, t2, f1, f2, warn, warned,
842 G_("a field of same name but different type "
843 "is defined in another translation unit"));
844 if (warn && warned)
845 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
846 return false;
847 }
848 if (!gimple_compare_field_offset (f1, f2))
849 {
850 /* Do not warn about artificial fields and just go into generic
851 field mismatch warning. */
852 if (DECL_ARTIFICIAL (f1))
853 break;
854 warn_odr (t1, t2, t1, t2, warn, warned,
855 G_("fields has different layout "
856 "in another translation unit"));
857 return false;
858 }
859 gcc_assert (DECL_NONADDRESSABLE_P (f1)
860 == DECL_NONADDRESSABLE_P (f2));
861 }
862
863 /* If one aggregate has more fields than the other, they
864 are not the same. */
865 if (f1 || f2)
866 {
867 warn_odr (t1, t2, NULL, NULL, warn, warned,
868 G_("a type with different number of fields "
869 "is defined in another translation unit"));
870 return false;
871 }
872 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
873 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
874 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
875 {
876 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
877 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
878 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
879 {
880 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
881 {
882 warn_odr (t1, t2, f1, f2, warn, warned,
883 G_("a different method of same type "
884 "is defined in another translation unit"));
885 return false;
886 }
887 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
888 {
889 warn_odr (t1, t2, f1, f2, warn, warned,
890 G_("s definition that differs by virtual "
891 "keyword in another translation unit"));
892 return false;
893 }
894 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
895 {
896 warn_odr (t1, t2, f1, f2, warn, warned,
897 G_("virtual table layout differs in another "
898 "translation unit"));
899 return false;
900 }
901 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
902 {
903 warn_odr (t1, t2, f1, f2, warn, warned,
904 G_("method with incompatible type is defined "
905 "in another translation unit"));
906 return false;
907 }
908 }
909 if (f1 || f2)
910 {
911 warn_odr (t1, t2, NULL, NULL, warn, warned,
912 G_("a type with different number of methods "
913 "is defined in another translation unit"));
914 return false;
915 }
916 }
917 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
918 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
919 TYPE_SIZE_UNIT (t2), 0));
920 }
921
922 return true;
923 }
924
925 default:
926 gcc_unreachable ();
927 }
928}
929
61a74079
JH
930/* TYPE is equivalent to VAL by ODR, but its tree representation differs
931 from VAL->type. This may happen in LTO where tree merging did not merge
932 all variants of the same type. It may or may not mean the ODR violation.
933 Add it to the list of duplicates and warn on some violations. */
934
549bcbd1 935static bool
61a74079
JH
936add_type_duplicate (odr_type val, tree type)
937{
549bcbd1 938 bool build_bases = false;
61a74079
JH
939 if (!val->types_set)
940 val->types_set = pointer_set_create ();
941
549bcbd1
JH
942 /* Always prefer complete type to be the leader. */
943 if (!COMPLETE_TYPE_P (val->type)
944 && COMPLETE_TYPE_P (type))
945 {
946 tree tmp = type;
947
948 build_bases = true;
949 type = val->type;
950 val->type = tmp;
951 }
952
61a74079
JH
953 /* See if this duplicate is new. */
954 if (!pointer_set_insert (val->types_set, type))
955 {
956 bool merge = true;
957 bool base_mismatch = false;
549bcbd1 958 unsigned int i,j;
c59f7203
JH
959 bool warned = false;
960 pointer_set_t *visited = pointer_set_create ();
549bcbd1 961
61a74079
JH
962 gcc_assert (in_lto_p);
963 vec_safe_push (val->types, type);
61a74079
JH
964
965 /* First we compare memory layout. */
c59f7203
JH
966 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
967 &warned, visited))
61a74079
JH
968 {
969 merge = false;
ec77d61f 970 odr_violation_reported = true;
549bcbd1 971 val->odr_violated = true;
61a74079
JH
972 if (cgraph_dump_file)
973 {
c59f7203 974 fprintf (cgraph_dump_file, "ODR violation\n");
61a74079
JH
975
976 print_node (cgraph_dump_file, "", val->type, 0);
977 putc ('\n',cgraph_dump_file);
978 print_node (cgraph_dump_file, "", type, 0);
979 putc ('\n',cgraph_dump_file);
980 }
981 }
c59f7203 982 pointer_set_destroy (visited);
61a74079
JH
983
984 /* Next sanity check that bases are the same. If not, we will end
985 up producing wrong answers. */
549bcbd1
JH
986 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
987 && TREE_CODE (val->type) == RECORD_TYPE
988 && TREE_CODE (type) == RECORD_TYPE
989 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
61a74079 990 {
549bcbd1
JH
991 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
992 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
993 {
994 odr_type base = get_odr_type
995 (BINFO_TYPE
996 (BINFO_BASE_BINFO (TYPE_BINFO (type),
997 i)),
998 true);
999 if (val->bases.length () <= j || val->bases[j] != base)
1000 base_mismatch = true;
1001 j++;
1002 }
1003 if (base_mismatch)
61a74079 1004 {
549bcbd1
JH
1005 merge = false;
1006 odr_violation_reported = true;
1007
c59f7203
JH
1008 if (!warned && !val->odr_violated)
1009 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1010 "a type with the same name but different bases is "
1011 "defined in another translation unit");
549bcbd1
JH
1012 val->odr_violated = true;
1013 if (cgraph_dump_file)
1014 {
1015 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
1016
1017 print_node (cgraph_dump_file, "", val->type, 0);
1018 putc ('\n',cgraph_dump_file);
1019 print_node (cgraph_dump_file, "", type, 0);
1020 putc ('\n',cgraph_dump_file);
1021 }
61a74079
JH
1022 }
1023 }
1024
1025 /* Regularize things a little. During LTO same types may come with
1026 different BINFOs. Either because their virtual table was
1027 not merged by tree merging and only later at decl merging or
1028 because one type comes with external vtable, while other
1029 with internal. We want to merge equivalent binfos to conserve
1030 memory and streaming overhead.
1031
1032 The external vtables are more harmful: they contain references
1033 to external declarations of methods that may be defined in the
1034 merged LTO unit. For this reason we absolutely need to remove
1035 them and replace by internal variants. Not doing so will lead
1036 to incomplete answers from possible_polymorphic_call_targets. */
549bcbd1
JH
1037 if (!flag_ltrans && merge
1038 && TREE_CODE (val->type) == RECORD_TYPE
1039 && TREE_CODE (type) == RECORD_TYPE
1040 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1041 && TYPE_MAIN_VARIANT (type) == type
1042 && TYPE_MAIN_VARIANT (val->type) == val->type
1043 && BINFO_VTABLE (TYPE_BINFO (val->type))
1044 && BINFO_VTABLE (TYPE_BINFO (type)))
61a74079
JH
1045 {
1046 tree master_binfo = TYPE_BINFO (val->type);
1047 tree v1 = BINFO_VTABLE (master_binfo);
1048 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1049
1050 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1051 {
1052 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1053 && operand_equal_p (TREE_OPERAND (v1, 1),
1054 TREE_OPERAND (v2, 1), 0));
1055 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1056 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1057 }
1058 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1059 == DECL_ASSEMBLER_NAME (v2));
1060
1061 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1062 {
1063 unsigned int i;
1064
c7e1befa 1065 set_type_binfo (val->type, TYPE_BINFO (type));
c3284718 1066 for (i = 0; i < val->types->length (); i++)
61a74079
JH
1067 {
1068 if (TYPE_BINFO ((*val->types)[i])
1069 == master_binfo)
c7e1befa 1070 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
61a74079 1071 }
c7e1befa 1072 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
61a74079
JH
1073 }
1074 else
c7e1befa 1075 set_type_binfo (type, master_binfo);
61a74079
JH
1076 }
1077 }
549bcbd1 1078 return build_bases;
61a74079
JH
1079}
1080
eefe9a99
JH
1081/* Get ODR type hash entry for TYPE. If INSERT is true, create
1082 possibly new entry. */
1083
1084odr_type
1085get_odr_type (tree type, bool insert)
1086{
1087 odr_type_d **slot;
1088 odr_type val;
1089 hashval_t hash;
549bcbd1
JH
1090 bool build_bases = false;
1091 bool insert_to_odr_array = false;
1092 int base_id = -1;
1093
1094 type = main_odr_variant (type);
eefe9a99 1095
eefe9a99 1096 hash = hash_type_name (type);
c203e8a7
TS
1097 slot
1098 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
eefe9a99
JH
1099 if (!slot)
1100 return NULL;
1101
1102 /* See if we already have entry for type. */
1103 if (*slot)
1104 {
1105 val = *slot;
1106
61a74079
JH
1107 /* With LTO we need to support multiple tree representation of
1108 the same ODR type. */
1109 if (val->type != type)
549bcbd1 1110 build_bases = add_type_duplicate (val, type);
eefe9a99
JH
1111 }
1112 else
1113 {
766090c2 1114 val = ggc_cleared_alloc<odr_type_d> ();
eefe9a99
JH
1115 val->type = type;
1116 val->bases = vNULL;
1117 val->derived_types = vNULL;
0e1474e5 1118 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
549bcbd1
JH
1119 build_bases = COMPLETE_TYPE_P (val->type);
1120 insert_to_odr_array = true;
1121 }
1122
1123 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1124 && type == TYPE_MAIN_VARIANT (type))
1125 {
1126 tree binfo = TYPE_BINFO (type);
1127 unsigned int i;
1128
1129 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1130
2d1644bf 1131 val->all_derivations_known = type_all_derivations_known_p (type);
eefe9a99
JH
1132 *slot = val;
1133 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1134 /* For now record only polymorphic types. other are
1135 pointless for devirtualization and we can not precisely
1136 determine ODR equivalency of these during LTO. */
1137 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1138 {
1139 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1140 i)),
1141 true);
549bcbd1 1142 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
eefe9a99
JH
1143 base->derived_types.safe_push (val);
1144 val->bases.safe_push (base);
549bcbd1
JH
1145 if (base->id > base_id)
1146 base_id = base->id;
eefe9a99 1147 }
549bcbd1
JH
1148 }
1149 /* Ensure that type always appears after bases. */
1150 if (insert_to_odr_array)
1151 {
eefe9a99 1152 if (odr_types_ptr)
c3284718 1153 val->id = odr_types.length ();
eefe9a99
JH
1154 vec_safe_push (odr_types_ptr, val);
1155 }
549bcbd1
JH
1156 else if (base_id > val->id)
1157 {
1158 odr_types[val->id] = 0;
1159 /* Be sure we did not recorded any derived types; these may need
1160 renumbering too. */
1161 gcc_assert (val->derived_types.length() == 0);
1162 if (odr_types_ptr)
1163 val->id = odr_types.length ();
1164 vec_safe_push (odr_types_ptr, val);
1165 }
eefe9a99
JH
1166 return val;
1167}
1168
1169/* Dump ODR type T and all its derrived type. INDENT specify indentation for
1170 recusive printing. */
1171
1172static void
1173dump_odr_type (FILE *f, odr_type t, int indent=0)
1174{
1175 unsigned int i;
1176 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1177 print_generic_expr (f, t->type, TDF_SLIM);
2d1644bf
JH
1178 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1179 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
eefe9a99
JH
1180 if (TYPE_NAME (t->type))
1181 {
1182 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1183 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1184 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1185 }
c3284718 1186 if (t->bases.length ())
eefe9a99
JH
1187 {
1188 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
c3284718 1189 for (i = 0; i < t->bases.length (); i++)
eefe9a99
JH
1190 fprintf (f, " %i", t->bases[i]->id);
1191 fprintf (f, "\n");
1192 }
c3284718 1193 if (t->derived_types.length ())
eefe9a99
JH
1194 {
1195 fprintf (f, "%*s derived types:\n", indent * 2, "");
c3284718 1196 for (i = 0; i < t->derived_types.length (); i++)
eefe9a99
JH
1197 dump_odr_type (f, t->derived_types[i], indent + 1);
1198 }
1199 fprintf (f, "\n");
1200}
1201
1202/* Dump the type inheritance graph. */
1203
1204static void
1205dump_type_inheritance_graph (FILE *f)
1206{
1207 unsigned int i;
0e1474e5
JH
1208 if (!odr_types_ptr)
1209 return;
eefe9a99 1210 fprintf (f, "\n\nType inheritance graph:\n");
c3284718 1211 for (i = 0; i < odr_types.length (); i++)
eefe9a99 1212 {
549bcbd1 1213 if (odr_types[i] && odr_types[i]->bases.length () == 0)
eefe9a99
JH
1214 dump_odr_type (f, odr_types[i]);
1215 }
c3284718 1216 for (i = 0; i < odr_types.length (); i++)
61a74079 1217 {
549bcbd1 1218 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
61a74079
JH
1219 {
1220 unsigned int j;
1221 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1222 print_node (f, "", odr_types[i]->type, 0);
c3284718 1223 for (j = 0; j < odr_types[i]->types->length (); j++)
61a74079
JH
1224 {
1225 tree t;
1226 fprintf (f, "duplicate #%i\n", j);
1227 print_node (f, "", (*odr_types[i]->types)[j], 0);
1228 t = (*odr_types[i]->types)[j];
1229 while (TYPE_P (t) && TYPE_CONTEXT (t))
1230 {
1231 t = TYPE_CONTEXT (t);
1232 print_node (f, "", t, 0);
1233 }
1234 putc ('\n',f);
1235 }
1236 }
1237 }
eefe9a99
JH
1238}
1239
1240/* Given method type T, return type of class it belongs to.
1241 Lookup this pointer and get its type. */
1242
64cbf23d 1243tree
d570d364 1244method_class_type (const_tree t)
eefe9a99
JH
1245{
1246 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
68377e53 1247 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
eefe9a99
JH
1248
1249 return TREE_TYPE (first_parm_type);
1250}
1251
1252/* Initialize IPA devirt and build inheritance tree graph. */
1253
1254void
1255build_type_inheritance_graph (void)
1256{
b270b096 1257 struct symtab_node *n;
eefe9a99
JH
1258 FILE *inheritance_dump_file;
1259 int flags;
1260
c203e8a7 1261 if (odr_hash)
eefe9a99
JH
1262 return;
1263 timevar_push (TV_IPA_INHERITANCE);
1264 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
c203e8a7 1265 odr_hash = new odr_hash_type (23);
eefe9a99
JH
1266
1267 /* We reconstruct the graph starting of types of all methods seen in the
1268 the unit. */
b270b096 1269 FOR_EACH_SYMBOL (n)
7de90a6c 1270 if (is_a <cgraph_node *> (n)
b270b096 1271 && DECL_VIRTUAL_P (n->decl)
d52f5295 1272 && n->real_symbol_p ())
549bcbd1
JH
1273 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1274 true);
b270b096
JH
1275
1276 /* Look also for virtual tables of types that do not define any methods.
1277
1278 We need it in a case where class B has virtual base of class A
1279 re-defining its virtual method and there is class C with no virtual
1280 methods with B as virtual base.
1281
1282 Here we output B's virtual method in two variant - for non-virtual
1283 and virtual inheritance. B's virtual table has non-virtual version,
1284 while C's has virtual.
1285
1286 For this reason we need to know about C in order to include both
1287 variants of B. More correctly, record_target_from_binfo should
1288 add both variants of the method when walking B, but we have no
1289 link in between them.
1290
1291 We rely on fact that either the method is exported and thus we
1292 assume it is called externally or C is in anonymous namespace and
1293 thus we will see the vtable. */
1294
7de90a6c 1295 else if (is_a <varpool_node *> (n)
b270b096
JH
1296 && DECL_VIRTUAL_P (n->decl)
1297 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1298 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1299 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
549bcbd1 1300 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
eefe9a99
JH
1301 if (inheritance_dump_file)
1302 {
1303 dump_type_inheritance_graph (inheritance_dump_file);
1304 dump_end (TDI_inheritance, inheritance_dump_file);
1305 }
1306 timevar_pop (TV_IPA_INHERITANCE);
1307}
1308
ccb05ef2
JH
1309/* Return true if N has reference from live virtual table
1310 (and thus can be a destination of polymorphic call).
1311 Be conservatively correct when callgraph is not built or
1312 if the method may be referred externally. */
1313
1314static bool
1315referenced_from_vtable_p (struct cgraph_node *node)
1316{
1317 int i;
1318 struct ipa_ref *ref;
1319 bool found = false;
1320
1321 if (node->externally_visible
1322 || node->used_from_other_partition)
1323 return true;
1324
1325 /* Keep this test constant time.
1326 It is unlikely this can happen except for the case where speculative
1327 devirtualization introduced many speculative edges to this node.
1328 In this case the target is very likely alive anyway. */
1329 if (node->ref_list.referring.length () > 100)
1330 return true;
1331
1332 /* We need references built. */
1333 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
1334 return true;
1335
d122681a 1336 for (i = 0; node->iterate_referring (i, ref); i++)
ccb05ef2
JH
1337
1338 if ((ref->use == IPA_REF_ALIAS
d52f5295 1339 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
ccb05ef2
JH
1340 || (ref->use == IPA_REF_ADDR
1341 && TREE_CODE (ref->referring->decl) == VAR_DECL
1342 && DECL_VIRTUAL_P (ref->referring->decl)))
1343 {
1344 found = true;
1345 break;
1346 }
1347 return found;
1348}
1349
68377e53 1350/* If TARGET has associated node, record it in the NODES array.
ec77d61f
JH
1351 CAN_REFER specify if program can refer to the target directly.
1352 if TARGET is unknown (NULL) or it can not be inserted (for example because
1353 its body was already removed and there is no way to refer to it), clear
1354 COMPLETEP. */
eefe9a99
JH
1355
1356static void
1357maybe_record_node (vec <cgraph_node *> &nodes,
68377e53 1358 tree target, pointer_set_t *inserted,
ec77d61f 1359 bool can_refer,
68377e53 1360 bool *completep)
eefe9a99 1361{
958c1d61
JH
1362 struct cgraph_node *target_node, *alias_target;
1363 enum availability avail;
88f592e3
JH
1364
1365 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1366 list of targets; the runtime effect of calling them is undefined.
1367 Only "real" virtual methods should be accounted. */
1368 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1369 return;
eefe9a99 1370
ec77d61f
JH
1371 if (!can_refer)
1372 {
1373 /* The only case when method of anonymous namespace becomes unreferable
1374 is when we completely optimized it out. */
1375 if (flag_ltrans
1376 || !target
88f592e3 1377 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
ec77d61f
JH
1378 *completep = false;
1379 return;
1380 }
1381
88f592e3 1382 if (!target)
68377e53
JH
1383 return;
1384
d52f5295 1385 target_node = cgraph_node::get (target);
68377e53 1386
958c1d61
JH
1387 /* Preffer alias target over aliases, so we do not get confused by
1388 fake duplicates. */
1389 if (target_node)
1390 {
d52f5295 1391 alias_target = target_node->ultimate_alias_target (&avail);
958c1d61
JH
1392 if (target_node != alias_target
1393 && avail >= AVAIL_AVAILABLE
d52f5295 1394 && target_node->get_availability ())
958c1d61
JH
1395 target_node = alias_target;
1396 }
1397
ccb05ef2
JH
1398 /* Method can only be called by polymorphic call if any
1399 of vtables refering to it are alive.
1400
1401 While this holds for non-anonymous functions, too, there are
1402 cases where we want to keep them in the list; for example
1403 inline functions with -fno-weak are static, but we still
1404 may devirtualize them when instance comes from other unit.
1405 The same holds for LTO.
1406
1407 Currently we ignore these functions in speculative devirtualization.
1408 ??? Maybe it would make sense to be more aggressive for LTO even
1409 eslewhere. */
1410 if (!flag_ltrans
1411 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1412 && (!target_node
1413 || !referenced_from_vtable_p (target_node)))
1414 ;
1415 /* See if TARGET is useful function we can deal with. */
1416 else if (target_node != NULL
1417 && (TREE_PUBLIC (target)
1418 || DECL_EXTERNAL (target)
1419 || target_node->definition)
d52f5295 1420 && target_node->real_symbol_p ())
0e1474e5 1421 {
68377e53 1422 gcc_assert (!target_node->global.inlined_to);
d52f5295 1423 gcc_assert (target_node->real_symbol_p ());
958c1d61 1424 if (!pointer_set_insert (inserted, target_node->decl))
68377e53
JH
1425 {
1426 pointer_set_insert (cached_polymorphic_call_targets,
1427 target_node);
1428 nodes.safe_push (target_node);
1429 }
0e1474e5 1430 }
68377e53 1431 else if (completep
2d1644bf
JH
1432 && (!type_in_anonymous_namespace_p
1433 (DECL_CONTEXT (target))
1434 || flag_ltrans))
0439a947 1435 *completep = false;
eefe9a99
JH
1436}
1437
68377e53
JH
1438/* See if BINFO's type match OUTER_TYPE. If so, lookup
1439 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2d1644bf
JH
1440 method in vtable and insert method to NODES array
1441 or BASES_TO_CONSIDER if this array is non-NULL.
eefe9a99
JH
1442 Otherwise recurse to base BINFOs.
1443 This match what get_binfo_at_offset does, but with offset
1444 being unknown.
1445
a3788dde
JH
1446 TYPE_BINFOS is a stack of BINFOS of types with defined
1447 virtual table seen on way from class type to BINFO.
eefe9a99
JH
1448
1449 MATCHED_VTABLES tracks virtual tables we already did lookup
68377e53
JH
1450 for virtual function in. INSERTED tracks nodes we already
1451 inserted.
3462aa02
JH
1452
1453 ANONYMOUS is true if BINFO is part of anonymous namespace.
ec77d61f
JH
1454
1455 Clear COMPLETEP when we hit unreferable target.
eefe9a99
JH
1456 */
1457
1458static void
68377e53 1459record_target_from_binfo (vec <cgraph_node *> &nodes,
2d1644bf 1460 vec <tree> *bases_to_consider,
68377e53
JH
1461 tree binfo,
1462 tree otr_type,
a3788dde 1463 vec <tree> &type_binfos,
68377e53
JH
1464 HOST_WIDE_INT otr_token,
1465 tree outer_type,
1466 HOST_WIDE_INT offset,
1467 pointer_set_t *inserted,
1468 pointer_set_t *matched_vtables,
ec77d61f
JH
1469 bool anonymous,
1470 bool *completep)
eefe9a99
JH
1471{
1472 tree type = BINFO_TYPE (binfo);
1473 int i;
1474 tree base_binfo;
1475
eefe9a99 1476
a3788dde
JH
1477 if (BINFO_VTABLE (binfo))
1478 type_binfos.safe_push (binfo);
68377e53 1479 if (types_same_for_odr (type, outer_type))
eefe9a99 1480 {
a3788dde
JH
1481 int i;
1482 tree type_binfo = NULL;
1483
1484 /* Lookup BINFO with virtual table. For normal types it is always last
1485 binfo on stack. */
1486 for (i = type_binfos.length () - 1; i >= 0; i--)
1487 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1488 {
1489 type_binfo = type_binfos[i];
1490 break;
1491 }
1492 if (BINFO_VTABLE (binfo))
1493 type_binfos.pop ();
1494 /* If this is duplicated BINFO for base shared by virtual inheritance,
1495 we may not have its associated vtable. This is not a problem, since
1496 we will walk it on the other path. */
1497 if (!type_binfo)
6d6af792 1498 return;
68377e53
JH
1499 tree inner_binfo = get_binfo_at_offset (type_binfo,
1500 offset, otr_type);
ec77d61f
JH
1501 if (!inner_binfo)
1502 {
1503 gcc_assert (odr_violation_reported);
1504 return;
1505 }
3462aa02
JH
1506 /* For types in anonymous namespace first check if the respective vtable
1507 is alive. If not, we know the type can't be called. */
1508 if (!flag_ltrans && anonymous)
1509 {
68377e53 1510 tree vtable = BINFO_VTABLE (inner_binfo);
2c8326a5 1511 varpool_node *vnode;
3462aa02
JH
1512
1513 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1514 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1515 vnode = varpool_get_node (vtable);
67348ccc 1516 if (!vnode || !vnode->definition)
3462aa02
JH
1517 return;
1518 }
68377e53 1519 gcc_assert (inner_binfo);
2d1644bf
JH
1520 if (bases_to_consider
1521 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
1522 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
68377e53 1523 {
ec77d61f
JH
1524 bool can_refer;
1525 tree target = gimple_get_virt_method_for_binfo (otr_token,
1526 inner_binfo,
1527 &can_refer);
2d1644bf
JH
1528 if (!bases_to_consider)
1529 maybe_record_node (nodes, target, inserted, can_refer, completep);
1530 /* Destructors are never called via construction vtables. */
1531 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1532 bases_to_consider->safe_push (target);
68377e53 1533 }
eefe9a99
JH
1534 return;
1535 }
1536
1537 /* Walk bases. */
1538 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1539 /* Walking bases that have no virtual method is pointless excercise. */
1540 if (polymorphic_type_binfo_p (base_binfo))
2d1644bf 1541 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
a3788dde 1542 type_binfos,
68377e53 1543 otr_token, outer_type, offset, inserted,
ec77d61f 1544 matched_vtables, anonymous, completep);
a3788dde
JH
1545 if (BINFO_VTABLE (binfo))
1546 type_binfos.pop ();
eefe9a99
JH
1547}
1548
1549/* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1550 of TYPE, insert them to NODES, recurse into derived nodes.
1551 INSERTED is used to avoid duplicate insertions of methods into NODES.
ec77d61f 1552 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2d1644bf
JH
1553 Clear COMPLETEP if unreferable target is found.
1554
1555 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1556 all cases where BASE_SKIPPED is true (because the base is abstract
1557 class). */
eefe9a99
JH
1558
1559static void
1560possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1561 pointer_set_t *inserted,
1562 pointer_set_t *matched_vtables,
1563 tree otr_type,
1564 odr_type type,
68377e53
JH
1565 HOST_WIDE_INT otr_token,
1566 tree outer_type,
ec77d61f 1567 HOST_WIDE_INT offset,
2d1644bf
JH
1568 bool *completep,
1569 vec <tree> &bases_to_consider,
1570 bool consider_construction)
eefe9a99
JH
1571{
1572 tree binfo = TYPE_BINFO (type->type);
1573 unsigned int i;
a3788dde 1574 vec <tree> type_binfos = vNULL;
2d1644bf
JH
1575 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1576
1577 /* We may need to consider types w/o instances because of possible derived
1578 types using their methods either directly or via construction vtables.
1579 We are safe to skip them when all derivations are known, since we will
1580 handle them later.
1581 This is done by recording them to BASES_TO_CONSIDER array. */
1582 if (possibly_instantiated || consider_construction)
1583 {
1584 record_target_from_binfo (nodes,
1585 (!possibly_instantiated
1586 && type_all_derivations_known_p (type->type))
1587 ? &bases_to_consider : NULL,
1588 binfo, otr_type, type_binfos, otr_token,
1589 outer_type, offset,
1590 inserted, matched_vtables,
1591 type->anonymous_namespace, completep);
1592 }
a3788dde 1593 type_binfos.release ();
c3284718 1594 for (i = 0; i < type->derived_types.length (); i++)
eefe9a99
JH
1595 possible_polymorphic_call_targets_1 (nodes, inserted,
1596 matched_vtables,
1597 otr_type,
1598 type->derived_types[i],
2d1644bf
JH
1599 otr_token, outer_type, offset, completep,
1600 bases_to_consider, consider_construction);
eefe9a99
JH
1601}
1602
1603/* Cache of queries for polymorphic call targets.
1604
1605 Enumerating all call targets may get expensive when there are many
1606 polymorphic calls in the program, so we memoize all the previous
1607 queries and avoid duplicated work. */
1608
1609struct polymorphic_call_target_d
1610{
eefe9a99 1611 HOST_WIDE_INT otr_token;
68377e53
JH
1612 ipa_polymorphic_call_context context;
1613 odr_type type;
eefe9a99 1614 vec <cgraph_node *> targets;
ec77d61f
JH
1615 int nonconstruction_targets;
1616 bool complete;
eefe9a99
JH
1617};
1618
1619/* Polymorphic call target cache helpers. */
1620
1621struct polymorphic_call_target_hasher
1622{
1623 typedef polymorphic_call_target_d value_type;
1624 typedef polymorphic_call_target_d compare_type;
1625 static inline hashval_t hash (const value_type *);
1626 static inline bool equal (const value_type *, const compare_type *);
1627 static inline void remove (value_type *);
1628};
1629
1630/* Return the computed hashcode for ODR_QUERY. */
1631
1632inline hashval_t
1633polymorphic_call_target_hasher::hash (const value_type *odr_query)
1634{
68377e53
JH
1635 hashval_t hash;
1636
1637 hash = iterative_hash_host_wide_int
1638 (odr_query->otr_token,
1639 odr_query->type->id);
1640 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
1641 hash);
1642 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
1643 return iterative_hash_hashval_t
1644 (((int)odr_query->context.maybe_in_construction << 1)
1645 | (int)odr_query->context.maybe_derived_type, hash);
eefe9a99
JH
1646}
1647
1648/* Compare cache entries T1 and T2. */
1649
1650inline bool
1651polymorphic_call_target_hasher::equal (const value_type *t1,
1652 const compare_type *t2)
1653{
68377e53
JH
1654 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1655 && t1->context.offset == t2->context.offset
1656 && t1->context.outer_type == t2->context.outer_type
1657 && t1->context.maybe_in_construction
1658 == t2->context.maybe_in_construction
1659 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
eefe9a99
JH
1660}
1661
1662/* Remove entry in polymorphic call target cache hash. */
1663
1664inline void
1665polymorphic_call_target_hasher::remove (value_type *v)
1666{
1667 v->targets.release ();
1668 free (v);
1669}
1670
1671/* Polymorphic call target query cache. */
1672
c203e8a7 1673typedef hash_table<polymorphic_call_target_hasher>
eefe9a99 1674 polymorphic_call_target_hash_type;
c203e8a7 1675static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
eefe9a99
JH
1676
1677/* Destroy polymorphic call target query cache. */
1678
1679static void
1680free_polymorphic_call_targets_hash ()
1681{
0e1474e5
JH
1682 if (cached_polymorphic_call_targets)
1683 {
c203e8a7
TS
1684 delete polymorphic_call_target_hash;
1685 polymorphic_call_target_hash = NULL;
0e1474e5
JH
1686 pointer_set_destroy (cached_polymorphic_call_targets);
1687 cached_polymorphic_call_targets = NULL;
1688 }
eefe9a99
JH
1689}
1690
1691/* When virtual function is removed, we may need to flush the cache. */
1692
1693static void
1694devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1695{
0e1474e5
JH
1696 if (cached_polymorphic_call_targets
1697 && pointer_set_contains (cached_polymorphic_call_targets, n))
eefe9a99
JH
1698 free_polymorphic_call_targets_hash ();
1699}
1700
d570d364
JH
1701/* Return true when TYPE contains an polymorphic type and thus is interesting
1702 for devirtualization machinery. */
1703
1704bool
1705contains_polymorphic_type_p (const_tree type)
1706{
1707 type = TYPE_MAIN_VARIANT (type);
1708
1709 if (RECORD_OR_UNION_TYPE_P (type))
1710 {
1711 if (TYPE_BINFO (type)
1712 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1713 return true;
1714 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1715 if (TREE_CODE (fld) == FIELD_DECL
1716 && !DECL_ARTIFICIAL (fld)
1717 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1718 return true;
1719 return false;
1720 }
1721 if (TREE_CODE (type) == ARRAY_TYPE)
1722 return contains_polymorphic_type_p (TREE_TYPE (type));
1723 return false;
1724}
1725
68377e53
JH
1726/* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1727 is contained at CONTEXT->OFFSET. Walk the memory representation of
1728 CONTEXT->OUTER_TYPE and find the outermost class type that match
1729 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1730 to represent it.
1731
1732 For example when CONTEXT represents type
1733 class A
1734 {
1735 int a;
1736 class B b;
1737 }
1738 and we look for type at offset sizeof(int), we end up with B and offset 0.
1739 If the same is produced by multiple inheritance, we end up with A and offset
1740 sizeof(int).
1741
1742 If we can not find corresponding class, give up by setting
1743 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1744 Return true when lookup was sucesful. */
1745
1746static bool
1747get_class_context (ipa_polymorphic_call_context *context,
1748 tree expected_type)
1749{
1750 tree type = context->outer_type;
1751 HOST_WIDE_INT offset = context->offset;
1752
1753 /* Find the sub-object the constant actually refers to and mark whether it is
1754 an artificial one (as opposed to a user-defined one). */
1755 while (true)
1756 {
1757 HOST_WIDE_INT pos, size;
1758 tree fld;
1759
1760 /* On a match, just return what we found. */
1761 if (TREE_CODE (type) == TREE_CODE (expected_type)
1762 && types_same_for_odr (type, expected_type))
1763 {
3b4e93c3
JH
1764 /* Type can not contain itself on an non-zero offset. In that case
1765 just give up. */
1766 if (offset != 0)
1767 goto give_up;
68377e53
JH
1768 gcc_assert (offset == 0);
1769 return true;
1770 }
1771
1772 /* Walk fields and find corresponding on at OFFSET. */
1773 if (TREE_CODE (type) == RECORD_TYPE)
1774 {
1775 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1776 {
1777 if (TREE_CODE (fld) != FIELD_DECL)
1778 continue;
1779
1780 pos = int_bit_position (fld);
1781 size = tree_to_uhwi (DECL_SIZE (fld));
1782 if (pos <= offset && (pos + size) > offset)
1783 break;
1784 }
1785
1786 if (!fld)
1787 goto give_up;
1788
c7e1befa 1789 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
68377e53
JH
1790 offset -= pos;
1791 /* DECL_ARTIFICIAL represents a basetype. */
1792 if (!DECL_ARTIFICIAL (fld))
1793 {
1794 context->outer_type = type;
1795 context->offset = offset;
1796 /* As soon as we se an field containing the type,
1797 we know we are not looking for derivations. */
1798 context->maybe_derived_type = false;
1799 }
1800 }
1801 else if (TREE_CODE (type) == ARRAY_TYPE)
1802 {
c7e1befa 1803 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
68377e53
JH
1804
1805 /* Give up if we don't know array size. */
1806 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1807 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1808 goto give_up;
1809 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1810 type = subtype;
1811 context->outer_type = type;
1812 context->offset = offset;
1813 context->maybe_derived_type = false;
1814 }
1815 /* Give up on anything else. */
1816 else
1817 goto give_up;
1818 }
1819
1820 /* If we failed to find subtype we look for, give up and fall back to the
1821 most generic query. */
1822give_up:
1823 context->outer_type = expected_type;
1824 context->offset = 0;
1825 context->maybe_derived_type = true;
e400f081
JH
1826 context->maybe_in_construction = true;
1827 /* POD can be changed to an instance of a polymorphic type by
1828 placement new. Here we play safe and assume that any
1829 non-polymorphic type is POD. */
1830 if ((TREE_CODE (type) != RECORD_TYPE
1831 || !TYPE_BINFO (type)
1832 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1833 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1834 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1835 tree_to_uhwi (TYPE_SIZE (type)))))
1836 return true;
68377e53
JH
1837 return false;
1838}
1839
1840/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1841
1842static bool
1843contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1844 tree otr_type)
1845{
c7e1befa
JH
1846 ipa_polymorphic_call_context context = {offset,
1847 TYPE_MAIN_VARIANT (outer_type),
68377e53
JH
1848 false, true};
1849 return get_class_context (&context, otr_type);
1850}
1851
390675c8
JH
1852/* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1853
1854static tree
85942f45
JH
1855subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1856 tree vtable)
390675c8
JH
1857{
1858 tree v = BINFO_VTABLE (binfo);
1859 int i;
1860 tree base_binfo;
85942f45 1861 unsigned HOST_WIDE_INT this_offset;
390675c8 1862
85942f45
JH
1863 if (v)
1864 {
1865 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1866 gcc_unreachable ();
1867
1868 if (offset == this_offset
1869 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1870 return binfo;
1871 }
390675c8 1872
390675c8
JH
1873 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1874 if (polymorphic_type_binfo_p (base_binfo))
1875 {
1876 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1877 if (base_binfo)
1878 return base_binfo;
1879 }
1880 return NULL;
1881}
1882
85942f45
JH
1883/* T is known constant value of virtual table pointer.
1884 Store virtual table to V and its offset to OFFSET.
1885 Return false if T does not look like virtual table reference. */
390675c8 1886
85942f45 1887bool
d570d364
JH
1888vtable_pointer_value_to_vtable (const_tree t, tree *v,
1889 unsigned HOST_WIDE_INT *offset)
390675c8
JH
1890{
1891 /* We expect &MEM[(void *)&virtual_table + 16B].
1892 We obtain object's BINFO from the context of the virtual table.
1893 This one contains pointer to virtual table represented via
1894 POINTER_PLUS_EXPR. Verify that this pointer match to what
1895 we propagated through.
1896
1897 In the case of virtual inheritance, the virtual tables may
1898 be nested, i.e. the offset may be different from 16 and we may
1899 need to dive into the type representation. */
85942f45 1900 if (TREE_CODE (t) == ADDR_EXPR
390675c8
JH
1901 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1902 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1903 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1904 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1905 == VAR_DECL)
1906 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1907 (TREE_OPERAND (t, 0), 0), 0)))
1908 {
85942f45
JH
1909 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1910 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1911 return true;
390675c8 1912 }
85942f45
JH
1913
1914 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1915 We need to handle it when T comes from static variable initializer or
1916 BINFO. */
1917 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1918 {
1919 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1920 t = TREE_OPERAND (t, 0);
1921 }
1922 else
1923 *offset = 0;
1924
1925 if (TREE_CODE (t) != ADDR_EXPR)
1926 return false;
1927 *v = TREE_OPERAND (t, 0);
1928 return true;
1929}
1930
1931/* T is known constant value of virtual table pointer. Return BINFO of the
1932 instance type. */
1933
1934tree
d570d364 1935vtable_pointer_value_to_binfo (const_tree t)
85942f45
JH
1936{
1937 tree vtable;
1938 unsigned HOST_WIDE_INT offset;
1939
1940 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1941 return NULL_TREE;
1942
1943 /* FIXME: for stores of construction vtables we return NULL,
1944 because we do not have BINFO for those. Eventually we should fix
1945 our representation to allow this case to be handled, too.
1946 In the case we see store of BINFO we however may assume
1947 that standard folding will be ale to cope with it. */
1948 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1949 offset, vtable);
390675c8
JH
1950}
1951
058d0a90
JH
1952/* We know that the instance is stored in variable or parameter
1953 (not dynamically allocated) and we want to disprove the fact
1954 that it may be in construction at invocation of CALL.
1955
1956 For the variable to be in construction we actually need to
1957 be in constructor of corresponding global variable or
1958 the inline stack of CALL must contain the constructor.
1959 Check this condition. This check works safely only before
1960 IPA passes, because inline stacks may become out of date
1961 later. */
1962
1963bool
1964decl_maybe_in_construction_p (tree base, tree outer_type,
1965 gimple call, tree function)
1966{
1967 outer_type = TYPE_MAIN_VARIANT (outer_type);
1968 gcc_assert (DECL_P (base));
1969
1970 /* After inlining the code unification optimizations may invalidate
1971 inline stacks. Also we need to give up on global variables after
1972 IPA, because addresses of these may have been propagated to their
1973 constructors. */
1974 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
1975 return true;
1976
1977 /* Pure functions can not do any changes on the dynamic type;
1978 that require writting to memory. */
1979 if (!auto_var_in_fn_p (base, function)
1980 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
1981 return false;
1982
1983 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
1984 block = BLOCK_SUPERCONTEXT (block))
1985 if (BLOCK_ABSTRACT_ORIGIN (block)
1986 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
1987 {
1988 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
1989
1990 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
1991 || (!DECL_CXX_CONSTRUCTOR_P (fn)
1992 || !DECL_CXX_DESTRUCTOR_P (fn)))
1993 {
1994 /* Watch for clones where we constant propagated the first
1995 argument (pointer to the instance). */
1996 fn = DECL_ABSTRACT_ORIGIN (fn);
1997 if (!fn
1998 || !is_global_var (base)
1999 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2000 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2001 || !DECL_CXX_DESTRUCTOR_P (fn)))
2002 continue;
2003 }
2004 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2005 continue;
2006
2007 /* FIXME: this can go away once we have ODR types equivalency on
2008 LTO level. */
2009 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2010 return true;
2011 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
2012 if (types_same_for_odr (type, outer_type))
2013 return true;
2014 }
2015
2016 if (TREE_CODE (base) == VAR_DECL
2017 && is_global_var (base))
2018 {
2019 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2020 || (!DECL_CXX_CONSTRUCTOR_P (function)
2021 || !DECL_CXX_DESTRUCTOR_P (function)))
2022 {
2023 if (!DECL_ABSTRACT_ORIGIN (function))
2024 return false;
2025 /* Watch for clones where we constant propagated the first
2026 argument (pointer to the instance). */
2027 function = DECL_ABSTRACT_ORIGIN (function);
2028 if (!function
2029 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2030 || (!DECL_CXX_CONSTRUCTOR_P (function)
2031 || !DECL_CXX_DESTRUCTOR_P (function)))
2032 return false;
2033 }
2034 /* FIXME: this can go away once we have ODR types equivalency on
2035 LTO level. */
2036 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2037 return true;
2038 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
2039 if (types_same_for_odr (type, outer_type))
2040 return true;
2041 }
2042 return false;
2043}
2044
5bccb77a
JH
2045/* Proudce polymorphic call context for call method of instance
2046 that is located within BASE (that is assumed to be a decl) at OFFSET. */
2047
2048static void
2049get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
2050 tree base, HOST_WIDE_INT offset)
2051{
2052 gcc_assert (DECL_P (base));
2053
c7e1befa 2054 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
5bccb77a
JH
2055 context->offset = offset;
2056 /* Make very conservative assumption that all objects
2057 may be in construction.
2058 TODO: ipa-prop already contains code to tell better.
2059 merge it later. */
2060 context->maybe_in_construction = true;
2061 context->maybe_derived_type = false;
2062}
2063
2064/* CST is an invariant (address of decl), try to get meaningful
2065 polymorphic call context for polymorphic call of method
2066 if instance of OTR_TYPE that is located at OFFSET of this invariant.
2067 Return FALSE if nothing meaningful can be found. */
2068
2069bool
2070get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
2071 tree cst,
2072 tree otr_type,
2073 HOST_WIDE_INT offset)
2074{
2075 HOST_WIDE_INT offset2, size, max_size;
2076 tree base;
2077
2078 if (TREE_CODE (cst) != ADDR_EXPR)
79c7de84 2079 return false;
5bccb77a
JH
2080
2081 cst = TREE_OPERAND (cst, 0);
2082 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
79c7de84
EB
2083 if (!DECL_P (base) || max_size == -1 || max_size != size)
2084 return false;
5bccb77a
JH
2085
2086 /* Only type inconsistent programs can have otr_type that is
2087 not part of outer type. */
79c7de84
EB
2088 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
2089 return false;
5bccb77a 2090
79c7de84 2091 get_polymorphic_call_info_for_decl (context, base, offset);
5bccb77a
JH
2092 return true;
2093}
2094
68377e53
JH
2095/* Given REF call in FNDECL, determine class of the polymorphic
2096 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
058d0a90
JH
2097 CALL is optional argument giving the actual statement (usually call) where
2098 the context is used.
68377e53
JH
2099 Return pointer to object described by the context */
2100
2101tree
2102get_polymorphic_call_info (tree fndecl,
2103 tree ref,
2104 tree *otr_type,
2105 HOST_WIDE_INT *otr_token,
058d0a90
JH
2106 ipa_polymorphic_call_context *context,
2107 gimple call)
68377e53
JH
2108{
2109 tree base_pointer;
2110 *otr_type = obj_type_ref_class (ref);
2111 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
2112
2113 /* Set up basic info in case we find nothing interesting in the analysis. */
c7e1befa 2114 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
68377e53
JH
2115 context->offset = 0;
2116 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
2117 context->maybe_derived_type = true;
2d1644bf 2118 context->maybe_in_construction = true;
68377e53
JH
2119
2120 /* Walk SSA for outer object. */
2121 do
2122 {
2123 if (TREE_CODE (base_pointer) == SSA_NAME
2124 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2125 && SSA_NAME_DEF_STMT (base_pointer)
2126 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
2127 {
2128 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
2129 STRIP_NOPS (base_pointer);
2130 }
2131 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
2132 {
2133 HOST_WIDE_INT size, max_size;
2134 HOST_WIDE_INT offset2;
2135 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
2136 &offset2, &size, &max_size);
2137
2138 /* If this is a varying address, punt. */
2139 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
2140 && max_size != -1
2141 && max_size == size)
2142 {
2143 /* We found dereference of a pointer. Type of the pointer
2144 and MEM_REF is meaningless, but we can look futher. */
2145 if (TREE_CODE (base) == MEM_REF)
2146 {
2147 base_pointer = TREE_OPERAND (base, 0);
2148 context->offset
807e902e 2149 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
68377e53
JH
2150 context->outer_type = NULL;
2151 }
2152 /* We found base object. In this case the outer_type
2153 is known. */
2154 else if (DECL_P (base))
2155 {
7656ee72 2156 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
68377e53
JH
2157
2158 /* Only type inconsistent programs can have otr_type that is
2159 not part of outer type. */
7656ee72
JH
2160 if (!contains_type_p (TREE_TYPE (base),
2161 context->offset + offset2, *otr_type))
3e86c6a8
JH
2162 {
2163 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2164 code sequences; we arrange the calls to be builtin_unreachable
2165 later. */
2166 *otr_token = INT_MAX;
2167 return base_pointer;
2168 }
5bccb77a
JH
2169 get_polymorphic_call_info_for_decl (context, base,
2170 context->offset + offset2);
058d0a90
JH
2171 if (context->maybe_in_construction && call)
2172 context->maybe_in_construction
2173 = decl_maybe_in_construction_p (base,
2174 context->outer_type,
2175 call,
2176 current_function_decl);
7656ee72 2177 return NULL;
68377e53
JH
2178 }
2179 else
2180 break;
2181 }
2182 else
2183 break;
2184 }
2185 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
2186 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
2187 {
2188 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
2189 * BITS_PER_UNIT;
2190 base_pointer = TREE_OPERAND (base_pointer, 0);
2191 }
2192 else
2193 break;
2194 }
2195 while (true);
2196
2197 /* Try to determine type of the outer object. */
2198 if (TREE_CODE (base_pointer) == SSA_NAME
2199 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2200 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
2201 {
2202 /* See if parameter is THIS pointer of a method. */
2203 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
2204 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
2205 {
c7e1befa
JH
2206 context->outer_type
2207 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
68377e53
JH
2208 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
2209
2210 /* Dynamic casting has possibly upcasted the type
2211 in the hiearchy. In this case outer type is less
2212 informative than inner type and we should forget
2213 about it. */
2214 if (!contains_type_p (context->outer_type, context->offset,
2215 *otr_type))
2216 {
2217 context->outer_type = NULL;
2218 return base_pointer;
2219 }
2220
2221 /* If the function is constructor or destructor, then
d74db8ff 2222 the type is possibly in construction, but we know
68377e53
JH
2223 it is not derived type. */
2224 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
2225 || DECL_CXX_DESTRUCTOR_P (fndecl))
2226 {
2227 context->maybe_in_construction = true;
2228 context->maybe_derived_type = false;
2229 }
2230 else
2231 {
2232 context->maybe_derived_type = true;
2233 context->maybe_in_construction = false;
2234 }
2235 return base_pointer;
2236 }
2237 /* Non-PODs passed by value are really passed by invisible
2238 reference. In this case we also know the type of the
2239 object. */
2240 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
2241 {
c7e1befa
JH
2242 context->outer_type
2243 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
68377e53
JH
2244 gcc_assert (!POINTER_TYPE_P (context->outer_type));
2245 /* Only type inconsistent programs can have otr_type that is
2246 not part of outer type. */
2247 if (!contains_type_p (context->outer_type, context->offset,
2248 *otr_type))
2249 {
3e86c6a8
JH
2250 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2251 code sequences; we arrange the calls to be builtin_unreachable
2252 later. */
2253 *otr_token = INT_MAX;
68377e53
JH
2254 return base_pointer;
2255 }
2256 context->maybe_derived_type = false;
2257 context->maybe_in_construction = false;
2258 return base_pointer;
2259 }
2260 }
2261 /* TODO: There are multiple ways to derive a type. For instance
2262 if BASE_POINTER is passed to an constructor call prior our refernece.
2263 We do not make this type of flow sensitive analysis yet. */
2264 return base_pointer;
2265}
2266
2267/* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2268 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
2269 and insert them to NODES.
2270
2271 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2272
2273static void
2274record_targets_from_bases (tree otr_type,
2275 HOST_WIDE_INT otr_token,
2276 tree outer_type,
2277 HOST_WIDE_INT offset,
ec77d61f 2278 vec <cgraph_node *> &nodes,
68377e53
JH
2279 pointer_set_t *inserted,
2280 pointer_set_t *matched_vtables,
2281 bool *completep)
2282{
2283 while (true)
2284 {
2285 HOST_WIDE_INT pos, size;
2286 tree base_binfo;
2287 tree fld;
2288
2289 if (types_same_for_odr (outer_type, otr_type))
2290 return;
2291
2292 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2293 {
2294 if (TREE_CODE (fld) != FIELD_DECL)
2295 continue;
2296
2297 pos = int_bit_position (fld);
2298 size = tree_to_shwi (DECL_SIZE (fld));
ec77d61f
JH
2299 if (pos <= offset && (pos + size) > offset
2300 /* Do not get confused by zero sized bases. */
2301 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
68377e53
JH
2302 break;
2303 }
2304 /* Within a class type we should always find correcponding fields. */
2305 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2306
2307 /* Nonbasetypes should have been stripped by outer_class_type. */
2308 gcc_assert (DECL_ARTIFICIAL (fld));
2309
2310 outer_type = TREE_TYPE (fld);
2311 offset -= pos;
2312
2313 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2314 offset, otr_type);
ec77d61f
JH
2315 if (!base_binfo)
2316 {
2317 gcc_assert (odr_violation_reported);
2318 return;
2319 }
68377e53
JH
2320 gcc_assert (base_binfo);
2321 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
2322 {
ec77d61f
JH
2323 bool can_refer;
2324 tree target = gimple_get_virt_method_for_binfo (otr_token,
2325 base_binfo,
2326 &can_refer);
2d1644bf
JH
2327 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2328 maybe_record_node (nodes, target, inserted, can_refer, completep);
68377e53
JH
2329 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
2330 }
2331 }
2332}
2333
3462aa02
JH
2334/* When virtual table is removed, we may need to flush the cache. */
2335
2336static void
2c8326a5 2337devirt_variable_node_removal_hook (varpool_node *n,
3462aa02
JH
2338 void *d ATTRIBUTE_UNUSED)
2339{
2340 if (cached_polymorphic_call_targets
67348ccc
DM
2341 && DECL_VIRTUAL_P (n->decl)
2342 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3462aa02
JH
2343 free_polymorphic_call_targets_hash ();
2344}
2345
eefe9a99 2346/* Return vector containing possible targets of polymorphic call of type
68377e53
JH
2347 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2348 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
2349 OTR_TYPE and include their virtual method. This is useful for types
2350 possibly in construction or destruction where the virtual table may
2351 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2352 us to walk the inheritance graph for all derivations.
2353
3e86c6a8
JH
2354 OTR_TOKEN == INT_MAX is used to mark calls that are provably
2355 undefined and should be redirected to unreachable.
2356
add5c763 2357 If COMPLETEP is non-NULL, store true if the list is complete.
eefe9a99
JH
2358 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2359 in the target cache. If user needs to visit every target list
2360 just once, it can memoize them.
2361
ec77d61f
JH
2362 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
2363 the type is not in the construction. Those targets appear first in the
2364 vector returned.
2365
eefe9a99
JH
2366 Returned vector is placed into cache. It is NOT caller's responsibility
2367 to free it. The vector can be freed on cgraph_remove_node call if
2368 the particular node is a virtual function present in the cache. */
2369
2370vec <cgraph_node *>
2371possible_polymorphic_call_targets (tree otr_type,
2372 HOST_WIDE_INT otr_token,
68377e53
JH
2373 ipa_polymorphic_call_context context,
2374 bool *completep,
ec77d61f
JH
2375 void **cache_token,
2376 int *nonconstruction_targetsp)
eefe9a99
JH
2377{
2378 static struct cgraph_node_hook_list *node_removal_hook_holder;
2379 pointer_set_t *inserted;
2380 pointer_set_t *matched_vtables;
add5c763 2381 vec <cgraph_node *> nodes = vNULL;
2d1644bf 2382 vec <tree> bases_to_consider = vNULL;
68377e53 2383 odr_type type, outer_type;
eefe9a99
JH
2384 polymorphic_call_target_d key;
2385 polymorphic_call_target_d **slot;
2386 unsigned int i;
2387 tree binfo, target;
ec77d61f
JH
2388 bool complete;
2389 bool can_refer;
2d1644bf 2390 bool skipped = false;
eefe9a99 2391
c7e1befa
JH
2392 otr_type = TYPE_MAIN_VARIANT (otr_type);
2393
3e86c6a8 2394 /* If ODR is not initialized, return empty incomplete list. */
c203e8a7 2395 if (!odr_hash)
79c7de84
EB
2396 {
2397 if (completep)
2398 *completep = false;
beb683ab
MJ
2399 if (cache_token)
2400 *cache_token = NULL;
ec77d61f
JH
2401 if (nonconstruction_targetsp)
2402 *nonconstruction_targetsp = 0;
79c7de84
EB
2403 return nodes;
2404 }
add5c763 2405
3e86c6a8
JH
2406 /* If we hit type inconsistency, just return empty list of targets. */
2407 if (otr_token == INT_MAX)
2408 {
2409 if (completep)
2410 *completep = true;
beb683ab
MJ
2411 if (cache_token)
2412 *cache_token = NULL;
3e86c6a8
JH
2413 if (nonconstruction_targetsp)
2414 *nonconstruction_targetsp = 0;
2415 return nodes;
2416 }
2417
68377e53 2418 type = get_odr_type (otr_type, true);
eefe9a99 2419
c7e1befa
JH
2420 /* Recording type variants would wast results cache. */
2421 gcc_assert (!context.outer_type
2422 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2423
68377e53 2424 /* Lookup the outer class type we want to walk. */
3e86c6a8
JH
2425 if (context.outer_type
2426 && !get_class_context (&context, otr_type))
2427 {
2428 if (completep)
2429 *completep = false;
beb683ab
MJ
2430 if (cache_token)
2431 *cache_token = NULL;
3e86c6a8
JH
2432 if (nonconstruction_targetsp)
2433 *nonconstruction_targetsp = 0;
2434 return nodes;
2435 }
eefe9a99 2436
c7e1befa
JH
2437 /* Check that get_class_context kept the main variant. */
2438 gcc_assert (!context.outer_type
2439 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2440
79c7de84 2441 /* We canonicalize our query, so we do not need extra hashtable entries. */
68377e53
JH
2442
2443 /* Without outer type, we have no use for offset. Just do the
2444 basic search from innter type */
2445 if (!context.outer_type)
2446 {
2447 context.outer_type = otr_type;
2448 context.offset = 0;
2449 }
2450 /* We need to update our hiearchy if the type does not exist. */
2451 outer_type = get_odr_type (context.outer_type, true);
ec77d61f 2452 /* If the type is complete, there are no derivations. */
68377e53
JH
2453 if (TYPE_FINAL_P (outer_type->type))
2454 context.maybe_derived_type = false;
eefe9a99
JH
2455
2456 /* Initialize query cache. */
2457 if (!cached_polymorphic_call_targets)
2458 {
2459 cached_polymorphic_call_targets = pointer_set_create ();
c203e8a7
TS
2460 polymorphic_call_target_hash
2461 = new polymorphic_call_target_hash_type (23);
eefe9a99 2462 if (!node_removal_hook_holder)
3462aa02
JH
2463 {
2464 node_removal_hook_holder =
2465 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
2466 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
2467 NULL);
2468 }
eefe9a99
JH
2469 }
2470
2471 /* Lookup cached answer. */
2472 key.type = type;
2473 key.otr_token = otr_token;
68377e53 2474 key.context = context;
c203e8a7 2475 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
eefe9a99
JH
2476 if (cache_token)
2477 *cache_token = (void *)*slot;
2478 if (*slot)
68377e53
JH
2479 {
2480 if (completep)
ec77d61f
JH
2481 *completep = (*slot)->complete;
2482 if (nonconstruction_targetsp)
2483 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
68377e53
JH
2484 return (*slot)->targets;
2485 }
2486
ec77d61f 2487 complete = true;
eefe9a99
JH
2488
2489 /* Do actual search. */
2490 timevar_push (TV_IPA_VIRTUAL_CALL);
2491 *slot = XCNEW (polymorphic_call_target_d);
2492 if (cache_token)
68377e53 2493 *cache_token = (void *)*slot;
eefe9a99
JH
2494 (*slot)->type = type;
2495 (*slot)->otr_token = otr_token;
68377e53 2496 (*slot)->context = context;
eefe9a99
JH
2497
2498 inserted = pointer_set_create ();
2499 matched_vtables = pointer_set_create ();
2500
2501 /* First see virtual method of type itself. */
68377e53
JH
2502 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
2503 context.offset, otr_type);
ec77d61f
JH
2504 if (binfo)
2505 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2506 &can_refer);
2507 else
68377e53 2508 {
ec77d61f
JH
2509 gcc_assert (odr_violation_reported);
2510 target = NULL;
2511 }
68377e53 2512
2d1644bf
JH
2513 /* Destructors are never called through construction virtual tables,
2514 because the type is always known. */
2515 if (target && DECL_CXX_DESTRUCTOR_P (target))
2516 context.maybe_in_construction = false;
ec77d61f
JH
2517
2518 if (target)
2519 {
2520 /* In the case we get complete method, we don't need
68377e53
JH
2521 to walk derivations. */
2522 if (DECL_FINAL_P (target))
2523 context.maybe_derived_type = false;
2524 }
2d1644bf
JH
2525
2526 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2527 if (type_possibly_instantiated_p (outer_type->type))
2528 maybe_record_node (nodes, target, inserted, can_refer, &complete);
ec77d61f 2529 else
2d1644bf
JH
2530 {
2531 skipped = true;
2532 gcc_assert (in_lto_p || context.maybe_derived_type);
2533 }
79c7de84 2534
549bcbd1
JH
2535 if (binfo)
2536 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
eefe9a99 2537
ec77d61f 2538 /* Next walk recursively all derived types. */
68377e53
JH
2539 if (context.maybe_derived_type)
2540 {
2541 /* For anonymous namespace types we can attempt to build full type.
2542 All derivations must be in this unit (unless we see partial unit). */
2d1644bf 2543 if (!type->all_derivations_known)
ec77d61f 2544 complete = false;
68377e53
JH
2545 for (i = 0; i < outer_type->derived_types.length(); i++)
2546 possible_polymorphic_call_targets_1 (nodes, inserted,
2547 matched_vtables,
79c7de84
EB
2548 otr_type,
2549 outer_type->derived_types[i],
68377e53 2550 otr_token, outer_type->type,
2d1644bf
JH
2551 context.offset, &complete,
2552 bases_to_consider,
2553 context.maybe_in_construction);
68377e53 2554 }
79c7de84 2555
ec77d61f
JH
2556 /* Finally walk bases, if asked to. */
2557 (*slot)->nonconstruction_targets = nodes.length();
2d1644bf
JH
2558
2559 /* Destructors are never called through construction virtual tables,
2560 because the type is always known. One of entries may be cxa_pure_virtual
2561 so look to at least two of them. */
2562 if (context.maybe_in_construction)
2563 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2564 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2565 context.maybe_in_construction = false;
ec77d61f 2566 if (context.maybe_in_construction)
2d1644bf
JH
2567 {
2568 if (type != outer_type
2569 && (!skipped
2570 || (context.maybe_derived_type
2571 && !type_all_derivations_known_p (outer_type->type))))
2572 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2573 context.offset, nodes, inserted,
2574 matched_vtables, &complete);
2575 if (skipped)
2576 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2577 for (i = 0; i < bases_to_consider.length(); i++)
2578 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
2579 }
2580 bases_to_consider.release();
ec77d61f 2581
eefe9a99 2582 (*slot)->targets = nodes;
ec77d61f 2583 (*slot)->complete = complete;
68377e53 2584 if (completep)
ec77d61f
JH
2585 *completep = complete;
2586 if (nonconstruction_targetsp)
2587 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
eefe9a99
JH
2588
2589 pointer_set_destroy (inserted);
2590 pointer_set_destroy (matched_vtables);
2591 timevar_pop (TV_IPA_VIRTUAL_CALL);
2592 return nodes;
2593}
2594
2595/* Dump all possible targets of a polymorphic call. */
2596
2597void
2598dump_possible_polymorphic_call_targets (FILE *f,
68377e53
JH
2599 tree otr_type,
2600 HOST_WIDE_INT otr_token,
2601 const ipa_polymorphic_call_context &ctx)
eefe9a99
JH
2602{
2603 vec <cgraph_node *> targets;
2604 bool final;
549bcbd1 2605 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
eefe9a99 2606 unsigned int i;
ec77d61f 2607 int nonconstruction;
eefe9a99
JH
2608
2609 if (!type)
2610 return;
2611 targets = possible_polymorphic_call_targets (otr_type, otr_token,
68377e53 2612 ctx,
ec77d61f 2613 &final, NULL, &nonconstruction);
68377e53 2614 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
eefe9a99 2615 print_generic_expr (f, type->type, TDF_SLIM);
ec77d61f
JH
2616 fprintf (f, " token %i\n", (int)otr_token);
2617 if (ctx.outer_type || ctx.offset)
2618 {
2619 fprintf (f, " Contained in type:");
2620 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
2621 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2622 ctx.offset);
2623 }
2624
2625 fprintf (f, " %s%s%s\n ",
2626 final ? "This is a complete list." :
68377e53
JH
2627 "This is partial list; extra targets may be defined in other units.",
2628 ctx.maybe_in_construction ? " (base types included)" : "",
2629 ctx.maybe_derived_type ? " (derived types included)" : "");
eefe9a99 2630 for (i = 0; i < targets.length (); i++)
ec77d61f
JH
2631 {
2632 char *name = NULL;
2633 if (i == (unsigned)nonconstruction)
2634 fprintf (f, "\n If the type is in construction,"
2635 " then additional tarets are:\n"
2636 " ");
2637 if (in_lto_p)
2638 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2639 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2640 if (in_lto_p)
2641 free (name);
2642 if (!targets[i]->definition)
2643 fprintf (f, " (no definition%s)",
2644 DECL_DECLARED_INLINE_P (targets[i]->decl)
2645 ? " inline" : "");
2646 }
68377e53 2647 fprintf (f, "\n\n");
eefe9a99
JH
2648}
2649
0e1474e5
JH
2650
2651/* Return true if N can be possibly target of a polymorphic call of
2652 OTR_TYPE/OTR_TOKEN. */
2653
2654bool
2655possible_polymorphic_call_target_p (tree otr_type,
2656 HOST_WIDE_INT otr_token,
68377e53 2657 const ipa_polymorphic_call_context &ctx,
0e1474e5
JH
2658 struct cgraph_node *n)
2659{
2660 vec <cgraph_node *> targets;
2661 unsigned int i;
68377e53 2662 enum built_in_function fcode;
450ad0cd 2663 bool final;
0e1474e5 2664
68377e53
JH
2665 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2666 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2667 == BUILT_IN_UNREACHABLE
2668 || fcode == BUILT_IN_TRAP))
2669 return true;
2670
c203e8a7 2671 if (!odr_hash)
0e1474e5 2672 return true;
68377e53 2673 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
0e1474e5 2674 for (i = 0; i < targets.length (); i++)
d52f5295 2675 if (n->semantically_equivalent_p (targets[i]))
0e1474e5 2676 return true;
450ad0cd
JH
2677
2678 /* At a moment we allow middle end to dig out new external declarations
2679 as a targets of polymorphic calls. */
67348ccc 2680 if (!final && !n->definition)
450ad0cd 2681 return true;
0e1474e5
JH
2682 return false;
2683}
2684
2685
2686/* After callgraph construction new external nodes may appear.
2687 Add them into the graph. */
2688
2689void
2690update_type_inheritance_graph (void)
2691{
2692 struct cgraph_node *n;
2693
c203e8a7 2694 if (!odr_hash)
0e1474e5
JH
2695 return;
2696 free_polymorphic_call_targets_hash ();
2697 timevar_push (TV_IPA_INHERITANCE);
68377e53 2698 /* We reconstruct the graph starting from types of all methods seen in the
0e1474e5
JH
2699 the unit. */
2700 FOR_EACH_FUNCTION (n)
67348ccc
DM
2701 if (DECL_VIRTUAL_P (n->decl)
2702 && !n->definition
d52f5295 2703 && n->real_symbol_p ())
549bcbd1
JH
2704 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2705 true);
0e1474e5
JH
2706 timevar_pop (TV_IPA_INHERITANCE);
2707}
bbc9396b
JH
2708
2709
2710/* Return true if N looks like likely target of a polymorphic call.
2711 Rule out cxa_pure_virtual, noreturns, function declared cold and
2712 other obvious cases. */
2713
2714bool
2715likely_target_p (struct cgraph_node *n)
2716{
2717 int flags;
2718 /* cxa_pure_virtual and similar things are not likely. */
67348ccc 2719 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
bbc9396b 2720 return false;
67348ccc 2721 flags = flags_from_decl_or_type (n->decl);
bbc9396b
JH
2722 if (flags & ECF_NORETURN)
2723 return false;
2724 if (lookup_attribute ("cold",
67348ccc 2725 DECL_ATTRIBUTES (n->decl)))
bbc9396b
JH
2726 return false;
2727 if (n->frequency < NODE_FREQUENCY_NORMAL)
2728 return false;
ccb05ef2
JH
2729 /* If there are no virtual tables refering the target alive,
2730 the only way the target can be called is an instance comming from other
2731 compilation unit; speculative devirtualization is build around an
2732 assumption that won't happen. */
2733 if (!referenced_from_vtable_p (n))
2734 return false;
bbc9396b
JH
2735 return true;
2736}
2737
2738/* The ipa-devirt pass.
3462aa02
JH
2739 When polymorphic call has only one likely target in the unit,
2740 turn it into speculative call. */
bbc9396b
JH
2741
2742static unsigned int
2743ipa_devirt (void)
2744{
2745 struct cgraph_node *n;
2746 struct pointer_set_t *bad_call_targets = pointer_set_create ();
2747 struct cgraph_edge *e;
2748
2749 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
2750 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
570215f9 2751 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
bbc9396b
JH
2752
2753 FOR_EACH_DEFINED_FUNCTION (n)
2754 {
2755 bool update = false;
2756 if (dump_file && n->indirect_calls)
2757 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
fec39fa6 2758 n->name (), n->order);
bbc9396b
JH
2759 for (e = n->indirect_calls; e; e = e->next_callee)
2760 if (e->indirect_info->polymorphic)
2761 {
2762 struct cgraph_node *likely_target = NULL;
2763 void *cache_token;
2764 bool final;
ec77d61f 2765 int nonconstruction_targets;
bbc9396b
JH
2766 vec <cgraph_node *>targets
2767 = possible_polymorphic_call_targets
ec77d61f 2768 (e, &final, &cache_token, &nonconstruction_targets);
bbc9396b
JH
2769 unsigned int i;
2770
2771 if (dump_file)
2772 dump_possible_polymorphic_call_targets
2773 (dump_file, e);
3462aa02 2774
bbc9396b
JH
2775 npolymorphic++;
2776
bbc9396b
JH
2777 if (!cgraph_maybe_hot_edge_p (e))
2778 {
2779 if (dump_file)
ec77d61f 2780 fprintf (dump_file, "Call is cold\n\n");
bbc9396b
JH
2781 ncold++;
2782 continue;
2783 }
2784 if (e->speculative)
2785 {
2786 if (dump_file)
ec77d61f 2787 fprintf (dump_file, "Call is aready speculated\n\n");
bbc9396b
JH
2788 nspeculated++;
2789
2790 /* When dumping see if we agree with speculation. */
2791 if (!dump_file)
2792 continue;
2793 }
2794 if (pointer_set_contains (bad_call_targets,
2795 cache_token))
2796 {
2797 if (dump_file)
ec77d61f 2798 fprintf (dump_file, "Target list is known to be useless\n\n");
bbc9396b
JH
2799 nmultiple++;
2800 continue;
2801 }
c3284718 2802 for (i = 0; i < targets.length (); i++)
bbc9396b
JH
2803 if (likely_target_p (targets[i]))
2804 {
2805 if (likely_target)
2806 {
ec77d61f
JH
2807 if (i < (unsigned) nonconstruction_targets)
2808 {
2809 likely_target = NULL;
2810 if (dump_file)
2811 fprintf (dump_file, "More than one likely target\n\n");
2812 nmultiple++;
2813 }
bbc9396b
JH
2814 break;
2815 }
2816 likely_target = targets[i];
2817 }
2818 if (!likely_target)
2819 {
2820 pointer_set_insert (bad_call_targets, cache_token);
2821 continue;
2822 }
2823 /* This is reached only when dumping; check if we agree or disagree
2824 with the speculation. */
2825 if (e->speculative)
2826 {
2827 struct cgraph_edge *e2;
2828 struct ipa_ref *ref;
2829 cgraph_speculative_call_info (e, e2, e, ref);
d52f5295
ML
2830 if (e2->callee->ultimate_alias_target ()
2831 == likely_target->ultimate_alias_target ())
bbc9396b 2832 {
ec77d61f 2833 fprintf (dump_file, "We agree with speculation\n\n");
bbc9396b
JH
2834 nok++;
2835 }
2836 else
2837 {
ec77d61f 2838 fprintf (dump_file, "We disagree with speculation\n\n");
bbc9396b
JH
2839 nwrong++;
2840 }
2841 continue;
2842 }
67348ccc 2843 if (!likely_target->definition)
bbc9396b
JH
2844 {
2845 if (dump_file)
ec77d61f 2846 fprintf (dump_file, "Target is not an definition\n\n");
bbc9396b
JH
2847 nnotdefined++;
2848 continue;
2849 }
2850 /* Do not introduce new references to external symbols. While we
2851 can handle these just well, it is common for programs to
2852 incorrectly with headers defining methods they are linked
2853 with. */
67348ccc 2854 if (DECL_EXTERNAL (likely_target->decl))
bbc9396b
JH
2855 {
2856 if (dump_file)
ec77d61f 2857 fprintf (dump_file, "Target is external\n\n");
bbc9396b
JH
2858 nexternal++;
2859 continue;
2860 }
570215f9
JM
2861 /* Don't use an implicitly-declared destructor (c++/58678). */
2862 struct cgraph_node *non_thunk_target
d52f5295 2863 = likely_target->function_symbol ();
570215f9
JM
2864 if (DECL_ARTIFICIAL (non_thunk_target->decl)
2865 && DECL_COMDAT (non_thunk_target->decl))
2866 {
2867 if (dump_file)
2868 fprintf (dump_file, "Target is artificial\n\n");
2869 nartificial++;
2870 continue;
2871 }
d52f5295
ML
2872 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
2873 && likely_target->can_be_discarded_p ())
bbc9396b
JH
2874 {
2875 if (dump_file)
ec77d61f 2876 fprintf (dump_file, "Target is overwritable\n\n");
bbc9396b
JH
2877 noverwritable++;
2878 continue;
2879 }
2b5f0895 2880 else if (dbg_cnt (devirt))
bbc9396b 2881 {
2b5f0895
XDL
2882 if (dump_enabled_p ())
2883 {
807b7d62 2884 location_t locus = gimple_location_safe (e->call_stmt);
2b5f0895
XDL
2885 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2886 "speculatively devirtualizing call in %s/%i to %s/%i\n",
2887 n->name (), n->order,
2888 likely_target->name (),
2889 likely_target->order);
2890 }
d52f5295 2891 if (!likely_target->can_be_discarded_p ())
5b79657a
JH
2892 {
2893 cgraph_node *alias;
d52f5295 2894 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
5b79657a
JH
2895 if (alias)
2896 likely_target = alias;
2897 }
bbc9396b
JH
2898 nconverted++;
2899 update = true;
2900 cgraph_turn_edge_to_speculative
2901 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2902 }
2903 }
2904 if (update)
2905 inline_update_overall_summary (n);
2906 }
2907 pointer_set_destroy (bad_call_targets);
2908
2909 if (dump_file)
2910 fprintf (dump_file,
2911 "%i polymorphic calls, %i devirtualized,"
2912 " %i speculatively devirtualized, %i cold\n"
2913 "%i have multiple targets, %i overwritable,"
2914 " %i already speculated (%i agree, %i disagree),"
570215f9 2915 " %i external, %i not defined, %i artificial\n",
bbc9396b
JH
2916 npolymorphic, ndevirtualized, nconverted, ncold,
2917 nmultiple, noverwritable, nspeculated, nok, nwrong,
570215f9 2918 nexternal, nnotdefined, nartificial);
bbc9396b
JH
2919 return ndevirtualized ? TODO_remove_functions : 0;
2920}
2921
bbc9396b
JH
2922namespace {
2923
2924const pass_data pass_data_ipa_devirt =
2925{
2926 IPA_PASS, /* type */
2927 "devirt", /* name */
2928 OPTGROUP_NONE, /* optinfo_flags */
bbc9396b
JH
2929 TV_IPA_DEVIRT, /* tv_id */
2930 0, /* properties_required */
2931 0, /* properties_provided */
2932 0, /* properties_destroyed */
2933 0, /* todo_flags_start */
2934 ( TODO_dump_symtab ), /* todo_flags_finish */
2935};
2936
2937class pass_ipa_devirt : public ipa_opt_pass_d
2938{
2939public:
c3284718
RS
2940 pass_ipa_devirt (gcc::context *ctxt)
2941 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
2942 NULL, /* generate_summary */
2943 NULL, /* write_summary */
2944 NULL, /* read_summary */
2945 NULL, /* write_optimization_summary */
2946 NULL, /* read_optimization_summary */
2947 NULL, /* stmt_fixup */
2948 0, /* function_transform_todo_flags_start */
2949 NULL, /* function_transform */
2950 NULL) /* variable_transform */
bbc9396b
JH
2951 {}
2952
2953 /* opt_pass methods: */
1a3d085c
TS
2954 virtual bool gate (function *)
2955 {
2956 return (flag_devirtualize
2957 && flag_devirtualize_speculatively
2958 && optimize);
2959 }
2960
be55bfe6 2961 virtual unsigned int execute (function *) { return ipa_devirt (); }
bbc9396b
JH
2962
2963}; // class pass_ipa_devirt
2964
2965} // anon namespace
2966
2967ipa_opt_pass_d *
2968make_pass_ipa_devirt (gcc::context *ctxt)
2969{
2970 return new pass_ipa_devirt (ctxt);
2971}
2972
eefe9a99 2973#include "gt-ipa-devirt.h"