1 /* Basic IPA utilities for type inheritance graph construction and
3 Copyright (C) 2013 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
84 build_type_inheritance_graph triggers a construction of the type inheritance
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
110 #include "coretypes.h"
113 #include "print-tree.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
132 /* Dummy polymorphic call context. */
134 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
135 = {0, NULL
, false, true};
137 /* Pointer set of all call targets appearing in the cache. */
138 static pointer_set_t
*cached_polymorphic_call_targets
;
140 /* The node of type inheritance graph. For each type unique in
141 One Defintion Rule (ODR) sense, we produce one node linking all
142 main variants of types equivalent to it, bases and derived types. */
144 struct GTY(()) odr_type_d
149 vec
<odr_type
> GTY((skip
)) bases
;
150 /* All derrived types with virtual methods seen in unit. */
151 vec
<odr_type
> GTY((skip
)) derived_types
;
153 /* All equivalent types, if more than one. */
154 vec
<tree
, va_gc
> *types
;
155 /* Set of all equivalent types, if NON-NULL. */
156 pointer_set_t
* GTY((skip
)) types_set
;
158 /* Unique ID indexing the type in odr_types array. */
160 /* Is it in anonymous namespace? */
161 bool anonymous_namespace
;
165 /* Return true if BINFO corresponds to a type with virtual methods.
167 Every type has several BINFOs. One is the BINFO associated by the type
168 while other represents bases of derived types. The BINFOs representing
169 bases do not have BINFO_VTABLE pointer set when this is the single
170 inheritance (because vtables are shared). Look up the BINFO of type
171 and check presence of its vtable. */
174 polymorphic_type_binfo_p (tree binfo
)
176 /* See if BINFO's type has an virtual table associtated with it. */
177 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo
)));
180 /* One Definition Rule hashtable helpers. */
184 typedef odr_type_d value_type
;
185 typedef union tree_node compare_type
;
186 static inline hashval_t
hash (const value_type
*);
187 static inline bool equal (const value_type
*, const compare_type
*);
188 static inline void remove (value_type
*);
191 /* Produce hash based on type name. */
194 hash_type_name (tree t
)
196 gcc_checking_assert (TYPE_MAIN_VARIANT (t
) == t
);
198 /* If not in LTO, all main variants are unique, so we can do
201 return htab_hash_pointer (t
);
203 /* Anonymous types are unique. */
204 if (type_in_anonymous_namespace_p (t
))
205 return htab_hash_pointer (t
);
207 /* For polymorphic types, we can simply hash the virtual table. */
208 if (TYPE_BINFO (t
) && BINFO_VTABLE (TYPE_BINFO (t
)))
210 tree v
= BINFO_VTABLE (TYPE_BINFO (t
));
213 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
215 hash
= TREE_INT_CST_LOW (TREE_OPERAND (v
, 1));
216 v
= TREE_OPERAND (TREE_OPERAND (v
, 0), 0);
219 v
= DECL_ASSEMBLER_NAME (v
);
220 hash
= iterative_hash_hashval_t (hash
, htab_hash_pointer (v
));
224 /* Rest is not implemented yet. */
228 /* Return the computed hashcode for ODR_TYPE. */
231 odr_hasher::hash (const value_type
*odr_type
)
233 return hash_type_name (odr_type
->type
);
236 /* Compare types T1 and T2 and return true if they are
240 odr_hasher::equal (const value_type
*t1
, const compare_type
*ct2
)
242 tree t2
= const_cast <tree
> (ct2
);
244 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2
) == ct2
);
249 return types_same_for_odr (t1
->type
, t2
);
252 /* Free ODR type V. */
255 odr_hasher::remove (value_type
*v
)
258 v
->derived_types
.release ();
260 pointer_set_destroy (v
->types_set
);
264 /* ODR type hash used to lookup ODR type based on tree type node. */
266 typedef hash_table
<odr_hasher
> odr_hash_type
;
267 static odr_hash_type odr_hash
;
269 /* ODR types are also stored into ODR_TYPE vector to allow consistent
270 walking. Bases appear before derived types. Vector is garbage collected
271 so we won't end up visiting empty types. */
273 static GTY(()) vec
<odr_type
, va_gc
> *odr_types_ptr
;
274 #define odr_types (*odr_types_ptr)
276 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
277 from VAL->type. This may happen in LTO where tree merging did not merge
278 all variants of the same type. It may or may not mean the ODR violation.
279 Add it to the list of duplicates and warn on some violations. */
282 add_type_duplicate (odr_type val
, tree type
)
285 val
->types_set
= pointer_set_create ();
287 /* See if this duplicate is new. */
288 if (!pointer_set_insert (val
->types_set
, type
))
291 bool base_mismatch
= false;
292 gcc_assert (in_lto_p
);
293 vec_safe_push (val
->types
, type
);
296 /* First we compare memory layout. */
297 if (!types_compatible_p (val
->type
, type
))
300 if (BINFO_VTABLE (TYPE_BINFO (val
->type
))
301 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type
)), 0,
302 "type %qD violates one definition rule ",
304 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val
->type
)),
305 "a type with the same name but different layout is "
306 "defined in another translation unit");
307 if (cgraph_dump_file
)
309 fprintf (cgraph_dump_file
, "ODR violation or merging or ODR type bug?\n");
311 print_node (cgraph_dump_file
, "", val
->type
, 0);
312 putc ('\n',cgraph_dump_file
);
313 print_node (cgraph_dump_file
, "", type
, 0);
314 putc ('\n',cgraph_dump_file
);
318 /* Next sanity check that bases are the same. If not, we will end
319 up producing wrong answers. */
320 for (j
= 0, i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)); i
++)
321 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type
), i
)))
323 odr_type base
= get_odr_type
325 (BINFO_BASE_BINFO (TYPE_BINFO (type
),
328 if (val
->bases
.length () <= j
|| val
->bases
[j
] != base
)
329 base_mismatch
= true;
336 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type
)), 0,
337 "type %qD violates one definition rule ",
339 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val
->type
)),
340 "a type with the same name but different bases is "
341 "defined in another translation unit");
342 if (cgraph_dump_file
)
344 fprintf (cgraph_dump_file
, "ODR bse violation or merging bug?\n");
346 print_node (cgraph_dump_file
, "", val
->type
, 0);
347 putc ('\n',cgraph_dump_file
);
348 print_node (cgraph_dump_file
, "", type
, 0);
349 putc ('\n',cgraph_dump_file
);
353 /* Regularize things a little. During LTO same types may come with
354 different BINFOs. Either because their virtual table was
355 not merged by tree merging and only later at decl merging or
356 because one type comes with external vtable, while other
357 with internal. We want to merge equivalent binfos to conserve
358 memory and streaming overhead.
360 The external vtables are more harmful: they contain references
361 to external declarations of methods that may be defined in the
362 merged LTO unit. For this reason we absolutely need to remove
363 them and replace by internal variants. Not doing so will lead
364 to incomplete answers from possible_polymorphic_call_targets. */
365 if (!flag_ltrans
&& merge
)
367 tree master_binfo
= TYPE_BINFO (val
->type
);
368 tree v1
= BINFO_VTABLE (master_binfo
);
369 tree v2
= BINFO_VTABLE (TYPE_BINFO (type
));
371 if (TREE_CODE (v1
) == POINTER_PLUS_EXPR
)
373 gcc_assert (TREE_CODE (v2
) == POINTER_PLUS_EXPR
374 && operand_equal_p (TREE_OPERAND (v1
, 1),
375 TREE_OPERAND (v2
, 1), 0));
376 v1
= TREE_OPERAND (TREE_OPERAND (v1
, 0), 0);
377 v2
= TREE_OPERAND (TREE_OPERAND (v2
, 0), 0);
379 gcc_assert (DECL_ASSEMBLER_NAME (v1
)
380 == DECL_ASSEMBLER_NAME (v2
));
382 if (DECL_EXTERNAL (v1
) && !DECL_EXTERNAL (v2
))
386 TYPE_BINFO (val
->type
) = TYPE_BINFO (type
);
387 for (i
= 0; i
< val
->types
->length (); i
++)
389 if (TYPE_BINFO ((*val
->types
)[i
])
391 TYPE_BINFO ((*val
->types
)[i
]) = TYPE_BINFO (type
);
395 TYPE_BINFO (type
) = master_binfo
;
400 /* Get ODR type hash entry for TYPE. If INSERT is true, create
401 possibly new entry. */
404 get_odr_type (tree type
, bool insert
)
410 type
= TYPE_MAIN_VARIANT (type
);
411 gcc_checking_assert (TYPE_MAIN_VARIANT (type
) == type
);
412 hash
= hash_type_name (type
);
413 slot
= odr_hash
.find_slot_with_hash (type
, hash
, insert
? INSERT
: NO_INSERT
);
417 /* See if we already have entry for type. */
422 /* With LTO we need to support multiple tree representation of
423 the same ODR type. */
424 if (val
->type
!= type
)
425 add_type_duplicate (val
, type
);
429 tree binfo
= TYPE_BINFO (type
);
432 val
= ggc_alloc_cleared_odr_type_d ();
435 val
->derived_types
= vNULL
;
436 val
->anonymous_namespace
= type_in_anonymous_namespace_p (type
);
438 for (i
= 0; i
< BINFO_N_BASE_BINFOS (binfo
); i
++)
439 /* For now record only polymorphic types. other are
440 pointless for devirtualization and we can not precisely
441 determine ODR equivalency of these during LTO. */
442 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo
, i
)))
444 odr_type base
= get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo
,
447 base
->derived_types
.safe_push (val
);
448 val
->bases
.safe_push (base
);
450 /* First record bases, then add into array so ids are increasing. */
452 val
->id
= odr_types
.length ();
453 vec_safe_push (odr_types_ptr
, val
);
458 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
459 recusive printing. */
462 dump_odr_type (FILE *f
, odr_type t
, int indent
=0)
465 fprintf (f
, "%*s type %i: ", indent
* 2, "", t
->id
);
466 print_generic_expr (f
, t
->type
, TDF_SLIM
);
467 fprintf (f
, "%s\n", t
->anonymous_namespace
? " (anonymous namespace)":"");
468 if (TYPE_NAME (t
->type
))
470 fprintf (f
, "%*s defined at: %s:%i\n", indent
* 2, "",
471 DECL_SOURCE_FILE (TYPE_NAME (t
->type
)),
472 DECL_SOURCE_LINE (TYPE_NAME (t
->type
)));
474 if (t
->bases
.length ())
476 fprintf (f
, "%*s base odr type ids: ", indent
* 2, "");
477 for (i
= 0; i
< t
->bases
.length (); i
++)
478 fprintf (f
, " %i", t
->bases
[i
]->id
);
481 if (t
->derived_types
.length ())
483 fprintf (f
, "%*s derived types:\n", indent
* 2, "");
484 for (i
= 0; i
< t
->derived_types
.length (); i
++)
485 dump_odr_type (f
, t
->derived_types
[i
], indent
+ 1);
490 /* Dump the type inheritance graph. */
493 dump_type_inheritance_graph (FILE *f
)
498 fprintf (f
, "\n\nType inheritance graph:\n");
499 for (i
= 0; i
< odr_types
.length (); i
++)
501 if (odr_types
[i
]->bases
.length () == 0)
502 dump_odr_type (f
, odr_types
[i
]);
504 for (i
= 0; i
< odr_types
.length (); i
++)
506 if (odr_types
[i
]->types
&& odr_types
[i
]->types
->length ())
509 fprintf (f
, "Duplicate tree types for odr type %i\n", i
);
510 print_node (f
, "", odr_types
[i
]->type
, 0);
511 for (j
= 0; j
< odr_types
[i
]->types
->length (); j
++)
514 fprintf (f
, "duplicate #%i\n", j
);
515 print_node (f
, "", (*odr_types
[i
]->types
)[j
], 0);
516 t
= (*odr_types
[i
]->types
)[j
];
517 while (TYPE_P (t
) && TYPE_CONTEXT (t
))
519 t
= TYPE_CONTEXT (t
);
520 print_node (f
, "", t
, 0);
528 /* Given method type T, return type of class it belongs to.
529 Lookup this pointer and get its type. */
532 method_class_type (tree t
)
534 tree first_parm_type
= TREE_VALUE (TYPE_ARG_TYPES (t
));
535 gcc_assert (TREE_CODE (t
) == METHOD_TYPE
);
537 return TREE_TYPE (first_parm_type
);
540 /* Initialize IPA devirt and build inheritance tree graph. */
543 build_type_inheritance_graph (void)
545 struct cgraph_node
*n
;
546 FILE *inheritance_dump_file
;
549 if (odr_hash
.is_created ())
551 timevar_push (TV_IPA_INHERITANCE
);
552 inheritance_dump_file
= dump_begin (TDI_inheritance
, &flags
);
553 odr_hash
.create (23);
555 /* We reconstruct the graph starting of types of all methods seen in the
557 FOR_EACH_FUNCTION (n
)
558 if (DECL_VIRTUAL_P (n
->decl
)
559 && symtab_real_symbol_p (n
))
560 get_odr_type (method_class_type (TREE_TYPE (n
->decl
)), true);
561 if (inheritance_dump_file
)
563 dump_type_inheritance_graph (inheritance_dump_file
);
564 dump_end (TDI_inheritance
, inheritance_dump_file
);
566 timevar_pop (TV_IPA_INHERITANCE
);
569 /* If TARGET has associated node, record it in the NODES array.
570 if TARGET can not be inserted (for example because its body was
571 already removed and there is no way to refer to it), clear COMPLETEP. */
574 maybe_record_node (vec
<cgraph_node
*> &nodes
,
575 tree target
, pointer_set_t
*inserted
,
578 struct cgraph_node
*target_node
;
579 enum built_in_function fcode
;
582 /* Those are used to mark impossible scenarios. */
583 || (fcode
= DECL_FUNCTION_CODE (target
))
584 == BUILT_IN_UNREACHABLE
585 || fcode
== BUILT_IN_TRAP
)
588 target_node
= cgraph_get_node (target
);
590 if (target_node
!= NULL
591 && (TREE_PUBLIC (target
)
592 || target_node
->definition
)
593 && symtab_real_symbol_p (target_node
))
595 gcc_assert (!target_node
->global
.inlined_to
);
596 gcc_assert (symtab_real_symbol_p (target_node
));
597 if (!pointer_set_insert (inserted
, target
))
599 pointer_set_insert (cached_polymorphic_call_targets
,
601 nodes
.safe_push (target_node
);
605 && !type_in_anonymous_namespace_p
606 (method_class_type (TREE_TYPE (target
))))
610 /* See if BINFO's type match OUTER_TYPE. If so, lookup
611 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
612 method in vtable and insert method to NODES array.
613 Otherwise recurse to base BINFOs.
614 This match what get_binfo_at_offset does, but with offset
617 TYPE_BINFO is binfo holding an virtual table matching
618 BINFO's type. In the case of single inheritance, this
619 is binfo of BINFO's type ancestor (vtable is shared),
620 otherwise it is binfo of BINFO's type.
622 MATCHED_VTABLES tracks virtual tables we already did lookup
623 for virtual function in. INSERTED tracks nodes we already
626 ANONYMOUS is true if BINFO is part of anonymous namespace.
630 record_target_from_binfo (vec
<cgraph_node
*> &nodes
,
634 HOST_WIDE_INT otr_token
,
636 HOST_WIDE_INT offset
,
637 pointer_set_t
*inserted
,
638 pointer_set_t
*matched_vtables
,
641 tree type
= BINFO_TYPE (binfo
);
645 gcc_checking_assert (BINFO_VTABLE (type_binfo
));
647 if (types_same_for_odr (type
, outer_type
))
649 tree inner_binfo
= get_binfo_at_offset (type_binfo
,
651 /* For types in anonymous namespace first check if the respective vtable
652 is alive. If not, we know the type can't be called. */
653 if (!flag_ltrans
&& anonymous
)
655 tree vtable
= BINFO_VTABLE (inner_binfo
);
656 struct varpool_node
*vnode
;
658 if (TREE_CODE (vtable
) == POINTER_PLUS_EXPR
)
659 vtable
= TREE_OPERAND (TREE_OPERAND (vtable
, 0), 0);
660 vnode
= varpool_get_node (vtable
);
661 if (!vnode
|| !vnode
->definition
)
664 gcc_assert (inner_binfo
);
665 if (!pointer_set_insert (matched_vtables
, BINFO_VTABLE (inner_binfo
)))
667 tree target
= gimple_get_virt_method_for_binfo (otr_token
, inner_binfo
);
669 maybe_record_node (nodes
, target
, inserted
, NULL
);
675 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
676 /* Walking bases that have no virtual method is pointless excercise. */
677 if (polymorphic_type_binfo_p (base_binfo
))
678 record_target_from_binfo (nodes
, base_binfo
, otr_type
,
679 /* In the case of single inheritance,
680 the virtual table is shared with
682 BINFO_VTABLE (base_binfo
) ? base_binfo
: type_binfo
,
683 otr_token
, outer_type
, offset
, inserted
,
684 matched_vtables
, anonymous
);
687 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
688 of TYPE, insert them to NODES, recurse into derived nodes.
689 INSERTED is used to avoid duplicate insertions of methods into NODES.
690 MATCHED_VTABLES are used to avoid duplicate walking vtables. */
693 possible_polymorphic_call_targets_1 (vec
<cgraph_node
*> &nodes
,
694 pointer_set_t
*inserted
,
695 pointer_set_t
*matched_vtables
,
698 HOST_WIDE_INT otr_token
,
700 HOST_WIDE_INT offset
)
702 tree binfo
= TYPE_BINFO (type
->type
);
705 record_target_from_binfo (nodes
, binfo
, otr_type
, binfo
, otr_token
,
707 inserted
, matched_vtables
,
708 type
->anonymous_namespace
);
709 for (i
= 0; i
< type
->derived_types
.length (); i
++)
710 possible_polymorphic_call_targets_1 (nodes
, inserted
,
713 type
->derived_types
[i
],
714 otr_token
, outer_type
, offset
);
717 /* Cache of queries for polymorphic call targets.
719 Enumerating all call targets may get expensive when there are many
720 polymorphic calls in the program, so we memoize all the previous
721 queries and avoid duplicated work. */
723 struct polymorphic_call_target_d
725 HOST_WIDE_INT otr_token
;
726 ipa_polymorphic_call_context context
;
728 vec
<cgraph_node
*> targets
;
732 /* Polymorphic call target cache helpers. */
734 struct polymorphic_call_target_hasher
736 typedef polymorphic_call_target_d value_type
;
737 typedef polymorphic_call_target_d compare_type
;
738 static inline hashval_t
hash (const value_type
*);
739 static inline bool equal (const value_type
*, const compare_type
*);
740 static inline void remove (value_type
*);
743 /* Return the computed hashcode for ODR_QUERY. */
746 polymorphic_call_target_hasher::hash (const value_type
*odr_query
)
750 hash
= iterative_hash_host_wide_int
751 (odr_query
->otr_token
,
752 odr_query
->type
->id
);
753 hash
= iterative_hash_hashval_t (TYPE_UID (odr_query
->context
.outer_type
),
755 hash
= iterative_hash_host_wide_int (odr_query
->context
.offset
, hash
);
756 return iterative_hash_hashval_t
757 (((int)odr_query
->context
.maybe_in_construction
<< 1)
758 | (int)odr_query
->context
.maybe_derived_type
, hash
);
761 /* Compare cache entries T1 and T2. */
764 polymorphic_call_target_hasher::equal (const value_type
*t1
,
765 const compare_type
*t2
)
767 return (t1
->type
== t2
->type
&& t1
->otr_token
== t2
->otr_token
768 && t1
->context
.offset
== t2
->context
.offset
769 && t1
->context
.outer_type
== t2
->context
.outer_type
770 && t1
->context
.maybe_in_construction
771 == t2
->context
.maybe_in_construction
772 && t1
->context
.maybe_derived_type
== t2
->context
.maybe_derived_type
);
775 /* Remove entry in polymorphic call target cache hash. */
778 polymorphic_call_target_hasher::remove (value_type
*v
)
780 v
->targets
.release ();
784 /* Polymorphic call target query cache. */
786 typedef hash_table
<polymorphic_call_target_hasher
>
787 polymorphic_call_target_hash_type
;
788 static polymorphic_call_target_hash_type polymorphic_call_target_hash
;
790 /* Destroy polymorphic call target query cache. */
793 free_polymorphic_call_targets_hash ()
795 if (cached_polymorphic_call_targets
)
797 polymorphic_call_target_hash
.dispose ();
798 pointer_set_destroy (cached_polymorphic_call_targets
);
799 cached_polymorphic_call_targets
= NULL
;
803 /* When virtual function is removed, we may need to flush the cache. */
806 devirt_node_removal_hook (struct cgraph_node
*n
, void *d ATTRIBUTE_UNUSED
)
808 if (cached_polymorphic_call_targets
809 && pointer_set_contains (cached_polymorphic_call_targets
, n
))
810 free_polymorphic_call_targets_hash ();
813 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
814 is contained at CONTEXT->OFFSET. Walk the memory representation of
815 CONTEXT->OUTER_TYPE and find the outermost class type that match
816 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
819 For example when CONTEXT represents type
825 and we look for type at offset sizeof(int), we end up with B and offset 0.
826 If the same is produced by multiple inheritance, we end up with A and offset
829 If we can not find corresponding class, give up by setting
830 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
831 Return true when lookup was sucesful. */
834 get_class_context (ipa_polymorphic_call_context
*context
,
837 tree type
= context
->outer_type
;
838 HOST_WIDE_INT offset
= context
->offset
;
840 /* Find the sub-object the constant actually refers to and mark whether it is
841 an artificial one (as opposed to a user-defined one). */
844 HOST_WIDE_INT pos
, size
;
847 /* On a match, just return what we found. */
848 if (TREE_CODE (type
) == TREE_CODE (expected_type
)
849 && types_same_for_odr (type
, expected_type
))
851 gcc_assert (offset
== 0);
855 /* Walk fields and find corresponding on at OFFSET. */
856 if (TREE_CODE (type
) == RECORD_TYPE
)
858 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
860 if (TREE_CODE (fld
) != FIELD_DECL
)
863 pos
= int_bit_position (fld
);
864 size
= tree_to_uhwi (DECL_SIZE (fld
));
865 if (pos
<= offset
&& (pos
+ size
) > offset
)
872 type
= TREE_TYPE (fld
);
874 /* DECL_ARTIFICIAL represents a basetype. */
875 if (!DECL_ARTIFICIAL (fld
))
877 context
->outer_type
= type
;
878 context
->offset
= offset
;
879 /* As soon as we se an field containing the type,
880 we know we are not looking for derivations. */
881 context
->maybe_derived_type
= false;
884 else if (TREE_CODE (type
) == ARRAY_TYPE
)
886 tree subtype
= TREE_TYPE (type
);
888 /* Give up if we don't know array size. */
889 if (!tree_fits_shwi_p (TYPE_SIZE (subtype
))
890 || !tree_to_shwi (TYPE_SIZE (subtype
)) <= 0)
892 offset
= offset
% tree_to_shwi (TYPE_SIZE (subtype
));
894 context
->outer_type
= type
;
895 context
->offset
= offset
;
896 context
->maybe_derived_type
= false;
898 /* Give up on anything else. */
903 /* If we failed to find subtype we look for, give up and fall back to the
904 most generic query. */
906 context
->outer_type
= expected_type
;
908 context
->maybe_derived_type
= true;
912 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
915 contains_type_p (tree outer_type
, HOST_WIDE_INT offset
,
918 ipa_polymorphic_call_context context
= {offset
, outer_type
,
920 return get_class_context (&context
, otr_type
);
923 /* Given REF call in FNDECL, determine class of the polymorphic
924 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
925 Return pointer to object described by the context */
928 get_polymorphic_call_info (tree fndecl
,
931 HOST_WIDE_INT
*otr_token
,
932 ipa_polymorphic_call_context
*context
)
935 *otr_type
= obj_type_ref_class (ref
);
936 *otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref
));
938 /* Set up basic info in case we find nothing interesting in the analysis. */
939 context
->outer_type
= *otr_type
;
941 base_pointer
= OBJ_TYPE_REF_OBJECT (ref
);
942 context
->maybe_derived_type
= true;
943 context
->maybe_in_construction
= false;
945 /* Walk SSA for outer object. */
948 if (TREE_CODE (base_pointer
) == SSA_NAME
949 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
950 && SSA_NAME_DEF_STMT (base_pointer
)
951 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer
)))
953 base_pointer
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer
));
954 STRIP_NOPS (base_pointer
);
956 else if (TREE_CODE (base_pointer
) == ADDR_EXPR
)
958 HOST_WIDE_INT size
, max_size
;
959 HOST_WIDE_INT offset2
;
960 tree base
= get_ref_base_and_extent (TREE_OPERAND (base_pointer
, 0),
961 &offset2
, &size
, &max_size
);
963 /* If this is a varying address, punt. */
964 if ((TREE_CODE (base
) == MEM_REF
|| DECL_P (base
))
968 /* We found dereference of a pointer. Type of the pointer
969 and MEM_REF is meaningless, but we can look futher. */
970 if (TREE_CODE (base
) == MEM_REF
)
972 base_pointer
= TREE_OPERAND (base
, 0);
974 += offset2
+ mem_ref_offset (base
).low
* BITS_PER_UNIT
;
975 context
->outer_type
= NULL
;
977 /* We found base object. In this case the outer_type
979 else if (DECL_P (base
))
981 context
->outer_type
= TREE_TYPE (base
);
982 gcc_assert (!POINTER_TYPE_P (context
->outer_type
));
984 /* Only type inconsistent programs can have otr_type that is
985 not part of outer type. */
986 if (!contains_type_p (context
->outer_type
,
987 context
->offset
, *otr_type
))
989 context
->offset
+= offset2
;
991 /* Make very conservative assumption that all objects
992 may be in construction.
993 TODO: ipa-prop already contains code to tell better.
995 context
->maybe_in_construction
= true;
996 context
->maybe_derived_type
= false;
1005 else if (TREE_CODE (base_pointer
) == POINTER_PLUS_EXPR
1006 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer
, 1)))
1008 context
->offset
+= tree_to_shwi (TREE_OPERAND (base_pointer
, 1))
1010 base_pointer
= TREE_OPERAND (base_pointer
, 0);
1017 /* Try to determine type of the outer object. */
1018 if (TREE_CODE (base_pointer
) == SSA_NAME
1019 && SSA_NAME_IS_DEFAULT_DEF (base_pointer
)
1020 && TREE_CODE (SSA_NAME_VAR (base_pointer
)) == PARM_DECL
)
1022 /* See if parameter is THIS pointer of a method. */
1023 if (TREE_CODE (TREE_TYPE (fndecl
)) == METHOD_TYPE
1024 && SSA_NAME_VAR (base_pointer
) == DECL_ARGUMENTS (fndecl
))
1026 context
->outer_type
= TREE_TYPE (TREE_TYPE (base_pointer
));
1027 gcc_assert (TREE_CODE (context
->outer_type
) == RECORD_TYPE
);
1029 /* Dynamic casting has possibly upcasted the type
1030 in the hiearchy. In this case outer type is less
1031 informative than inner type and we should forget
1033 if (!contains_type_p (context
->outer_type
, context
->offset
,
1036 context
->outer_type
= NULL
;
1037 return base_pointer
;
1040 /* If the function is constructor or destructor, then
1041 the type is possibly in consturction, but we know
1042 it is not derived type. */
1043 if (DECL_CXX_CONSTRUCTOR_P (fndecl
)
1044 || DECL_CXX_DESTRUCTOR_P (fndecl
))
1046 context
->maybe_in_construction
= true;
1047 context
->maybe_derived_type
= false;
1051 context
->maybe_derived_type
= true;
1052 context
->maybe_in_construction
= false;
1054 return base_pointer
;
1056 /* Non-PODs passed by value are really passed by invisible
1057 reference. In this case we also know the type of the
1059 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer
)))
1061 context
->outer_type
= TREE_TYPE (TREE_TYPE (base_pointer
));
1062 gcc_assert (!POINTER_TYPE_P (context
->outer_type
));
1063 /* Only type inconsistent programs can have otr_type that is
1064 not part of outer type. */
1065 if (!contains_type_p (context
->outer_type
, context
->offset
,
1068 context
->outer_type
= NULL
;
1070 return base_pointer
;
1072 context
->maybe_derived_type
= false;
1073 context
->maybe_in_construction
= false;
1074 return base_pointer
;
1077 /* TODO: There are multiple ways to derive a type. For instance
1078 if BASE_POINTER is passed to an constructor call prior our refernece.
1079 We do not make this type of flow sensitive analysis yet. */
1080 return base_pointer
;
1083 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1084 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1085 and insert them to NODES.
1087 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1090 record_targets_from_bases (tree otr_type
,
1091 HOST_WIDE_INT otr_token
,
1093 HOST_WIDE_INT offset
,
1094 vec
<cgraph_node
*> nodes
,
1095 pointer_set_t
*inserted
,
1096 pointer_set_t
*matched_vtables
,
1101 HOST_WIDE_INT pos
, size
;
1105 if (types_same_for_odr (outer_type
, otr_type
))
1108 for (fld
= TYPE_FIELDS (outer_type
); fld
; fld
= DECL_CHAIN (fld
))
1110 if (TREE_CODE (fld
) != FIELD_DECL
)
1113 pos
= int_bit_position (fld
);
1114 size
= tree_to_shwi (DECL_SIZE (fld
));
1115 if (pos
<= offset
&& (pos
+ size
) > offset
)
1118 /* Within a class type we should always find correcponding fields. */
1119 gcc_assert (fld
&& TREE_CODE (TREE_TYPE (fld
)) == RECORD_TYPE
);
1121 /* Nonbasetypes should have been stripped by outer_class_type. */
1122 gcc_assert (DECL_ARTIFICIAL (fld
));
1124 outer_type
= TREE_TYPE (fld
);
1127 base_binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
),
1129 gcc_assert (base_binfo
);
1130 if (!pointer_set_insert (matched_vtables
, BINFO_VTABLE (base_binfo
)))
1132 tree target
= gimple_get_virt_method_for_binfo (otr_token
, base_binfo
);
1134 maybe_record_node (nodes
, target
, inserted
, completep
);
1135 /* The only way method in anonymous namespace can become unreferable
1136 is that it has been fully optimized out. */
1137 else if (flag_ltrans
|| !type_in_anonymous_namespace_p (outer_type
))
1139 pointer_set_insert (matched_vtables
, BINFO_VTABLE (base_binfo
));
1144 /* When virtual table is removed, we may need to flush the cache. */
1147 devirt_variable_node_removal_hook (struct varpool_node
*n
,
1148 void *d ATTRIBUTE_UNUSED
)
1150 if (cached_polymorphic_call_targets
1151 && DECL_VIRTUAL_P (n
->decl
)
1152 && type_in_anonymous_namespace_p (DECL_CONTEXT (n
->decl
)))
1153 free_polymorphic_call_targets_hash ();
1156 /* Return vector containing possible targets of polymorphic call of type
1157 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1158 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1159 OTR_TYPE and include their virtual method. This is useful for types
1160 possibly in construction or destruction where the virtual table may
1161 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1162 us to walk the inheritance graph for all derivations.
1164 If COMPLETEP is non-NULL, store true if the list is complette.
1165 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1166 in the target cache. If user needs to visit every target list
1167 just once, it can memoize them.
1169 Returned vector is placed into cache. It is NOT caller's responsibility
1170 to free it. The vector can be freed on cgraph_remove_node call if
1171 the particular node is a virtual function present in the cache. */
1174 possible_polymorphic_call_targets (tree otr_type
,
1175 HOST_WIDE_INT otr_token
,
1176 ipa_polymorphic_call_context context
,
1180 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
1181 pointer_set_t
*inserted
;
1182 pointer_set_t
*matched_vtables
;
1183 vec
<cgraph_node
*> nodes
=vNULL
;
1184 odr_type type
, outer_type
;
1185 polymorphic_call_target_d key
;
1186 polymorphic_call_target_d
**slot
;
1191 type
= get_odr_type (otr_type
, true);
1193 /* Lookup the outer class type we want to walk. */
1194 if (context
.outer_type
)
1195 get_class_context (&context
, otr_type
);
1197 /* We now canonicalize our query, so we do not need extra hashtable entries. */
1199 /* Without outer type, we have no use for offset. Just do the
1200 basic search from innter type */
1201 if (!context
.outer_type
)
1203 context
.outer_type
= otr_type
;
1206 /* We need to update our hiearchy if the type does not exist. */
1207 outer_type
= get_odr_type (context
.outer_type
, true);
1208 /* If outer and inner type match, there are no bases to see. */
1209 if (type
== outer_type
)
1210 context
.maybe_in_construction
= false;
1211 /* If the type is final, there are no derivations. */
1212 if (TYPE_FINAL_P (outer_type
->type
))
1213 context
.maybe_derived_type
= false;
1215 /* Initialize query cache. */
1216 if (!cached_polymorphic_call_targets
)
1218 cached_polymorphic_call_targets
= pointer_set_create ();
1219 polymorphic_call_target_hash
.create (23);
1220 if (!node_removal_hook_holder
)
1222 node_removal_hook_holder
=
1223 cgraph_add_node_removal_hook (&devirt_node_removal_hook
, NULL
);
1224 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook
,
1229 /* Lookup cached answer. */
1231 key
.otr_token
= otr_token
;
1232 key
.context
= context
;
1233 slot
= polymorphic_call_target_hash
.find_slot (&key
, INSERT
);
1235 *cache_token
= (void *)*slot
;
1239 *completep
= (*slot
)->final
;
1240 return (*slot
)->targets
;
1245 /* Do actual search. */
1246 timevar_push (TV_IPA_VIRTUAL_CALL
);
1247 *slot
= XCNEW (polymorphic_call_target_d
);
1249 *cache_token
= (void *)*slot
;
1250 (*slot
)->type
= type
;
1251 (*slot
)->otr_token
= otr_token
;
1252 (*slot
)->context
= context
;
1254 inserted
= pointer_set_create ();
1255 matched_vtables
= pointer_set_create ();
1257 /* First see virtual method of type itself. */
1259 binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
->type
),
1260 context
.offset
, otr_type
);
1261 target
= gimple_get_virt_method_for_binfo (otr_token
, binfo
);
1264 maybe_record_node (nodes
, target
, inserted
, &final
);
1266 /* In the case we get final method, we don't need
1267 to walk derivations. */
1268 if (DECL_FINAL_P (target
))
1269 context
.maybe_derived_type
= false;
1271 /* The only way method in anonymous namespace can become unreferable
1272 is that it has been fully optimized out. */
1273 else if (flag_ltrans
|| !type
->anonymous_namespace
)
1275 pointer_set_insert (matched_vtables
, BINFO_VTABLE (binfo
));
1277 /* Next walk bases, if asked to. */
1278 if (context
.maybe_in_construction
)
1279 record_targets_from_bases (otr_type
, otr_token
, outer_type
->type
,
1280 context
.offset
, nodes
, inserted
,
1281 matched_vtables
, &final
);
1283 /* Finally walk recursively all derived types. */
1284 if (context
.maybe_derived_type
)
1286 /* For anonymous namespace types we can attempt to build full type.
1287 All derivations must be in this unit (unless we see partial unit). */
1288 if (!type
->anonymous_namespace
|| flag_ltrans
)
1290 for (i
= 0; i
< outer_type
->derived_types
.length(); i
++)
1291 possible_polymorphic_call_targets_1 (nodes
, inserted
,
1293 otr_type
, outer_type
->derived_types
[i
],
1294 otr_token
, outer_type
->type
,
1297 (*slot
)->targets
= nodes
;
1298 (*slot
)->final
= final
;
1302 pointer_set_destroy (inserted
);
1303 pointer_set_destroy (matched_vtables
);
1304 timevar_pop (TV_IPA_VIRTUAL_CALL
);
1308 /* Dump all possible targets of a polymorphic call. */
1311 dump_possible_polymorphic_call_targets (FILE *f
,
1313 HOST_WIDE_INT otr_token
,
1314 const ipa_polymorphic_call_context
&ctx
)
1316 vec
<cgraph_node
*> targets
;
1318 odr_type type
= get_odr_type (otr_type
, false);
1323 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
,
1326 fprintf (f
, " Targets of polymorphic call of type %i:", type
->id
);
1327 print_generic_expr (f
, type
->type
, TDF_SLIM
);
1328 fprintf (f
, " token %i\n"
1329 " Contained in type:",
1331 print_generic_expr (f
, ctx
.outer_type
, TDF_SLIM
);
1332 fprintf (f
, " at offset "HOST_WIDE_INT_PRINT_DEC
"\n"
1335 final
? "This is full list." :
1336 "This is partial list; extra targets may be defined in other units.",
1337 ctx
.maybe_in_construction
? " (base types included)" : "",
1338 ctx
.maybe_derived_type
? " (derived types included)" : "");
1339 for (i
= 0; i
< targets
.length (); i
++)
1340 fprintf (f
, " %s/%i", targets
[i
]->name (),
1342 fprintf (f
, "\n\n");
1346 /* Return true if N can be possibly target of a polymorphic call of
1347 OTR_TYPE/OTR_TOKEN. */
1350 possible_polymorphic_call_target_p (tree otr_type
,
1351 HOST_WIDE_INT otr_token
,
1352 const ipa_polymorphic_call_context
&ctx
,
1353 struct cgraph_node
*n
)
1355 vec
<cgraph_node
*> targets
;
1357 enum built_in_function fcode
;
1360 if (TREE_CODE (TREE_TYPE (n
->decl
)) == FUNCTION_TYPE
1361 && ((fcode
= DECL_FUNCTION_CODE (n
->decl
))
1362 == BUILT_IN_UNREACHABLE
1363 || fcode
== BUILT_IN_TRAP
))
1366 if (!odr_hash
.is_created ())
1368 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
, ctx
, &final
);
1369 for (i
= 0; i
< targets
.length (); i
++)
1370 if (symtab_semantically_equivalent_p (n
, targets
[i
]))
1373 /* At a moment we allow middle end to dig out new external declarations
1374 as a targets of polymorphic calls. */
1375 if (!final
&& !n
->definition
)
1381 /* After callgraph construction new external nodes may appear.
1382 Add them into the graph. */
1385 update_type_inheritance_graph (void)
1387 struct cgraph_node
*n
;
1389 if (!odr_hash
.is_created ())
1391 free_polymorphic_call_targets_hash ();
1392 timevar_push (TV_IPA_INHERITANCE
);
1393 /* We reconstruct the graph starting from types of all methods seen in the
1395 FOR_EACH_FUNCTION (n
)
1396 if (DECL_VIRTUAL_P (n
->decl
)
1398 && symtab_real_symbol_p (n
))
1399 get_odr_type (method_class_type (TREE_TYPE (n
->decl
)), true);
1400 timevar_pop (TV_IPA_INHERITANCE
);
1404 /* Return true if N looks like likely target of a polymorphic call.
1405 Rule out cxa_pure_virtual, noreturns, function declared cold and
1406 other obvious cases. */
1409 likely_target_p (struct cgraph_node
*n
)
1412 /* cxa_pure_virtual and similar things are not likely. */
1413 if (TREE_CODE (TREE_TYPE (n
->decl
)) != METHOD_TYPE
)
1415 flags
= flags_from_decl_or_type (n
->decl
);
1416 if (flags
& ECF_NORETURN
)
1418 if (lookup_attribute ("cold",
1419 DECL_ATTRIBUTES (n
->decl
)))
1421 if (n
->frequency
< NODE_FREQUENCY_NORMAL
)
1426 /* The ipa-devirt pass.
1427 When polymorphic call has only one likely target in the unit,
1428 turn it into speculative call. */
1433 struct cgraph_node
*n
;
1434 struct pointer_set_t
*bad_call_targets
= pointer_set_create ();
1435 struct cgraph_edge
*e
;
1437 int npolymorphic
= 0, nspeculated
= 0, nconverted
= 0, ncold
= 0;
1438 int nmultiple
= 0, noverwritable
= 0, ndevirtualized
= 0, nnotdefined
= 0;
1439 int nwrong
= 0, nok
= 0, nexternal
= 0;;
1441 FOR_EACH_DEFINED_FUNCTION (n
)
1443 bool update
= false;
1444 if (dump_file
&& n
->indirect_calls
)
1445 fprintf (dump_file
, "\n\nProcesing function %s/%i\n",
1446 n
->name (), n
->order
);
1447 for (e
= n
->indirect_calls
; e
; e
= e
->next_callee
)
1448 if (e
->indirect_info
->polymorphic
)
1450 struct cgraph_node
*likely_target
= NULL
;
1453 vec
<cgraph_node
*>targets
1454 = possible_polymorphic_call_targets
1455 (e
, &final
, &cache_token
);
1459 dump_possible_polymorphic_call_targets
1464 if (!cgraph_maybe_hot_edge_p (e
))
1467 fprintf (dump_file
, "Call is cold\n");
1474 fprintf (dump_file
, "Call is aready speculated\n");
1477 /* When dumping see if we agree with speculation. */
1481 if (pointer_set_contains (bad_call_targets
,
1485 fprintf (dump_file
, "Target list is known to be useless\n");
1489 for (i
= 0; i
< targets
.length (); i
++)
1490 if (likely_target_p (targets
[i
]))
1494 likely_target
= NULL
;
1496 fprintf (dump_file
, "More than one likely target\n");
1500 likely_target
= targets
[i
];
1504 pointer_set_insert (bad_call_targets
, cache_token
);
1507 /* This is reached only when dumping; check if we agree or disagree
1508 with the speculation. */
1511 struct cgraph_edge
*e2
;
1512 struct ipa_ref
*ref
;
1513 cgraph_speculative_call_info (e
, e2
, e
, ref
);
1514 if (cgraph_function_or_thunk_node (e2
->callee
, NULL
)
1515 == cgraph_function_or_thunk_node (likely_target
, NULL
))
1517 fprintf (dump_file
, "We agree with speculation\n");
1522 fprintf (dump_file
, "We disagree with speculation\n");
1527 if (!likely_target
->definition
)
1530 fprintf (dump_file
, "Target is not an definition\n");
1534 /* Do not introduce new references to external symbols. While we
1535 can handle these just well, it is common for programs to
1536 incorrectly with headers defining methods they are linked
1538 if (DECL_EXTERNAL (likely_target
->decl
))
1541 fprintf (dump_file
, "Target is external\n");
1545 if (cgraph_function_body_availability (likely_target
)
1546 <= AVAIL_OVERWRITABLE
1547 && symtab_can_be_discarded (likely_target
))
1550 fprintf (dump_file
, "Target is overwritable\n");
1558 "Speculatively devirtualizing call in %s/%i to %s/%i\n",
1559 n
->name (), n
->order
,
1560 likely_target
->name (),
1561 likely_target
->order
);
1562 if (!symtab_can_be_discarded (likely_target
))
1565 alias
= cgraph (symtab_nonoverwritable_alias
1568 likely_target
= alias
;
1572 cgraph_turn_edge_to_speculative
1573 (e
, likely_target
, e
->count
* 8 / 10, e
->frequency
* 8 / 10);
1577 inline_update_overall_summary (n
);
1579 pointer_set_destroy (bad_call_targets
);
1583 "%i polymorphic calls, %i devirtualized,"
1584 " %i speculatively devirtualized, %i cold\n"
1585 "%i have multiple targets, %i overwritable,"
1586 " %i already speculated (%i agree, %i disagree),"
1587 " %i external, %i not defined\n",
1588 npolymorphic
, ndevirtualized
, nconverted
, ncold
,
1589 nmultiple
, noverwritable
, nspeculated
, nok
, nwrong
,
1590 nexternal
, nnotdefined
);
1591 return ndevirtualized
? TODO_remove_functions
: 0;
1594 /* Gate for IPCP optimization. */
1597 gate_ipa_devirt (void)
1599 return flag_devirtualize_speculatively
&& optimize
;
1604 const pass_data pass_data_ipa_devirt
=
1606 IPA_PASS
, /* type */
1607 "devirt", /* name */
1608 OPTGROUP_NONE
, /* optinfo_flags */
1609 true, /* has_gate */
1610 true, /* has_execute */
1611 TV_IPA_DEVIRT
, /* tv_id */
1612 0, /* properties_required */
1613 0, /* properties_provided */
1614 0, /* properties_destroyed */
1615 0, /* todo_flags_start */
1616 ( TODO_dump_symtab
), /* todo_flags_finish */
1619 class pass_ipa_devirt
: public ipa_opt_pass_d
1622 pass_ipa_devirt (gcc::context
*ctxt
)
1623 : ipa_opt_pass_d (pass_data_ipa_devirt
, ctxt
,
1624 NULL
, /* generate_summary */
1625 NULL
, /* write_summary */
1626 NULL
, /* read_summary */
1627 NULL
, /* write_optimization_summary */
1628 NULL
, /* read_optimization_summary */
1629 NULL
, /* stmt_fixup */
1630 0, /* function_transform_todo_flags_start */
1631 NULL
, /* function_transform */
1632 NULL
) /* variable_transform */
1635 /* opt_pass methods: */
1636 bool gate () { return gate_ipa_devirt (); }
1637 unsigned int execute () { return ipa_devirt (); }
1639 }; // class pass_ipa_devirt
1644 make_pass_ipa_devirt (gcc::context
*ctxt
)
1646 return new pass_ipa_devirt (ctxt
);
1649 #include "gt-ipa-devirt.h"