]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-devirt.c
Daily bump.
[thirdparty/gcc.git] / gcc / ipa-devirt.c
CommitLineData
eefe9a99
JH
1/* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
23a5b65a 3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
eefe9a99
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
0e1474e5 40 This is the Gimple representation of type information of a polymorphic call.
eefe9a99
JH
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
0e1474e5 43 otr_token is the index into virtual table where address is taken.
eefe9a99
JH
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
0e1474e5 58 virtual table of the base type. Also BINFO_OFFSET specifies
eefe9a99
JH
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
0e1474e5 75 or from DECL_VINDEX of a given virtual table.
eefe9a99
JH
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
0e1474e5 89 inserted into the graph. Also types without virtual methods are not
eefe9a99
JH
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
0e1474e5 98 Edges are represented by odr_type->base and odr_type->derived_types.
eefe9a99
JH
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
bbc9396b
JH
104
105 pass_ipa_devirt performs simple speculative devirtualization.
eefe9a99
JH
106*/
107
108#include "config.h"
109#include "system.h"
110#include "coretypes.h"
111#include "tm.h"
4d648807 112#include "tree.h"
d8a2d370
DN
113#include "print-tree.h"
114#include "calls.h"
eefe9a99 115#include "cgraph.h"
d8a2d370 116#include "expr.h"
eefe9a99 117#include "tree-pass.h"
eefe9a99
JH
118#include "pointer-set.h"
119#include "target.h"
120#include "hash-table.h"
121#include "tree-pretty-print.h"
122#include "ipa-utils.h"
2fb9a547
AM
123#include "tree-ssa-alias.h"
124#include "internal-fn.h"
125#include "gimple-fold.h"
126#include "gimple-expr.h"
eefe9a99 127#include "gimple.h"
bbc9396b 128#include "ipa-inline.h"
61a74079 129#include "diagnostic.h"
68377e53 130#include "tree-dfa.h"
ec77d61f
JH
131#include "demangle.h"
132
133static bool odr_violation_reported = false;
68377e53
JH
134
135/* Dummy polymorphic call context. */
136
137const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
138 = {0, NULL, false, true};
eefe9a99 139
0e1474e5
JH
140/* Pointer set of all call targets appearing in the cache. */
141static pointer_set_t *cached_polymorphic_call_targets;
142
eefe9a99
JH
143/* The node of type inheritance graph. For each type unique in
144 One Defintion Rule (ODR) sense, we produce one node linking all
145 main variants of types equivalent to it, bases and derived types. */
146
147struct GTY(()) odr_type_d
148{
eefe9a99
JH
149 /* leader type. */
150 tree type;
151 /* All bases. */
152 vec<odr_type> GTY((skip)) bases;
153 /* All derrived types with virtual methods seen in unit. */
154 vec<odr_type> GTY((skip)) derived_types;
0e1474e5 155
61a74079
JH
156 /* All equivalent types, if more than one. */
157 vec<tree, va_gc> *types;
158 /* Set of all equivalent types, if NON-NULL. */
159 pointer_set_t * GTY((skip)) types_set;
160
0e1474e5
JH
161 /* Unique ID indexing the type in odr_types array. */
162 int id;
eefe9a99
JH
163 /* Is it in anonymous namespace? */
164 bool anonymous_namespace;
165};
166
167
0e1474e5
JH
168/* Return true if BINFO corresponds to a type with virtual methods.
169
170 Every type has several BINFOs. One is the BINFO associated by the type
171 while other represents bases of derived types. The BINFOs representing
172 bases do not have BINFO_VTABLE pointer set when this is the single
173 inheritance (because vtables are shared). Look up the BINFO of type
174 and check presence of its vtable. */
eefe9a99
JH
175
176static inline bool
177polymorphic_type_binfo_p (tree binfo)
178{
179 /* See if BINFO's type has an virtual table associtated with it. */
180 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
181}
182
183/* One Definition Rule hashtable helpers. */
184
185struct odr_hasher
186{
187 typedef odr_type_d value_type;
188 typedef union tree_node compare_type;
189 static inline hashval_t hash (const value_type *);
190 static inline bool equal (const value_type *, const compare_type *);
191 static inline void remove (value_type *);
192};
193
194/* Produce hash based on type name. */
195
196hashval_t
197hash_type_name (tree t)
198{
199 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
200
201 /* If not in LTO, all main variants are unique, so we can do
202 pointer hash. */
203 if (!in_lto_p)
204 return htab_hash_pointer (t);
205
206 /* Anonymous types are unique. */
207 if (type_in_anonymous_namespace_p (t))
208 return htab_hash_pointer (t);
209
61a74079
JH
210 /* For polymorphic types, we can simply hash the virtual table. */
211 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
212 {
213 tree v = BINFO_VTABLE (TYPE_BINFO (t));
214 hashval_t hash = 0;
215
216 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
217 {
218 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
219 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
220 }
221
222 v = DECL_ASSEMBLER_NAME (v);
61a74079
JH
223 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
224 return hash;
225 }
226
eefe9a99
JH
227 /* Rest is not implemented yet. */
228 gcc_unreachable ();
229}
230
231/* Return the computed hashcode for ODR_TYPE. */
232
233inline hashval_t
234odr_hasher::hash (const value_type *odr_type)
235{
236 return hash_type_name (odr_type->type);
237}
238
0e1474e5 239/* Compare types T1 and T2 and return true if they are
eefe9a99
JH
240 equivalent. */
241
242inline bool
243odr_hasher::equal (const value_type *t1, const compare_type *ct2)
244{
245 tree t2 = const_cast <tree> (ct2);
246
247 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
248 if (t1->type == t2)
249 return true;
250 if (!in_lto_p)
251 return false;
252 return types_same_for_odr (t1->type, t2);
253}
254
0e1474e5 255/* Free ODR type V. */
eefe9a99
JH
256
257inline void
258odr_hasher::remove (value_type *v)
259{
260 v->bases.release ();
261 v->derived_types.release ();
61a74079
JH
262 if (v->types_set)
263 pointer_set_destroy (v->types_set);
eefe9a99
JH
264 ggc_free (v);
265}
266
267/* ODR type hash used to lookup ODR type based on tree type node. */
268
269typedef hash_table <odr_hasher> odr_hash_type;
270static odr_hash_type odr_hash;
271
272/* ODR types are also stored into ODR_TYPE vector to allow consistent
273 walking. Bases appear before derived types. Vector is garbage collected
274 so we won't end up visiting empty types. */
275
276static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
277#define odr_types (*odr_types_ptr)
278
61a74079
JH
279/* TYPE is equivalent to VAL by ODR, but its tree representation differs
280 from VAL->type. This may happen in LTO where tree merging did not merge
281 all variants of the same type. It may or may not mean the ODR violation.
282 Add it to the list of duplicates and warn on some violations. */
283
284static void
285add_type_duplicate (odr_type val, tree type)
286{
287 if (!val->types_set)
288 val->types_set = pointer_set_create ();
289
290 /* See if this duplicate is new. */
291 if (!pointer_set_insert (val->types_set, type))
292 {
293 bool merge = true;
294 bool base_mismatch = false;
295 gcc_assert (in_lto_p);
296 vec_safe_push (val->types, type);
297 unsigned int i,j;
298
299 /* First we compare memory layout. */
300 if (!types_compatible_p (val->type, type))
301 {
302 merge = false;
ec77d61f 303 odr_violation_reported = true;
61a74079
JH
304 if (BINFO_VTABLE (TYPE_BINFO (val->type))
305 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
306 "type %qD violates one definition rule ",
307 type))
308 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
309 "a type with the same name but different layout is "
310 "defined in another translation unit");
61a74079
JH
311 if (cgraph_dump_file)
312 {
313 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
314
315 print_node (cgraph_dump_file, "", val->type, 0);
316 putc ('\n',cgraph_dump_file);
317 print_node (cgraph_dump_file, "", type, 0);
318 putc ('\n',cgraph_dump_file);
319 }
320 }
321
322 /* Next sanity check that bases are the same. If not, we will end
323 up producing wrong answers. */
324 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
325 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
326 {
327 odr_type base = get_odr_type
328 (BINFO_TYPE
329 (BINFO_BASE_BINFO (TYPE_BINFO (type),
330 i)),
331 true);
332 if (val->bases.length () <= j || val->bases[j] != base)
333 base_mismatch = true;
334 j++;
335 }
336 if (base_mismatch)
337 {
338 merge = false;
ec77d61f 339 odr_violation_reported = true;
61a74079
JH
340
341 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
342 "type %qD violates one definition rule ",
343 type))
344 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
345 "a type with the same name but different bases is "
346 "defined in another translation unit");
347 if (cgraph_dump_file)
348 {
349 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
350
351 print_node (cgraph_dump_file, "", val->type, 0);
352 putc ('\n',cgraph_dump_file);
353 print_node (cgraph_dump_file, "", type, 0);
354 putc ('\n',cgraph_dump_file);
355 }
356 }
357
358 /* Regularize things a little. During LTO same types may come with
359 different BINFOs. Either because their virtual table was
360 not merged by tree merging and only later at decl merging or
361 because one type comes with external vtable, while other
362 with internal. We want to merge equivalent binfos to conserve
363 memory and streaming overhead.
364
365 The external vtables are more harmful: they contain references
366 to external declarations of methods that may be defined in the
367 merged LTO unit. For this reason we absolutely need to remove
368 them and replace by internal variants. Not doing so will lead
369 to incomplete answers from possible_polymorphic_call_targets. */
370 if (!flag_ltrans && merge)
371 {
372 tree master_binfo = TYPE_BINFO (val->type);
373 tree v1 = BINFO_VTABLE (master_binfo);
374 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
375
376 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
377 {
378 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
379 && operand_equal_p (TREE_OPERAND (v1, 1),
380 TREE_OPERAND (v2, 1), 0));
381 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
382 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
383 }
384 gcc_assert (DECL_ASSEMBLER_NAME (v1)
385 == DECL_ASSEMBLER_NAME (v2));
386
387 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
388 {
389 unsigned int i;
390
391 TYPE_BINFO (val->type) = TYPE_BINFO (type);
c3284718 392 for (i = 0; i < val->types->length (); i++)
61a74079
JH
393 {
394 if (TYPE_BINFO ((*val->types)[i])
395 == master_binfo)
396 TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
397 }
398 }
399 else
400 TYPE_BINFO (type) = master_binfo;
401 }
402 }
403}
404
eefe9a99
JH
405/* Get ODR type hash entry for TYPE. If INSERT is true, create
406 possibly new entry. */
407
408odr_type
409get_odr_type (tree type, bool insert)
410{
411 odr_type_d **slot;
412 odr_type val;
413 hashval_t hash;
414
415 type = TYPE_MAIN_VARIANT (type);
416 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
417 hash = hash_type_name (type);
418 slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
419 if (!slot)
420 return NULL;
421
422 /* See if we already have entry for type. */
423 if (*slot)
424 {
425 val = *slot;
426
61a74079
JH
427 /* With LTO we need to support multiple tree representation of
428 the same ODR type. */
429 if (val->type != type)
430 add_type_duplicate (val, type);
eefe9a99
JH
431 }
432 else
433 {
434 tree binfo = TYPE_BINFO (type);
435 unsigned int i;
436
437 val = ggc_alloc_cleared_odr_type_d ();
438 val->type = type;
439 val->bases = vNULL;
440 val->derived_types = vNULL;
0e1474e5 441 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
eefe9a99
JH
442 *slot = val;
443 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
444 /* For now record only polymorphic types. other are
445 pointless for devirtualization and we can not precisely
446 determine ODR equivalency of these during LTO. */
447 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
448 {
449 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
450 i)),
451 true);
452 base->derived_types.safe_push (val);
453 val->bases.safe_push (base);
454 }
455 /* First record bases, then add into array so ids are increasing. */
456 if (odr_types_ptr)
c3284718 457 val->id = odr_types.length ();
eefe9a99
JH
458 vec_safe_push (odr_types_ptr, val);
459 }
460 return val;
461}
462
463/* Dump ODR type T and all its derrived type. INDENT specify indentation for
464 recusive printing. */
465
466static void
467dump_odr_type (FILE *f, odr_type t, int indent=0)
468{
469 unsigned int i;
470 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
471 print_generic_expr (f, t->type, TDF_SLIM);
0e1474e5 472 fprintf (f, "%s\n", t->anonymous_namespace ? " (anonymous namespace)":"");
eefe9a99
JH
473 if (TYPE_NAME (t->type))
474 {
475 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
476 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
477 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
478 }
c3284718 479 if (t->bases.length ())
eefe9a99
JH
480 {
481 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
c3284718 482 for (i = 0; i < t->bases.length (); i++)
eefe9a99
JH
483 fprintf (f, " %i", t->bases[i]->id);
484 fprintf (f, "\n");
485 }
c3284718 486 if (t->derived_types.length ())
eefe9a99
JH
487 {
488 fprintf (f, "%*s derived types:\n", indent * 2, "");
c3284718 489 for (i = 0; i < t->derived_types.length (); i++)
eefe9a99
JH
490 dump_odr_type (f, t->derived_types[i], indent + 1);
491 }
492 fprintf (f, "\n");
493}
494
495/* Dump the type inheritance graph. */
496
497static void
498dump_type_inheritance_graph (FILE *f)
499{
500 unsigned int i;
0e1474e5
JH
501 if (!odr_types_ptr)
502 return;
eefe9a99 503 fprintf (f, "\n\nType inheritance graph:\n");
c3284718 504 for (i = 0; i < odr_types.length (); i++)
eefe9a99 505 {
c3284718 506 if (odr_types[i]->bases.length () == 0)
eefe9a99
JH
507 dump_odr_type (f, odr_types[i]);
508 }
c3284718 509 for (i = 0; i < odr_types.length (); i++)
61a74079 510 {
c3284718 511 if (odr_types[i]->types && odr_types[i]->types->length ())
61a74079
JH
512 {
513 unsigned int j;
514 fprintf (f, "Duplicate tree types for odr type %i\n", i);
515 print_node (f, "", odr_types[i]->type, 0);
c3284718 516 for (j = 0; j < odr_types[i]->types->length (); j++)
61a74079
JH
517 {
518 tree t;
519 fprintf (f, "duplicate #%i\n", j);
520 print_node (f, "", (*odr_types[i]->types)[j], 0);
521 t = (*odr_types[i]->types)[j];
522 while (TYPE_P (t) && TYPE_CONTEXT (t))
523 {
524 t = TYPE_CONTEXT (t);
525 print_node (f, "", t, 0);
526 }
527 putc ('\n',f);
528 }
529 }
530 }
eefe9a99
JH
531}
532
533/* Given method type T, return type of class it belongs to.
534 Lookup this pointer and get its type. */
535
64cbf23d 536tree
eefe9a99
JH
537method_class_type (tree t)
538{
539 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
68377e53 540 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
eefe9a99
JH
541
542 return TREE_TYPE (first_parm_type);
543}
544
545/* Initialize IPA devirt and build inheritance tree graph. */
546
547void
548build_type_inheritance_graph (void)
549{
b270b096 550 struct symtab_node *n;
eefe9a99
JH
551 FILE *inheritance_dump_file;
552 int flags;
553
554 if (odr_hash.is_created ())
555 return;
556 timevar_push (TV_IPA_INHERITANCE);
557 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
558 odr_hash.create (23);
559
560 /* We reconstruct the graph starting of types of all methods seen in the
561 the unit. */
b270b096
JH
562 FOR_EACH_SYMBOL (n)
563 if (is_a <cgraph_node> (n)
564 && DECL_VIRTUAL_P (n->decl)
67348ccc
DM
565 && symtab_real_symbol_p (n))
566 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
b270b096
JH
567
568 /* Look also for virtual tables of types that do not define any methods.
569
570 We need it in a case where class B has virtual base of class A
571 re-defining its virtual method and there is class C with no virtual
572 methods with B as virtual base.
573
574 Here we output B's virtual method in two variant - for non-virtual
575 and virtual inheritance. B's virtual table has non-virtual version,
576 while C's has virtual.
577
578 For this reason we need to know about C in order to include both
579 variants of B. More correctly, record_target_from_binfo should
580 add both variants of the method when walking B, but we have no
581 link in between them.
582
583 We rely on fact that either the method is exported and thus we
584 assume it is called externally or C is in anonymous namespace and
585 thus we will see the vtable. */
586
587 else if (is_a <varpool_node> (n)
588 && DECL_VIRTUAL_P (n->decl)
589 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
590 && TYPE_BINFO (DECL_CONTEXT (n->decl))
591 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
592 get_odr_type (DECL_CONTEXT (n->decl), true);
eefe9a99
JH
593 if (inheritance_dump_file)
594 {
595 dump_type_inheritance_graph (inheritance_dump_file);
596 dump_end (TDI_inheritance, inheritance_dump_file);
597 }
598 timevar_pop (TV_IPA_INHERITANCE);
599}
600
68377e53 601/* If TARGET has associated node, record it in the NODES array.
ec77d61f
JH
602 CAN_REFER specify if program can refer to the target directly.
603 if TARGET is unknown (NULL) or it can not be inserted (for example because
604 its body was already removed and there is no way to refer to it), clear
605 COMPLETEP. */
eefe9a99
JH
606
607static void
608maybe_record_node (vec <cgraph_node *> &nodes,
68377e53 609 tree target, pointer_set_t *inserted,
ec77d61f 610 bool can_refer,
68377e53 611 bool *completep)
eefe9a99
JH
612{
613 struct cgraph_node *target_node;
614 enum built_in_function fcode;
615
ec77d61f
JH
616 if (!can_refer)
617 {
618 /* The only case when method of anonymous namespace becomes unreferable
619 is when we completely optimized it out. */
620 if (flag_ltrans
621 || !target
622 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
623 *completep = false;
624 return;
625 }
626
68377e53 627 if (!target
eefe9a99 628 /* Those are used to mark impossible scenarios. */
68377e53
JH
629 || (fcode = DECL_FUNCTION_CODE (target))
630 == BUILT_IN_UNREACHABLE
631 || fcode == BUILT_IN_TRAP)
632 return;
633
634 target_node = cgraph_get_node (target);
635
636 if (target_node != NULL
3462aa02 637 && (TREE_PUBLIC (target)
67348ccc
DM
638 || target_node->definition)
639 && symtab_real_symbol_p (target_node))
0e1474e5 640 {
68377e53
JH
641 gcc_assert (!target_node->global.inlined_to);
642 gcc_assert (symtab_real_symbol_p (target_node));
643 if (!pointer_set_insert (inserted, target))
644 {
645 pointer_set_insert (cached_polymorphic_call_targets,
646 target_node);
647 nodes.safe_push (target_node);
648 }
0e1474e5 649 }
68377e53
JH
650 else if (completep
651 && !type_in_anonymous_namespace_p
652 (method_class_type (TREE_TYPE (target))))
653 *completep = true;
eefe9a99
JH
654}
655
68377e53
JH
656/* See if BINFO's type match OUTER_TYPE. If so, lookup
657 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
658 method in vtable and insert method to NODES array.
eefe9a99
JH
659 Otherwise recurse to base BINFOs.
660 This match what get_binfo_at_offset does, but with offset
661 being unknown.
662
a3788dde
JH
663 TYPE_BINFOS is a stack of BINFOS of types with defined
664 virtual table seen on way from class type to BINFO.
eefe9a99
JH
665
666 MATCHED_VTABLES tracks virtual tables we already did lookup
68377e53
JH
667 for virtual function in. INSERTED tracks nodes we already
668 inserted.
3462aa02
JH
669
670 ANONYMOUS is true if BINFO is part of anonymous namespace.
ec77d61f
JH
671
672 Clear COMPLETEP when we hit unreferable target.
eefe9a99
JH
673 */
674
675static void
68377e53
JH
676record_target_from_binfo (vec <cgraph_node *> &nodes,
677 tree binfo,
678 tree otr_type,
a3788dde 679 vec <tree> &type_binfos,
68377e53
JH
680 HOST_WIDE_INT otr_token,
681 tree outer_type,
682 HOST_WIDE_INT offset,
683 pointer_set_t *inserted,
684 pointer_set_t *matched_vtables,
ec77d61f
JH
685 bool anonymous,
686 bool *completep)
eefe9a99
JH
687{
688 tree type = BINFO_TYPE (binfo);
689 int i;
690 tree base_binfo;
691
eefe9a99 692
a3788dde
JH
693 if (BINFO_VTABLE (binfo))
694 type_binfos.safe_push (binfo);
68377e53 695 if (types_same_for_odr (type, outer_type))
eefe9a99 696 {
a3788dde
JH
697 int i;
698 tree type_binfo = NULL;
699
700 /* Lookup BINFO with virtual table. For normal types it is always last
701 binfo on stack. */
702 for (i = type_binfos.length () - 1; i >= 0; i--)
703 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
704 {
705 type_binfo = type_binfos[i];
706 break;
707 }
708 if (BINFO_VTABLE (binfo))
709 type_binfos.pop ();
710 /* If this is duplicated BINFO for base shared by virtual inheritance,
711 we may not have its associated vtable. This is not a problem, since
712 we will walk it on the other path. */
713 if (!type_binfo)
6d6af792 714 return;
68377e53
JH
715 tree inner_binfo = get_binfo_at_offset (type_binfo,
716 offset, otr_type);
ec77d61f
JH
717 if (!inner_binfo)
718 {
719 gcc_assert (odr_violation_reported);
720 return;
721 }
3462aa02
JH
722 /* For types in anonymous namespace first check if the respective vtable
723 is alive. If not, we know the type can't be called. */
724 if (!flag_ltrans && anonymous)
725 {
68377e53 726 tree vtable = BINFO_VTABLE (inner_binfo);
2c8326a5 727 varpool_node *vnode;
3462aa02
JH
728
729 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
730 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
731 vnode = varpool_get_node (vtable);
67348ccc 732 if (!vnode || !vnode->definition)
3462aa02
JH
733 return;
734 }
68377e53
JH
735 gcc_assert (inner_binfo);
736 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
737 {
ec77d61f
JH
738 bool can_refer;
739 tree target = gimple_get_virt_method_for_binfo (otr_token,
740 inner_binfo,
741 &can_refer);
742 maybe_record_node (nodes, target, inserted, can_refer, completep);
68377e53 743 }
eefe9a99
JH
744 return;
745 }
746
747 /* Walk bases. */
748 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
749 /* Walking bases that have no virtual method is pointless excercise. */
750 if (polymorphic_type_binfo_p (base_binfo))
68377e53 751 record_target_from_binfo (nodes, base_binfo, otr_type,
a3788dde 752 type_binfos,
68377e53 753 otr_token, outer_type, offset, inserted,
ec77d61f 754 matched_vtables, anonymous, completep);
a3788dde
JH
755 if (BINFO_VTABLE (binfo))
756 type_binfos.pop ();
eefe9a99
JH
757}
758
759/* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
760 of TYPE, insert them to NODES, recurse into derived nodes.
761 INSERTED is used to avoid duplicate insertions of methods into NODES.
ec77d61f
JH
762 MATCHED_VTABLES are used to avoid duplicate walking vtables.
763 Clear COMPLETEP if unreferable target is found. */
eefe9a99
JH
764
765static void
766possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
767 pointer_set_t *inserted,
768 pointer_set_t *matched_vtables,
769 tree otr_type,
770 odr_type type,
68377e53
JH
771 HOST_WIDE_INT otr_token,
772 tree outer_type,
ec77d61f
JH
773 HOST_WIDE_INT offset,
774 bool *completep)
eefe9a99
JH
775{
776 tree binfo = TYPE_BINFO (type->type);
777 unsigned int i;
a3788dde 778 vec <tree> type_binfos = vNULL;
eefe9a99 779
a3788dde 780 record_target_from_binfo (nodes, binfo, otr_type, type_binfos, otr_token,
68377e53
JH
781 outer_type, offset,
782 inserted, matched_vtables,
ec77d61f 783 type->anonymous_namespace, completep);
a3788dde 784 type_binfos.release ();
c3284718 785 for (i = 0; i < type->derived_types.length (); i++)
eefe9a99
JH
786 possible_polymorphic_call_targets_1 (nodes, inserted,
787 matched_vtables,
788 otr_type,
789 type->derived_types[i],
ec77d61f 790 otr_token, outer_type, offset, completep);
eefe9a99
JH
791}
792
793/* Cache of queries for polymorphic call targets.
794
795 Enumerating all call targets may get expensive when there are many
796 polymorphic calls in the program, so we memoize all the previous
797 queries and avoid duplicated work. */
798
799struct polymorphic_call_target_d
800{
eefe9a99 801 HOST_WIDE_INT otr_token;
68377e53
JH
802 ipa_polymorphic_call_context context;
803 odr_type type;
eefe9a99 804 vec <cgraph_node *> targets;
ec77d61f
JH
805 int nonconstruction_targets;
806 bool complete;
eefe9a99
JH
807};
808
809/* Polymorphic call target cache helpers. */
810
811struct polymorphic_call_target_hasher
812{
813 typedef polymorphic_call_target_d value_type;
814 typedef polymorphic_call_target_d compare_type;
815 static inline hashval_t hash (const value_type *);
816 static inline bool equal (const value_type *, const compare_type *);
817 static inline void remove (value_type *);
818};
819
820/* Return the computed hashcode for ODR_QUERY. */
821
822inline hashval_t
823polymorphic_call_target_hasher::hash (const value_type *odr_query)
824{
68377e53
JH
825 hashval_t hash;
826
827 hash = iterative_hash_host_wide_int
828 (odr_query->otr_token,
829 odr_query->type->id);
830 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
831 hash);
832 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
833 return iterative_hash_hashval_t
834 (((int)odr_query->context.maybe_in_construction << 1)
835 | (int)odr_query->context.maybe_derived_type, hash);
eefe9a99
JH
836}
837
838/* Compare cache entries T1 and T2. */
839
840inline bool
841polymorphic_call_target_hasher::equal (const value_type *t1,
842 const compare_type *t2)
843{
68377e53
JH
844 return (t1->type == t2->type && t1->otr_token == t2->otr_token
845 && t1->context.offset == t2->context.offset
846 && t1->context.outer_type == t2->context.outer_type
847 && t1->context.maybe_in_construction
848 == t2->context.maybe_in_construction
849 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
eefe9a99
JH
850}
851
852/* Remove entry in polymorphic call target cache hash. */
853
854inline void
855polymorphic_call_target_hasher::remove (value_type *v)
856{
857 v->targets.release ();
858 free (v);
859}
860
861/* Polymorphic call target query cache. */
862
863typedef hash_table <polymorphic_call_target_hasher>
864 polymorphic_call_target_hash_type;
865static polymorphic_call_target_hash_type polymorphic_call_target_hash;
eefe9a99
JH
866
867/* Destroy polymorphic call target query cache. */
868
869static void
870free_polymorphic_call_targets_hash ()
871{
0e1474e5
JH
872 if (cached_polymorphic_call_targets)
873 {
874 polymorphic_call_target_hash.dispose ();
875 pointer_set_destroy (cached_polymorphic_call_targets);
876 cached_polymorphic_call_targets = NULL;
877 }
eefe9a99
JH
878}
879
880/* When virtual function is removed, we may need to flush the cache. */
881
882static void
883devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
884{
0e1474e5
JH
885 if (cached_polymorphic_call_targets
886 && pointer_set_contains (cached_polymorphic_call_targets, n))
eefe9a99
JH
887 free_polymorphic_call_targets_hash ();
888}
889
68377e53
JH
890/* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
891 is contained at CONTEXT->OFFSET. Walk the memory representation of
892 CONTEXT->OUTER_TYPE and find the outermost class type that match
893 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
894 to represent it.
895
896 For example when CONTEXT represents type
897 class A
898 {
899 int a;
900 class B b;
901 }
902 and we look for type at offset sizeof(int), we end up with B and offset 0.
903 If the same is produced by multiple inheritance, we end up with A and offset
904 sizeof(int).
905
906 If we can not find corresponding class, give up by setting
907 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
908 Return true when lookup was sucesful. */
909
910static bool
911get_class_context (ipa_polymorphic_call_context *context,
912 tree expected_type)
913{
914 tree type = context->outer_type;
915 HOST_WIDE_INT offset = context->offset;
916
917 /* Find the sub-object the constant actually refers to and mark whether it is
918 an artificial one (as opposed to a user-defined one). */
919 while (true)
920 {
921 HOST_WIDE_INT pos, size;
922 tree fld;
923
924 /* On a match, just return what we found. */
925 if (TREE_CODE (type) == TREE_CODE (expected_type)
926 && types_same_for_odr (type, expected_type))
927 {
3b4e93c3
JH
928 /* Type can not contain itself on an non-zero offset. In that case
929 just give up. */
930 if (offset != 0)
931 goto give_up;
68377e53
JH
932 gcc_assert (offset == 0);
933 return true;
934 }
935
936 /* Walk fields and find corresponding on at OFFSET. */
937 if (TREE_CODE (type) == RECORD_TYPE)
938 {
939 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
940 {
941 if (TREE_CODE (fld) != FIELD_DECL)
942 continue;
943
944 pos = int_bit_position (fld);
945 size = tree_to_uhwi (DECL_SIZE (fld));
946 if (pos <= offset && (pos + size) > offset)
947 break;
948 }
949
950 if (!fld)
951 goto give_up;
952
953 type = TREE_TYPE (fld);
954 offset -= pos;
955 /* DECL_ARTIFICIAL represents a basetype. */
956 if (!DECL_ARTIFICIAL (fld))
957 {
958 context->outer_type = type;
959 context->offset = offset;
960 /* As soon as we se an field containing the type,
961 we know we are not looking for derivations. */
962 context->maybe_derived_type = false;
963 }
964 }
965 else if (TREE_CODE (type) == ARRAY_TYPE)
966 {
967 tree subtype = TREE_TYPE (type);
968
969 /* Give up if we don't know array size. */
970 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
971 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
972 goto give_up;
973 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
974 type = subtype;
975 context->outer_type = type;
976 context->offset = offset;
977 context->maybe_derived_type = false;
978 }
979 /* Give up on anything else. */
980 else
981 goto give_up;
982 }
983
984 /* If we failed to find subtype we look for, give up and fall back to the
985 most generic query. */
986give_up:
987 context->outer_type = expected_type;
988 context->offset = 0;
989 context->maybe_derived_type = true;
990 return false;
991}
992
993/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
994
995static bool
996contains_type_p (tree outer_type, HOST_WIDE_INT offset,
997 tree otr_type)
998{
999 ipa_polymorphic_call_context context = {offset, outer_type,
1000 false, true};
1001 return get_class_context (&context, otr_type);
1002}
1003
390675c8
JH
1004/* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1005
1006static tree
85942f45
JH
1007subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1008 tree vtable)
390675c8
JH
1009{
1010 tree v = BINFO_VTABLE (binfo);
1011 int i;
1012 tree base_binfo;
85942f45 1013 unsigned HOST_WIDE_INT this_offset;
390675c8 1014
85942f45
JH
1015 if (v)
1016 {
1017 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1018 gcc_unreachable ();
1019
1020 if (offset == this_offset
1021 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1022 return binfo;
1023 }
390675c8 1024
390675c8
JH
1025 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1026 if (polymorphic_type_binfo_p (base_binfo))
1027 {
1028 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1029 if (base_binfo)
1030 return base_binfo;
1031 }
1032 return NULL;
1033}
1034
85942f45
JH
1035/* T is known constant value of virtual table pointer.
1036 Store virtual table to V and its offset to OFFSET.
1037 Return false if T does not look like virtual table reference. */
390675c8 1038
85942f45
JH
1039bool
1040vtable_pointer_value_to_vtable (tree t, tree *v, unsigned HOST_WIDE_INT *offset)
390675c8
JH
1041{
1042 /* We expect &MEM[(void *)&virtual_table + 16B].
1043 We obtain object's BINFO from the context of the virtual table.
1044 This one contains pointer to virtual table represented via
1045 POINTER_PLUS_EXPR. Verify that this pointer match to what
1046 we propagated through.
1047
1048 In the case of virtual inheritance, the virtual tables may
1049 be nested, i.e. the offset may be different from 16 and we may
1050 need to dive into the type representation. */
85942f45 1051 if (TREE_CODE (t) == ADDR_EXPR
390675c8
JH
1052 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1053 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1054 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1055 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1056 == VAR_DECL)
1057 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1058 (TREE_OPERAND (t, 0), 0), 0)))
1059 {
85942f45
JH
1060 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1061 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1062 return true;
390675c8 1063 }
85942f45
JH
1064
1065 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1066 We need to handle it when T comes from static variable initializer or
1067 BINFO. */
1068 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1069 {
1070 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1071 t = TREE_OPERAND (t, 0);
1072 }
1073 else
1074 *offset = 0;
1075
1076 if (TREE_CODE (t) != ADDR_EXPR)
1077 return false;
1078 *v = TREE_OPERAND (t, 0);
1079 return true;
1080}
1081
1082/* T is known constant value of virtual table pointer. Return BINFO of the
1083 instance type. */
1084
1085tree
1086vtable_pointer_value_to_binfo (tree t)
1087{
1088 tree vtable;
1089 unsigned HOST_WIDE_INT offset;
1090
1091 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1092 return NULL_TREE;
1093
1094 /* FIXME: for stores of construction vtables we return NULL,
1095 because we do not have BINFO for those. Eventually we should fix
1096 our representation to allow this case to be handled, too.
1097 In the case we see store of BINFO we however may assume
1098 that standard folding will be ale to cope with it. */
1099 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1100 offset, vtable);
390675c8
JH
1101}
1102
5bccb77a
JH
1103/* Proudce polymorphic call context for call method of instance
1104 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1105
1106static void
1107get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1108 tree base, HOST_WIDE_INT offset)
1109{
1110 gcc_assert (DECL_P (base));
1111
1112 context->outer_type = TREE_TYPE (base);
1113 context->offset = offset;
1114 /* Make very conservative assumption that all objects
1115 may be in construction.
1116 TODO: ipa-prop already contains code to tell better.
1117 merge it later. */
1118 context->maybe_in_construction = true;
1119 context->maybe_derived_type = false;
1120}
1121
1122/* CST is an invariant (address of decl), try to get meaningful
1123 polymorphic call context for polymorphic call of method
1124 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1125 Return FALSE if nothing meaningful can be found. */
1126
1127bool
1128get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1129 tree cst,
1130 tree otr_type,
1131 HOST_WIDE_INT offset)
1132{
1133 HOST_WIDE_INT offset2, size, max_size;
1134 tree base;
1135
1136 if (TREE_CODE (cst) != ADDR_EXPR)
79c7de84 1137 return false;
5bccb77a
JH
1138
1139 cst = TREE_OPERAND (cst, 0);
1140 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
79c7de84
EB
1141 if (!DECL_P (base) || max_size == -1 || max_size != size)
1142 return false;
5bccb77a
JH
1143
1144 /* Only type inconsistent programs can have otr_type that is
1145 not part of outer type. */
79c7de84
EB
1146 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1147 return false;
5bccb77a 1148
79c7de84 1149 get_polymorphic_call_info_for_decl (context, base, offset);
5bccb77a
JH
1150 return true;
1151}
1152
68377e53
JH
1153/* Given REF call in FNDECL, determine class of the polymorphic
1154 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1155 Return pointer to object described by the context */
1156
1157tree
1158get_polymorphic_call_info (tree fndecl,
1159 tree ref,
1160 tree *otr_type,
1161 HOST_WIDE_INT *otr_token,
1162 ipa_polymorphic_call_context *context)
1163{
1164 tree base_pointer;
1165 *otr_type = obj_type_ref_class (ref);
1166 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1167
1168 /* Set up basic info in case we find nothing interesting in the analysis. */
1169 context->outer_type = *otr_type;
1170 context->offset = 0;
1171 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1172 context->maybe_derived_type = true;
1173 context->maybe_in_construction = false;
1174
1175 /* Walk SSA for outer object. */
1176 do
1177 {
1178 if (TREE_CODE (base_pointer) == SSA_NAME
1179 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1180 && SSA_NAME_DEF_STMT (base_pointer)
1181 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1182 {
1183 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1184 STRIP_NOPS (base_pointer);
1185 }
1186 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1187 {
1188 HOST_WIDE_INT size, max_size;
1189 HOST_WIDE_INT offset2;
1190 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1191 &offset2, &size, &max_size);
1192
1193 /* If this is a varying address, punt. */
1194 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1195 && max_size != -1
1196 && max_size == size)
1197 {
1198 /* We found dereference of a pointer. Type of the pointer
1199 and MEM_REF is meaningless, but we can look futher. */
1200 if (TREE_CODE (base) == MEM_REF)
1201 {
1202 base_pointer = TREE_OPERAND (base, 0);
1203 context->offset
1204 += offset2 + mem_ref_offset (base).low * BITS_PER_UNIT;
1205 context->outer_type = NULL;
1206 }
1207 /* We found base object. In this case the outer_type
1208 is known. */
1209 else if (DECL_P (base))
1210 {
7656ee72 1211 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
68377e53
JH
1212
1213 /* Only type inconsistent programs can have otr_type that is
1214 not part of outer type. */
7656ee72
JH
1215 if (!contains_type_p (TREE_TYPE (base),
1216 context->offset + offset2, *otr_type))
3e86c6a8
JH
1217 {
1218 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1219 code sequences; we arrange the calls to be builtin_unreachable
1220 later. */
1221 *otr_token = INT_MAX;
1222 return base_pointer;
1223 }
5bccb77a
JH
1224 get_polymorphic_call_info_for_decl (context, base,
1225 context->offset + offset2);
7656ee72 1226 return NULL;
68377e53
JH
1227 }
1228 else
1229 break;
1230 }
1231 else
1232 break;
1233 }
1234 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1235 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1236 {
1237 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1238 * BITS_PER_UNIT;
1239 base_pointer = TREE_OPERAND (base_pointer, 0);
1240 }
1241 else
1242 break;
1243 }
1244 while (true);
1245
1246 /* Try to determine type of the outer object. */
1247 if (TREE_CODE (base_pointer) == SSA_NAME
1248 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1249 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1250 {
1251 /* See if parameter is THIS pointer of a method. */
1252 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1253 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1254 {
1255 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1256 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1257
1258 /* Dynamic casting has possibly upcasted the type
1259 in the hiearchy. In this case outer type is less
1260 informative than inner type and we should forget
1261 about it. */
1262 if (!contains_type_p (context->outer_type, context->offset,
1263 *otr_type))
1264 {
1265 context->outer_type = NULL;
1266 return base_pointer;
1267 }
1268
1269 /* If the function is constructor or destructor, then
d74db8ff 1270 the type is possibly in construction, but we know
68377e53
JH
1271 it is not derived type. */
1272 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1273 || DECL_CXX_DESTRUCTOR_P (fndecl))
1274 {
1275 context->maybe_in_construction = true;
1276 context->maybe_derived_type = false;
1277 }
1278 else
1279 {
1280 context->maybe_derived_type = true;
1281 context->maybe_in_construction = false;
1282 }
1283 return base_pointer;
1284 }
1285 /* Non-PODs passed by value are really passed by invisible
1286 reference. In this case we also know the type of the
1287 object. */
1288 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1289 {
1290 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1291 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1292 /* Only type inconsistent programs can have otr_type that is
1293 not part of outer type. */
1294 if (!contains_type_p (context->outer_type, context->offset,
1295 *otr_type))
1296 {
3e86c6a8
JH
1297 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1298 code sequences; we arrange the calls to be builtin_unreachable
1299 later. */
1300 *otr_token = INT_MAX;
68377e53
JH
1301 return base_pointer;
1302 }
1303 context->maybe_derived_type = false;
1304 context->maybe_in_construction = false;
1305 return base_pointer;
1306 }
1307 }
1308 /* TODO: There are multiple ways to derive a type. For instance
1309 if BASE_POINTER is passed to an constructor call prior our refernece.
1310 We do not make this type of flow sensitive analysis yet. */
1311 return base_pointer;
1312}
1313
1314/* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1315 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1316 and insert them to NODES.
1317
1318 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1319
1320static void
1321record_targets_from_bases (tree otr_type,
1322 HOST_WIDE_INT otr_token,
1323 tree outer_type,
1324 HOST_WIDE_INT offset,
ec77d61f 1325 vec <cgraph_node *> &nodes,
68377e53
JH
1326 pointer_set_t *inserted,
1327 pointer_set_t *matched_vtables,
1328 bool *completep)
1329{
1330 while (true)
1331 {
1332 HOST_WIDE_INT pos, size;
1333 tree base_binfo;
1334 tree fld;
1335
1336 if (types_same_for_odr (outer_type, otr_type))
1337 return;
1338
1339 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1340 {
1341 if (TREE_CODE (fld) != FIELD_DECL)
1342 continue;
1343
1344 pos = int_bit_position (fld);
1345 size = tree_to_shwi (DECL_SIZE (fld));
ec77d61f
JH
1346 if (pos <= offset && (pos + size) > offset
1347 /* Do not get confused by zero sized bases. */
1348 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
68377e53
JH
1349 break;
1350 }
1351 /* Within a class type we should always find correcponding fields. */
1352 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1353
1354 /* Nonbasetypes should have been stripped by outer_class_type. */
1355 gcc_assert (DECL_ARTIFICIAL (fld));
1356
1357 outer_type = TREE_TYPE (fld);
1358 offset -= pos;
1359
1360 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1361 offset, otr_type);
ec77d61f
JH
1362 if (!base_binfo)
1363 {
1364 gcc_assert (odr_violation_reported);
1365 return;
1366 }
68377e53
JH
1367 gcc_assert (base_binfo);
1368 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1369 {
ec77d61f
JH
1370 bool can_refer;
1371 tree target = gimple_get_virt_method_for_binfo (otr_token,
1372 base_binfo,
1373 &can_refer);
1374 maybe_record_node (nodes, target, inserted, can_refer, completep);
68377e53
JH
1375 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1376 }
1377 }
1378}
1379
3462aa02
JH
1380/* When virtual table is removed, we may need to flush the cache. */
1381
1382static void
2c8326a5 1383devirt_variable_node_removal_hook (varpool_node *n,
3462aa02
JH
1384 void *d ATTRIBUTE_UNUSED)
1385{
1386 if (cached_polymorphic_call_targets
67348ccc
DM
1387 && DECL_VIRTUAL_P (n->decl)
1388 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3462aa02
JH
1389 free_polymorphic_call_targets_hash ();
1390}
1391
eefe9a99 1392/* Return vector containing possible targets of polymorphic call of type
68377e53
JH
1393 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1394 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1395 OTR_TYPE and include their virtual method. This is useful for types
1396 possibly in construction or destruction where the virtual table may
1397 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1398 us to walk the inheritance graph for all derivations.
1399
3e86c6a8
JH
1400 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1401 undefined and should be redirected to unreachable.
1402
add5c763 1403 If COMPLETEP is non-NULL, store true if the list is complete.
eefe9a99
JH
1404 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1405 in the target cache. If user needs to visit every target list
1406 just once, it can memoize them.
1407
ec77d61f
JH
1408 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1409 the type is not in the construction. Those targets appear first in the
1410 vector returned.
1411
eefe9a99
JH
1412 Returned vector is placed into cache. It is NOT caller's responsibility
1413 to free it. The vector can be freed on cgraph_remove_node call if
1414 the particular node is a virtual function present in the cache. */
1415
1416vec <cgraph_node *>
1417possible_polymorphic_call_targets (tree otr_type,
1418 HOST_WIDE_INT otr_token,
68377e53
JH
1419 ipa_polymorphic_call_context context,
1420 bool *completep,
ec77d61f
JH
1421 void **cache_token,
1422 int *nonconstruction_targetsp)
eefe9a99
JH
1423{
1424 static struct cgraph_node_hook_list *node_removal_hook_holder;
1425 pointer_set_t *inserted;
1426 pointer_set_t *matched_vtables;
add5c763 1427 vec <cgraph_node *> nodes = vNULL;
68377e53 1428 odr_type type, outer_type;
eefe9a99
JH
1429 polymorphic_call_target_d key;
1430 polymorphic_call_target_d **slot;
1431 unsigned int i;
1432 tree binfo, target;
ec77d61f
JH
1433 bool complete;
1434 bool can_refer;
eefe9a99 1435
3e86c6a8 1436 /* If ODR is not initialized, return empty incomplete list. */
79c7de84
EB
1437 if (!odr_hash.is_created ())
1438 {
1439 if (completep)
1440 *completep = false;
ec77d61f
JH
1441 if (nonconstruction_targetsp)
1442 *nonconstruction_targetsp = 0;
79c7de84
EB
1443 return nodes;
1444 }
add5c763 1445
3e86c6a8
JH
1446 /* If we hit type inconsistency, just return empty list of targets. */
1447 if (otr_token == INT_MAX)
1448 {
1449 if (completep)
1450 *completep = true;
1451 if (nonconstruction_targetsp)
1452 *nonconstruction_targetsp = 0;
1453 return nodes;
1454 }
1455
68377e53 1456 type = get_odr_type (otr_type, true);
eefe9a99 1457
68377e53 1458 /* Lookup the outer class type we want to walk. */
3e86c6a8
JH
1459 if (context.outer_type
1460 && !get_class_context (&context, otr_type))
1461 {
1462 if (completep)
1463 *completep = false;
1464 if (nonconstruction_targetsp)
1465 *nonconstruction_targetsp = 0;
1466 return nodes;
1467 }
eefe9a99 1468
79c7de84 1469 /* We canonicalize our query, so we do not need extra hashtable entries. */
68377e53
JH
1470
1471 /* Without outer type, we have no use for offset. Just do the
1472 basic search from innter type */
1473 if (!context.outer_type)
1474 {
1475 context.outer_type = otr_type;
1476 context.offset = 0;
1477 }
1478 /* We need to update our hiearchy if the type does not exist. */
1479 outer_type = get_odr_type (context.outer_type, true);
1480 /* If outer and inner type match, there are no bases to see. */
1481 if (type == outer_type)
1482 context.maybe_in_construction = false;
ec77d61f 1483 /* If the type is complete, there are no derivations. */
68377e53
JH
1484 if (TYPE_FINAL_P (outer_type->type))
1485 context.maybe_derived_type = false;
eefe9a99
JH
1486
1487 /* Initialize query cache. */
1488 if (!cached_polymorphic_call_targets)
1489 {
1490 cached_polymorphic_call_targets = pointer_set_create ();
1491 polymorphic_call_target_hash.create (23);
1492 if (!node_removal_hook_holder)
3462aa02
JH
1493 {
1494 node_removal_hook_holder =
1495 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1496 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1497 NULL);
1498 }
eefe9a99
JH
1499 }
1500
1501 /* Lookup cached answer. */
1502 key.type = type;
1503 key.otr_token = otr_token;
68377e53 1504 key.context = context;
eefe9a99
JH
1505 slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
1506 if (cache_token)
1507 *cache_token = (void *)*slot;
1508 if (*slot)
68377e53
JH
1509 {
1510 if (completep)
ec77d61f
JH
1511 *completep = (*slot)->complete;
1512 if (nonconstruction_targetsp)
1513 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
68377e53
JH
1514 return (*slot)->targets;
1515 }
1516
ec77d61f 1517 complete = true;
eefe9a99
JH
1518
1519 /* Do actual search. */
1520 timevar_push (TV_IPA_VIRTUAL_CALL);
1521 *slot = XCNEW (polymorphic_call_target_d);
1522 if (cache_token)
68377e53 1523 *cache_token = (void *)*slot;
eefe9a99
JH
1524 (*slot)->type = type;
1525 (*slot)->otr_token = otr_token;
68377e53 1526 (*slot)->context = context;
eefe9a99
JH
1527
1528 inserted = pointer_set_create ();
1529 matched_vtables = pointer_set_create ();
1530
1531 /* First see virtual method of type itself. */
68377e53
JH
1532 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1533 context.offset, otr_type);
ec77d61f
JH
1534 if (binfo)
1535 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1536 &can_refer);
1537 else
68377e53 1538 {
ec77d61f
JH
1539 gcc_assert (odr_violation_reported);
1540 target = NULL;
1541 }
68377e53 1542
ec77d61f
JH
1543 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1544
1545 if (target)
1546 {
1547 /* In the case we get complete method, we don't need
68377e53
JH
1548 to walk derivations. */
1549 if (DECL_FINAL_P (target))
1550 context.maybe_derived_type = false;
1551 }
ec77d61f
JH
1552 else
1553 gcc_assert (!complete);
79c7de84 1554
eefe9a99
JH
1555 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1556
ec77d61f 1557 /* Next walk recursively all derived types. */
68377e53
JH
1558 if (context.maybe_derived_type)
1559 {
1560 /* For anonymous namespace types we can attempt to build full type.
1561 All derivations must be in this unit (unless we see partial unit). */
1562 if (!type->anonymous_namespace || flag_ltrans)
ec77d61f 1563 complete = false;
68377e53
JH
1564 for (i = 0; i < outer_type->derived_types.length(); i++)
1565 possible_polymorphic_call_targets_1 (nodes, inserted,
1566 matched_vtables,
79c7de84
EB
1567 otr_type,
1568 outer_type->derived_types[i],
68377e53 1569 otr_token, outer_type->type,
ec77d61f 1570 context.offset, &complete);
68377e53 1571 }
79c7de84 1572
ec77d61f
JH
1573 /* Finally walk bases, if asked to. */
1574 (*slot)->nonconstruction_targets = nodes.length();
1575 if (context.maybe_in_construction)
1576 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1577 context.offset, nodes, inserted,
1578 matched_vtables, &complete);
1579
eefe9a99 1580 (*slot)->targets = nodes;
ec77d61f 1581 (*slot)->complete = complete;
68377e53 1582 if (completep)
ec77d61f
JH
1583 *completep = complete;
1584 if (nonconstruction_targetsp)
1585 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
eefe9a99
JH
1586
1587 pointer_set_destroy (inserted);
1588 pointer_set_destroy (matched_vtables);
1589 timevar_pop (TV_IPA_VIRTUAL_CALL);
1590 return nodes;
1591}
1592
1593/* Dump all possible targets of a polymorphic call. */
1594
1595void
1596dump_possible_polymorphic_call_targets (FILE *f,
68377e53
JH
1597 tree otr_type,
1598 HOST_WIDE_INT otr_token,
1599 const ipa_polymorphic_call_context &ctx)
eefe9a99
JH
1600{
1601 vec <cgraph_node *> targets;
1602 bool final;
1603 odr_type type = get_odr_type (otr_type, false);
1604 unsigned int i;
ec77d61f 1605 int nonconstruction;
eefe9a99
JH
1606
1607 if (!type)
1608 return;
1609 targets = possible_polymorphic_call_targets (otr_type, otr_token,
68377e53 1610 ctx,
ec77d61f 1611 &final, NULL, &nonconstruction);
68377e53 1612 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
eefe9a99 1613 print_generic_expr (f, type->type, TDF_SLIM);
ec77d61f
JH
1614 fprintf (f, " token %i\n", (int)otr_token);
1615 if (ctx.outer_type || ctx.offset)
1616 {
1617 fprintf (f, " Contained in type:");
1618 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1619 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
1620 ctx.offset);
1621 }
1622
1623 fprintf (f, " %s%s%s\n ",
1624 final ? "This is a complete list." :
68377e53
JH
1625 "This is partial list; extra targets may be defined in other units.",
1626 ctx.maybe_in_construction ? " (base types included)" : "",
1627 ctx.maybe_derived_type ? " (derived types included)" : "");
eefe9a99 1628 for (i = 0; i < targets.length (); i++)
ec77d61f
JH
1629 {
1630 char *name = NULL;
1631 if (i == (unsigned)nonconstruction)
1632 fprintf (f, "\n If the type is in construction,"
1633 " then additional tarets are:\n"
1634 " ");
1635 if (in_lto_p)
1636 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
1637 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
1638 if (in_lto_p)
1639 free (name);
1640 if (!targets[i]->definition)
1641 fprintf (f, " (no definition%s)",
1642 DECL_DECLARED_INLINE_P (targets[i]->decl)
1643 ? " inline" : "");
1644 }
68377e53 1645 fprintf (f, "\n\n");
eefe9a99
JH
1646}
1647
0e1474e5
JH
1648
1649/* Return true if N can be possibly target of a polymorphic call of
1650 OTR_TYPE/OTR_TOKEN. */
1651
1652bool
1653possible_polymorphic_call_target_p (tree otr_type,
1654 HOST_WIDE_INT otr_token,
68377e53 1655 const ipa_polymorphic_call_context &ctx,
0e1474e5
JH
1656 struct cgraph_node *n)
1657{
1658 vec <cgraph_node *> targets;
1659 unsigned int i;
68377e53 1660 enum built_in_function fcode;
450ad0cd 1661 bool final;
0e1474e5 1662
68377e53
JH
1663 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1664 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1665 == BUILT_IN_UNREACHABLE
1666 || fcode == BUILT_IN_TRAP))
1667 return true;
1668
0e1474e5
JH
1669 if (!odr_hash.is_created ())
1670 return true;
68377e53 1671 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
0e1474e5 1672 for (i = 0; i < targets.length (); i++)
68377e53 1673 if (symtab_semantically_equivalent_p (n, targets[i]))
0e1474e5 1674 return true;
450ad0cd
JH
1675
1676 /* At a moment we allow middle end to dig out new external declarations
1677 as a targets of polymorphic calls. */
67348ccc 1678 if (!final && !n->definition)
450ad0cd 1679 return true;
0e1474e5
JH
1680 return false;
1681}
1682
1683
1684/* After callgraph construction new external nodes may appear.
1685 Add them into the graph. */
1686
1687void
1688update_type_inheritance_graph (void)
1689{
1690 struct cgraph_node *n;
1691
1692 if (!odr_hash.is_created ())
1693 return;
1694 free_polymorphic_call_targets_hash ();
1695 timevar_push (TV_IPA_INHERITANCE);
68377e53 1696 /* We reconstruct the graph starting from types of all methods seen in the
0e1474e5
JH
1697 the unit. */
1698 FOR_EACH_FUNCTION (n)
67348ccc
DM
1699 if (DECL_VIRTUAL_P (n->decl)
1700 && !n->definition
1701 && symtab_real_symbol_p (n))
1702 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
0e1474e5
JH
1703 timevar_pop (TV_IPA_INHERITANCE);
1704}
bbc9396b
JH
1705
1706
1707/* Return true if N looks like likely target of a polymorphic call.
1708 Rule out cxa_pure_virtual, noreturns, function declared cold and
1709 other obvious cases. */
1710
1711bool
1712likely_target_p (struct cgraph_node *n)
1713{
1714 int flags;
1715 /* cxa_pure_virtual and similar things are not likely. */
67348ccc 1716 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
bbc9396b 1717 return false;
67348ccc 1718 flags = flags_from_decl_or_type (n->decl);
bbc9396b
JH
1719 if (flags & ECF_NORETURN)
1720 return false;
1721 if (lookup_attribute ("cold",
67348ccc 1722 DECL_ATTRIBUTES (n->decl)))
bbc9396b
JH
1723 return false;
1724 if (n->frequency < NODE_FREQUENCY_NORMAL)
1725 return false;
1726 return true;
1727}
1728
1729/* The ipa-devirt pass.
3462aa02
JH
1730 When polymorphic call has only one likely target in the unit,
1731 turn it into speculative call. */
bbc9396b
JH
1732
1733static unsigned int
1734ipa_devirt (void)
1735{
1736 struct cgraph_node *n;
1737 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1738 struct cgraph_edge *e;
1739
1740 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1741 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
570215f9 1742 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
bbc9396b
JH
1743
1744 FOR_EACH_DEFINED_FUNCTION (n)
1745 {
1746 bool update = false;
1747 if (dump_file && n->indirect_calls)
1748 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
fec39fa6 1749 n->name (), n->order);
bbc9396b
JH
1750 for (e = n->indirect_calls; e; e = e->next_callee)
1751 if (e->indirect_info->polymorphic)
1752 {
1753 struct cgraph_node *likely_target = NULL;
1754 void *cache_token;
1755 bool final;
ec77d61f 1756 int nonconstruction_targets;
bbc9396b
JH
1757 vec <cgraph_node *>targets
1758 = possible_polymorphic_call_targets
ec77d61f 1759 (e, &final, &cache_token, &nonconstruction_targets);
bbc9396b
JH
1760 unsigned int i;
1761
1762 if (dump_file)
1763 dump_possible_polymorphic_call_targets
1764 (dump_file, e);
3462aa02 1765
bbc9396b
JH
1766 npolymorphic++;
1767
bbc9396b
JH
1768 if (!cgraph_maybe_hot_edge_p (e))
1769 {
1770 if (dump_file)
ec77d61f 1771 fprintf (dump_file, "Call is cold\n\n");
bbc9396b
JH
1772 ncold++;
1773 continue;
1774 }
1775 if (e->speculative)
1776 {
1777 if (dump_file)
ec77d61f 1778 fprintf (dump_file, "Call is aready speculated\n\n");
bbc9396b
JH
1779 nspeculated++;
1780
1781 /* When dumping see if we agree with speculation. */
1782 if (!dump_file)
1783 continue;
1784 }
1785 if (pointer_set_contains (bad_call_targets,
1786 cache_token))
1787 {
1788 if (dump_file)
ec77d61f 1789 fprintf (dump_file, "Target list is known to be useless\n\n");
bbc9396b
JH
1790 nmultiple++;
1791 continue;
1792 }
c3284718 1793 for (i = 0; i < targets.length (); i++)
bbc9396b
JH
1794 if (likely_target_p (targets[i]))
1795 {
1796 if (likely_target)
1797 {
ec77d61f
JH
1798 if (i < (unsigned) nonconstruction_targets)
1799 {
1800 likely_target = NULL;
1801 if (dump_file)
1802 fprintf (dump_file, "More than one likely target\n\n");
1803 nmultiple++;
1804 }
bbc9396b
JH
1805 break;
1806 }
1807 likely_target = targets[i];
1808 }
1809 if (!likely_target)
1810 {
1811 pointer_set_insert (bad_call_targets, cache_token);
1812 continue;
1813 }
1814 /* This is reached only when dumping; check if we agree or disagree
1815 with the speculation. */
1816 if (e->speculative)
1817 {
1818 struct cgraph_edge *e2;
1819 struct ipa_ref *ref;
1820 cgraph_speculative_call_info (e, e2, e, ref);
1821 if (cgraph_function_or_thunk_node (e2->callee, NULL)
1822 == cgraph_function_or_thunk_node (likely_target, NULL))
1823 {
ec77d61f 1824 fprintf (dump_file, "We agree with speculation\n\n");
bbc9396b
JH
1825 nok++;
1826 }
1827 else
1828 {
ec77d61f 1829 fprintf (dump_file, "We disagree with speculation\n\n");
bbc9396b
JH
1830 nwrong++;
1831 }
1832 continue;
1833 }
67348ccc 1834 if (!likely_target->definition)
bbc9396b
JH
1835 {
1836 if (dump_file)
ec77d61f 1837 fprintf (dump_file, "Target is not an definition\n\n");
bbc9396b
JH
1838 nnotdefined++;
1839 continue;
1840 }
1841 /* Do not introduce new references to external symbols. While we
1842 can handle these just well, it is common for programs to
1843 incorrectly with headers defining methods they are linked
1844 with. */
67348ccc 1845 if (DECL_EXTERNAL (likely_target->decl))
bbc9396b
JH
1846 {
1847 if (dump_file)
ec77d61f 1848 fprintf (dump_file, "Target is external\n\n");
bbc9396b
JH
1849 nexternal++;
1850 continue;
1851 }
570215f9
JM
1852 /* Don't use an implicitly-declared destructor (c++/58678). */
1853 struct cgraph_node *non_thunk_target
1854 = cgraph_function_node (likely_target);
1855 if (DECL_ARTIFICIAL (non_thunk_target->decl)
1856 && DECL_COMDAT (non_thunk_target->decl))
1857 {
1858 if (dump_file)
1859 fprintf (dump_file, "Target is artificial\n\n");
1860 nartificial++;
1861 continue;
1862 }
bbc9396b
JH
1863 if (cgraph_function_body_availability (likely_target)
1864 <= AVAIL_OVERWRITABLE
67348ccc 1865 && symtab_can_be_discarded (likely_target))
bbc9396b
JH
1866 {
1867 if (dump_file)
ec77d61f 1868 fprintf (dump_file, "Target is overwritable\n\n");
bbc9396b
JH
1869 noverwritable++;
1870 continue;
1871 }
1872 else
1873 {
1874 if (dump_file)
1875 fprintf (dump_file,
ec77d61f 1876 "Speculatively devirtualizing call in %s/%i to %s/%i\n\n",
fec39fa6
TS
1877 n->name (), n->order,
1878 likely_target->name (),
67348ccc
DM
1879 likely_target->order);
1880 if (!symtab_can_be_discarded (likely_target))
5b79657a
JH
1881 {
1882 cgraph_node *alias;
1883 alias = cgraph (symtab_nonoverwritable_alias
67348ccc 1884 (likely_target));
5b79657a
JH
1885 if (alias)
1886 likely_target = alias;
1887 }
bbc9396b
JH
1888 nconverted++;
1889 update = true;
1890 cgraph_turn_edge_to_speculative
1891 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
1892 }
1893 }
1894 if (update)
1895 inline_update_overall_summary (n);
1896 }
1897 pointer_set_destroy (bad_call_targets);
1898
1899 if (dump_file)
1900 fprintf (dump_file,
1901 "%i polymorphic calls, %i devirtualized,"
1902 " %i speculatively devirtualized, %i cold\n"
1903 "%i have multiple targets, %i overwritable,"
1904 " %i already speculated (%i agree, %i disagree),"
570215f9 1905 " %i external, %i not defined, %i artificial\n",
bbc9396b
JH
1906 npolymorphic, ndevirtualized, nconverted, ncold,
1907 nmultiple, noverwritable, nspeculated, nok, nwrong,
570215f9 1908 nexternal, nnotdefined, nartificial);
bbc9396b
JH
1909 return ndevirtualized ? TODO_remove_functions : 0;
1910}
1911
a88bf705 1912/* Gate for speculative IPA devirtualization optimization. */
bbc9396b
JH
1913
1914static bool
1915gate_ipa_devirt (void)
1916{
a88bf705
JJ
1917 return (flag_devirtualize
1918 && flag_devirtualize_speculatively
1919 && optimize);
bbc9396b
JH
1920}
1921
1922namespace {
1923
1924const pass_data pass_data_ipa_devirt =
1925{
1926 IPA_PASS, /* type */
1927 "devirt", /* name */
1928 OPTGROUP_NONE, /* optinfo_flags */
1929 true, /* has_gate */
1930 true, /* has_execute */
1931 TV_IPA_DEVIRT, /* tv_id */
1932 0, /* properties_required */
1933 0, /* properties_provided */
1934 0, /* properties_destroyed */
1935 0, /* todo_flags_start */
1936 ( TODO_dump_symtab ), /* todo_flags_finish */
1937};
1938
1939class pass_ipa_devirt : public ipa_opt_pass_d
1940{
1941public:
c3284718
RS
1942 pass_ipa_devirt (gcc::context *ctxt)
1943 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
1944 NULL, /* generate_summary */
1945 NULL, /* write_summary */
1946 NULL, /* read_summary */
1947 NULL, /* write_optimization_summary */
1948 NULL, /* read_optimization_summary */
1949 NULL, /* stmt_fixup */
1950 0, /* function_transform_todo_flags_start */
1951 NULL, /* function_transform */
1952 NULL) /* variable_transform */
bbc9396b
JH
1953 {}
1954
1955 /* opt_pass methods: */
1956 bool gate () { return gate_ipa_devirt (); }
1957 unsigned int execute () { return ipa_devirt (); }
1958
1959}; // class pass_ipa_devirt
1960
1961} // anon namespace
1962
1963ipa_opt_pass_d *
1964make_pass_ipa_devirt (gcc::context *ctxt)
1965{
1966 return new pass_ipa_devirt (ctxt);
1967}
1968
eefe9a99 1969#include "gt-ipa-devirt.h"