]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-devirt.c
rm a bunch of _stat allocation functions
[thirdparty/gcc.git] / gcc / ipa-devirt.c
CommitLineData
eefe9a99
JH
1/* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
23a5b65a 3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
eefe9a99
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
0e1474e5 40 This is the Gimple representation of type information of a polymorphic call.
eefe9a99
JH
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
0e1474e5 43 otr_token is the index into virtual table where address is taken.
eefe9a99
JH
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
0e1474e5 58 virtual table of the base type. Also BINFO_OFFSET specifies
eefe9a99
JH
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
0e1474e5 75 or from DECL_VINDEX of a given virtual table.
eefe9a99
JH
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
0e1474e5 89 inserted into the graph. Also types without virtual methods are not
eefe9a99
JH
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
0e1474e5 98 Edges are represented by odr_type->base and odr_type->derived_types.
eefe9a99
JH
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
bbc9396b
JH
104
105 pass_ipa_devirt performs simple speculative devirtualization.
eefe9a99
JH
106*/
107
108#include "config.h"
109#include "system.h"
110#include "coretypes.h"
111#include "tm.h"
4d648807 112#include "tree.h"
d8a2d370
DN
113#include "print-tree.h"
114#include "calls.h"
eefe9a99 115#include "cgraph.h"
d8a2d370 116#include "expr.h"
eefe9a99 117#include "tree-pass.h"
eefe9a99
JH
118#include "pointer-set.h"
119#include "target.h"
120#include "hash-table.h"
121#include "tree-pretty-print.h"
122#include "ipa-utils.h"
2fb9a547
AM
123#include "tree-ssa-alias.h"
124#include "internal-fn.h"
125#include "gimple-fold.h"
126#include "gimple-expr.h"
eefe9a99 127#include "gimple.h"
bbc9396b 128#include "ipa-inline.h"
61a74079 129#include "diagnostic.h"
68377e53 130#include "tree-dfa.h"
ec77d61f
JH
131#include "demangle.h"
132
133static bool odr_violation_reported = false;
68377e53
JH
134
135/* Dummy polymorphic call context. */
136
137const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
138 = {0, NULL, false, true};
eefe9a99 139
0e1474e5
JH
140/* Pointer set of all call targets appearing in the cache. */
141static pointer_set_t *cached_polymorphic_call_targets;
142
eefe9a99
JH
143/* The node of type inheritance graph. For each type unique in
144 One Defintion Rule (ODR) sense, we produce one node linking all
145 main variants of types equivalent to it, bases and derived types. */
146
147struct GTY(()) odr_type_d
148{
eefe9a99
JH
149 /* leader type. */
150 tree type;
151 /* All bases. */
152 vec<odr_type> GTY((skip)) bases;
153 /* All derrived types with virtual methods seen in unit. */
154 vec<odr_type> GTY((skip)) derived_types;
0e1474e5 155
61a74079
JH
156 /* All equivalent types, if more than one. */
157 vec<tree, va_gc> *types;
158 /* Set of all equivalent types, if NON-NULL. */
159 pointer_set_t * GTY((skip)) types_set;
160
0e1474e5
JH
161 /* Unique ID indexing the type in odr_types array. */
162 int id;
eefe9a99
JH
163 /* Is it in anonymous namespace? */
164 bool anonymous_namespace;
2d1644bf
JH
165 /* Do we know about all derivations of given type? */
166 bool all_derivations_known;
eefe9a99
JH
167};
168
169
0e1474e5
JH
170/* Return true if BINFO corresponds to a type with virtual methods.
171
172 Every type has several BINFOs. One is the BINFO associated by the type
173 while other represents bases of derived types. The BINFOs representing
174 bases do not have BINFO_VTABLE pointer set when this is the single
175 inheritance (because vtables are shared). Look up the BINFO of type
176 and check presence of its vtable. */
eefe9a99
JH
177
178static inline bool
179polymorphic_type_binfo_p (tree binfo)
180{
181 /* See if BINFO's type has an virtual table associtated with it. */
182 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
183}
184
2d1644bf
JH
185/* Return TRUE if all derived types of T are known and thus
186 we may consider the walk of derived type complete.
187
188 This is typically true only for final anonymous namespace types and types
189 defined within functions (that may be COMDAT and thus shared across units,
190 but with the same set of derived types). */
191
192static bool
193type_all_derivations_known_p (tree t)
194{
195 if (TYPE_FINAL_P (t))
196 return true;
197 if (flag_ltrans)
198 return false;
199 if (type_in_anonymous_namespace_p (t))
200 return true;
201 return (decl_function_context (TYPE_NAME (t)) != NULL);
202}
203
204/* Return TURE if type's constructors are all visible. */
205
206static bool
207type_all_ctors_visible_p (tree t)
208{
209 return !flag_ltrans
210 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
211 /* We can not always use type_all_derivations_known_p.
212 For function local types we must assume case where
213 the function is COMDAT and shared in between units.
214
215 TODO: These cases are quite easy to get, but we need
216 to keep track of C++ privatizing via -Wno-weak
217 as well as the IPA privatizing. */
218 && type_in_anonymous_namespace_p (t);
219}
220
221/* Return TRUE if type may have instance. */
222
223static bool
224type_possibly_instantiated_p (tree t)
225{
226 tree vtable;
227 varpool_node *vnode;
228
229 /* TODO: Add abstract types here. */
230 if (!type_all_ctors_visible_p (t))
231 return true;
232
233 vtable = BINFO_VTABLE (TYPE_BINFO (t));
234 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
235 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
236 vnode = varpool_get_node (vtable);
237 return vnode && vnode->definition;
238}
239
eefe9a99
JH
240/* One Definition Rule hashtable helpers. */
241
242struct odr_hasher
243{
244 typedef odr_type_d value_type;
245 typedef union tree_node compare_type;
246 static inline hashval_t hash (const value_type *);
247 static inline bool equal (const value_type *, const compare_type *);
248 static inline void remove (value_type *);
249};
250
251/* Produce hash based on type name. */
252
253hashval_t
254hash_type_name (tree t)
255{
256 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
257
258 /* If not in LTO, all main variants are unique, so we can do
259 pointer hash. */
260 if (!in_lto_p)
261 return htab_hash_pointer (t);
262
263 /* Anonymous types are unique. */
264 if (type_in_anonymous_namespace_p (t))
265 return htab_hash_pointer (t);
266
61a74079
JH
267 /* For polymorphic types, we can simply hash the virtual table. */
268 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
269 {
270 tree v = BINFO_VTABLE (TYPE_BINFO (t));
271 hashval_t hash = 0;
272
273 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
274 {
275 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
276 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
277 }
278
279 v = DECL_ASSEMBLER_NAME (v);
61a74079
JH
280 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
281 return hash;
282 }
283
eefe9a99
JH
284 /* Rest is not implemented yet. */
285 gcc_unreachable ();
286}
287
288/* Return the computed hashcode for ODR_TYPE. */
289
290inline hashval_t
291odr_hasher::hash (const value_type *odr_type)
292{
293 return hash_type_name (odr_type->type);
294}
295
0e1474e5 296/* Compare types T1 and T2 and return true if they are
eefe9a99
JH
297 equivalent. */
298
299inline bool
300odr_hasher::equal (const value_type *t1, const compare_type *ct2)
301{
302 tree t2 = const_cast <tree> (ct2);
303
304 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
305 if (t1->type == t2)
306 return true;
307 if (!in_lto_p)
308 return false;
309 return types_same_for_odr (t1->type, t2);
310}
311
0e1474e5 312/* Free ODR type V. */
eefe9a99
JH
313
314inline void
315odr_hasher::remove (value_type *v)
316{
317 v->bases.release ();
318 v->derived_types.release ();
61a74079
JH
319 if (v->types_set)
320 pointer_set_destroy (v->types_set);
eefe9a99
JH
321 ggc_free (v);
322}
323
324/* ODR type hash used to lookup ODR type based on tree type node. */
325
326typedef hash_table <odr_hasher> odr_hash_type;
327static odr_hash_type odr_hash;
328
329/* ODR types are also stored into ODR_TYPE vector to allow consistent
330 walking. Bases appear before derived types. Vector is garbage collected
331 so we won't end up visiting empty types. */
332
333static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
334#define odr_types (*odr_types_ptr)
335
61a74079
JH
336/* TYPE is equivalent to VAL by ODR, but its tree representation differs
337 from VAL->type. This may happen in LTO where tree merging did not merge
338 all variants of the same type. It may or may not mean the ODR violation.
339 Add it to the list of duplicates and warn on some violations. */
340
341static void
342add_type_duplicate (odr_type val, tree type)
343{
344 if (!val->types_set)
345 val->types_set = pointer_set_create ();
346
347 /* See if this duplicate is new. */
348 if (!pointer_set_insert (val->types_set, type))
349 {
350 bool merge = true;
351 bool base_mismatch = false;
352 gcc_assert (in_lto_p);
353 vec_safe_push (val->types, type);
354 unsigned int i,j;
355
356 /* First we compare memory layout. */
357 if (!types_compatible_p (val->type, type))
358 {
359 merge = false;
ec77d61f 360 odr_violation_reported = true;
61a74079
JH
361 if (BINFO_VTABLE (TYPE_BINFO (val->type))
362 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
363 "type %qD violates one definition rule ",
364 type))
365 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
366 "a type with the same name but different layout is "
367 "defined in another translation unit");
61a74079
JH
368 if (cgraph_dump_file)
369 {
370 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
371
372 print_node (cgraph_dump_file, "", val->type, 0);
373 putc ('\n',cgraph_dump_file);
374 print_node (cgraph_dump_file, "", type, 0);
375 putc ('\n',cgraph_dump_file);
376 }
377 }
378
379 /* Next sanity check that bases are the same. If not, we will end
380 up producing wrong answers. */
381 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
382 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
383 {
384 odr_type base = get_odr_type
385 (BINFO_TYPE
386 (BINFO_BASE_BINFO (TYPE_BINFO (type),
387 i)),
388 true);
389 if (val->bases.length () <= j || val->bases[j] != base)
390 base_mismatch = true;
391 j++;
392 }
393 if (base_mismatch)
394 {
395 merge = false;
ec77d61f 396 odr_violation_reported = true;
61a74079
JH
397
398 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
399 "type %qD violates one definition rule ",
400 type))
401 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
402 "a type with the same name but different bases is "
403 "defined in another translation unit");
404 if (cgraph_dump_file)
405 {
406 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
407
408 print_node (cgraph_dump_file, "", val->type, 0);
409 putc ('\n',cgraph_dump_file);
410 print_node (cgraph_dump_file, "", type, 0);
411 putc ('\n',cgraph_dump_file);
412 }
413 }
414
415 /* Regularize things a little. During LTO same types may come with
416 different BINFOs. Either because their virtual table was
417 not merged by tree merging and only later at decl merging or
418 because one type comes with external vtable, while other
419 with internal. We want to merge equivalent binfos to conserve
420 memory and streaming overhead.
421
422 The external vtables are more harmful: they contain references
423 to external declarations of methods that may be defined in the
424 merged LTO unit. For this reason we absolutely need to remove
425 them and replace by internal variants. Not doing so will lead
426 to incomplete answers from possible_polymorphic_call_targets. */
427 if (!flag_ltrans && merge)
428 {
429 tree master_binfo = TYPE_BINFO (val->type);
430 tree v1 = BINFO_VTABLE (master_binfo);
431 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
432
433 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
434 {
435 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
436 && operand_equal_p (TREE_OPERAND (v1, 1),
437 TREE_OPERAND (v2, 1), 0));
438 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
439 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
440 }
441 gcc_assert (DECL_ASSEMBLER_NAME (v1)
442 == DECL_ASSEMBLER_NAME (v2));
443
444 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
445 {
446 unsigned int i;
447
448 TYPE_BINFO (val->type) = TYPE_BINFO (type);
c3284718 449 for (i = 0; i < val->types->length (); i++)
61a74079
JH
450 {
451 if (TYPE_BINFO ((*val->types)[i])
452 == master_binfo)
453 TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
454 }
455 }
456 else
457 TYPE_BINFO (type) = master_binfo;
458 }
459 }
460}
461
eefe9a99
JH
462/* Get ODR type hash entry for TYPE. If INSERT is true, create
463 possibly new entry. */
464
465odr_type
466get_odr_type (tree type, bool insert)
467{
468 odr_type_d **slot;
469 odr_type val;
470 hashval_t hash;
471
472 type = TYPE_MAIN_VARIANT (type);
473 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
474 hash = hash_type_name (type);
475 slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
476 if (!slot)
477 return NULL;
478
479 /* See if we already have entry for type. */
480 if (*slot)
481 {
482 val = *slot;
483
61a74079
JH
484 /* With LTO we need to support multiple tree representation of
485 the same ODR type. */
486 if (val->type != type)
487 add_type_duplicate (val, type);
eefe9a99
JH
488 }
489 else
490 {
491 tree binfo = TYPE_BINFO (type);
492 unsigned int i;
493
494 val = ggc_alloc_cleared_odr_type_d ();
495 val->type = type;
496 val->bases = vNULL;
497 val->derived_types = vNULL;
0e1474e5 498 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
2d1644bf 499 val->all_derivations_known = type_all_derivations_known_p (type);
eefe9a99
JH
500 *slot = val;
501 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
502 /* For now record only polymorphic types. other are
503 pointless for devirtualization and we can not precisely
504 determine ODR equivalency of these during LTO. */
505 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
506 {
507 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
508 i)),
509 true);
510 base->derived_types.safe_push (val);
511 val->bases.safe_push (base);
512 }
513 /* First record bases, then add into array so ids are increasing. */
514 if (odr_types_ptr)
c3284718 515 val->id = odr_types.length ();
eefe9a99
JH
516 vec_safe_push (odr_types_ptr, val);
517 }
518 return val;
519}
520
521/* Dump ODR type T and all its derrived type. INDENT specify indentation for
522 recusive printing. */
523
524static void
525dump_odr_type (FILE *f, odr_type t, int indent=0)
526{
527 unsigned int i;
528 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
529 print_generic_expr (f, t->type, TDF_SLIM);
2d1644bf
JH
530 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
531 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
eefe9a99
JH
532 if (TYPE_NAME (t->type))
533 {
534 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
535 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
536 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
537 }
c3284718 538 if (t->bases.length ())
eefe9a99
JH
539 {
540 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
c3284718 541 for (i = 0; i < t->bases.length (); i++)
eefe9a99
JH
542 fprintf (f, " %i", t->bases[i]->id);
543 fprintf (f, "\n");
544 }
c3284718 545 if (t->derived_types.length ())
eefe9a99
JH
546 {
547 fprintf (f, "%*s derived types:\n", indent * 2, "");
c3284718 548 for (i = 0; i < t->derived_types.length (); i++)
eefe9a99
JH
549 dump_odr_type (f, t->derived_types[i], indent + 1);
550 }
551 fprintf (f, "\n");
552}
553
554/* Dump the type inheritance graph. */
555
556static void
557dump_type_inheritance_graph (FILE *f)
558{
559 unsigned int i;
0e1474e5
JH
560 if (!odr_types_ptr)
561 return;
eefe9a99 562 fprintf (f, "\n\nType inheritance graph:\n");
c3284718 563 for (i = 0; i < odr_types.length (); i++)
eefe9a99 564 {
c3284718 565 if (odr_types[i]->bases.length () == 0)
eefe9a99
JH
566 dump_odr_type (f, odr_types[i]);
567 }
c3284718 568 for (i = 0; i < odr_types.length (); i++)
61a74079 569 {
c3284718 570 if (odr_types[i]->types && odr_types[i]->types->length ())
61a74079
JH
571 {
572 unsigned int j;
573 fprintf (f, "Duplicate tree types for odr type %i\n", i);
574 print_node (f, "", odr_types[i]->type, 0);
c3284718 575 for (j = 0; j < odr_types[i]->types->length (); j++)
61a74079
JH
576 {
577 tree t;
578 fprintf (f, "duplicate #%i\n", j);
579 print_node (f, "", (*odr_types[i]->types)[j], 0);
580 t = (*odr_types[i]->types)[j];
581 while (TYPE_P (t) && TYPE_CONTEXT (t))
582 {
583 t = TYPE_CONTEXT (t);
584 print_node (f, "", t, 0);
585 }
586 putc ('\n',f);
587 }
588 }
589 }
eefe9a99
JH
590}
591
592/* Given method type T, return type of class it belongs to.
593 Lookup this pointer and get its type. */
594
64cbf23d 595tree
eefe9a99
JH
596method_class_type (tree t)
597{
598 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
68377e53 599 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
eefe9a99
JH
600
601 return TREE_TYPE (first_parm_type);
602}
603
604/* Initialize IPA devirt and build inheritance tree graph. */
605
606void
607build_type_inheritance_graph (void)
608{
b270b096 609 struct symtab_node *n;
eefe9a99
JH
610 FILE *inheritance_dump_file;
611 int flags;
612
613 if (odr_hash.is_created ())
614 return;
615 timevar_push (TV_IPA_INHERITANCE);
616 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
617 odr_hash.create (23);
618
619 /* We reconstruct the graph starting of types of all methods seen in the
620 the unit. */
b270b096 621 FOR_EACH_SYMBOL (n)
7de90a6c 622 if (is_a <cgraph_node *> (n)
b270b096 623 && DECL_VIRTUAL_P (n->decl)
67348ccc
DM
624 && symtab_real_symbol_p (n))
625 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
b270b096
JH
626
627 /* Look also for virtual tables of types that do not define any methods.
628
629 We need it in a case where class B has virtual base of class A
630 re-defining its virtual method and there is class C with no virtual
631 methods with B as virtual base.
632
633 Here we output B's virtual method in two variant - for non-virtual
634 and virtual inheritance. B's virtual table has non-virtual version,
635 while C's has virtual.
636
637 For this reason we need to know about C in order to include both
638 variants of B. More correctly, record_target_from_binfo should
639 add both variants of the method when walking B, but we have no
640 link in between them.
641
642 We rely on fact that either the method is exported and thus we
643 assume it is called externally or C is in anonymous namespace and
644 thus we will see the vtable. */
645
7de90a6c 646 else if (is_a <varpool_node *> (n)
b270b096
JH
647 && DECL_VIRTUAL_P (n->decl)
648 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
649 && TYPE_BINFO (DECL_CONTEXT (n->decl))
650 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
651 get_odr_type (DECL_CONTEXT (n->decl), true);
eefe9a99
JH
652 if (inheritance_dump_file)
653 {
654 dump_type_inheritance_graph (inheritance_dump_file);
655 dump_end (TDI_inheritance, inheritance_dump_file);
656 }
657 timevar_pop (TV_IPA_INHERITANCE);
658}
659
ccb05ef2
JH
660/* Return true if N has reference from live virtual table
661 (and thus can be a destination of polymorphic call).
662 Be conservatively correct when callgraph is not built or
663 if the method may be referred externally. */
664
665static bool
666referenced_from_vtable_p (struct cgraph_node *node)
667{
668 int i;
669 struct ipa_ref *ref;
670 bool found = false;
671
672 if (node->externally_visible
673 || node->used_from_other_partition)
674 return true;
675
676 /* Keep this test constant time.
677 It is unlikely this can happen except for the case where speculative
678 devirtualization introduced many speculative edges to this node.
679 In this case the target is very likely alive anyway. */
680 if (node->ref_list.referring.length () > 100)
681 return true;
682
683 /* We need references built. */
684 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
685 return true;
686
687 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
688 i, ref); i++)
689
690 if ((ref->use == IPA_REF_ALIAS
691 && referenced_from_vtable_p (cgraph (ref->referring)))
692 || (ref->use == IPA_REF_ADDR
693 && TREE_CODE (ref->referring->decl) == VAR_DECL
694 && DECL_VIRTUAL_P (ref->referring->decl)))
695 {
696 found = true;
697 break;
698 }
699 return found;
700}
701
68377e53 702/* If TARGET has associated node, record it in the NODES array.
ec77d61f
JH
703 CAN_REFER specify if program can refer to the target directly.
704 if TARGET is unknown (NULL) or it can not be inserted (for example because
705 its body was already removed and there is no way to refer to it), clear
706 COMPLETEP. */
eefe9a99
JH
707
708static void
709maybe_record_node (vec <cgraph_node *> &nodes,
68377e53 710 tree target, pointer_set_t *inserted,
ec77d61f 711 bool can_refer,
68377e53 712 bool *completep)
eefe9a99
JH
713{
714 struct cgraph_node *target_node;
88f592e3
JH
715
716 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
717 list of targets; the runtime effect of calling them is undefined.
718 Only "real" virtual methods should be accounted. */
719 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
720 return;
eefe9a99 721
ec77d61f
JH
722 if (!can_refer)
723 {
724 /* The only case when method of anonymous namespace becomes unreferable
725 is when we completely optimized it out. */
726 if (flag_ltrans
727 || !target
88f592e3 728 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
ec77d61f
JH
729 *completep = false;
730 return;
731 }
732
88f592e3 733 if (!target)
68377e53
JH
734 return;
735
736 target_node = cgraph_get_node (target);
737
ccb05ef2
JH
738 /* Method can only be called by polymorphic call if any
739 of vtables refering to it are alive.
740
741 While this holds for non-anonymous functions, too, there are
742 cases where we want to keep them in the list; for example
743 inline functions with -fno-weak are static, but we still
744 may devirtualize them when instance comes from other unit.
745 The same holds for LTO.
746
747 Currently we ignore these functions in speculative devirtualization.
748 ??? Maybe it would make sense to be more aggressive for LTO even
749 eslewhere. */
750 if (!flag_ltrans
751 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
752 && (!target_node
753 || !referenced_from_vtable_p (target_node)))
754 ;
755 /* See if TARGET is useful function we can deal with. */
756 else if (target_node != NULL
757 && (TREE_PUBLIC (target)
758 || DECL_EXTERNAL (target)
759 || target_node->definition)
760 && symtab_real_symbol_p (target_node))
0e1474e5 761 {
68377e53
JH
762 gcc_assert (!target_node->global.inlined_to);
763 gcc_assert (symtab_real_symbol_p (target_node));
764 if (!pointer_set_insert (inserted, target))
765 {
766 pointer_set_insert (cached_polymorphic_call_targets,
767 target_node);
768 nodes.safe_push (target_node);
769 }
0e1474e5 770 }
68377e53 771 else if (completep
2d1644bf
JH
772 && (!type_in_anonymous_namespace_p
773 (DECL_CONTEXT (target))
774 || flag_ltrans))
0439a947 775 *completep = false;
eefe9a99
JH
776}
777
68377e53
JH
778/* See if BINFO's type match OUTER_TYPE. If so, lookup
779 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2d1644bf
JH
780 method in vtable and insert method to NODES array
781 or BASES_TO_CONSIDER if this array is non-NULL.
eefe9a99
JH
782 Otherwise recurse to base BINFOs.
783 This match what get_binfo_at_offset does, but with offset
784 being unknown.
785
a3788dde
JH
786 TYPE_BINFOS is a stack of BINFOS of types with defined
787 virtual table seen on way from class type to BINFO.
eefe9a99
JH
788
789 MATCHED_VTABLES tracks virtual tables we already did lookup
68377e53
JH
790 for virtual function in. INSERTED tracks nodes we already
791 inserted.
3462aa02
JH
792
793 ANONYMOUS is true if BINFO is part of anonymous namespace.
ec77d61f
JH
794
795 Clear COMPLETEP when we hit unreferable target.
eefe9a99
JH
796 */
797
798static void
68377e53 799record_target_from_binfo (vec <cgraph_node *> &nodes,
2d1644bf 800 vec <tree> *bases_to_consider,
68377e53
JH
801 tree binfo,
802 tree otr_type,
a3788dde 803 vec <tree> &type_binfos,
68377e53
JH
804 HOST_WIDE_INT otr_token,
805 tree outer_type,
806 HOST_WIDE_INT offset,
807 pointer_set_t *inserted,
808 pointer_set_t *matched_vtables,
ec77d61f
JH
809 bool anonymous,
810 bool *completep)
eefe9a99
JH
811{
812 tree type = BINFO_TYPE (binfo);
813 int i;
814 tree base_binfo;
815
eefe9a99 816
a3788dde
JH
817 if (BINFO_VTABLE (binfo))
818 type_binfos.safe_push (binfo);
68377e53 819 if (types_same_for_odr (type, outer_type))
eefe9a99 820 {
a3788dde
JH
821 int i;
822 tree type_binfo = NULL;
823
824 /* Lookup BINFO with virtual table. For normal types it is always last
825 binfo on stack. */
826 for (i = type_binfos.length () - 1; i >= 0; i--)
827 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
828 {
829 type_binfo = type_binfos[i];
830 break;
831 }
832 if (BINFO_VTABLE (binfo))
833 type_binfos.pop ();
834 /* If this is duplicated BINFO for base shared by virtual inheritance,
835 we may not have its associated vtable. This is not a problem, since
836 we will walk it on the other path. */
837 if (!type_binfo)
6d6af792 838 return;
68377e53
JH
839 tree inner_binfo = get_binfo_at_offset (type_binfo,
840 offset, otr_type);
ec77d61f
JH
841 if (!inner_binfo)
842 {
843 gcc_assert (odr_violation_reported);
844 return;
845 }
3462aa02
JH
846 /* For types in anonymous namespace first check if the respective vtable
847 is alive. If not, we know the type can't be called. */
848 if (!flag_ltrans && anonymous)
849 {
68377e53 850 tree vtable = BINFO_VTABLE (inner_binfo);
2c8326a5 851 varpool_node *vnode;
3462aa02
JH
852
853 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
854 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
855 vnode = varpool_get_node (vtable);
67348ccc 856 if (!vnode || !vnode->definition)
3462aa02
JH
857 return;
858 }
68377e53 859 gcc_assert (inner_binfo);
2d1644bf
JH
860 if (bases_to_consider
861 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
862 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
68377e53 863 {
ec77d61f
JH
864 bool can_refer;
865 tree target = gimple_get_virt_method_for_binfo (otr_token,
866 inner_binfo,
867 &can_refer);
2d1644bf
JH
868 if (!bases_to_consider)
869 maybe_record_node (nodes, target, inserted, can_refer, completep);
870 /* Destructors are never called via construction vtables. */
871 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
872 bases_to_consider->safe_push (target);
68377e53 873 }
eefe9a99
JH
874 return;
875 }
876
877 /* Walk bases. */
878 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
879 /* Walking bases that have no virtual method is pointless excercise. */
880 if (polymorphic_type_binfo_p (base_binfo))
2d1644bf 881 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
a3788dde 882 type_binfos,
68377e53 883 otr_token, outer_type, offset, inserted,
ec77d61f 884 matched_vtables, anonymous, completep);
a3788dde
JH
885 if (BINFO_VTABLE (binfo))
886 type_binfos.pop ();
eefe9a99
JH
887}
888
889/* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
890 of TYPE, insert them to NODES, recurse into derived nodes.
891 INSERTED is used to avoid duplicate insertions of methods into NODES.
ec77d61f 892 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2d1644bf
JH
893 Clear COMPLETEP if unreferable target is found.
894
895 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
896 all cases where BASE_SKIPPED is true (because the base is abstract
897 class). */
eefe9a99
JH
898
899static void
900possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
901 pointer_set_t *inserted,
902 pointer_set_t *matched_vtables,
903 tree otr_type,
904 odr_type type,
68377e53
JH
905 HOST_WIDE_INT otr_token,
906 tree outer_type,
ec77d61f 907 HOST_WIDE_INT offset,
2d1644bf
JH
908 bool *completep,
909 vec <tree> &bases_to_consider,
910 bool consider_construction)
eefe9a99
JH
911{
912 tree binfo = TYPE_BINFO (type->type);
913 unsigned int i;
a3788dde 914 vec <tree> type_binfos = vNULL;
2d1644bf
JH
915 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
916
917 /* We may need to consider types w/o instances because of possible derived
918 types using their methods either directly or via construction vtables.
919 We are safe to skip them when all derivations are known, since we will
920 handle them later.
921 This is done by recording them to BASES_TO_CONSIDER array. */
922 if (possibly_instantiated || consider_construction)
923 {
924 record_target_from_binfo (nodes,
925 (!possibly_instantiated
926 && type_all_derivations_known_p (type->type))
927 ? &bases_to_consider : NULL,
928 binfo, otr_type, type_binfos, otr_token,
929 outer_type, offset,
930 inserted, matched_vtables,
931 type->anonymous_namespace, completep);
932 }
a3788dde 933 type_binfos.release ();
c3284718 934 for (i = 0; i < type->derived_types.length (); i++)
eefe9a99
JH
935 possible_polymorphic_call_targets_1 (nodes, inserted,
936 matched_vtables,
937 otr_type,
938 type->derived_types[i],
2d1644bf
JH
939 otr_token, outer_type, offset, completep,
940 bases_to_consider, consider_construction);
eefe9a99
JH
941}
942
943/* Cache of queries for polymorphic call targets.
944
945 Enumerating all call targets may get expensive when there are many
946 polymorphic calls in the program, so we memoize all the previous
947 queries and avoid duplicated work. */
948
949struct polymorphic_call_target_d
950{
eefe9a99 951 HOST_WIDE_INT otr_token;
68377e53
JH
952 ipa_polymorphic_call_context context;
953 odr_type type;
eefe9a99 954 vec <cgraph_node *> targets;
ec77d61f
JH
955 int nonconstruction_targets;
956 bool complete;
eefe9a99
JH
957};
958
959/* Polymorphic call target cache helpers. */
960
961struct polymorphic_call_target_hasher
962{
963 typedef polymorphic_call_target_d value_type;
964 typedef polymorphic_call_target_d compare_type;
965 static inline hashval_t hash (const value_type *);
966 static inline bool equal (const value_type *, const compare_type *);
967 static inline void remove (value_type *);
968};
969
970/* Return the computed hashcode for ODR_QUERY. */
971
972inline hashval_t
973polymorphic_call_target_hasher::hash (const value_type *odr_query)
974{
68377e53
JH
975 hashval_t hash;
976
977 hash = iterative_hash_host_wide_int
978 (odr_query->otr_token,
979 odr_query->type->id);
980 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
981 hash);
982 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
983 return iterative_hash_hashval_t
984 (((int)odr_query->context.maybe_in_construction << 1)
985 | (int)odr_query->context.maybe_derived_type, hash);
eefe9a99
JH
986}
987
988/* Compare cache entries T1 and T2. */
989
990inline bool
991polymorphic_call_target_hasher::equal (const value_type *t1,
992 const compare_type *t2)
993{
68377e53
JH
994 return (t1->type == t2->type && t1->otr_token == t2->otr_token
995 && t1->context.offset == t2->context.offset
996 && t1->context.outer_type == t2->context.outer_type
997 && t1->context.maybe_in_construction
998 == t2->context.maybe_in_construction
999 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
eefe9a99
JH
1000}
1001
1002/* Remove entry in polymorphic call target cache hash. */
1003
1004inline void
1005polymorphic_call_target_hasher::remove (value_type *v)
1006{
1007 v->targets.release ();
1008 free (v);
1009}
1010
1011/* Polymorphic call target query cache. */
1012
1013typedef hash_table <polymorphic_call_target_hasher>
1014 polymorphic_call_target_hash_type;
1015static polymorphic_call_target_hash_type polymorphic_call_target_hash;
eefe9a99
JH
1016
1017/* Destroy polymorphic call target query cache. */
1018
1019static void
1020free_polymorphic_call_targets_hash ()
1021{
0e1474e5
JH
1022 if (cached_polymorphic_call_targets)
1023 {
1024 polymorphic_call_target_hash.dispose ();
1025 pointer_set_destroy (cached_polymorphic_call_targets);
1026 cached_polymorphic_call_targets = NULL;
1027 }
eefe9a99
JH
1028}
1029
1030/* When virtual function is removed, we may need to flush the cache. */
1031
1032static void
1033devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1034{
0e1474e5
JH
1035 if (cached_polymorphic_call_targets
1036 && pointer_set_contains (cached_polymorphic_call_targets, n))
eefe9a99
JH
1037 free_polymorphic_call_targets_hash ();
1038}
1039
68377e53
JH
1040/* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1041 is contained at CONTEXT->OFFSET. Walk the memory representation of
1042 CONTEXT->OUTER_TYPE and find the outermost class type that match
1043 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1044 to represent it.
1045
1046 For example when CONTEXT represents type
1047 class A
1048 {
1049 int a;
1050 class B b;
1051 }
1052 and we look for type at offset sizeof(int), we end up with B and offset 0.
1053 If the same is produced by multiple inheritance, we end up with A and offset
1054 sizeof(int).
1055
1056 If we can not find corresponding class, give up by setting
1057 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1058 Return true when lookup was sucesful. */
1059
1060static bool
1061get_class_context (ipa_polymorphic_call_context *context,
1062 tree expected_type)
1063{
1064 tree type = context->outer_type;
1065 HOST_WIDE_INT offset = context->offset;
1066
1067 /* Find the sub-object the constant actually refers to and mark whether it is
1068 an artificial one (as opposed to a user-defined one). */
1069 while (true)
1070 {
1071 HOST_WIDE_INT pos, size;
1072 tree fld;
1073
1074 /* On a match, just return what we found. */
1075 if (TREE_CODE (type) == TREE_CODE (expected_type)
1076 && types_same_for_odr (type, expected_type))
1077 {
3b4e93c3
JH
1078 /* Type can not contain itself on an non-zero offset. In that case
1079 just give up. */
1080 if (offset != 0)
1081 goto give_up;
68377e53
JH
1082 gcc_assert (offset == 0);
1083 return true;
1084 }
1085
1086 /* Walk fields and find corresponding on at OFFSET. */
1087 if (TREE_CODE (type) == RECORD_TYPE)
1088 {
1089 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1090 {
1091 if (TREE_CODE (fld) != FIELD_DECL)
1092 continue;
1093
1094 pos = int_bit_position (fld);
1095 size = tree_to_uhwi (DECL_SIZE (fld));
1096 if (pos <= offset && (pos + size) > offset)
1097 break;
1098 }
1099
1100 if (!fld)
1101 goto give_up;
1102
1103 type = TREE_TYPE (fld);
1104 offset -= pos;
1105 /* DECL_ARTIFICIAL represents a basetype. */
1106 if (!DECL_ARTIFICIAL (fld))
1107 {
1108 context->outer_type = type;
1109 context->offset = offset;
1110 /* As soon as we se an field containing the type,
1111 we know we are not looking for derivations. */
1112 context->maybe_derived_type = false;
1113 }
1114 }
1115 else if (TREE_CODE (type) == ARRAY_TYPE)
1116 {
1117 tree subtype = TREE_TYPE (type);
1118
1119 /* Give up if we don't know array size. */
1120 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1121 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1122 goto give_up;
1123 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1124 type = subtype;
1125 context->outer_type = type;
1126 context->offset = offset;
1127 context->maybe_derived_type = false;
1128 }
1129 /* Give up on anything else. */
1130 else
1131 goto give_up;
1132 }
1133
1134 /* If we failed to find subtype we look for, give up and fall back to the
1135 most generic query. */
1136give_up:
1137 context->outer_type = expected_type;
1138 context->offset = 0;
1139 context->maybe_derived_type = true;
e400f081
JH
1140 context->maybe_in_construction = true;
1141 /* POD can be changed to an instance of a polymorphic type by
1142 placement new. Here we play safe and assume that any
1143 non-polymorphic type is POD. */
1144 if ((TREE_CODE (type) != RECORD_TYPE
1145 || !TYPE_BINFO (type)
1146 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1147 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1148 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1149 tree_to_uhwi (TYPE_SIZE (type)))))
1150 return true;
68377e53
JH
1151 return false;
1152}
1153
1154/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1155
1156static bool
1157contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1158 tree otr_type)
1159{
1160 ipa_polymorphic_call_context context = {offset, outer_type,
1161 false, true};
1162 return get_class_context (&context, otr_type);
1163}
1164
390675c8
JH
1165/* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1166
1167static tree
85942f45
JH
1168subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1169 tree vtable)
390675c8
JH
1170{
1171 tree v = BINFO_VTABLE (binfo);
1172 int i;
1173 tree base_binfo;
85942f45 1174 unsigned HOST_WIDE_INT this_offset;
390675c8 1175
85942f45
JH
1176 if (v)
1177 {
1178 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1179 gcc_unreachable ();
1180
1181 if (offset == this_offset
1182 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1183 return binfo;
1184 }
390675c8 1185
390675c8
JH
1186 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1187 if (polymorphic_type_binfo_p (base_binfo))
1188 {
1189 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1190 if (base_binfo)
1191 return base_binfo;
1192 }
1193 return NULL;
1194}
1195
85942f45
JH
1196/* T is known constant value of virtual table pointer.
1197 Store virtual table to V and its offset to OFFSET.
1198 Return false if T does not look like virtual table reference. */
390675c8 1199
85942f45
JH
1200bool
1201vtable_pointer_value_to_vtable (tree t, tree *v, unsigned HOST_WIDE_INT *offset)
390675c8
JH
1202{
1203 /* We expect &MEM[(void *)&virtual_table + 16B].
1204 We obtain object's BINFO from the context of the virtual table.
1205 This one contains pointer to virtual table represented via
1206 POINTER_PLUS_EXPR. Verify that this pointer match to what
1207 we propagated through.
1208
1209 In the case of virtual inheritance, the virtual tables may
1210 be nested, i.e. the offset may be different from 16 and we may
1211 need to dive into the type representation. */
85942f45 1212 if (TREE_CODE (t) == ADDR_EXPR
390675c8
JH
1213 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1214 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1215 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1216 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1217 == VAR_DECL)
1218 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1219 (TREE_OPERAND (t, 0), 0), 0)))
1220 {
85942f45
JH
1221 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1222 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1223 return true;
390675c8 1224 }
85942f45
JH
1225
1226 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1227 We need to handle it when T comes from static variable initializer or
1228 BINFO. */
1229 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1230 {
1231 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1232 t = TREE_OPERAND (t, 0);
1233 }
1234 else
1235 *offset = 0;
1236
1237 if (TREE_CODE (t) != ADDR_EXPR)
1238 return false;
1239 *v = TREE_OPERAND (t, 0);
1240 return true;
1241}
1242
1243/* T is known constant value of virtual table pointer. Return BINFO of the
1244 instance type. */
1245
1246tree
1247vtable_pointer_value_to_binfo (tree t)
1248{
1249 tree vtable;
1250 unsigned HOST_WIDE_INT offset;
1251
1252 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1253 return NULL_TREE;
1254
1255 /* FIXME: for stores of construction vtables we return NULL,
1256 because we do not have BINFO for those. Eventually we should fix
1257 our representation to allow this case to be handled, too.
1258 In the case we see store of BINFO we however may assume
1259 that standard folding will be ale to cope with it. */
1260 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1261 offset, vtable);
390675c8
JH
1262}
1263
5bccb77a
JH
1264/* Proudce polymorphic call context for call method of instance
1265 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1266
1267static void
1268get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1269 tree base, HOST_WIDE_INT offset)
1270{
1271 gcc_assert (DECL_P (base));
1272
1273 context->outer_type = TREE_TYPE (base);
1274 context->offset = offset;
1275 /* Make very conservative assumption that all objects
1276 may be in construction.
1277 TODO: ipa-prop already contains code to tell better.
1278 merge it later. */
1279 context->maybe_in_construction = true;
1280 context->maybe_derived_type = false;
1281}
1282
1283/* CST is an invariant (address of decl), try to get meaningful
1284 polymorphic call context for polymorphic call of method
1285 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1286 Return FALSE if nothing meaningful can be found. */
1287
1288bool
1289get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1290 tree cst,
1291 tree otr_type,
1292 HOST_WIDE_INT offset)
1293{
1294 HOST_WIDE_INT offset2, size, max_size;
1295 tree base;
1296
1297 if (TREE_CODE (cst) != ADDR_EXPR)
79c7de84 1298 return false;
5bccb77a
JH
1299
1300 cst = TREE_OPERAND (cst, 0);
1301 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
79c7de84
EB
1302 if (!DECL_P (base) || max_size == -1 || max_size != size)
1303 return false;
5bccb77a
JH
1304
1305 /* Only type inconsistent programs can have otr_type that is
1306 not part of outer type. */
79c7de84
EB
1307 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1308 return false;
5bccb77a 1309
79c7de84 1310 get_polymorphic_call_info_for_decl (context, base, offset);
5bccb77a
JH
1311 return true;
1312}
1313
68377e53
JH
1314/* Given REF call in FNDECL, determine class of the polymorphic
1315 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1316 Return pointer to object described by the context */
1317
1318tree
1319get_polymorphic_call_info (tree fndecl,
1320 tree ref,
1321 tree *otr_type,
1322 HOST_WIDE_INT *otr_token,
1323 ipa_polymorphic_call_context *context)
1324{
1325 tree base_pointer;
1326 *otr_type = obj_type_ref_class (ref);
1327 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1328
1329 /* Set up basic info in case we find nothing interesting in the analysis. */
1330 context->outer_type = *otr_type;
1331 context->offset = 0;
1332 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1333 context->maybe_derived_type = true;
2d1644bf 1334 context->maybe_in_construction = true;
68377e53
JH
1335
1336 /* Walk SSA for outer object. */
1337 do
1338 {
1339 if (TREE_CODE (base_pointer) == SSA_NAME
1340 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1341 && SSA_NAME_DEF_STMT (base_pointer)
1342 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1343 {
1344 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1345 STRIP_NOPS (base_pointer);
1346 }
1347 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1348 {
1349 HOST_WIDE_INT size, max_size;
1350 HOST_WIDE_INT offset2;
1351 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1352 &offset2, &size, &max_size);
1353
1354 /* If this is a varying address, punt. */
1355 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1356 && max_size != -1
1357 && max_size == size)
1358 {
1359 /* We found dereference of a pointer. Type of the pointer
1360 and MEM_REF is meaningless, but we can look futher. */
1361 if (TREE_CODE (base) == MEM_REF)
1362 {
1363 base_pointer = TREE_OPERAND (base, 0);
1364 context->offset
807e902e 1365 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
68377e53
JH
1366 context->outer_type = NULL;
1367 }
1368 /* We found base object. In this case the outer_type
1369 is known. */
1370 else if (DECL_P (base))
1371 {
7656ee72 1372 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
68377e53
JH
1373
1374 /* Only type inconsistent programs can have otr_type that is
1375 not part of outer type. */
7656ee72
JH
1376 if (!contains_type_p (TREE_TYPE (base),
1377 context->offset + offset2, *otr_type))
3e86c6a8
JH
1378 {
1379 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1380 code sequences; we arrange the calls to be builtin_unreachable
1381 later. */
1382 *otr_token = INT_MAX;
1383 return base_pointer;
1384 }
5bccb77a
JH
1385 get_polymorphic_call_info_for_decl (context, base,
1386 context->offset + offset2);
7656ee72 1387 return NULL;
68377e53
JH
1388 }
1389 else
1390 break;
1391 }
1392 else
1393 break;
1394 }
1395 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1396 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1397 {
1398 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1399 * BITS_PER_UNIT;
1400 base_pointer = TREE_OPERAND (base_pointer, 0);
1401 }
1402 else
1403 break;
1404 }
1405 while (true);
1406
1407 /* Try to determine type of the outer object. */
1408 if (TREE_CODE (base_pointer) == SSA_NAME
1409 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1410 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1411 {
1412 /* See if parameter is THIS pointer of a method. */
1413 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1414 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1415 {
1416 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1417 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1418
1419 /* Dynamic casting has possibly upcasted the type
1420 in the hiearchy. In this case outer type is less
1421 informative than inner type and we should forget
1422 about it. */
1423 if (!contains_type_p (context->outer_type, context->offset,
1424 *otr_type))
1425 {
1426 context->outer_type = NULL;
1427 return base_pointer;
1428 }
1429
1430 /* If the function is constructor or destructor, then
d74db8ff 1431 the type is possibly in construction, but we know
68377e53
JH
1432 it is not derived type. */
1433 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1434 || DECL_CXX_DESTRUCTOR_P (fndecl))
1435 {
1436 context->maybe_in_construction = true;
1437 context->maybe_derived_type = false;
1438 }
1439 else
1440 {
1441 context->maybe_derived_type = true;
1442 context->maybe_in_construction = false;
1443 }
1444 return base_pointer;
1445 }
1446 /* Non-PODs passed by value are really passed by invisible
1447 reference. In this case we also know the type of the
1448 object. */
1449 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1450 {
1451 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1452 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1453 /* Only type inconsistent programs can have otr_type that is
1454 not part of outer type. */
1455 if (!contains_type_p (context->outer_type, context->offset,
1456 *otr_type))
1457 {
3e86c6a8
JH
1458 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1459 code sequences; we arrange the calls to be builtin_unreachable
1460 later. */
1461 *otr_token = INT_MAX;
68377e53
JH
1462 return base_pointer;
1463 }
1464 context->maybe_derived_type = false;
1465 context->maybe_in_construction = false;
1466 return base_pointer;
1467 }
1468 }
1469 /* TODO: There are multiple ways to derive a type. For instance
1470 if BASE_POINTER is passed to an constructor call prior our refernece.
1471 We do not make this type of flow sensitive analysis yet. */
1472 return base_pointer;
1473}
1474
1475/* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1476 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1477 and insert them to NODES.
1478
1479 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1480
1481static void
1482record_targets_from_bases (tree otr_type,
1483 HOST_WIDE_INT otr_token,
1484 tree outer_type,
1485 HOST_WIDE_INT offset,
ec77d61f 1486 vec <cgraph_node *> &nodes,
68377e53
JH
1487 pointer_set_t *inserted,
1488 pointer_set_t *matched_vtables,
1489 bool *completep)
1490{
1491 while (true)
1492 {
1493 HOST_WIDE_INT pos, size;
1494 tree base_binfo;
1495 tree fld;
1496
1497 if (types_same_for_odr (outer_type, otr_type))
1498 return;
1499
1500 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1501 {
1502 if (TREE_CODE (fld) != FIELD_DECL)
1503 continue;
1504
1505 pos = int_bit_position (fld);
1506 size = tree_to_shwi (DECL_SIZE (fld));
ec77d61f
JH
1507 if (pos <= offset && (pos + size) > offset
1508 /* Do not get confused by zero sized bases. */
1509 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
68377e53
JH
1510 break;
1511 }
1512 /* Within a class type we should always find correcponding fields. */
1513 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1514
1515 /* Nonbasetypes should have been stripped by outer_class_type. */
1516 gcc_assert (DECL_ARTIFICIAL (fld));
1517
1518 outer_type = TREE_TYPE (fld);
1519 offset -= pos;
1520
1521 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1522 offset, otr_type);
ec77d61f
JH
1523 if (!base_binfo)
1524 {
1525 gcc_assert (odr_violation_reported);
1526 return;
1527 }
68377e53
JH
1528 gcc_assert (base_binfo);
1529 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1530 {
ec77d61f
JH
1531 bool can_refer;
1532 tree target = gimple_get_virt_method_for_binfo (otr_token,
1533 base_binfo,
1534 &can_refer);
2d1644bf
JH
1535 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
1536 maybe_record_node (nodes, target, inserted, can_refer, completep);
68377e53
JH
1537 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1538 }
1539 }
1540}
1541
3462aa02
JH
1542/* When virtual table is removed, we may need to flush the cache. */
1543
1544static void
2c8326a5 1545devirt_variable_node_removal_hook (varpool_node *n,
3462aa02
JH
1546 void *d ATTRIBUTE_UNUSED)
1547{
1548 if (cached_polymorphic_call_targets
67348ccc
DM
1549 && DECL_VIRTUAL_P (n->decl)
1550 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3462aa02
JH
1551 free_polymorphic_call_targets_hash ();
1552}
1553
eefe9a99 1554/* Return vector containing possible targets of polymorphic call of type
68377e53
JH
1555 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1556 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1557 OTR_TYPE and include their virtual method. This is useful for types
1558 possibly in construction or destruction where the virtual table may
1559 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1560 us to walk the inheritance graph for all derivations.
1561
3e86c6a8
JH
1562 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1563 undefined and should be redirected to unreachable.
1564
add5c763 1565 If COMPLETEP is non-NULL, store true if the list is complete.
eefe9a99
JH
1566 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1567 in the target cache. If user needs to visit every target list
1568 just once, it can memoize them.
1569
ec77d61f
JH
1570 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1571 the type is not in the construction. Those targets appear first in the
1572 vector returned.
1573
eefe9a99
JH
1574 Returned vector is placed into cache. It is NOT caller's responsibility
1575 to free it. The vector can be freed on cgraph_remove_node call if
1576 the particular node is a virtual function present in the cache. */
1577
1578vec <cgraph_node *>
1579possible_polymorphic_call_targets (tree otr_type,
1580 HOST_WIDE_INT otr_token,
68377e53
JH
1581 ipa_polymorphic_call_context context,
1582 bool *completep,
ec77d61f
JH
1583 void **cache_token,
1584 int *nonconstruction_targetsp)
eefe9a99
JH
1585{
1586 static struct cgraph_node_hook_list *node_removal_hook_holder;
1587 pointer_set_t *inserted;
1588 pointer_set_t *matched_vtables;
add5c763 1589 vec <cgraph_node *> nodes = vNULL;
2d1644bf 1590 vec <tree> bases_to_consider = vNULL;
68377e53 1591 odr_type type, outer_type;
eefe9a99
JH
1592 polymorphic_call_target_d key;
1593 polymorphic_call_target_d **slot;
1594 unsigned int i;
1595 tree binfo, target;
ec77d61f
JH
1596 bool complete;
1597 bool can_refer;
2d1644bf 1598 bool skipped = false;
eefe9a99 1599
3e86c6a8 1600 /* If ODR is not initialized, return empty incomplete list. */
79c7de84
EB
1601 if (!odr_hash.is_created ())
1602 {
1603 if (completep)
1604 *completep = false;
ec77d61f
JH
1605 if (nonconstruction_targetsp)
1606 *nonconstruction_targetsp = 0;
79c7de84
EB
1607 return nodes;
1608 }
add5c763 1609
3e86c6a8
JH
1610 /* If we hit type inconsistency, just return empty list of targets. */
1611 if (otr_token == INT_MAX)
1612 {
1613 if (completep)
1614 *completep = true;
1615 if (nonconstruction_targetsp)
1616 *nonconstruction_targetsp = 0;
1617 return nodes;
1618 }
1619
68377e53 1620 type = get_odr_type (otr_type, true);
eefe9a99 1621
68377e53 1622 /* Lookup the outer class type we want to walk. */
3e86c6a8
JH
1623 if (context.outer_type
1624 && !get_class_context (&context, otr_type))
1625 {
1626 if (completep)
1627 *completep = false;
1628 if (nonconstruction_targetsp)
1629 *nonconstruction_targetsp = 0;
1630 return nodes;
1631 }
eefe9a99 1632
79c7de84 1633 /* We canonicalize our query, so we do not need extra hashtable entries. */
68377e53
JH
1634
1635 /* Without outer type, we have no use for offset. Just do the
1636 basic search from innter type */
1637 if (!context.outer_type)
1638 {
1639 context.outer_type = otr_type;
1640 context.offset = 0;
1641 }
1642 /* We need to update our hiearchy if the type does not exist. */
1643 outer_type = get_odr_type (context.outer_type, true);
ec77d61f 1644 /* If the type is complete, there are no derivations. */
68377e53
JH
1645 if (TYPE_FINAL_P (outer_type->type))
1646 context.maybe_derived_type = false;
eefe9a99
JH
1647
1648 /* Initialize query cache. */
1649 if (!cached_polymorphic_call_targets)
1650 {
1651 cached_polymorphic_call_targets = pointer_set_create ();
1652 polymorphic_call_target_hash.create (23);
1653 if (!node_removal_hook_holder)
3462aa02
JH
1654 {
1655 node_removal_hook_holder =
1656 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1657 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1658 NULL);
1659 }
eefe9a99
JH
1660 }
1661
1662 /* Lookup cached answer. */
1663 key.type = type;
1664 key.otr_token = otr_token;
68377e53 1665 key.context = context;
eefe9a99
JH
1666 slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
1667 if (cache_token)
1668 *cache_token = (void *)*slot;
1669 if (*slot)
68377e53
JH
1670 {
1671 if (completep)
ec77d61f
JH
1672 *completep = (*slot)->complete;
1673 if (nonconstruction_targetsp)
1674 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
68377e53
JH
1675 return (*slot)->targets;
1676 }
1677
ec77d61f 1678 complete = true;
eefe9a99
JH
1679
1680 /* Do actual search. */
1681 timevar_push (TV_IPA_VIRTUAL_CALL);
1682 *slot = XCNEW (polymorphic_call_target_d);
1683 if (cache_token)
68377e53 1684 *cache_token = (void *)*slot;
eefe9a99
JH
1685 (*slot)->type = type;
1686 (*slot)->otr_token = otr_token;
68377e53 1687 (*slot)->context = context;
eefe9a99
JH
1688
1689 inserted = pointer_set_create ();
1690 matched_vtables = pointer_set_create ();
1691
1692 /* First see virtual method of type itself. */
68377e53
JH
1693 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1694 context.offset, otr_type);
ec77d61f
JH
1695 if (binfo)
1696 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1697 &can_refer);
1698 else
68377e53 1699 {
ec77d61f
JH
1700 gcc_assert (odr_violation_reported);
1701 target = NULL;
1702 }
68377e53 1703
2d1644bf
JH
1704 /* Destructors are never called through construction virtual tables,
1705 because the type is always known. */
1706 if (target && DECL_CXX_DESTRUCTOR_P (target))
1707 context.maybe_in_construction = false;
ec77d61f
JH
1708
1709 if (target)
1710 {
1711 /* In the case we get complete method, we don't need
68377e53
JH
1712 to walk derivations. */
1713 if (DECL_FINAL_P (target))
1714 context.maybe_derived_type = false;
1715 }
2d1644bf
JH
1716
1717 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
1718 if (type_possibly_instantiated_p (outer_type->type))
1719 maybe_record_node (nodes, target, inserted, can_refer, &complete);
ec77d61f 1720 else
2d1644bf
JH
1721 {
1722 skipped = true;
1723 gcc_assert (in_lto_p || context.maybe_derived_type);
1724 }
79c7de84 1725
eefe9a99
JH
1726 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1727
ec77d61f 1728 /* Next walk recursively all derived types. */
68377e53
JH
1729 if (context.maybe_derived_type)
1730 {
1731 /* For anonymous namespace types we can attempt to build full type.
1732 All derivations must be in this unit (unless we see partial unit). */
2d1644bf 1733 if (!type->all_derivations_known)
ec77d61f 1734 complete = false;
68377e53
JH
1735 for (i = 0; i < outer_type->derived_types.length(); i++)
1736 possible_polymorphic_call_targets_1 (nodes, inserted,
1737 matched_vtables,
79c7de84
EB
1738 otr_type,
1739 outer_type->derived_types[i],
68377e53 1740 otr_token, outer_type->type,
2d1644bf
JH
1741 context.offset, &complete,
1742 bases_to_consider,
1743 context.maybe_in_construction);
68377e53 1744 }
79c7de84 1745
ec77d61f
JH
1746 /* Finally walk bases, if asked to. */
1747 (*slot)->nonconstruction_targets = nodes.length();
2d1644bf
JH
1748
1749 /* Destructors are never called through construction virtual tables,
1750 because the type is always known. One of entries may be cxa_pure_virtual
1751 so look to at least two of them. */
1752 if (context.maybe_in_construction)
1753 for (i =0 ; i < MIN (nodes.length (), 2); i++)
1754 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
1755 context.maybe_in_construction = false;
ec77d61f 1756 if (context.maybe_in_construction)
2d1644bf
JH
1757 {
1758 if (type != outer_type
1759 && (!skipped
1760 || (context.maybe_derived_type
1761 && !type_all_derivations_known_p (outer_type->type))))
1762 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1763 context.offset, nodes, inserted,
1764 matched_vtables, &complete);
1765 if (skipped)
1766 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1767 for (i = 0; i < bases_to_consider.length(); i++)
1768 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
1769 }
1770 bases_to_consider.release();
ec77d61f 1771
eefe9a99 1772 (*slot)->targets = nodes;
ec77d61f 1773 (*slot)->complete = complete;
68377e53 1774 if (completep)
ec77d61f
JH
1775 *completep = complete;
1776 if (nonconstruction_targetsp)
1777 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
eefe9a99
JH
1778
1779 pointer_set_destroy (inserted);
1780 pointer_set_destroy (matched_vtables);
1781 timevar_pop (TV_IPA_VIRTUAL_CALL);
1782 return nodes;
1783}
1784
1785/* Dump all possible targets of a polymorphic call. */
1786
1787void
1788dump_possible_polymorphic_call_targets (FILE *f,
68377e53
JH
1789 tree otr_type,
1790 HOST_WIDE_INT otr_token,
1791 const ipa_polymorphic_call_context &ctx)
eefe9a99
JH
1792{
1793 vec <cgraph_node *> targets;
1794 bool final;
1795 odr_type type = get_odr_type (otr_type, false);
1796 unsigned int i;
ec77d61f 1797 int nonconstruction;
eefe9a99
JH
1798
1799 if (!type)
1800 return;
1801 targets = possible_polymorphic_call_targets (otr_type, otr_token,
68377e53 1802 ctx,
ec77d61f 1803 &final, NULL, &nonconstruction);
68377e53 1804 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
eefe9a99 1805 print_generic_expr (f, type->type, TDF_SLIM);
ec77d61f
JH
1806 fprintf (f, " token %i\n", (int)otr_token);
1807 if (ctx.outer_type || ctx.offset)
1808 {
1809 fprintf (f, " Contained in type:");
1810 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1811 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
1812 ctx.offset);
1813 }
1814
1815 fprintf (f, " %s%s%s\n ",
1816 final ? "This is a complete list." :
68377e53
JH
1817 "This is partial list; extra targets may be defined in other units.",
1818 ctx.maybe_in_construction ? " (base types included)" : "",
1819 ctx.maybe_derived_type ? " (derived types included)" : "");
eefe9a99 1820 for (i = 0; i < targets.length (); i++)
ec77d61f
JH
1821 {
1822 char *name = NULL;
1823 if (i == (unsigned)nonconstruction)
1824 fprintf (f, "\n If the type is in construction,"
1825 " then additional tarets are:\n"
1826 " ");
1827 if (in_lto_p)
1828 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
1829 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
1830 if (in_lto_p)
1831 free (name);
1832 if (!targets[i]->definition)
1833 fprintf (f, " (no definition%s)",
1834 DECL_DECLARED_INLINE_P (targets[i]->decl)
1835 ? " inline" : "");
1836 }
68377e53 1837 fprintf (f, "\n\n");
eefe9a99
JH
1838}
1839
0e1474e5
JH
1840
1841/* Return true if N can be possibly target of a polymorphic call of
1842 OTR_TYPE/OTR_TOKEN. */
1843
1844bool
1845possible_polymorphic_call_target_p (tree otr_type,
1846 HOST_WIDE_INT otr_token,
68377e53 1847 const ipa_polymorphic_call_context &ctx,
0e1474e5
JH
1848 struct cgraph_node *n)
1849{
1850 vec <cgraph_node *> targets;
1851 unsigned int i;
68377e53 1852 enum built_in_function fcode;
450ad0cd 1853 bool final;
0e1474e5 1854
68377e53
JH
1855 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1856 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1857 == BUILT_IN_UNREACHABLE
1858 || fcode == BUILT_IN_TRAP))
1859 return true;
1860
0e1474e5
JH
1861 if (!odr_hash.is_created ())
1862 return true;
68377e53 1863 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
0e1474e5 1864 for (i = 0; i < targets.length (); i++)
68377e53 1865 if (symtab_semantically_equivalent_p (n, targets[i]))
0e1474e5 1866 return true;
450ad0cd
JH
1867
1868 /* At a moment we allow middle end to dig out new external declarations
1869 as a targets of polymorphic calls. */
67348ccc 1870 if (!final && !n->definition)
450ad0cd 1871 return true;
0e1474e5
JH
1872 return false;
1873}
1874
1875
1876/* After callgraph construction new external nodes may appear.
1877 Add them into the graph. */
1878
1879void
1880update_type_inheritance_graph (void)
1881{
1882 struct cgraph_node *n;
1883
1884 if (!odr_hash.is_created ())
1885 return;
1886 free_polymorphic_call_targets_hash ();
1887 timevar_push (TV_IPA_INHERITANCE);
68377e53 1888 /* We reconstruct the graph starting from types of all methods seen in the
0e1474e5
JH
1889 the unit. */
1890 FOR_EACH_FUNCTION (n)
67348ccc
DM
1891 if (DECL_VIRTUAL_P (n->decl)
1892 && !n->definition
1893 && symtab_real_symbol_p (n))
1894 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
0e1474e5
JH
1895 timevar_pop (TV_IPA_INHERITANCE);
1896}
bbc9396b
JH
1897
1898
1899/* Return true if N looks like likely target of a polymorphic call.
1900 Rule out cxa_pure_virtual, noreturns, function declared cold and
1901 other obvious cases. */
1902
1903bool
1904likely_target_p (struct cgraph_node *n)
1905{
1906 int flags;
1907 /* cxa_pure_virtual and similar things are not likely. */
67348ccc 1908 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
bbc9396b 1909 return false;
67348ccc 1910 flags = flags_from_decl_or_type (n->decl);
bbc9396b
JH
1911 if (flags & ECF_NORETURN)
1912 return false;
1913 if (lookup_attribute ("cold",
67348ccc 1914 DECL_ATTRIBUTES (n->decl)))
bbc9396b
JH
1915 return false;
1916 if (n->frequency < NODE_FREQUENCY_NORMAL)
1917 return false;
ccb05ef2
JH
1918 /* If there are no virtual tables refering the target alive,
1919 the only way the target can be called is an instance comming from other
1920 compilation unit; speculative devirtualization is build around an
1921 assumption that won't happen. */
1922 if (!referenced_from_vtable_p (n))
1923 return false;
bbc9396b
JH
1924 return true;
1925}
1926
1927/* The ipa-devirt pass.
3462aa02
JH
1928 When polymorphic call has only one likely target in the unit,
1929 turn it into speculative call. */
bbc9396b
JH
1930
1931static unsigned int
1932ipa_devirt (void)
1933{
1934 struct cgraph_node *n;
1935 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1936 struct cgraph_edge *e;
1937
1938 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1939 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
570215f9 1940 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
bbc9396b
JH
1941
1942 FOR_EACH_DEFINED_FUNCTION (n)
1943 {
1944 bool update = false;
1945 if (dump_file && n->indirect_calls)
1946 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
fec39fa6 1947 n->name (), n->order);
bbc9396b
JH
1948 for (e = n->indirect_calls; e; e = e->next_callee)
1949 if (e->indirect_info->polymorphic)
1950 {
1951 struct cgraph_node *likely_target = NULL;
1952 void *cache_token;
1953 bool final;
ec77d61f 1954 int nonconstruction_targets;
bbc9396b
JH
1955 vec <cgraph_node *>targets
1956 = possible_polymorphic_call_targets
ec77d61f 1957 (e, &final, &cache_token, &nonconstruction_targets);
bbc9396b
JH
1958 unsigned int i;
1959
1960 if (dump_file)
1961 dump_possible_polymorphic_call_targets
1962 (dump_file, e);
3462aa02 1963
bbc9396b
JH
1964 npolymorphic++;
1965
bbc9396b
JH
1966 if (!cgraph_maybe_hot_edge_p (e))
1967 {
1968 if (dump_file)
ec77d61f 1969 fprintf (dump_file, "Call is cold\n\n");
bbc9396b
JH
1970 ncold++;
1971 continue;
1972 }
1973 if (e->speculative)
1974 {
1975 if (dump_file)
ec77d61f 1976 fprintf (dump_file, "Call is aready speculated\n\n");
bbc9396b
JH
1977 nspeculated++;
1978
1979 /* When dumping see if we agree with speculation. */
1980 if (!dump_file)
1981 continue;
1982 }
1983 if (pointer_set_contains (bad_call_targets,
1984 cache_token))
1985 {
1986 if (dump_file)
ec77d61f 1987 fprintf (dump_file, "Target list is known to be useless\n\n");
bbc9396b
JH
1988 nmultiple++;
1989 continue;
1990 }
c3284718 1991 for (i = 0; i < targets.length (); i++)
bbc9396b
JH
1992 if (likely_target_p (targets[i]))
1993 {
1994 if (likely_target)
1995 {
ec77d61f
JH
1996 if (i < (unsigned) nonconstruction_targets)
1997 {
1998 likely_target = NULL;
1999 if (dump_file)
2000 fprintf (dump_file, "More than one likely target\n\n");
2001 nmultiple++;
2002 }
bbc9396b
JH
2003 break;
2004 }
2005 likely_target = targets[i];
2006 }
2007 if (!likely_target)
2008 {
2009 pointer_set_insert (bad_call_targets, cache_token);
2010 continue;
2011 }
2012 /* This is reached only when dumping; check if we agree or disagree
2013 with the speculation. */
2014 if (e->speculative)
2015 {
2016 struct cgraph_edge *e2;
2017 struct ipa_ref *ref;
2018 cgraph_speculative_call_info (e, e2, e, ref);
2019 if (cgraph_function_or_thunk_node (e2->callee, NULL)
2020 == cgraph_function_or_thunk_node (likely_target, NULL))
2021 {
ec77d61f 2022 fprintf (dump_file, "We agree with speculation\n\n");
bbc9396b
JH
2023 nok++;
2024 }
2025 else
2026 {
ec77d61f 2027 fprintf (dump_file, "We disagree with speculation\n\n");
bbc9396b
JH
2028 nwrong++;
2029 }
2030 continue;
2031 }
67348ccc 2032 if (!likely_target->definition)
bbc9396b
JH
2033 {
2034 if (dump_file)
ec77d61f 2035 fprintf (dump_file, "Target is not an definition\n\n");
bbc9396b
JH
2036 nnotdefined++;
2037 continue;
2038 }
2039 /* Do not introduce new references to external symbols. While we
2040 can handle these just well, it is common for programs to
2041 incorrectly with headers defining methods they are linked
2042 with. */
67348ccc 2043 if (DECL_EXTERNAL (likely_target->decl))
bbc9396b
JH
2044 {
2045 if (dump_file)
ec77d61f 2046 fprintf (dump_file, "Target is external\n\n");
bbc9396b
JH
2047 nexternal++;
2048 continue;
2049 }
570215f9
JM
2050 /* Don't use an implicitly-declared destructor (c++/58678). */
2051 struct cgraph_node *non_thunk_target
2052 = cgraph_function_node (likely_target);
2053 if (DECL_ARTIFICIAL (non_thunk_target->decl)
2054 && DECL_COMDAT (non_thunk_target->decl))
2055 {
2056 if (dump_file)
2057 fprintf (dump_file, "Target is artificial\n\n");
2058 nartificial++;
2059 continue;
2060 }
bbc9396b
JH
2061 if (cgraph_function_body_availability (likely_target)
2062 <= AVAIL_OVERWRITABLE
67348ccc 2063 && symtab_can_be_discarded (likely_target))
bbc9396b
JH
2064 {
2065 if (dump_file)
ec77d61f 2066 fprintf (dump_file, "Target is overwritable\n\n");
bbc9396b
JH
2067 noverwritable++;
2068 continue;
2069 }
2070 else
2071 {
2072 if (dump_file)
2073 fprintf (dump_file,
ec77d61f 2074 "Speculatively devirtualizing call in %s/%i to %s/%i\n\n",
fec39fa6
TS
2075 n->name (), n->order,
2076 likely_target->name (),
67348ccc
DM
2077 likely_target->order);
2078 if (!symtab_can_be_discarded (likely_target))
5b79657a
JH
2079 {
2080 cgraph_node *alias;
2081 alias = cgraph (symtab_nonoverwritable_alias
67348ccc 2082 (likely_target));
5b79657a
JH
2083 if (alias)
2084 likely_target = alias;
2085 }
bbc9396b
JH
2086 nconverted++;
2087 update = true;
2088 cgraph_turn_edge_to_speculative
2089 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2090 }
2091 }
2092 if (update)
2093 inline_update_overall_summary (n);
2094 }
2095 pointer_set_destroy (bad_call_targets);
2096
2097 if (dump_file)
2098 fprintf (dump_file,
2099 "%i polymorphic calls, %i devirtualized,"
2100 " %i speculatively devirtualized, %i cold\n"
2101 "%i have multiple targets, %i overwritable,"
2102 " %i already speculated (%i agree, %i disagree),"
570215f9 2103 " %i external, %i not defined, %i artificial\n",
bbc9396b
JH
2104 npolymorphic, ndevirtualized, nconverted, ncold,
2105 nmultiple, noverwritable, nspeculated, nok, nwrong,
570215f9 2106 nexternal, nnotdefined, nartificial);
bbc9396b
JH
2107 return ndevirtualized ? TODO_remove_functions : 0;
2108}
2109
bbc9396b
JH
2110namespace {
2111
2112const pass_data pass_data_ipa_devirt =
2113{
2114 IPA_PASS, /* type */
2115 "devirt", /* name */
2116 OPTGROUP_NONE, /* optinfo_flags */
bbc9396b
JH
2117 true, /* has_execute */
2118 TV_IPA_DEVIRT, /* tv_id */
2119 0, /* properties_required */
2120 0, /* properties_provided */
2121 0, /* properties_destroyed */
2122 0, /* todo_flags_start */
2123 ( TODO_dump_symtab ), /* todo_flags_finish */
2124};
2125
2126class pass_ipa_devirt : public ipa_opt_pass_d
2127{
2128public:
c3284718
RS
2129 pass_ipa_devirt (gcc::context *ctxt)
2130 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
2131 NULL, /* generate_summary */
2132 NULL, /* write_summary */
2133 NULL, /* read_summary */
2134 NULL, /* write_optimization_summary */
2135 NULL, /* read_optimization_summary */
2136 NULL, /* stmt_fixup */
2137 0, /* function_transform_todo_flags_start */
2138 NULL, /* function_transform */
2139 NULL) /* variable_transform */
bbc9396b
JH
2140 {}
2141
2142 /* opt_pass methods: */
1a3d085c
TS
2143 virtual bool gate (function *)
2144 {
2145 return (flag_devirtualize
2146 && flag_devirtualize_speculatively
2147 && optimize);
2148 }
2149
be55bfe6 2150 virtual unsigned int execute (function *) { return ipa_devirt (); }
bbc9396b
JH
2151
2152}; // class pass_ipa_devirt
2153
2154} // anon namespace
2155
2156ipa_opt_pass_d *
2157make_pass_ipa_devirt (gcc::context *ctxt)
2158{
2159 return new pass_ipa_devirt (ctxt);
2160}
2161
eefe9a99 2162#include "gt-ipa-devirt.h"