]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-devirt.c
fix typo in winnt.c
[thirdparty/gcc.git] / gcc / ipa-devirt.c
CommitLineData
eefe9a99
JH
1/* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
23a5b65a 3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
eefe9a99
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
0e1474e5 40 This is the Gimple representation of type information of a polymorphic call.
eefe9a99
JH
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
0e1474e5 43 otr_token is the index into virtual table where address is taken.
eefe9a99
JH
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
0e1474e5 58 virtual table of the base type. Also BINFO_OFFSET specifies
eefe9a99
JH
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
0e1474e5 75 or from DECL_VINDEX of a given virtual table.
eefe9a99
JH
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
0e1474e5 89 inserted into the graph. Also types without virtual methods are not
eefe9a99
JH
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
0e1474e5 98 Edges are represented by odr_type->base and odr_type->derived_types.
eefe9a99
JH
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
bbc9396b
JH
104
105 pass_ipa_devirt performs simple speculative devirtualization.
eefe9a99
JH
106*/
107
108#include "config.h"
109#include "system.h"
110#include "coretypes.h"
111#include "tm.h"
4d648807 112#include "tree.h"
d8a2d370
DN
113#include "print-tree.h"
114#include "calls.h"
eefe9a99 115#include "cgraph.h"
d8a2d370 116#include "expr.h"
eefe9a99 117#include "tree-pass.h"
eefe9a99
JH
118#include "pointer-set.h"
119#include "target.h"
120#include "hash-table.h"
121#include "tree-pretty-print.h"
122#include "ipa-utils.h"
2fb9a547
AM
123#include "tree-ssa-alias.h"
124#include "internal-fn.h"
125#include "gimple-fold.h"
126#include "gimple-expr.h"
eefe9a99 127#include "gimple.h"
bbc9396b 128#include "ipa-inline.h"
61a74079 129#include "diagnostic.h"
68377e53 130#include "tree-dfa.h"
ec77d61f 131#include "demangle.h"
2b5f0895 132#include "dbgcnt.h"
ec77d61f
JH
133
134static bool odr_violation_reported = false;
68377e53
JH
135
136/* Dummy polymorphic call context. */
137
138const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
139 = {0, NULL, false, true};
eefe9a99 140
0e1474e5
JH
141/* Pointer set of all call targets appearing in the cache. */
142static pointer_set_t *cached_polymorphic_call_targets;
143
eefe9a99
JH
144/* The node of type inheritance graph. For each type unique in
145 One Defintion Rule (ODR) sense, we produce one node linking all
146 main variants of types equivalent to it, bases and derived types. */
147
148struct GTY(()) odr_type_d
149{
eefe9a99
JH
150 /* leader type. */
151 tree type;
152 /* All bases. */
153 vec<odr_type> GTY((skip)) bases;
154 /* All derrived types with virtual methods seen in unit. */
155 vec<odr_type> GTY((skip)) derived_types;
0e1474e5 156
61a74079
JH
157 /* All equivalent types, if more than one. */
158 vec<tree, va_gc> *types;
159 /* Set of all equivalent types, if NON-NULL. */
160 pointer_set_t * GTY((skip)) types_set;
161
0e1474e5
JH
162 /* Unique ID indexing the type in odr_types array. */
163 int id;
eefe9a99
JH
164 /* Is it in anonymous namespace? */
165 bool anonymous_namespace;
2d1644bf
JH
166 /* Do we know about all derivations of given type? */
167 bool all_derivations_known;
eefe9a99
JH
168};
169
170
0e1474e5
JH
171/* Return true if BINFO corresponds to a type with virtual methods.
172
173 Every type has several BINFOs. One is the BINFO associated by the type
174 while other represents bases of derived types. The BINFOs representing
175 bases do not have BINFO_VTABLE pointer set when this is the single
176 inheritance (because vtables are shared). Look up the BINFO of type
177 and check presence of its vtable. */
eefe9a99
JH
178
179static inline bool
180polymorphic_type_binfo_p (tree binfo)
181{
182 /* See if BINFO's type has an virtual table associtated with it. */
183 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
184}
185
2d1644bf
JH
186/* Return TRUE if all derived types of T are known and thus
187 we may consider the walk of derived type complete.
188
189 This is typically true only for final anonymous namespace types and types
190 defined within functions (that may be COMDAT and thus shared across units,
191 but with the same set of derived types). */
192
193static bool
194type_all_derivations_known_p (tree t)
195{
196 if (TYPE_FINAL_P (t))
197 return true;
198 if (flag_ltrans)
199 return false;
200 if (type_in_anonymous_namespace_p (t))
201 return true;
202 return (decl_function_context (TYPE_NAME (t)) != NULL);
203}
204
205/* Return TURE if type's constructors are all visible. */
206
207static bool
208type_all_ctors_visible_p (tree t)
209{
210 return !flag_ltrans
211 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
212 /* We can not always use type_all_derivations_known_p.
213 For function local types we must assume case where
214 the function is COMDAT and shared in between units.
215
216 TODO: These cases are quite easy to get, but we need
217 to keep track of C++ privatizing via -Wno-weak
218 as well as the IPA privatizing. */
219 && type_in_anonymous_namespace_p (t);
220}
221
222/* Return TRUE if type may have instance. */
223
224static bool
225type_possibly_instantiated_p (tree t)
226{
227 tree vtable;
228 varpool_node *vnode;
229
230 /* TODO: Add abstract types here. */
231 if (!type_all_ctors_visible_p (t))
232 return true;
233
234 vtable = BINFO_VTABLE (TYPE_BINFO (t));
235 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
236 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
237 vnode = varpool_get_node (vtable);
238 return vnode && vnode->definition;
239}
240
eefe9a99
JH
241/* One Definition Rule hashtable helpers. */
242
243struct odr_hasher
244{
245 typedef odr_type_d value_type;
246 typedef union tree_node compare_type;
247 static inline hashval_t hash (const value_type *);
248 static inline bool equal (const value_type *, const compare_type *);
249 static inline void remove (value_type *);
250};
251
252/* Produce hash based on type name. */
253
254hashval_t
255hash_type_name (tree t)
256{
257 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
258
259 /* If not in LTO, all main variants are unique, so we can do
260 pointer hash. */
261 if (!in_lto_p)
262 return htab_hash_pointer (t);
263
264 /* Anonymous types are unique. */
265 if (type_in_anonymous_namespace_p (t))
266 return htab_hash_pointer (t);
267
61a74079
JH
268 /* For polymorphic types, we can simply hash the virtual table. */
269 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
270 {
271 tree v = BINFO_VTABLE (TYPE_BINFO (t));
272 hashval_t hash = 0;
273
274 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
275 {
276 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
277 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
278 }
279
280 v = DECL_ASSEMBLER_NAME (v);
61a74079
JH
281 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
282 return hash;
283 }
284
eefe9a99
JH
285 /* Rest is not implemented yet. */
286 gcc_unreachable ();
287}
288
289/* Return the computed hashcode for ODR_TYPE. */
290
291inline hashval_t
292odr_hasher::hash (const value_type *odr_type)
293{
294 return hash_type_name (odr_type->type);
295}
296
0e1474e5 297/* Compare types T1 and T2 and return true if they are
eefe9a99
JH
298 equivalent. */
299
300inline bool
301odr_hasher::equal (const value_type *t1, const compare_type *ct2)
302{
303 tree t2 = const_cast <tree> (ct2);
304
305 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
306 if (t1->type == t2)
307 return true;
308 if (!in_lto_p)
309 return false;
310 return types_same_for_odr (t1->type, t2);
311}
312
0e1474e5 313/* Free ODR type V. */
eefe9a99
JH
314
315inline void
316odr_hasher::remove (value_type *v)
317{
318 v->bases.release ();
319 v->derived_types.release ();
61a74079
JH
320 if (v->types_set)
321 pointer_set_destroy (v->types_set);
eefe9a99
JH
322 ggc_free (v);
323}
324
325/* ODR type hash used to lookup ODR type based on tree type node. */
326
c203e8a7
TS
327typedef hash_table<odr_hasher> odr_hash_type;
328static odr_hash_type *odr_hash;
eefe9a99
JH
329
330/* ODR types are also stored into ODR_TYPE vector to allow consistent
331 walking. Bases appear before derived types. Vector is garbage collected
332 so we won't end up visiting empty types. */
333
334static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
335#define odr_types (*odr_types_ptr)
336
61a74079
JH
337/* TYPE is equivalent to VAL by ODR, but its tree representation differs
338 from VAL->type. This may happen in LTO where tree merging did not merge
339 all variants of the same type. It may or may not mean the ODR violation.
340 Add it to the list of duplicates and warn on some violations. */
341
342static void
343add_type_duplicate (odr_type val, tree type)
344{
345 if (!val->types_set)
346 val->types_set = pointer_set_create ();
347
348 /* See if this duplicate is new. */
349 if (!pointer_set_insert (val->types_set, type))
350 {
351 bool merge = true;
352 bool base_mismatch = false;
353 gcc_assert (in_lto_p);
354 vec_safe_push (val->types, type);
355 unsigned int i,j;
356
357 /* First we compare memory layout. */
358 if (!types_compatible_p (val->type, type))
359 {
360 merge = false;
ec77d61f 361 odr_violation_reported = true;
61a74079
JH
362 if (BINFO_VTABLE (TYPE_BINFO (val->type))
363 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
364 "type %qD violates one definition rule ",
365 type))
366 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
367 "a type with the same name but different layout is "
368 "defined in another translation unit");
61a74079
JH
369 if (cgraph_dump_file)
370 {
371 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
372
373 print_node (cgraph_dump_file, "", val->type, 0);
374 putc ('\n',cgraph_dump_file);
375 print_node (cgraph_dump_file, "", type, 0);
376 putc ('\n',cgraph_dump_file);
377 }
378 }
379
380 /* Next sanity check that bases are the same. If not, we will end
381 up producing wrong answers. */
382 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
383 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
384 {
385 odr_type base = get_odr_type
386 (BINFO_TYPE
387 (BINFO_BASE_BINFO (TYPE_BINFO (type),
388 i)),
389 true);
390 if (val->bases.length () <= j || val->bases[j] != base)
391 base_mismatch = true;
392 j++;
393 }
394 if (base_mismatch)
395 {
396 merge = false;
ec77d61f 397 odr_violation_reported = true;
61a74079
JH
398
399 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
400 "type %qD violates one definition rule ",
401 type))
402 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
403 "a type with the same name but different bases is "
404 "defined in another translation unit");
405 if (cgraph_dump_file)
406 {
407 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
408
409 print_node (cgraph_dump_file, "", val->type, 0);
410 putc ('\n',cgraph_dump_file);
411 print_node (cgraph_dump_file, "", type, 0);
412 putc ('\n',cgraph_dump_file);
413 }
414 }
415
416 /* Regularize things a little. During LTO same types may come with
417 different BINFOs. Either because their virtual table was
418 not merged by tree merging and only later at decl merging or
419 because one type comes with external vtable, while other
420 with internal. We want to merge equivalent binfos to conserve
421 memory and streaming overhead.
422
423 The external vtables are more harmful: they contain references
424 to external declarations of methods that may be defined in the
425 merged LTO unit. For this reason we absolutely need to remove
426 them and replace by internal variants. Not doing so will lead
427 to incomplete answers from possible_polymorphic_call_targets. */
428 if (!flag_ltrans && merge)
429 {
430 tree master_binfo = TYPE_BINFO (val->type);
431 tree v1 = BINFO_VTABLE (master_binfo);
432 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
433
434 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
435 {
436 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
437 && operand_equal_p (TREE_OPERAND (v1, 1),
438 TREE_OPERAND (v2, 1), 0));
439 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
440 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
441 }
442 gcc_assert (DECL_ASSEMBLER_NAME (v1)
443 == DECL_ASSEMBLER_NAME (v2));
444
445 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
446 {
447 unsigned int i;
448
449 TYPE_BINFO (val->type) = TYPE_BINFO (type);
c3284718 450 for (i = 0; i < val->types->length (); i++)
61a74079
JH
451 {
452 if (TYPE_BINFO ((*val->types)[i])
453 == master_binfo)
454 TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
455 }
456 }
457 else
458 TYPE_BINFO (type) = master_binfo;
459 }
460 }
461}
462
eefe9a99
JH
463/* Get ODR type hash entry for TYPE. If INSERT is true, create
464 possibly new entry. */
465
466odr_type
467get_odr_type (tree type, bool insert)
468{
469 odr_type_d **slot;
470 odr_type val;
471 hashval_t hash;
472
473 type = TYPE_MAIN_VARIANT (type);
474 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
475 hash = hash_type_name (type);
c203e8a7
TS
476 slot
477 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
eefe9a99
JH
478 if (!slot)
479 return NULL;
480
481 /* See if we already have entry for type. */
482 if (*slot)
483 {
484 val = *slot;
485
61a74079
JH
486 /* With LTO we need to support multiple tree representation of
487 the same ODR type. */
488 if (val->type != type)
489 add_type_duplicate (val, type);
eefe9a99
JH
490 }
491 else
492 {
493 tree binfo = TYPE_BINFO (type);
494 unsigned int i;
495
766090c2 496 val = ggc_cleared_alloc<odr_type_d> ();
eefe9a99
JH
497 val->type = type;
498 val->bases = vNULL;
499 val->derived_types = vNULL;
0e1474e5 500 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
2d1644bf 501 val->all_derivations_known = type_all_derivations_known_p (type);
eefe9a99
JH
502 *slot = val;
503 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
504 /* For now record only polymorphic types. other are
505 pointless for devirtualization and we can not precisely
506 determine ODR equivalency of these during LTO. */
507 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
508 {
509 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
510 i)),
511 true);
512 base->derived_types.safe_push (val);
513 val->bases.safe_push (base);
514 }
515 /* First record bases, then add into array so ids are increasing. */
516 if (odr_types_ptr)
c3284718 517 val->id = odr_types.length ();
eefe9a99
JH
518 vec_safe_push (odr_types_ptr, val);
519 }
520 return val;
521}
522
523/* Dump ODR type T and all its derrived type. INDENT specify indentation for
524 recusive printing. */
525
526static void
527dump_odr_type (FILE *f, odr_type t, int indent=0)
528{
529 unsigned int i;
530 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
531 print_generic_expr (f, t->type, TDF_SLIM);
2d1644bf
JH
532 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
533 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
eefe9a99
JH
534 if (TYPE_NAME (t->type))
535 {
536 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
537 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
538 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
539 }
c3284718 540 if (t->bases.length ())
eefe9a99
JH
541 {
542 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
c3284718 543 for (i = 0; i < t->bases.length (); i++)
eefe9a99
JH
544 fprintf (f, " %i", t->bases[i]->id);
545 fprintf (f, "\n");
546 }
c3284718 547 if (t->derived_types.length ())
eefe9a99
JH
548 {
549 fprintf (f, "%*s derived types:\n", indent * 2, "");
c3284718 550 for (i = 0; i < t->derived_types.length (); i++)
eefe9a99
JH
551 dump_odr_type (f, t->derived_types[i], indent + 1);
552 }
553 fprintf (f, "\n");
554}
555
556/* Dump the type inheritance graph. */
557
558static void
559dump_type_inheritance_graph (FILE *f)
560{
561 unsigned int i;
0e1474e5
JH
562 if (!odr_types_ptr)
563 return;
eefe9a99 564 fprintf (f, "\n\nType inheritance graph:\n");
c3284718 565 for (i = 0; i < odr_types.length (); i++)
eefe9a99 566 {
c3284718 567 if (odr_types[i]->bases.length () == 0)
eefe9a99
JH
568 dump_odr_type (f, odr_types[i]);
569 }
c3284718 570 for (i = 0; i < odr_types.length (); i++)
61a74079 571 {
c3284718 572 if (odr_types[i]->types && odr_types[i]->types->length ())
61a74079
JH
573 {
574 unsigned int j;
575 fprintf (f, "Duplicate tree types for odr type %i\n", i);
576 print_node (f, "", odr_types[i]->type, 0);
c3284718 577 for (j = 0; j < odr_types[i]->types->length (); j++)
61a74079
JH
578 {
579 tree t;
580 fprintf (f, "duplicate #%i\n", j);
581 print_node (f, "", (*odr_types[i]->types)[j], 0);
582 t = (*odr_types[i]->types)[j];
583 while (TYPE_P (t) && TYPE_CONTEXT (t))
584 {
585 t = TYPE_CONTEXT (t);
586 print_node (f, "", t, 0);
587 }
588 putc ('\n',f);
589 }
590 }
591 }
eefe9a99
JH
592}
593
594/* Given method type T, return type of class it belongs to.
595 Lookup this pointer and get its type. */
596
64cbf23d 597tree
eefe9a99
JH
598method_class_type (tree t)
599{
600 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
68377e53 601 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
eefe9a99
JH
602
603 return TREE_TYPE (first_parm_type);
604}
605
606/* Initialize IPA devirt and build inheritance tree graph. */
607
608void
609build_type_inheritance_graph (void)
610{
b270b096 611 struct symtab_node *n;
eefe9a99
JH
612 FILE *inheritance_dump_file;
613 int flags;
614
c203e8a7 615 if (odr_hash)
eefe9a99
JH
616 return;
617 timevar_push (TV_IPA_INHERITANCE);
618 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
c203e8a7 619 odr_hash = new odr_hash_type (23);
eefe9a99
JH
620
621 /* We reconstruct the graph starting of types of all methods seen in the
622 the unit. */
b270b096 623 FOR_EACH_SYMBOL (n)
7de90a6c 624 if (is_a <cgraph_node *> (n)
b270b096 625 && DECL_VIRTUAL_P (n->decl)
67348ccc
DM
626 && symtab_real_symbol_p (n))
627 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
b270b096
JH
628
629 /* Look also for virtual tables of types that do not define any methods.
630
631 We need it in a case where class B has virtual base of class A
632 re-defining its virtual method and there is class C with no virtual
633 methods with B as virtual base.
634
635 Here we output B's virtual method in two variant - for non-virtual
636 and virtual inheritance. B's virtual table has non-virtual version,
637 while C's has virtual.
638
639 For this reason we need to know about C in order to include both
640 variants of B. More correctly, record_target_from_binfo should
641 add both variants of the method when walking B, but we have no
642 link in between them.
643
644 We rely on fact that either the method is exported and thus we
645 assume it is called externally or C is in anonymous namespace and
646 thus we will see the vtable. */
647
7de90a6c 648 else if (is_a <varpool_node *> (n)
b270b096
JH
649 && DECL_VIRTUAL_P (n->decl)
650 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
651 && TYPE_BINFO (DECL_CONTEXT (n->decl))
652 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
653 get_odr_type (DECL_CONTEXT (n->decl), true);
eefe9a99
JH
654 if (inheritance_dump_file)
655 {
656 dump_type_inheritance_graph (inheritance_dump_file);
657 dump_end (TDI_inheritance, inheritance_dump_file);
658 }
659 timevar_pop (TV_IPA_INHERITANCE);
660}
661
ccb05ef2
JH
662/* Return true if N has reference from live virtual table
663 (and thus can be a destination of polymorphic call).
664 Be conservatively correct when callgraph is not built or
665 if the method may be referred externally. */
666
667static bool
668referenced_from_vtable_p (struct cgraph_node *node)
669{
670 int i;
671 struct ipa_ref *ref;
672 bool found = false;
673
674 if (node->externally_visible
675 || node->used_from_other_partition)
676 return true;
677
678 /* Keep this test constant time.
679 It is unlikely this can happen except for the case where speculative
680 devirtualization introduced many speculative edges to this node.
681 In this case the target is very likely alive anyway. */
682 if (node->ref_list.referring.length () > 100)
683 return true;
684
685 /* We need references built. */
686 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
687 return true;
688
689 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
690 i, ref); i++)
691
692 if ((ref->use == IPA_REF_ALIAS
693 && referenced_from_vtable_p (cgraph (ref->referring)))
694 || (ref->use == IPA_REF_ADDR
695 && TREE_CODE (ref->referring->decl) == VAR_DECL
696 && DECL_VIRTUAL_P (ref->referring->decl)))
697 {
698 found = true;
699 break;
700 }
701 return found;
702}
703
68377e53 704/* If TARGET has associated node, record it in the NODES array.
ec77d61f
JH
705 CAN_REFER specify if program can refer to the target directly.
706 if TARGET is unknown (NULL) or it can not be inserted (for example because
707 its body was already removed and there is no way to refer to it), clear
708 COMPLETEP. */
eefe9a99
JH
709
710static void
711maybe_record_node (vec <cgraph_node *> &nodes,
68377e53 712 tree target, pointer_set_t *inserted,
ec77d61f 713 bool can_refer,
68377e53 714 bool *completep)
eefe9a99
JH
715{
716 struct cgraph_node *target_node;
88f592e3
JH
717
718 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
719 list of targets; the runtime effect of calling them is undefined.
720 Only "real" virtual methods should be accounted. */
721 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
722 return;
eefe9a99 723
ec77d61f
JH
724 if (!can_refer)
725 {
726 /* The only case when method of anonymous namespace becomes unreferable
727 is when we completely optimized it out. */
728 if (flag_ltrans
729 || !target
88f592e3 730 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
ec77d61f
JH
731 *completep = false;
732 return;
733 }
734
88f592e3 735 if (!target)
68377e53
JH
736 return;
737
738 target_node = cgraph_get_node (target);
739
ccb05ef2
JH
740 /* Method can only be called by polymorphic call if any
741 of vtables refering to it are alive.
742
743 While this holds for non-anonymous functions, too, there are
744 cases where we want to keep them in the list; for example
745 inline functions with -fno-weak are static, but we still
746 may devirtualize them when instance comes from other unit.
747 The same holds for LTO.
748
749 Currently we ignore these functions in speculative devirtualization.
750 ??? Maybe it would make sense to be more aggressive for LTO even
751 eslewhere. */
752 if (!flag_ltrans
753 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
754 && (!target_node
755 || !referenced_from_vtable_p (target_node)))
756 ;
757 /* See if TARGET is useful function we can deal with. */
758 else if (target_node != NULL
759 && (TREE_PUBLIC (target)
760 || DECL_EXTERNAL (target)
761 || target_node->definition)
762 && symtab_real_symbol_p (target_node))
0e1474e5 763 {
68377e53
JH
764 gcc_assert (!target_node->global.inlined_to);
765 gcc_assert (symtab_real_symbol_p (target_node));
766 if (!pointer_set_insert (inserted, target))
767 {
768 pointer_set_insert (cached_polymorphic_call_targets,
769 target_node);
770 nodes.safe_push (target_node);
771 }
0e1474e5 772 }
68377e53 773 else if (completep
2d1644bf
JH
774 && (!type_in_anonymous_namespace_p
775 (DECL_CONTEXT (target))
776 || flag_ltrans))
0439a947 777 *completep = false;
eefe9a99
JH
778}
779
68377e53
JH
780/* See if BINFO's type match OUTER_TYPE. If so, lookup
781 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2d1644bf
JH
782 method in vtable and insert method to NODES array
783 or BASES_TO_CONSIDER if this array is non-NULL.
eefe9a99
JH
784 Otherwise recurse to base BINFOs.
785 This match what get_binfo_at_offset does, but with offset
786 being unknown.
787
a3788dde
JH
788 TYPE_BINFOS is a stack of BINFOS of types with defined
789 virtual table seen on way from class type to BINFO.
eefe9a99
JH
790
791 MATCHED_VTABLES tracks virtual tables we already did lookup
68377e53
JH
792 for virtual function in. INSERTED tracks nodes we already
793 inserted.
3462aa02
JH
794
795 ANONYMOUS is true if BINFO is part of anonymous namespace.
ec77d61f
JH
796
797 Clear COMPLETEP when we hit unreferable target.
eefe9a99
JH
798 */
799
800static void
68377e53 801record_target_from_binfo (vec <cgraph_node *> &nodes,
2d1644bf 802 vec <tree> *bases_to_consider,
68377e53
JH
803 tree binfo,
804 tree otr_type,
a3788dde 805 vec <tree> &type_binfos,
68377e53
JH
806 HOST_WIDE_INT otr_token,
807 tree outer_type,
808 HOST_WIDE_INT offset,
809 pointer_set_t *inserted,
810 pointer_set_t *matched_vtables,
ec77d61f
JH
811 bool anonymous,
812 bool *completep)
eefe9a99
JH
813{
814 tree type = BINFO_TYPE (binfo);
815 int i;
816 tree base_binfo;
817
eefe9a99 818
a3788dde
JH
819 if (BINFO_VTABLE (binfo))
820 type_binfos.safe_push (binfo);
68377e53 821 if (types_same_for_odr (type, outer_type))
eefe9a99 822 {
a3788dde
JH
823 int i;
824 tree type_binfo = NULL;
825
826 /* Lookup BINFO with virtual table. For normal types it is always last
827 binfo on stack. */
828 for (i = type_binfos.length () - 1; i >= 0; i--)
829 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
830 {
831 type_binfo = type_binfos[i];
832 break;
833 }
834 if (BINFO_VTABLE (binfo))
835 type_binfos.pop ();
836 /* If this is duplicated BINFO for base shared by virtual inheritance,
837 we may not have its associated vtable. This is not a problem, since
838 we will walk it on the other path. */
839 if (!type_binfo)
6d6af792 840 return;
68377e53
JH
841 tree inner_binfo = get_binfo_at_offset (type_binfo,
842 offset, otr_type);
ec77d61f
JH
843 if (!inner_binfo)
844 {
845 gcc_assert (odr_violation_reported);
846 return;
847 }
3462aa02
JH
848 /* For types in anonymous namespace first check if the respective vtable
849 is alive. If not, we know the type can't be called. */
850 if (!flag_ltrans && anonymous)
851 {
68377e53 852 tree vtable = BINFO_VTABLE (inner_binfo);
2c8326a5 853 varpool_node *vnode;
3462aa02
JH
854
855 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
856 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
857 vnode = varpool_get_node (vtable);
67348ccc 858 if (!vnode || !vnode->definition)
3462aa02
JH
859 return;
860 }
68377e53 861 gcc_assert (inner_binfo);
2d1644bf
JH
862 if (bases_to_consider
863 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
864 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
68377e53 865 {
ec77d61f
JH
866 bool can_refer;
867 tree target = gimple_get_virt_method_for_binfo (otr_token,
868 inner_binfo,
869 &can_refer);
2d1644bf
JH
870 if (!bases_to_consider)
871 maybe_record_node (nodes, target, inserted, can_refer, completep);
872 /* Destructors are never called via construction vtables. */
873 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
874 bases_to_consider->safe_push (target);
68377e53 875 }
eefe9a99
JH
876 return;
877 }
878
879 /* Walk bases. */
880 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
881 /* Walking bases that have no virtual method is pointless excercise. */
882 if (polymorphic_type_binfo_p (base_binfo))
2d1644bf 883 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
a3788dde 884 type_binfos,
68377e53 885 otr_token, outer_type, offset, inserted,
ec77d61f 886 matched_vtables, anonymous, completep);
a3788dde
JH
887 if (BINFO_VTABLE (binfo))
888 type_binfos.pop ();
eefe9a99
JH
889}
890
891/* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
892 of TYPE, insert them to NODES, recurse into derived nodes.
893 INSERTED is used to avoid duplicate insertions of methods into NODES.
ec77d61f 894 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2d1644bf
JH
895 Clear COMPLETEP if unreferable target is found.
896
897 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
898 all cases where BASE_SKIPPED is true (because the base is abstract
899 class). */
eefe9a99
JH
900
901static void
902possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
903 pointer_set_t *inserted,
904 pointer_set_t *matched_vtables,
905 tree otr_type,
906 odr_type type,
68377e53
JH
907 HOST_WIDE_INT otr_token,
908 tree outer_type,
ec77d61f 909 HOST_WIDE_INT offset,
2d1644bf
JH
910 bool *completep,
911 vec <tree> &bases_to_consider,
912 bool consider_construction)
eefe9a99
JH
913{
914 tree binfo = TYPE_BINFO (type->type);
915 unsigned int i;
a3788dde 916 vec <tree> type_binfos = vNULL;
2d1644bf
JH
917 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
918
919 /* We may need to consider types w/o instances because of possible derived
920 types using their methods either directly or via construction vtables.
921 We are safe to skip them when all derivations are known, since we will
922 handle them later.
923 This is done by recording them to BASES_TO_CONSIDER array. */
924 if (possibly_instantiated || consider_construction)
925 {
926 record_target_from_binfo (nodes,
927 (!possibly_instantiated
928 && type_all_derivations_known_p (type->type))
929 ? &bases_to_consider : NULL,
930 binfo, otr_type, type_binfos, otr_token,
931 outer_type, offset,
932 inserted, matched_vtables,
933 type->anonymous_namespace, completep);
934 }
a3788dde 935 type_binfos.release ();
c3284718 936 for (i = 0; i < type->derived_types.length (); i++)
eefe9a99
JH
937 possible_polymorphic_call_targets_1 (nodes, inserted,
938 matched_vtables,
939 otr_type,
940 type->derived_types[i],
2d1644bf
JH
941 otr_token, outer_type, offset, completep,
942 bases_to_consider, consider_construction);
eefe9a99
JH
943}
944
945/* Cache of queries for polymorphic call targets.
946
947 Enumerating all call targets may get expensive when there are many
948 polymorphic calls in the program, so we memoize all the previous
949 queries and avoid duplicated work. */
950
951struct polymorphic_call_target_d
952{
eefe9a99 953 HOST_WIDE_INT otr_token;
68377e53
JH
954 ipa_polymorphic_call_context context;
955 odr_type type;
eefe9a99 956 vec <cgraph_node *> targets;
ec77d61f
JH
957 int nonconstruction_targets;
958 bool complete;
eefe9a99
JH
959};
960
961/* Polymorphic call target cache helpers. */
962
963struct polymorphic_call_target_hasher
964{
965 typedef polymorphic_call_target_d value_type;
966 typedef polymorphic_call_target_d compare_type;
967 static inline hashval_t hash (const value_type *);
968 static inline bool equal (const value_type *, const compare_type *);
969 static inline void remove (value_type *);
970};
971
972/* Return the computed hashcode for ODR_QUERY. */
973
974inline hashval_t
975polymorphic_call_target_hasher::hash (const value_type *odr_query)
976{
68377e53
JH
977 hashval_t hash;
978
979 hash = iterative_hash_host_wide_int
980 (odr_query->otr_token,
981 odr_query->type->id);
982 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
983 hash);
984 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
985 return iterative_hash_hashval_t
986 (((int)odr_query->context.maybe_in_construction << 1)
987 | (int)odr_query->context.maybe_derived_type, hash);
eefe9a99
JH
988}
989
990/* Compare cache entries T1 and T2. */
991
992inline bool
993polymorphic_call_target_hasher::equal (const value_type *t1,
994 const compare_type *t2)
995{
68377e53
JH
996 return (t1->type == t2->type && t1->otr_token == t2->otr_token
997 && t1->context.offset == t2->context.offset
998 && t1->context.outer_type == t2->context.outer_type
999 && t1->context.maybe_in_construction
1000 == t2->context.maybe_in_construction
1001 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
eefe9a99
JH
1002}
1003
1004/* Remove entry in polymorphic call target cache hash. */
1005
1006inline void
1007polymorphic_call_target_hasher::remove (value_type *v)
1008{
1009 v->targets.release ();
1010 free (v);
1011}
1012
1013/* Polymorphic call target query cache. */
1014
c203e8a7 1015typedef hash_table<polymorphic_call_target_hasher>
eefe9a99 1016 polymorphic_call_target_hash_type;
c203e8a7 1017static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
eefe9a99
JH
1018
1019/* Destroy polymorphic call target query cache. */
1020
1021static void
1022free_polymorphic_call_targets_hash ()
1023{
0e1474e5
JH
1024 if (cached_polymorphic_call_targets)
1025 {
c203e8a7
TS
1026 delete polymorphic_call_target_hash;
1027 polymorphic_call_target_hash = NULL;
0e1474e5
JH
1028 pointer_set_destroy (cached_polymorphic_call_targets);
1029 cached_polymorphic_call_targets = NULL;
1030 }
eefe9a99
JH
1031}
1032
1033/* When virtual function is removed, we may need to flush the cache. */
1034
1035static void
1036devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1037{
0e1474e5
JH
1038 if (cached_polymorphic_call_targets
1039 && pointer_set_contains (cached_polymorphic_call_targets, n))
eefe9a99
JH
1040 free_polymorphic_call_targets_hash ();
1041}
1042
68377e53
JH
1043/* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1044 is contained at CONTEXT->OFFSET. Walk the memory representation of
1045 CONTEXT->OUTER_TYPE and find the outermost class type that match
1046 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1047 to represent it.
1048
1049 For example when CONTEXT represents type
1050 class A
1051 {
1052 int a;
1053 class B b;
1054 }
1055 and we look for type at offset sizeof(int), we end up with B and offset 0.
1056 If the same is produced by multiple inheritance, we end up with A and offset
1057 sizeof(int).
1058
1059 If we can not find corresponding class, give up by setting
1060 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1061 Return true when lookup was sucesful. */
1062
1063static bool
1064get_class_context (ipa_polymorphic_call_context *context,
1065 tree expected_type)
1066{
1067 tree type = context->outer_type;
1068 HOST_WIDE_INT offset = context->offset;
1069
1070 /* Find the sub-object the constant actually refers to and mark whether it is
1071 an artificial one (as opposed to a user-defined one). */
1072 while (true)
1073 {
1074 HOST_WIDE_INT pos, size;
1075 tree fld;
1076
1077 /* On a match, just return what we found. */
1078 if (TREE_CODE (type) == TREE_CODE (expected_type)
1079 && types_same_for_odr (type, expected_type))
1080 {
3b4e93c3
JH
1081 /* Type can not contain itself on an non-zero offset. In that case
1082 just give up. */
1083 if (offset != 0)
1084 goto give_up;
68377e53
JH
1085 gcc_assert (offset == 0);
1086 return true;
1087 }
1088
1089 /* Walk fields and find corresponding on at OFFSET. */
1090 if (TREE_CODE (type) == RECORD_TYPE)
1091 {
1092 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1093 {
1094 if (TREE_CODE (fld) != FIELD_DECL)
1095 continue;
1096
1097 pos = int_bit_position (fld);
1098 size = tree_to_uhwi (DECL_SIZE (fld));
1099 if (pos <= offset && (pos + size) > offset)
1100 break;
1101 }
1102
1103 if (!fld)
1104 goto give_up;
1105
1106 type = TREE_TYPE (fld);
1107 offset -= pos;
1108 /* DECL_ARTIFICIAL represents a basetype. */
1109 if (!DECL_ARTIFICIAL (fld))
1110 {
1111 context->outer_type = type;
1112 context->offset = offset;
1113 /* As soon as we se an field containing the type,
1114 we know we are not looking for derivations. */
1115 context->maybe_derived_type = false;
1116 }
1117 }
1118 else if (TREE_CODE (type) == ARRAY_TYPE)
1119 {
1120 tree subtype = TREE_TYPE (type);
1121
1122 /* Give up if we don't know array size. */
1123 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1124 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1125 goto give_up;
1126 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1127 type = subtype;
1128 context->outer_type = type;
1129 context->offset = offset;
1130 context->maybe_derived_type = false;
1131 }
1132 /* Give up on anything else. */
1133 else
1134 goto give_up;
1135 }
1136
1137 /* If we failed to find subtype we look for, give up and fall back to the
1138 most generic query. */
1139give_up:
1140 context->outer_type = expected_type;
1141 context->offset = 0;
1142 context->maybe_derived_type = true;
e400f081
JH
1143 context->maybe_in_construction = true;
1144 /* POD can be changed to an instance of a polymorphic type by
1145 placement new. Here we play safe and assume that any
1146 non-polymorphic type is POD. */
1147 if ((TREE_CODE (type) != RECORD_TYPE
1148 || !TYPE_BINFO (type)
1149 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1150 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1151 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1152 tree_to_uhwi (TYPE_SIZE (type)))))
1153 return true;
68377e53
JH
1154 return false;
1155}
1156
1157/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1158
1159static bool
1160contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1161 tree otr_type)
1162{
1163 ipa_polymorphic_call_context context = {offset, outer_type,
1164 false, true};
1165 return get_class_context (&context, otr_type);
1166}
1167
390675c8
JH
1168/* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1169
1170static tree
85942f45
JH
1171subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1172 tree vtable)
390675c8
JH
1173{
1174 tree v = BINFO_VTABLE (binfo);
1175 int i;
1176 tree base_binfo;
85942f45 1177 unsigned HOST_WIDE_INT this_offset;
390675c8 1178
85942f45
JH
1179 if (v)
1180 {
1181 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1182 gcc_unreachable ();
1183
1184 if (offset == this_offset
1185 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1186 return binfo;
1187 }
390675c8 1188
390675c8
JH
1189 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1190 if (polymorphic_type_binfo_p (base_binfo))
1191 {
1192 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1193 if (base_binfo)
1194 return base_binfo;
1195 }
1196 return NULL;
1197}
1198
85942f45
JH
1199/* T is known constant value of virtual table pointer.
1200 Store virtual table to V and its offset to OFFSET.
1201 Return false if T does not look like virtual table reference. */
390675c8 1202
85942f45
JH
1203bool
1204vtable_pointer_value_to_vtable (tree t, tree *v, unsigned HOST_WIDE_INT *offset)
390675c8
JH
1205{
1206 /* We expect &MEM[(void *)&virtual_table + 16B].
1207 We obtain object's BINFO from the context of the virtual table.
1208 This one contains pointer to virtual table represented via
1209 POINTER_PLUS_EXPR. Verify that this pointer match to what
1210 we propagated through.
1211
1212 In the case of virtual inheritance, the virtual tables may
1213 be nested, i.e. the offset may be different from 16 and we may
1214 need to dive into the type representation. */
85942f45 1215 if (TREE_CODE (t) == ADDR_EXPR
390675c8
JH
1216 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1217 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1218 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1219 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1220 == VAR_DECL)
1221 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1222 (TREE_OPERAND (t, 0), 0), 0)))
1223 {
85942f45
JH
1224 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1225 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1226 return true;
390675c8 1227 }
85942f45
JH
1228
1229 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1230 We need to handle it when T comes from static variable initializer or
1231 BINFO. */
1232 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1233 {
1234 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1235 t = TREE_OPERAND (t, 0);
1236 }
1237 else
1238 *offset = 0;
1239
1240 if (TREE_CODE (t) != ADDR_EXPR)
1241 return false;
1242 *v = TREE_OPERAND (t, 0);
1243 return true;
1244}
1245
1246/* T is known constant value of virtual table pointer. Return BINFO of the
1247 instance type. */
1248
1249tree
1250vtable_pointer_value_to_binfo (tree t)
1251{
1252 tree vtable;
1253 unsigned HOST_WIDE_INT offset;
1254
1255 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1256 return NULL_TREE;
1257
1258 /* FIXME: for stores of construction vtables we return NULL,
1259 because we do not have BINFO for those. Eventually we should fix
1260 our representation to allow this case to be handled, too.
1261 In the case we see store of BINFO we however may assume
1262 that standard folding will be ale to cope with it. */
1263 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1264 offset, vtable);
390675c8
JH
1265}
1266
5bccb77a
JH
1267/* Proudce polymorphic call context for call method of instance
1268 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1269
1270static void
1271get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1272 tree base, HOST_WIDE_INT offset)
1273{
1274 gcc_assert (DECL_P (base));
1275
1276 context->outer_type = TREE_TYPE (base);
1277 context->offset = offset;
1278 /* Make very conservative assumption that all objects
1279 may be in construction.
1280 TODO: ipa-prop already contains code to tell better.
1281 merge it later. */
1282 context->maybe_in_construction = true;
1283 context->maybe_derived_type = false;
1284}
1285
1286/* CST is an invariant (address of decl), try to get meaningful
1287 polymorphic call context for polymorphic call of method
1288 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1289 Return FALSE if nothing meaningful can be found. */
1290
1291bool
1292get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1293 tree cst,
1294 tree otr_type,
1295 HOST_WIDE_INT offset)
1296{
1297 HOST_WIDE_INT offset2, size, max_size;
1298 tree base;
1299
1300 if (TREE_CODE (cst) != ADDR_EXPR)
79c7de84 1301 return false;
5bccb77a
JH
1302
1303 cst = TREE_OPERAND (cst, 0);
1304 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
79c7de84
EB
1305 if (!DECL_P (base) || max_size == -1 || max_size != size)
1306 return false;
5bccb77a
JH
1307
1308 /* Only type inconsistent programs can have otr_type that is
1309 not part of outer type. */
79c7de84
EB
1310 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1311 return false;
5bccb77a 1312
79c7de84 1313 get_polymorphic_call_info_for_decl (context, base, offset);
5bccb77a
JH
1314 return true;
1315}
1316
68377e53
JH
1317/* Given REF call in FNDECL, determine class of the polymorphic
1318 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1319 Return pointer to object described by the context */
1320
1321tree
1322get_polymorphic_call_info (tree fndecl,
1323 tree ref,
1324 tree *otr_type,
1325 HOST_WIDE_INT *otr_token,
1326 ipa_polymorphic_call_context *context)
1327{
1328 tree base_pointer;
1329 *otr_type = obj_type_ref_class (ref);
1330 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1331
1332 /* Set up basic info in case we find nothing interesting in the analysis. */
1333 context->outer_type = *otr_type;
1334 context->offset = 0;
1335 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1336 context->maybe_derived_type = true;
2d1644bf 1337 context->maybe_in_construction = true;
68377e53
JH
1338
1339 /* Walk SSA for outer object. */
1340 do
1341 {
1342 if (TREE_CODE (base_pointer) == SSA_NAME
1343 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1344 && SSA_NAME_DEF_STMT (base_pointer)
1345 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1346 {
1347 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1348 STRIP_NOPS (base_pointer);
1349 }
1350 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1351 {
1352 HOST_WIDE_INT size, max_size;
1353 HOST_WIDE_INT offset2;
1354 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1355 &offset2, &size, &max_size);
1356
1357 /* If this is a varying address, punt. */
1358 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1359 && max_size != -1
1360 && max_size == size)
1361 {
1362 /* We found dereference of a pointer. Type of the pointer
1363 and MEM_REF is meaningless, but we can look futher. */
1364 if (TREE_CODE (base) == MEM_REF)
1365 {
1366 base_pointer = TREE_OPERAND (base, 0);
1367 context->offset
807e902e 1368 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
68377e53
JH
1369 context->outer_type = NULL;
1370 }
1371 /* We found base object. In this case the outer_type
1372 is known. */
1373 else if (DECL_P (base))
1374 {
7656ee72 1375 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
68377e53
JH
1376
1377 /* Only type inconsistent programs can have otr_type that is
1378 not part of outer type. */
7656ee72
JH
1379 if (!contains_type_p (TREE_TYPE (base),
1380 context->offset + offset2, *otr_type))
3e86c6a8
JH
1381 {
1382 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1383 code sequences; we arrange the calls to be builtin_unreachable
1384 later. */
1385 *otr_token = INT_MAX;
1386 return base_pointer;
1387 }
5bccb77a
JH
1388 get_polymorphic_call_info_for_decl (context, base,
1389 context->offset + offset2);
7656ee72 1390 return NULL;
68377e53
JH
1391 }
1392 else
1393 break;
1394 }
1395 else
1396 break;
1397 }
1398 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1399 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1400 {
1401 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1402 * BITS_PER_UNIT;
1403 base_pointer = TREE_OPERAND (base_pointer, 0);
1404 }
1405 else
1406 break;
1407 }
1408 while (true);
1409
1410 /* Try to determine type of the outer object. */
1411 if (TREE_CODE (base_pointer) == SSA_NAME
1412 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1413 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1414 {
1415 /* See if parameter is THIS pointer of a method. */
1416 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1417 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1418 {
1419 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1420 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1421
1422 /* Dynamic casting has possibly upcasted the type
1423 in the hiearchy. In this case outer type is less
1424 informative than inner type and we should forget
1425 about it. */
1426 if (!contains_type_p (context->outer_type, context->offset,
1427 *otr_type))
1428 {
1429 context->outer_type = NULL;
1430 return base_pointer;
1431 }
1432
1433 /* If the function is constructor or destructor, then
d74db8ff 1434 the type is possibly in construction, but we know
68377e53
JH
1435 it is not derived type. */
1436 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1437 || DECL_CXX_DESTRUCTOR_P (fndecl))
1438 {
1439 context->maybe_in_construction = true;
1440 context->maybe_derived_type = false;
1441 }
1442 else
1443 {
1444 context->maybe_derived_type = true;
1445 context->maybe_in_construction = false;
1446 }
1447 return base_pointer;
1448 }
1449 /* Non-PODs passed by value are really passed by invisible
1450 reference. In this case we also know the type of the
1451 object. */
1452 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1453 {
1454 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1455 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1456 /* Only type inconsistent programs can have otr_type that is
1457 not part of outer type. */
1458 if (!contains_type_p (context->outer_type, context->offset,
1459 *otr_type))
1460 {
3e86c6a8
JH
1461 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1462 code sequences; we arrange the calls to be builtin_unreachable
1463 later. */
1464 *otr_token = INT_MAX;
68377e53
JH
1465 return base_pointer;
1466 }
1467 context->maybe_derived_type = false;
1468 context->maybe_in_construction = false;
1469 return base_pointer;
1470 }
1471 }
1472 /* TODO: There are multiple ways to derive a type. For instance
1473 if BASE_POINTER is passed to an constructor call prior our refernece.
1474 We do not make this type of flow sensitive analysis yet. */
1475 return base_pointer;
1476}
1477
1478/* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1479 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1480 and insert them to NODES.
1481
1482 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1483
1484static void
1485record_targets_from_bases (tree otr_type,
1486 HOST_WIDE_INT otr_token,
1487 tree outer_type,
1488 HOST_WIDE_INT offset,
ec77d61f 1489 vec <cgraph_node *> &nodes,
68377e53
JH
1490 pointer_set_t *inserted,
1491 pointer_set_t *matched_vtables,
1492 bool *completep)
1493{
1494 while (true)
1495 {
1496 HOST_WIDE_INT pos, size;
1497 tree base_binfo;
1498 tree fld;
1499
1500 if (types_same_for_odr (outer_type, otr_type))
1501 return;
1502
1503 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1504 {
1505 if (TREE_CODE (fld) != FIELD_DECL)
1506 continue;
1507
1508 pos = int_bit_position (fld);
1509 size = tree_to_shwi (DECL_SIZE (fld));
ec77d61f
JH
1510 if (pos <= offset && (pos + size) > offset
1511 /* Do not get confused by zero sized bases. */
1512 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
68377e53
JH
1513 break;
1514 }
1515 /* Within a class type we should always find correcponding fields. */
1516 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1517
1518 /* Nonbasetypes should have been stripped by outer_class_type. */
1519 gcc_assert (DECL_ARTIFICIAL (fld));
1520
1521 outer_type = TREE_TYPE (fld);
1522 offset -= pos;
1523
1524 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1525 offset, otr_type);
ec77d61f
JH
1526 if (!base_binfo)
1527 {
1528 gcc_assert (odr_violation_reported);
1529 return;
1530 }
68377e53
JH
1531 gcc_assert (base_binfo);
1532 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1533 {
ec77d61f
JH
1534 bool can_refer;
1535 tree target = gimple_get_virt_method_for_binfo (otr_token,
1536 base_binfo,
1537 &can_refer);
2d1644bf
JH
1538 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
1539 maybe_record_node (nodes, target, inserted, can_refer, completep);
68377e53
JH
1540 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1541 }
1542 }
1543}
1544
3462aa02
JH
1545/* When virtual table is removed, we may need to flush the cache. */
1546
1547static void
2c8326a5 1548devirt_variable_node_removal_hook (varpool_node *n,
3462aa02
JH
1549 void *d ATTRIBUTE_UNUSED)
1550{
1551 if (cached_polymorphic_call_targets
67348ccc
DM
1552 && DECL_VIRTUAL_P (n->decl)
1553 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3462aa02
JH
1554 free_polymorphic_call_targets_hash ();
1555}
1556
eefe9a99 1557/* Return vector containing possible targets of polymorphic call of type
68377e53
JH
1558 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1559 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1560 OTR_TYPE and include their virtual method. This is useful for types
1561 possibly in construction or destruction where the virtual table may
1562 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1563 us to walk the inheritance graph for all derivations.
1564
3e86c6a8
JH
1565 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1566 undefined and should be redirected to unreachable.
1567
add5c763 1568 If COMPLETEP is non-NULL, store true if the list is complete.
eefe9a99
JH
1569 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1570 in the target cache. If user needs to visit every target list
1571 just once, it can memoize them.
1572
ec77d61f
JH
1573 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1574 the type is not in the construction. Those targets appear first in the
1575 vector returned.
1576
eefe9a99
JH
1577 Returned vector is placed into cache. It is NOT caller's responsibility
1578 to free it. The vector can be freed on cgraph_remove_node call if
1579 the particular node is a virtual function present in the cache. */
1580
1581vec <cgraph_node *>
1582possible_polymorphic_call_targets (tree otr_type,
1583 HOST_WIDE_INT otr_token,
68377e53
JH
1584 ipa_polymorphic_call_context context,
1585 bool *completep,
ec77d61f
JH
1586 void **cache_token,
1587 int *nonconstruction_targetsp)
eefe9a99
JH
1588{
1589 static struct cgraph_node_hook_list *node_removal_hook_holder;
1590 pointer_set_t *inserted;
1591 pointer_set_t *matched_vtables;
add5c763 1592 vec <cgraph_node *> nodes = vNULL;
2d1644bf 1593 vec <tree> bases_to_consider = vNULL;
68377e53 1594 odr_type type, outer_type;
eefe9a99
JH
1595 polymorphic_call_target_d key;
1596 polymorphic_call_target_d **slot;
1597 unsigned int i;
1598 tree binfo, target;
ec77d61f
JH
1599 bool complete;
1600 bool can_refer;
2d1644bf 1601 bool skipped = false;
eefe9a99 1602
3e86c6a8 1603 /* If ODR is not initialized, return empty incomplete list. */
c203e8a7 1604 if (!odr_hash)
79c7de84
EB
1605 {
1606 if (completep)
1607 *completep = false;
beb683ab
MJ
1608 if (cache_token)
1609 *cache_token = NULL;
ec77d61f
JH
1610 if (nonconstruction_targetsp)
1611 *nonconstruction_targetsp = 0;
79c7de84
EB
1612 return nodes;
1613 }
add5c763 1614
3e86c6a8
JH
1615 /* If we hit type inconsistency, just return empty list of targets. */
1616 if (otr_token == INT_MAX)
1617 {
1618 if (completep)
1619 *completep = true;
beb683ab
MJ
1620 if (cache_token)
1621 *cache_token = NULL;
3e86c6a8
JH
1622 if (nonconstruction_targetsp)
1623 *nonconstruction_targetsp = 0;
1624 return nodes;
1625 }
1626
68377e53 1627 type = get_odr_type (otr_type, true);
eefe9a99 1628
68377e53 1629 /* Lookup the outer class type we want to walk. */
3e86c6a8
JH
1630 if (context.outer_type
1631 && !get_class_context (&context, otr_type))
1632 {
1633 if (completep)
1634 *completep = false;
beb683ab
MJ
1635 if (cache_token)
1636 *cache_token = NULL;
3e86c6a8
JH
1637 if (nonconstruction_targetsp)
1638 *nonconstruction_targetsp = 0;
1639 return nodes;
1640 }
eefe9a99 1641
79c7de84 1642 /* We canonicalize our query, so we do not need extra hashtable entries. */
68377e53
JH
1643
1644 /* Without outer type, we have no use for offset. Just do the
1645 basic search from innter type */
1646 if (!context.outer_type)
1647 {
1648 context.outer_type = otr_type;
1649 context.offset = 0;
1650 }
1651 /* We need to update our hiearchy if the type does not exist. */
1652 outer_type = get_odr_type (context.outer_type, true);
ec77d61f 1653 /* If the type is complete, there are no derivations. */
68377e53
JH
1654 if (TYPE_FINAL_P (outer_type->type))
1655 context.maybe_derived_type = false;
eefe9a99
JH
1656
1657 /* Initialize query cache. */
1658 if (!cached_polymorphic_call_targets)
1659 {
1660 cached_polymorphic_call_targets = pointer_set_create ();
c203e8a7
TS
1661 polymorphic_call_target_hash
1662 = new polymorphic_call_target_hash_type (23);
eefe9a99 1663 if (!node_removal_hook_holder)
3462aa02
JH
1664 {
1665 node_removal_hook_holder =
1666 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1667 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1668 NULL);
1669 }
eefe9a99
JH
1670 }
1671
1672 /* Lookup cached answer. */
1673 key.type = type;
1674 key.otr_token = otr_token;
68377e53 1675 key.context = context;
c203e8a7 1676 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
eefe9a99
JH
1677 if (cache_token)
1678 *cache_token = (void *)*slot;
1679 if (*slot)
68377e53
JH
1680 {
1681 if (completep)
ec77d61f
JH
1682 *completep = (*slot)->complete;
1683 if (nonconstruction_targetsp)
1684 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
68377e53
JH
1685 return (*slot)->targets;
1686 }
1687
ec77d61f 1688 complete = true;
eefe9a99
JH
1689
1690 /* Do actual search. */
1691 timevar_push (TV_IPA_VIRTUAL_CALL);
1692 *slot = XCNEW (polymorphic_call_target_d);
1693 if (cache_token)
68377e53 1694 *cache_token = (void *)*slot;
eefe9a99
JH
1695 (*slot)->type = type;
1696 (*slot)->otr_token = otr_token;
68377e53 1697 (*slot)->context = context;
eefe9a99
JH
1698
1699 inserted = pointer_set_create ();
1700 matched_vtables = pointer_set_create ();
1701
1702 /* First see virtual method of type itself. */
68377e53
JH
1703 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1704 context.offset, otr_type);
ec77d61f
JH
1705 if (binfo)
1706 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1707 &can_refer);
1708 else
68377e53 1709 {
ec77d61f
JH
1710 gcc_assert (odr_violation_reported);
1711 target = NULL;
1712 }
68377e53 1713
2d1644bf
JH
1714 /* Destructors are never called through construction virtual tables,
1715 because the type is always known. */
1716 if (target && DECL_CXX_DESTRUCTOR_P (target))
1717 context.maybe_in_construction = false;
ec77d61f
JH
1718
1719 if (target)
1720 {
1721 /* In the case we get complete method, we don't need
68377e53
JH
1722 to walk derivations. */
1723 if (DECL_FINAL_P (target))
1724 context.maybe_derived_type = false;
1725 }
2d1644bf
JH
1726
1727 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
1728 if (type_possibly_instantiated_p (outer_type->type))
1729 maybe_record_node (nodes, target, inserted, can_refer, &complete);
ec77d61f 1730 else
2d1644bf
JH
1731 {
1732 skipped = true;
1733 gcc_assert (in_lto_p || context.maybe_derived_type);
1734 }
79c7de84 1735
eefe9a99
JH
1736 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1737
ec77d61f 1738 /* Next walk recursively all derived types. */
68377e53
JH
1739 if (context.maybe_derived_type)
1740 {
1741 /* For anonymous namespace types we can attempt to build full type.
1742 All derivations must be in this unit (unless we see partial unit). */
2d1644bf 1743 if (!type->all_derivations_known)
ec77d61f 1744 complete = false;
68377e53
JH
1745 for (i = 0; i < outer_type->derived_types.length(); i++)
1746 possible_polymorphic_call_targets_1 (nodes, inserted,
1747 matched_vtables,
79c7de84
EB
1748 otr_type,
1749 outer_type->derived_types[i],
68377e53 1750 otr_token, outer_type->type,
2d1644bf
JH
1751 context.offset, &complete,
1752 bases_to_consider,
1753 context.maybe_in_construction);
68377e53 1754 }
79c7de84 1755
ec77d61f
JH
1756 /* Finally walk bases, if asked to. */
1757 (*slot)->nonconstruction_targets = nodes.length();
2d1644bf
JH
1758
1759 /* Destructors are never called through construction virtual tables,
1760 because the type is always known. One of entries may be cxa_pure_virtual
1761 so look to at least two of them. */
1762 if (context.maybe_in_construction)
1763 for (i =0 ; i < MIN (nodes.length (), 2); i++)
1764 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
1765 context.maybe_in_construction = false;
ec77d61f 1766 if (context.maybe_in_construction)
2d1644bf
JH
1767 {
1768 if (type != outer_type
1769 && (!skipped
1770 || (context.maybe_derived_type
1771 && !type_all_derivations_known_p (outer_type->type))))
1772 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1773 context.offset, nodes, inserted,
1774 matched_vtables, &complete);
1775 if (skipped)
1776 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1777 for (i = 0; i < bases_to_consider.length(); i++)
1778 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
1779 }
1780 bases_to_consider.release();
ec77d61f 1781
eefe9a99 1782 (*slot)->targets = nodes;
ec77d61f 1783 (*slot)->complete = complete;
68377e53 1784 if (completep)
ec77d61f
JH
1785 *completep = complete;
1786 if (nonconstruction_targetsp)
1787 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
eefe9a99
JH
1788
1789 pointer_set_destroy (inserted);
1790 pointer_set_destroy (matched_vtables);
1791 timevar_pop (TV_IPA_VIRTUAL_CALL);
1792 return nodes;
1793}
1794
1795/* Dump all possible targets of a polymorphic call. */
1796
1797void
1798dump_possible_polymorphic_call_targets (FILE *f,
68377e53
JH
1799 tree otr_type,
1800 HOST_WIDE_INT otr_token,
1801 const ipa_polymorphic_call_context &ctx)
eefe9a99
JH
1802{
1803 vec <cgraph_node *> targets;
1804 bool final;
1805 odr_type type = get_odr_type (otr_type, false);
1806 unsigned int i;
ec77d61f 1807 int nonconstruction;
eefe9a99
JH
1808
1809 if (!type)
1810 return;
1811 targets = possible_polymorphic_call_targets (otr_type, otr_token,
68377e53 1812 ctx,
ec77d61f 1813 &final, NULL, &nonconstruction);
68377e53 1814 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
eefe9a99 1815 print_generic_expr (f, type->type, TDF_SLIM);
ec77d61f
JH
1816 fprintf (f, " token %i\n", (int)otr_token);
1817 if (ctx.outer_type || ctx.offset)
1818 {
1819 fprintf (f, " Contained in type:");
1820 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1821 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
1822 ctx.offset);
1823 }
1824
1825 fprintf (f, " %s%s%s\n ",
1826 final ? "This is a complete list." :
68377e53
JH
1827 "This is partial list; extra targets may be defined in other units.",
1828 ctx.maybe_in_construction ? " (base types included)" : "",
1829 ctx.maybe_derived_type ? " (derived types included)" : "");
eefe9a99 1830 for (i = 0; i < targets.length (); i++)
ec77d61f
JH
1831 {
1832 char *name = NULL;
1833 if (i == (unsigned)nonconstruction)
1834 fprintf (f, "\n If the type is in construction,"
1835 " then additional tarets are:\n"
1836 " ");
1837 if (in_lto_p)
1838 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
1839 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
1840 if (in_lto_p)
1841 free (name);
1842 if (!targets[i]->definition)
1843 fprintf (f, " (no definition%s)",
1844 DECL_DECLARED_INLINE_P (targets[i]->decl)
1845 ? " inline" : "");
1846 }
68377e53 1847 fprintf (f, "\n\n");
eefe9a99
JH
1848}
1849
0e1474e5
JH
1850
1851/* Return true if N can be possibly target of a polymorphic call of
1852 OTR_TYPE/OTR_TOKEN. */
1853
1854bool
1855possible_polymorphic_call_target_p (tree otr_type,
1856 HOST_WIDE_INT otr_token,
68377e53 1857 const ipa_polymorphic_call_context &ctx,
0e1474e5
JH
1858 struct cgraph_node *n)
1859{
1860 vec <cgraph_node *> targets;
1861 unsigned int i;
68377e53 1862 enum built_in_function fcode;
450ad0cd 1863 bool final;
0e1474e5 1864
68377e53
JH
1865 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1866 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1867 == BUILT_IN_UNREACHABLE
1868 || fcode == BUILT_IN_TRAP))
1869 return true;
1870
c203e8a7 1871 if (!odr_hash)
0e1474e5 1872 return true;
68377e53 1873 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
0e1474e5 1874 for (i = 0; i < targets.length (); i++)
68377e53 1875 if (symtab_semantically_equivalent_p (n, targets[i]))
0e1474e5 1876 return true;
450ad0cd
JH
1877
1878 /* At a moment we allow middle end to dig out new external declarations
1879 as a targets of polymorphic calls. */
67348ccc 1880 if (!final && !n->definition)
450ad0cd 1881 return true;
0e1474e5
JH
1882 return false;
1883}
1884
1885
1886/* After callgraph construction new external nodes may appear.
1887 Add them into the graph. */
1888
1889void
1890update_type_inheritance_graph (void)
1891{
1892 struct cgraph_node *n;
1893
c203e8a7 1894 if (!odr_hash)
0e1474e5
JH
1895 return;
1896 free_polymorphic_call_targets_hash ();
1897 timevar_push (TV_IPA_INHERITANCE);
68377e53 1898 /* We reconstruct the graph starting from types of all methods seen in the
0e1474e5
JH
1899 the unit. */
1900 FOR_EACH_FUNCTION (n)
67348ccc
DM
1901 if (DECL_VIRTUAL_P (n->decl)
1902 && !n->definition
1903 && symtab_real_symbol_p (n))
1904 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
0e1474e5
JH
1905 timevar_pop (TV_IPA_INHERITANCE);
1906}
bbc9396b
JH
1907
1908
1909/* Return true if N looks like likely target of a polymorphic call.
1910 Rule out cxa_pure_virtual, noreturns, function declared cold and
1911 other obvious cases. */
1912
1913bool
1914likely_target_p (struct cgraph_node *n)
1915{
1916 int flags;
1917 /* cxa_pure_virtual and similar things are not likely. */
67348ccc 1918 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
bbc9396b 1919 return false;
67348ccc 1920 flags = flags_from_decl_or_type (n->decl);
bbc9396b
JH
1921 if (flags & ECF_NORETURN)
1922 return false;
1923 if (lookup_attribute ("cold",
67348ccc 1924 DECL_ATTRIBUTES (n->decl)))
bbc9396b
JH
1925 return false;
1926 if (n->frequency < NODE_FREQUENCY_NORMAL)
1927 return false;
ccb05ef2
JH
1928 /* If there are no virtual tables refering the target alive,
1929 the only way the target can be called is an instance comming from other
1930 compilation unit; speculative devirtualization is build around an
1931 assumption that won't happen. */
1932 if (!referenced_from_vtable_p (n))
1933 return false;
bbc9396b
JH
1934 return true;
1935}
1936
1937/* The ipa-devirt pass.
3462aa02
JH
1938 When polymorphic call has only one likely target in the unit,
1939 turn it into speculative call. */
bbc9396b
JH
1940
1941static unsigned int
1942ipa_devirt (void)
1943{
1944 struct cgraph_node *n;
1945 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1946 struct cgraph_edge *e;
1947
1948 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1949 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
570215f9 1950 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
bbc9396b
JH
1951
1952 FOR_EACH_DEFINED_FUNCTION (n)
1953 {
1954 bool update = false;
1955 if (dump_file && n->indirect_calls)
1956 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
fec39fa6 1957 n->name (), n->order);
bbc9396b
JH
1958 for (e = n->indirect_calls; e; e = e->next_callee)
1959 if (e->indirect_info->polymorphic)
1960 {
1961 struct cgraph_node *likely_target = NULL;
1962 void *cache_token;
1963 bool final;
ec77d61f 1964 int nonconstruction_targets;
bbc9396b
JH
1965 vec <cgraph_node *>targets
1966 = possible_polymorphic_call_targets
ec77d61f 1967 (e, &final, &cache_token, &nonconstruction_targets);
bbc9396b
JH
1968 unsigned int i;
1969
1970 if (dump_file)
1971 dump_possible_polymorphic_call_targets
1972 (dump_file, e);
3462aa02 1973
bbc9396b
JH
1974 npolymorphic++;
1975
bbc9396b
JH
1976 if (!cgraph_maybe_hot_edge_p (e))
1977 {
1978 if (dump_file)
ec77d61f 1979 fprintf (dump_file, "Call is cold\n\n");
bbc9396b
JH
1980 ncold++;
1981 continue;
1982 }
1983 if (e->speculative)
1984 {
1985 if (dump_file)
ec77d61f 1986 fprintf (dump_file, "Call is aready speculated\n\n");
bbc9396b
JH
1987 nspeculated++;
1988
1989 /* When dumping see if we agree with speculation. */
1990 if (!dump_file)
1991 continue;
1992 }
1993 if (pointer_set_contains (bad_call_targets,
1994 cache_token))
1995 {
1996 if (dump_file)
ec77d61f 1997 fprintf (dump_file, "Target list is known to be useless\n\n");
bbc9396b
JH
1998 nmultiple++;
1999 continue;
2000 }
c3284718 2001 for (i = 0; i < targets.length (); i++)
bbc9396b
JH
2002 if (likely_target_p (targets[i]))
2003 {
2004 if (likely_target)
2005 {
ec77d61f
JH
2006 if (i < (unsigned) nonconstruction_targets)
2007 {
2008 likely_target = NULL;
2009 if (dump_file)
2010 fprintf (dump_file, "More than one likely target\n\n");
2011 nmultiple++;
2012 }
bbc9396b
JH
2013 break;
2014 }
2015 likely_target = targets[i];
2016 }
2017 if (!likely_target)
2018 {
2019 pointer_set_insert (bad_call_targets, cache_token);
2020 continue;
2021 }
2022 /* This is reached only when dumping; check if we agree or disagree
2023 with the speculation. */
2024 if (e->speculative)
2025 {
2026 struct cgraph_edge *e2;
2027 struct ipa_ref *ref;
2028 cgraph_speculative_call_info (e, e2, e, ref);
2029 if (cgraph_function_or_thunk_node (e2->callee, NULL)
2030 == cgraph_function_or_thunk_node (likely_target, NULL))
2031 {
ec77d61f 2032 fprintf (dump_file, "We agree with speculation\n\n");
bbc9396b
JH
2033 nok++;
2034 }
2035 else
2036 {
ec77d61f 2037 fprintf (dump_file, "We disagree with speculation\n\n");
bbc9396b
JH
2038 nwrong++;
2039 }
2040 continue;
2041 }
67348ccc 2042 if (!likely_target->definition)
bbc9396b
JH
2043 {
2044 if (dump_file)
ec77d61f 2045 fprintf (dump_file, "Target is not an definition\n\n");
bbc9396b
JH
2046 nnotdefined++;
2047 continue;
2048 }
2049 /* Do not introduce new references to external symbols. While we
2050 can handle these just well, it is common for programs to
2051 incorrectly with headers defining methods they are linked
2052 with. */
67348ccc 2053 if (DECL_EXTERNAL (likely_target->decl))
bbc9396b
JH
2054 {
2055 if (dump_file)
ec77d61f 2056 fprintf (dump_file, "Target is external\n\n");
bbc9396b
JH
2057 nexternal++;
2058 continue;
2059 }
570215f9
JM
2060 /* Don't use an implicitly-declared destructor (c++/58678). */
2061 struct cgraph_node *non_thunk_target
2062 = cgraph_function_node (likely_target);
2063 if (DECL_ARTIFICIAL (non_thunk_target->decl)
2064 && DECL_COMDAT (non_thunk_target->decl))
2065 {
2066 if (dump_file)
2067 fprintf (dump_file, "Target is artificial\n\n");
2068 nartificial++;
2069 continue;
2070 }
bbc9396b
JH
2071 if (cgraph_function_body_availability (likely_target)
2072 <= AVAIL_OVERWRITABLE
67348ccc 2073 && symtab_can_be_discarded (likely_target))
bbc9396b
JH
2074 {
2075 if (dump_file)
ec77d61f 2076 fprintf (dump_file, "Target is overwritable\n\n");
bbc9396b
JH
2077 noverwritable++;
2078 continue;
2079 }
2b5f0895 2080 else if (dbg_cnt (devirt))
bbc9396b 2081 {
2b5f0895
XDL
2082 if (dump_enabled_p ())
2083 {
2084 location_t locus = gimple_location (e->call_stmt);
2085 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2086 "speculatively devirtualizing call in %s/%i to %s/%i\n",
2087 n->name (), n->order,
2088 likely_target->name (),
2089 likely_target->order);
2090 }
67348ccc 2091 if (!symtab_can_be_discarded (likely_target))
5b79657a
JH
2092 {
2093 cgraph_node *alias;
2094 alias = cgraph (symtab_nonoverwritable_alias
67348ccc 2095 (likely_target));
5b79657a
JH
2096 if (alias)
2097 likely_target = alias;
2098 }
bbc9396b
JH
2099 nconverted++;
2100 update = true;
2101 cgraph_turn_edge_to_speculative
2102 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2103 }
2104 }
2105 if (update)
2106 inline_update_overall_summary (n);
2107 }
2108 pointer_set_destroy (bad_call_targets);
2109
2110 if (dump_file)
2111 fprintf (dump_file,
2112 "%i polymorphic calls, %i devirtualized,"
2113 " %i speculatively devirtualized, %i cold\n"
2114 "%i have multiple targets, %i overwritable,"
2115 " %i already speculated (%i agree, %i disagree),"
570215f9 2116 " %i external, %i not defined, %i artificial\n",
bbc9396b
JH
2117 npolymorphic, ndevirtualized, nconverted, ncold,
2118 nmultiple, noverwritable, nspeculated, nok, nwrong,
570215f9 2119 nexternal, nnotdefined, nartificial);
bbc9396b
JH
2120 return ndevirtualized ? TODO_remove_functions : 0;
2121}
2122
bbc9396b
JH
2123namespace {
2124
2125const pass_data pass_data_ipa_devirt =
2126{
2127 IPA_PASS, /* type */
2128 "devirt", /* name */
2129 OPTGROUP_NONE, /* optinfo_flags */
bbc9396b
JH
2130 true, /* has_execute */
2131 TV_IPA_DEVIRT, /* tv_id */
2132 0, /* properties_required */
2133 0, /* properties_provided */
2134 0, /* properties_destroyed */
2135 0, /* todo_flags_start */
2136 ( TODO_dump_symtab ), /* todo_flags_finish */
2137};
2138
2139class pass_ipa_devirt : public ipa_opt_pass_d
2140{
2141public:
c3284718
RS
2142 pass_ipa_devirt (gcc::context *ctxt)
2143 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
2144 NULL, /* generate_summary */
2145 NULL, /* write_summary */
2146 NULL, /* read_summary */
2147 NULL, /* write_optimization_summary */
2148 NULL, /* read_optimization_summary */
2149 NULL, /* stmt_fixup */
2150 0, /* function_transform_todo_flags_start */
2151 NULL, /* function_transform */
2152 NULL) /* variable_transform */
bbc9396b
JH
2153 {}
2154
2155 /* opt_pass methods: */
1a3d085c
TS
2156 virtual bool gate (function *)
2157 {
2158 return (flag_devirtualize
2159 && flag_devirtualize_speculatively
2160 && optimize);
2161 }
2162
be55bfe6 2163 virtual unsigned int execute (function *) { return ipa_devirt (); }
bbc9396b
JH
2164
2165}; // class pass_ipa_devirt
2166
2167} // anon namespace
2168
2169ipa_opt_pass_d *
2170make_pass_ipa_devirt (gcc::context *ctxt)
2171{
2172 return new pass_ipa_devirt (ctxt);
2173}
2174
eefe9a99 2175#include "gt-ipa-devirt.h"