]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-devirt.c
tree-vect-slp.c (vect_supported_load_permutation_p): Avoid redef of outer loop index...
[thirdparty/gcc.git] / gcc / ipa-devirt.c
CommitLineData
eefe9a99
JH
1/* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
23a5b65a 3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
eefe9a99
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
0e1474e5 40 This is the Gimple representation of type information of a polymorphic call.
eefe9a99
JH
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
0e1474e5 43 otr_token is the index into virtual table where address is taken.
eefe9a99
JH
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
0e1474e5 58 virtual table of the base type. Also BINFO_OFFSET specifies
eefe9a99
JH
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
0e1474e5 75 or from DECL_VINDEX of a given virtual table.
eefe9a99
JH
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
0e1474e5 89 inserted into the graph. Also types without virtual methods are not
eefe9a99
JH
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
0e1474e5 98 Edges are represented by odr_type->base and odr_type->derived_types.
eefe9a99
JH
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
bbc9396b
JH
104
105 pass_ipa_devirt performs simple speculative devirtualization.
eefe9a99
JH
106*/
107
108#include "config.h"
109#include "system.h"
110#include "coretypes.h"
111#include "tm.h"
4d648807 112#include "tree.h"
d8a2d370
DN
113#include "print-tree.h"
114#include "calls.h"
eefe9a99 115#include "cgraph.h"
d8a2d370 116#include "expr.h"
eefe9a99 117#include "tree-pass.h"
eefe9a99
JH
118#include "pointer-set.h"
119#include "target.h"
120#include "hash-table.h"
121#include "tree-pretty-print.h"
122#include "ipa-utils.h"
2fb9a547
AM
123#include "tree-ssa-alias.h"
124#include "internal-fn.h"
125#include "gimple-fold.h"
126#include "gimple-expr.h"
eefe9a99 127#include "gimple.h"
bbc9396b 128#include "ipa-inline.h"
61a74079 129#include "diagnostic.h"
68377e53
JH
130#include "tree-dfa.h"
131
132/* Dummy polymorphic call context. */
133
134const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
135 = {0, NULL, false, true};
eefe9a99 136
0e1474e5
JH
137/* Pointer set of all call targets appearing in the cache. */
138static pointer_set_t *cached_polymorphic_call_targets;
139
eefe9a99
JH
140/* The node of type inheritance graph. For each type unique in
141 One Defintion Rule (ODR) sense, we produce one node linking all
142 main variants of types equivalent to it, bases and derived types. */
143
144struct GTY(()) odr_type_d
145{
eefe9a99
JH
146 /* leader type. */
147 tree type;
148 /* All bases. */
149 vec<odr_type> GTY((skip)) bases;
150 /* All derrived types with virtual methods seen in unit. */
151 vec<odr_type> GTY((skip)) derived_types;
0e1474e5 152
61a74079
JH
153 /* All equivalent types, if more than one. */
154 vec<tree, va_gc> *types;
155 /* Set of all equivalent types, if NON-NULL. */
156 pointer_set_t * GTY((skip)) types_set;
157
0e1474e5
JH
158 /* Unique ID indexing the type in odr_types array. */
159 int id;
eefe9a99
JH
160 /* Is it in anonymous namespace? */
161 bool anonymous_namespace;
162};
163
164
0e1474e5
JH
165/* Return true if BINFO corresponds to a type with virtual methods.
166
167 Every type has several BINFOs. One is the BINFO associated by the type
168 while other represents bases of derived types. The BINFOs representing
169 bases do not have BINFO_VTABLE pointer set when this is the single
170 inheritance (because vtables are shared). Look up the BINFO of type
171 and check presence of its vtable. */
eefe9a99
JH
172
173static inline bool
174polymorphic_type_binfo_p (tree binfo)
175{
176 /* See if BINFO's type has an virtual table associtated with it. */
177 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
178}
179
180/* One Definition Rule hashtable helpers. */
181
182struct odr_hasher
183{
184 typedef odr_type_d value_type;
185 typedef union tree_node compare_type;
186 static inline hashval_t hash (const value_type *);
187 static inline bool equal (const value_type *, const compare_type *);
188 static inline void remove (value_type *);
189};
190
191/* Produce hash based on type name. */
192
193hashval_t
194hash_type_name (tree t)
195{
196 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
197
198 /* If not in LTO, all main variants are unique, so we can do
199 pointer hash. */
200 if (!in_lto_p)
201 return htab_hash_pointer (t);
202
203 /* Anonymous types are unique. */
204 if (type_in_anonymous_namespace_p (t))
205 return htab_hash_pointer (t);
206
61a74079
JH
207 /* For polymorphic types, we can simply hash the virtual table. */
208 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
209 {
210 tree v = BINFO_VTABLE (TYPE_BINFO (t));
211 hashval_t hash = 0;
212
213 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
214 {
215 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
216 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
217 }
218
219 v = DECL_ASSEMBLER_NAME (v);
61a74079
JH
220 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
221 return hash;
222 }
223
eefe9a99
JH
224 /* Rest is not implemented yet. */
225 gcc_unreachable ();
226}
227
228/* Return the computed hashcode for ODR_TYPE. */
229
230inline hashval_t
231odr_hasher::hash (const value_type *odr_type)
232{
233 return hash_type_name (odr_type->type);
234}
235
0e1474e5 236/* Compare types T1 and T2 and return true if they are
eefe9a99
JH
237 equivalent. */
238
239inline bool
240odr_hasher::equal (const value_type *t1, const compare_type *ct2)
241{
242 tree t2 = const_cast <tree> (ct2);
243
244 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
245 if (t1->type == t2)
246 return true;
247 if (!in_lto_p)
248 return false;
249 return types_same_for_odr (t1->type, t2);
250}
251
0e1474e5 252/* Free ODR type V. */
eefe9a99
JH
253
254inline void
255odr_hasher::remove (value_type *v)
256{
257 v->bases.release ();
258 v->derived_types.release ();
61a74079
JH
259 if (v->types_set)
260 pointer_set_destroy (v->types_set);
eefe9a99
JH
261 ggc_free (v);
262}
263
264/* ODR type hash used to lookup ODR type based on tree type node. */
265
266typedef hash_table <odr_hasher> odr_hash_type;
267static odr_hash_type odr_hash;
268
269/* ODR types are also stored into ODR_TYPE vector to allow consistent
270 walking. Bases appear before derived types. Vector is garbage collected
271 so we won't end up visiting empty types. */
272
273static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
274#define odr_types (*odr_types_ptr)
275
61a74079
JH
276/* TYPE is equivalent to VAL by ODR, but its tree representation differs
277 from VAL->type. This may happen in LTO where tree merging did not merge
278 all variants of the same type. It may or may not mean the ODR violation.
279 Add it to the list of duplicates and warn on some violations. */
280
281static void
282add_type_duplicate (odr_type val, tree type)
283{
284 if (!val->types_set)
285 val->types_set = pointer_set_create ();
286
287 /* See if this duplicate is new. */
288 if (!pointer_set_insert (val->types_set, type))
289 {
290 bool merge = true;
291 bool base_mismatch = false;
292 gcc_assert (in_lto_p);
293 vec_safe_push (val->types, type);
294 unsigned int i,j;
295
296 /* First we compare memory layout. */
297 if (!types_compatible_p (val->type, type))
298 {
299 merge = false;
300 if (BINFO_VTABLE (TYPE_BINFO (val->type))
301 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
302 "type %qD violates one definition rule ",
303 type))
304 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
305 "a type with the same name but different layout is "
306 "defined in another translation unit");
61a74079
JH
307 if (cgraph_dump_file)
308 {
309 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
310
311 print_node (cgraph_dump_file, "", val->type, 0);
312 putc ('\n',cgraph_dump_file);
313 print_node (cgraph_dump_file, "", type, 0);
314 putc ('\n',cgraph_dump_file);
315 }
316 }
317
318 /* Next sanity check that bases are the same. If not, we will end
319 up producing wrong answers. */
320 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
321 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
322 {
323 odr_type base = get_odr_type
324 (BINFO_TYPE
325 (BINFO_BASE_BINFO (TYPE_BINFO (type),
326 i)),
327 true);
328 if (val->bases.length () <= j || val->bases[j] != base)
329 base_mismatch = true;
330 j++;
331 }
332 if (base_mismatch)
333 {
334 merge = false;
335
336 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
337 "type %qD violates one definition rule ",
338 type))
339 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
340 "a type with the same name but different bases is "
341 "defined in another translation unit");
342 if (cgraph_dump_file)
343 {
344 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
345
346 print_node (cgraph_dump_file, "", val->type, 0);
347 putc ('\n',cgraph_dump_file);
348 print_node (cgraph_dump_file, "", type, 0);
349 putc ('\n',cgraph_dump_file);
350 }
351 }
352
353 /* Regularize things a little. During LTO same types may come with
354 different BINFOs. Either because their virtual table was
355 not merged by tree merging and only later at decl merging or
356 because one type comes with external vtable, while other
357 with internal. We want to merge equivalent binfos to conserve
358 memory and streaming overhead.
359
360 The external vtables are more harmful: they contain references
361 to external declarations of methods that may be defined in the
362 merged LTO unit. For this reason we absolutely need to remove
363 them and replace by internal variants. Not doing so will lead
364 to incomplete answers from possible_polymorphic_call_targets. */
365 if (!flag_ltrans && merge)
366 {
367 tree master_binfo = TYPE_BINFO (val->type);
368 tree v1 = BINFO_VTABLE (master_binfo);
369 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
370
371 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
372 {
373 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
374 && operand_equal_p (TREE_OPERAND (v1, 1),
375 TREE_OPERAND (v2, 1), 0));
376 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
377 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
378 }
379 gcc_assert (DECL_ASSEMBLER_NAME (v1)
380 == DECL_ASSEMBLER_NAME (v2));
381
382 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
383 {
384 unsigned int i;
385
386 TYPE_BINFO (val->type) = TYPE_BINFO (type);
c3284718 387 for (i = 0; i < val->types->length (); i++)
61a74079
JH
388 {
389 if (TYPE_BINFO ((*val->types)[i])
390 == master_binfo)
391 TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
392 }
393 }
394 else
395 TYPE_BINFO (type) = master_binfo;
396 }
397 }
398}
399
eefe9a99
JH
400/* Get ODR type hash entry for TYPE. If INSERT is true, create
401 possibly new entry. */
402
403odr_type
404get_odr_type (tree type, bool insert)
405{
406 odr_type_d **slot;
407 odr_type val;
408 hashval_t hash;
409
410 type = TYPE_MAIN_VARIANT (type);
411 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
412 hash = hash_type_name (type);
413 slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
414 if (!slot)
415 return NULL;
416
417 /* See if we already have entry for type. */
418 if (*slot)
419 {
420 val = *slot;
421
61a74079
JH
422 /* With LTO we need to support multiple tree representation of
423 the same ODR type. */
424 if (val->type != type)
425 add_type_duplicate (val, type);
eefe9a99
JH
426 }
427 else
428 {
429 tree binfo = TYPE_BINFO (type);
430 unsigned int i;
431
432 val = ggc_alloc_cleared_odr_type_d ();
433 val->type = type;
434 val->bases = vNULL;
435 val->derived_types = vNULL;
0e1474e5 436 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
eefe9a99
JH
437 *slot = val;
438 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
439 /* For now record only polymorphic types. other are
440 pointless for devirtualization and we can not precisely
441 determine ODR equivalency of these during LTO. */
442 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
443 {
444 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
445 i)),
446 true);
447 base->derived_types.safe_push (val);
448 val->bases.safe_push (base);
449 }
450 /* First record bases, then add into array so ids are increasing. */
451 if (odr_types_ptr)
c3284718 452 val->id = odr_types.length ();
eefe9a99
JH
453 vec_safe_push (odr_types_ptr, val);
454 }
455 return val;
456}
457
458/* Dump ODR type T and all its derrived type. INDENT specify indentation for
459 recusive printing. */
460
461static void
462dump_odr_type (FILE *f, odr_type t, int indent=0)
463{
464 unsigned int i;
465 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
466 print_generic_expr (f, t->type, TDF_SLIM);
0e1474e5 467 fprintf (f, "%s\n", t->anonymous_namespace ? " (anonymous namespace)":"");
eefe9a99
JH
468 if (TYPE_NAME (t->type))
469 {
470 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
471 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
472 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
473 }
c3284718 474 if (t->bases.length ())
eefe9a99
JH
475 {
476 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
c3284718 477 for (i = 0; i < t->bases.length (); i++)
eefe9a99
JH
478 fprintf (f, " %i", t->bases[i]->id);
479 fprintf (f, "\n");
480 }
c3284718 481 if (t->derived_types.length ())
eefe9a99
JH
482 {
483 fprintf (f, "%*s derived types:\n", indent * 2, "");
c3284718 484 for (i = 0; i < t->derived_types.length (); i++)
eefe9a99
JH
485 dump_odr_type (f, t->derived_types[i], indent + 1);
486 }
487 fprintf (f, "\n");
488}
489
490/* Dump the type inheritance graph. */
491
492static void
493dump_type_inheritance_graph (FILE *f)
494{
495 unsigned int i;
0e1474e5
JH
496 if (!odr_types_ptr)
497 return;
eefe9a99 498 fprintf (f, "\n\nType inheritance graph:\n");
c3284718 499 for (i = 0; i < odr_types.length (); i++)
eefe9a99 500 {
c3284718 501 if (odr_types[i]->bases.length () == 0)
eefe9a99
JH
502 dump_odr_type (f, odr_types[i]);
503 }
c3284718 504 for (i = 0; i < odr_types.length (); i++)
61a74079 505 {
c3284718 506 if (odr_types[i]->types && odr_types[i]->types->length ())
61a74079
JH
507 {
508 unsigned int j;
509 fprintf (f, "Duplicate tree types for odr type %i\n", i);
510 print_node (f, "", odr_types[i]->type, 0);
c3284718 511 for (j = 0; j < odr_types[i]->types->length (); j++)
61a74079
JH
512 {
513 tree t;
514 fprintf (f, "duplicate #%i\n", j);
515 print_node (f, "", (*odr_types[i]->types)[j], 0);
516 t = (*odr_types[i]->types)[j];
517 while (TYPE_P (t) && TYPE_CONTEXT (t))
518 {
519 t = TYPE_CONTEXT (t);
520 print_node (f, "", t, 0);
521 }
522 putc ('\n',f);
523 }
524 }
525 }
eefe9a99
JH
526}
527
528/* Given method type T, return type of class it belongs to.
529 Lookup this pointer and get its type. */
530
64cbf23d 531tree
eefe9a99
JH
532method_class_type (tree t)
533{
534 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
68377e53 535 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
eefe9a99
JH
536
537 return TREE_TYPE (first_parm_type);
538}
539
540/* Initialize IPA devirt and build inheritance tree graph. */
541
542void
543build_type_inheritance_graph (void)
544{
b270b096 545 struct symtab_node *n;
eefe9a99
JH
546 FILE *inheritance_dump_file;
547 int flags;
548
549 if (odr_hash.is_created ())
550 return;
551 timevar_push (TV_IPA_INHERITANCE);
552 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
553 odr_hash.create (23);
554
555 /* We reconstruct the graph starting of types of all methods seen in the
556 the unit. */
b270b096
JH
557 FOR_EACH_SYMBOL (n)
558 if (is_a <cgraph_node> (n)
559 && DECL_VIRTUAL_P (n->decl)
67348ccc
DM
560 && symtab_real_symbol_p (n))
561 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
b270b096
JH
562
563 /* Look also for virtual tables of types that do not define any methods.
564
565 We need it in a case where class B has virtual base of class A
566 re-defining its virtual method and there is class C with no virtual
567 methods with B as virtual base.
568
569 Here we output B's virtual method in two variant - for non-virtual
570 and virtual inheritance. B's virtual table has non-virtual version,
571 while C's has virtual.
572
573 For this reason we need to know about C in order to include both
574 variants of B. More correctly, record_target_from_binfo should
575 add both variants of the method when walking B, but we have no
576 link in between them.
577
578 We rely on fact that either the method is exported and thus we
579 assume it is called externally or C is in anonymous namespace and
580 thus we will see the vtable. */
581
582 else if (is_a <varpool_node> (n)
583 && DECL_VIRTUAL_P (n->decl)
584 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
585 && TYPE_BINFO (DECL_CONTEXT (n->decl))
586 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
587 get_odr_type (DECL_CONTEXT (n->decl), true);
eefe9a99
JH
588 if (inheritance_dump_file)
589 {
590 dump_type_inheritance_graph (inheritance_dump_file);
591 dump_end (TDI_inheritance, inheritance_dump_file);
592 }
593 timevar_pop (TV_IPA_INHERITANCE);
594}
595
68377e53
JH
596/* If TARGET has associated node, record it in the NODES array.
597 if TARGET can not be inserted (for example because its body was
598 already removed and there is no way to refer to it), clear COMPLETEP. */
eefe9a99
JH
599
600static void
601maybe_record_node (vec <cgraph_node *> &nodes,
68377e53
JH
602 tree target, pointer_set_t *inserted,
603 bool *completep)
eefe9a99
JH
604{
605 struct cgraph_node *target_node;
606 enum built_in_function fcode;
607
68377e53 608 if (!target
eefe9a99 609 /* Those are used to mark impossible scenarios. */
68377e53
JH
610 || (fcode = DECL_FUNCTION_CODE (target))
611 == BUILT_IN_UNREACHABLE
612 || fcode == BUILT_IN_TRAP)
613 return;
614
615 target_node = cgraph_get_node (target);
616
617 if (target_node != NULL
3462aa02 618 && (TREE_PUBLIC (target)
67348ccc
DM
619 || target_node->definition)
620 && symtab_real_symbol_p (target_node))
0e1474e5 621 {
68377e53
JH
622 gcc_assert (!target_node->global.inlined_to);
623 gcc_assert (symtab_real_symbol_p (target_node));
624 if (!pointer_set_insert (inserted, target))
625 {
626 pointer_set_insert (cached_polymorphic_call_targets,
627 target_node);
628 nodes.safe_push (target_node);
629 }
0e1474e5 630 }
68377e53
JH
631 else if (completep
632 && !type_in_anonymous_namespace_p
633 (method_class_type (TREE_TYPE (target))))
634 *completep = true;
eefe9a99
JH
635}
636
68377e53
JH
637/* See if BINFO's type match OUTER_TYPE. If so, lookup
638 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
639 method in vtable and insert method to NODES array.
eefe9a99
JH
640 Otherwise recurse to base BINFOs.
641 This match what get_binfo_at_offset does, but with offset
642 being unknown.
643
a3788dde
JH
644 TYPE_BINFOS is a stack of BINFOS of types with defined
645 virtual table seen on way from class type to BINFO.
eefe9a99
JH
646
647 MATCHED_VTABLES tracks virtual tables we already did lookup
68377e53
JH
648 for virtual function in. INSERTED tracks nodes we already
649 inserted.
3462aa02
JH
650
651 ANONYMOUS is true if BINFO is part of anonymous namespace.
eefe9a99
JH
652 */
653
654static void
68377e53
JH
655record_target_from_binfo (vec <cgraph_node *> &nodes,
656 tree binfo,
657 tree otr_type,
a3788dde 658 vec <tree> &type_binfos,
68377e53
JH
659 HOST_WIDE_INT otr_token,
660 tree outer_type,
661 HOST_WIDE_INT offset,
662 pointer_set_t *inserted,
663 pointer_set_t *matched_vtables,
664 bool anonymous)
eefe9a99
JH
665{
666 tree type = BINFO_TYPE (binfo);
667 int i;
668 tree base_binfo;
669
eefe9a99 670
a3788dde
JH
671 if (BINFO_VTABLE (binfo))
672 type_binfos.safe_push (binfo);
68377e53 673 if (types_same_for_odr (type, outer_type))
eefe9a99 674 {
a3788dde
JH
675 int i;
676 tree type_binfo = NULL;
677
678 /* Lookup BINFO with virtual table. For normal types it is always last
679 binfo on stack. */
680 for (i = type_binfos.length () - 1; i >= 0; i--)
681 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
682 {
683 type_binfo = type_binfos[i];
684 break;
685 }
686 if (BINFO_VTABLE (binfo))
687 type_binfos.pop ();
688 /* If this is duplicated BINFO for base shared by virtual inheritance,
689 we may not have its associated vtable. This is not a problem, since
690 we will walk it on the other path. */
691 if (!type_binfo)
692 {
693 gcc_assert (BINFO_VIRTUAL_P (binfo));
694 return;
695 }
68377e53
JH
696 tree inner_binfo = get_binfo_at_offset (type_binfo,
697 offset, otr_type);
3462aa02
JH
698 /* For types in anonymous namespace first check if the respective vtable
699 is alive. If not, we know the type can't be called. */
700 if (!flag_ltrans && anonymous)
701 {
68377e53 702 tree vtable = BINFO_VTABLE (inner_binfo);
2c8326a5 703 varpool_node *vnode;
3462aa02
JH
704
705 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
706 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
707 vnode = varpool_get_node (vtable);
67348ccc 708 if (!vnode || !vnode->definition)
3462aa02
JH
709 return;
710 }
68377e53
JH
711 gcc_assert (inner_binfo);
712 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
713 {
714 tree target = gimple_get_virt_method_for_binfo (otr_token, inner_binfo);
715 if (target)
716 maybe_record_node (nodes, target, inserted, NULL);
717 }
eefe9a99
JH
718 return;
719 }
720
721 /* Walk bases. */
722 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
723 /* Walking bases that have no virtual method is pointless excercise. */
724 if (polymorphic_type_binfo_p (base_binfo))
68377e53 725 record_target_from_binfo (nodes, base_binfo, otr_type,
a3788dde 726 type_binfos,
68377e53
JH
727 otr_token, outer_type, offset, inserted,
728 matched_vtables, anonymous);
a3788dde
JH
729 if (BINFO_VTABLE (binfo))
730 type_binfos.pop ();
eefe9a99
JH
731}
732
733/* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
734 of TYPE, insert them to NODES, recurse into derived nodes.
735 INSERTED is used to avoid duplicate insertions of methods into NODES.
736 MATCHED_VTABLES are used to avoid duplicate walking vtables. */
737
738static void
739possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
740 pointer_set_t *inserted,
741 pointer_set_t *matched_vtables,
742 tree otr_type,
743 odr_type type,
68377e53
JH
744 HOST_WIDE_INT otr_token,
745 tree outer_type,
746 HOST_WIDE_INT offset)
eefe9a99
JH
747{
748 tree binfo = TYPE_BINFO (type->type);
749 unsigned int i;
a3788dde 750 vec <tree> type_binfos = vNULL;
eefe9a99 751
a3788dde 752 record_target_from_binfo (nodes, binfo, otr_type, type_binfos, otr_token,
68377e53
JH
753 outer_type, offset,
754 inserted, matched_vtables,
755 type->anonymous_namespace);
a3788dde 756 type_binfos.release ();
c3284718 757 for (i = 0; i < type->derived_types.length (); i++)
eefe9a99
JH
758 possible_polymorphic_call_targets_1 (nodes, inserted,
759 matched_vtables,
760 otr_type,
761 type->derived_types[i],
68377e53 762 otr_token, outer_type, offset);
eefe9a99
JH
763}
764
765/* Cache of queries for polymorphic call targets.
766
767 Enumerating all call targets may get expensive when there are many
768 polymorphic calls in the program, so we memoize all the previous
769 queries and avoid duplicated work. */
770
771struct polymorphic_call_target_d
772{
eefe9a99 773 HOST_WIDE_INT otr_token;
68377e53
JH
774 ipa_polymorphic_call_context context;
775 odr_type type;
eefe9a99 776 vec <cgraph_node *> targets;
68377e53 777 bool final;
eefe9a99
JH
778};
779
780/* Polymorphic call target cache helpers. */
781
782struct polymorphic_call_target_hasher
783{
784 typedef polymorphic_call_target_d value_type;
785 typedef polymorphic_call_target_d compare_type;
786 static inline hashval_t hash (const value_type *);
787 static inline bool equal (const value_type *, const compare_type *);
788 static inline void remove (value_type *);
789};
790
791/* Return the computed hashcode for ODR_QUERY. */
792
793inline hashval_t
794polymorphic_call_target_hasher::hash (const value_type *odr_query)
795{
68377e53
JH
796 hashval_t hash;
797
798 hash = iterative_hash_host_wide_int
799 (odr_query->otr_token,
800 odr_query->type->id);
801 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
802 hash);
803 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
804 return iterative_hash_hashval_t
805 (((int)odr_query->context.maybe_in_construction << 1)
806 | (int)odr_query->context.maybe_derived_type, hash);
eefe9a99
JH
807}
808
809/* Compare cache entries T1 and T2. */
810
811inline bool
812polymorphic_call_target_hasher::equal (const value_type *t1,
813 const compare_type *t2)
814{
68377e53
JH
815 return (t1->type == t2->type && t1->otr_token == t2->otr_token
816 && t1->context.offset == t2->context.offset
817 && t1->context.outer_type == t2->context.outer_type
818 && t1->context.maybe_in_construction
819 == t2->context.maybe_in_construction
820 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
eefe9a99
JH
821}
822
823/* Remove entry in polymorphic call target cache hash. */
824
825inline void
826polymorphic_call_target_hasher::remove (value_type *v)
827{
828 v->targets.release ();
829 free (v);
830}
831
832/* Polymorphic call target query cache. */
833
834typedef hash_table <polymorphic_call_target_hasher>
835 polymorphic_call_target_hash_type;
836static polymorphic_call_target_hash_type polymorphic_call_target_hash;
eefe9a99
JH
837
838/* Destroy polymorphic call target query cache. */
839
840static void
841free_polymorphic_call_targets_hash ()
842{
0e1474e5
JH
843 if (cached_polymorphic_call_targets)
844 {
845 polymorphic_call_target_hash.dispose ();
846 pointer_set_destroy (cached_polymorphic_call_targets);
847 cached_polymorphic_call_targets = NULL;
848 }
eefe9a99
JH
849}
850
851/* When virtual function is removed, we may need to flush the cache. */
852
853static void
854devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
855{
0e1474e5
JH
856 if (cached_polymorphic_call_targets
857 && pointer_set_contains (cached_polymorphic_call_targets, n))
eefe9a99
JH
858 free_polymorphic_call_targets_hash ();
859}
860
68377e53
JH
861/* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
862 is contained at CONTEXT->OFFSET. Walk the memory representation of
863 CONTEXT->OUTER_TYPE and find the outermost class type that match
864 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
865 to represent it.
866
867 For example when CONTEXT represents type
868 class A
869 {
870 int a;
871 class B b;
872 }
873 and we look for type at offset sizeof(int), we end up with B and offset 0.
874 If the same is produced by multiple inheritance, we end up with A and offset
875 sizeof(int).
876
877 If we can not find corresponding class, give up by setting
878 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
879 Return true when lookup was sucesful. */
880
881static bool
882get_class_context (ipa_polymorphic_call_context *context,
883 tree expected_type)
884{
885 tree type = context->outer_type;
886 HOST_WIDE_INT offset = context->offset;
887
888 /* Find the sub-object the constant actually refers to and mark whether it is
889 an artificial one (as opposed to a user-defined one). */
890 while (true)
891 {
892 HOST_WIDE_INT pos, size;
893 tree fld;
894
895 /* On a match, just return what we found. */
896 if (TREE_CODE (type) == TREE_CODE (expected_type)
897 && types_same_for_odr (type, expected_type))
898 {
3b4e93c3
JH
899 /* Type can not contain itself on an non-zero offset. In that case
900 just give up. */
901 if (offset != 0)
902 goto give_up;
68377e53
JH
903 gcc_assert (offset == 0);
904 return true;
905 }
906
907 /* Walk fields and find corresponding on at OFFSET. */
908 if (TREE_CODE (type) == RECORD_TYPE)
909 {
910 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
911 {
912 if (TREE_CODE (fld) != FIELD_DECL)
913 continue;
914
915 pos = int_bit_position (fld);
916 size = tree_to_uhwi (DECL_SIZE (fld));
917 if (pos <= offset && (pos + size) > offset)
918 break;
919 }
920
921 if (!fld)
922 goto give_up;
923
924 type = TREE_TYPE (fld);
925 offset -= pos;
926 /* DECL_ARTIFICIAL represents a basetype. */
927 if (!DECL_ARTIFICIAL (fld))
928 {
929 context->outer_type = type;
930 context->offset = offset;
931 /* As soon as we se an field containing the type,
932 we know we are not looking for derivations. */
933 context->maybe_derived_type = false;
934 }
935 }
936 else if (TREE_CODE (type) == ARRAY_TYPE)
937 {
938 tree subtype = TREE_TYPE (type);
939
940 /* Give up if we don't know array size. */
941 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
942 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
943 goto give_up;
944 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
945 type = subtype;
946 context->outer_type = type;
947 context->offset = offset;
948 context->maybe_derived_type = false;
949 }
950 /* Give up on anything else. */
951 else
952 goto give_up;
953 }
954
955 /* If we failed to find subtype we look for, give up and fall back to the
956 most generic query. */
957give_up:
958 context->outer_type = expected_type;
959 context->offset = 0;
960 context->maybe_derived_type = true;
961 return false;
962}
963
964/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
965
966static bool
967contains_type_p (tree outer_type, HOST_WIDE_INT offset,
968 tree otr_type)
969{
970 ipa_polymorphic_call_context context = {offset, outer_type,
971 false, true};
972 return get_class_context (&context, otr_type);
973}
974
390675c8
JH
975/* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
976
977static tree
978subbinfo_with_vtable_at_offset (tree binfo, tree offset, tree vtable)
979{
980 tree v = BINFO_VTABLE (binfo);
981 int i;
982 tree base_binfo;
983
984 gcc_assert (!v || TREE_CODE (v) == POINTER_PLUS_EXPR);
985
986 if (v && tree_int_cst_equal (TREE_OPERAND (v, 1), offset)
987 && TREE_OPERAND (TREE_OPERAND (v, 0), 0) == vtable)
988 return binfo;
989 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
990 if (polymorphic_type_binfo_p (base_binfo))
991 {
992 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
993 if (base_binfo)
994 return base_binfo;
995 }
996 return NULL;
997}
998
999/* T is known constant value of virtual table pointer. Return BINFO of the
1000 instance type. */
1001
1002tree
1003vtable_pointer_value_to_binfo (tree t)
1004{
1005 /* We expect &MEM[(void *)&virtual_table + 16B].
1006 We obtain object's BINFO from the context of the virtual table.
1007 This one contains pointer to virtual table represented via
1008 POINTER_PLUS_EXPR. Verify that this pointer match to what
1009 we propagated through.
1010
1011 In the case of virtual inheritance, the virtual tables may
1012 be nested, i.e. the offset may be different from 16 and we may
1013 need to dive into the type representation. */
1014 if (t && TREE_CODE (t) == ADDR_EXPR
1015 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1016 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1017 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1018 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1019 == VAR_DECL)
1020 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1021 (TREE_OPERAND (t, 0), 0), 0)))
1022 {
1023 tree vtable = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1024 tree offset = TREE_OPERAND (TREE_OPERAND (t, 0), 1);
1025 tree binfo = TYPE_BINFO (DECL_CONTEXT (vtable));
1026
1027 binfo = subbinfo_with_vtable_at_offset (binfo, offset, vtable);
1028
1029 /* FIXME: for stores of construction vtables we return NULL,
1030 because we do not have BINFO for those. Eventually we should fix
1031 our representation to allow this case to be handled, too.
1032 In the case we see store of BINFO we however may assume
1033 that standard folding will be ale to cope with it. */
1034 return binfo;
1035 }
1036 return NULL;
1037}
1038
68377e53
JH
1039/* Given REF call in FNDECL, determine class of the polymorphic
1040 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1041 Return pointer to object described by the context */
1042
1043tree
1044get_polymorphic_call_info (tree fndecl,
1045 tree ref,
1046 tree *otr_type,
1047 HOST_WIDE_INT *otr_token,
1048 ipa_polymorphic_call_context *context)
1049{
1050 tree base_pointer;
1051 *otr_type = obj_type_ref_class (ref);
1052 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1053
1054 /* Set up basic info in case we find nothing interesting in the analysis. */
1055 context->outer_type = *otr_type;
1056 context->offset = 0;
1057 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1058 context->maybe_derived_type = true;
1059 context->maybe_in_construction = false;
1060
1061 /* Walk SSA for outer object. */
1062 do
1063 {
1064 if (TREE_CODE (base_pointer) == SSA_NAME
1065 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1066 && SSA_NAME_DEF_STMT (base_pointer)
1067 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1068 {
1069 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1070 STRIP_NOPS (base_pointer);
1071 }
1072 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1073 {
1074 HOST_WIDE_INT size, max_size;
1075 HOST_WIDE_INT offset2;
1076 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1077 &offset2, &size, &max_size);
1078
1079 /* If this is a varying address, punt. */
1080 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1081 && max_size != -1
1082 && max_size == size)
1083 {
1084 /* We found dereference of a pointer. Type of the pointer
1085 and MEM_REF is meaningless, but we can look futher. */
1086 if (TREE_CODE (base) == MEM_REF)
1087 {
1088 base_pointer = TREE_OPERAND (base, 0);
1089 context->offset
1090 += offset2 + mem_ref_offset (base).low * BITS_PER_UNIT;
1091 context->outer_type = NULL;
1092 }
1093 /* We found base object. In this case the outer_type
1094 is known. */
1095 else if (DECL_P (base))
1096 {
7656ee72 1097 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
68377e53
JH
1098
1099 /* Only type inconsistent programs can have otr_type that is
1100 not part of outer type. */
7656ee72
JH
1101 if (!contains_type_p (TREE_TYPE (base),
1102 context->offset + offset2, *otr_type))
68377e53 1103 return base_pointer;
7656ee72 1104 context->outer_type = TREE_TYPE (base);
68377e53 1105 context->offset += offset2;
68377e53
JH
1106 /* Make very conservative assumption that all objects
1107 may be in construction.
1108 TODO: ipa-prop already contains code to tell better.
1109 merge it later. */
1110 context->maybe_in_construction = true;
1111 context->maybe_derived_type = false;
7656ee72 1112 return NULL;
68377e53
JH
1113 }
1114 else
1115 break;
1116 }
1117 else
1118 break;
1119 }
1120 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1121 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1122 {
1123 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1124 * BITS_PER_UNIT;
1125 base_pointer = TREE_OPERAND (base_pointer, 0);
1126 }
1127 else
1128 break;
1129 }
1130 while (true);
1131
1132 /* Try to determine type of the outer object. */
1133 if (TREE_CODE (base_pointer) == SSA_NAME
1134 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1135 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1136 {
1137 /* See if parameter is THIS pointer of a method. */
1138 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1139 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1140 {
1141 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1142 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1143
1144 /* Dynamic casting has possibly upcasted the type
1145 in the hiearchy. In this case outer type is less
1146 informative than inner type and we should forget
1147 about it. */
1148 if (!contains_type_p (context->outer_type, context->offset,
1149 *otr_type))
1150 {
1151 context->outer_type = NULL;
1152 return base_pointer;
1153 }
1154
1155 /* If the function is constructor or destructor, then
1156 the type is possibly in consturction, but we know
1157 it is not derived type. */
1158 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1159 || DECL_CXX_DESTRUCTOR_P (fndecl))
1160 {
1161 context->maybe_in_construction = true;
1162 context->maybe_derived_type = false;
1163 }
1164 else
1165 {
1166 context->maybe_derived_type = true;
1167 context->maybe_in_construction = false;
1168 }
1169 return base_pointer;
1170 }
1171 /* Non-PODs passed by value are really passed by invisible
1172 reference. In this case we also know the type of the
1173 object. */
1174 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1175 {
1176 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1177 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1178 /* Only type inconsistent programs can have otr_type that is
1179 not part of outer type. */
1180 if (!contains_type_p (context->outer_type, context->offset,
1181 *otr_type))
1182 {
1183 context->outer_type = NULL;
1184 gcc_unreachable ();
1185 return base_pointer;
1186 }
1187 context->maybe_derived_type = false;
1188 context->maybe_in_construction = false;
1189 return base_pointer;
1190 }
1191 }
1192 /* TODO: There are multiple ways to derive a type. For instance
1193 if BASE_POINTER is passed to an constructor call prior our refernece.
1194 We do not make this type of flow sensitive analysis yet. */
1195 return base_pointer;
1196}
1197
1198/* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1199 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1200 and insert them to NODES.
1201
1202 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1203
1204static void
1205record_targets_from_bases (tree otr_type,
1206 HOST_WIDE_INT otr_token,
1207 tree outer_type,
1208 HOST_WIDE_INT offset,
1209 vec <cgraph_node *> nodes,
1210 pointer_set_t *inserted,
1211 pointer_set_t *matched_vtables,
1212 bool *completep)
1213{
1214 while (true)
1215 {
1216 HOST_WIDE_INT pos, size;
1217 tree base_binfo;
1218 tree fld;
1219
1220 if (types_same_for_odr (outer_type, otr_type))
1221 return;
1222
1223 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1224 {
1225 if (TREE_CODE (fld) != FIELD_DECL)
1226 continue;
1227
1228 pos = int_bit_position (fld);
1229 size = tree_to_shwi (DECL_SIZE (fld));
1230 if (pos <= offset && (pos + size) > offset)
1231 break;
1232 }
1233 /* Within a class type we should always find correcponding fields. */
1234 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1235
1236 /* Nonbasetypes should have been stripped by outer_class_type. */
1237 gcc_assert (DECL_ARTIFICIAL (fld));
1238
1239 outer_type = TREE_TYPE (fld);
1240 offset -= pos;
1241
1242 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1243 offset, otr_type);
1244 gcc_assert (base_binfo);
1245 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1246 {
1247 tree target = gimple_get_virt_method_for_binfo (otr_token, base_binfo);
1248 if (target)
1249 maybe_record_node (nodes, target, inserted, completep);
1250 /* The only way method in anonymous namespace can become unreferable
1251 is that it has been fully optimized out. */
1252 else if (flag_ltrans || !type_in_anonymous_namespace_p (outer_type))
1253 *completep = false;
1254 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1255 }
1256 }
1257}
1258
3462aa02
JH
1259/* When virtual table is removed, we may need to flush the cache. */
1260
1261static void
2c8326a5 1262devirt_variable_node_removal_hook (varpool_node *n,
3462aa02
JH
1263 void *d ATTRIBUTE_UNUSED)
1264{
1265 if (cached_polymorphic_call_targets
67348ccc
DM
1266 && DECL_VIRTUAL_P (n->decl)
1267 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3462aa02
JH
1268 free_polymorphic_call_targets_hash ();
1269}
1270
eefe9a99 1271/* Return vector containing possible targets of polymorphic call of type
68377e53
JH
1272 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1273 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1274 OTR_TYPE and include their virtual method. This is useful for types
1275 possibly in construction or destruction where the virtual table may
1276 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1277 us to walk the inheritance graph for all derivations.
1278
1279 If COMPLETEP is non-NULL, store true if the list is complette.
eefe9a99
JH
1280 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1281 in the target cache. If user needs to visit every target list
1282 just once, it can memoize them.
1283
1284 Returned vector is placed into cache. It is NOT caller's responsibility
1285 to free it. The vector can be freed on cgraph_remove_node call if
1286 the particular node is a virtual function present in the cache. */
1287
1288vec <cgraph_node *>
1289possible_polymorphic_call_targets (tree otr_type,
1290 HOST_WIDE_INT otr_token,
68377e53
JH
1291 ipa_polymorphic_call_context context,
1292 bool *completep,
eefe9a99
JH
1293 void **cache_token)
1294{
1295 static struct cgraph_node_hook_list *node_removal_hook_holder;
1296 pointer_set_t *inserted;
1297 pointer_set_t *matched_vtables;
1298 vec <cgraph_node *> nodes=vNULL;
68377e53 1299 odr_type type, outer_type;
eefe9a99
JH
1300 polymorphic_call_target_d key;
1301 polymorphic_call_target_d **slot;
1302 unsigned int i;
1303 tree binfo, target;
68377e53 1304 bool final;
eefe9a99 1305
68377e53 1306 type = get_odr_type (otr_type, true);
eefe9a99 1307
68377e53
JH
1308 /* Lookup the outer class type we want to walk. */
1309 if (context.outer_type)
1310 get_class_context (&context, otr_type);
eefe9a99 1311
68377e53
JH
1312 /* We now canonicalize our query, so we do not need extra hashtable entries. */
1313
1314 /* Without outer type, we have no use for offset. Just do the
1315 basic search from innter type */
1316 if (!context.outer_type)
1317 {
1318 context.outer_type = otr_type;
1319 context.offset = 0;
1320 }
1321 /* We need to update our hiearchy if the type does not exist. */
1322 outer_type = get_odr_type (context.outer_type, true);
1323 /* If outer and inner type match, there are no bases to see. */
1324 if (type == outer_type)
1325 context.maybe_in_construction = false;
1326 /* If the type is final, there are no derivations. */
1327 if (TYPE_FINAL_P (outer_type->type))
1328 context.maybe_derived_type = false;
eefe9a99
JH
1329
1330 /* Initialize query cache. */
1331 if (!cached_polymorphic_call_targets)
1332 {
1333 cached_polymorphic_call_targets = pointer_set_create ();
1334 polymorphic_call_target_hash.create (23);
1335 if (!node_removal_hook_holder)
3462aa02
JH
1336 {
1337 node_removal_hook_holder =
1338 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1339 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1340 NULL);
1341 }
eefe9a99
JH
1342 }
1343
1344 /* Lookup cached answer. */
1345 key.type = type;
1346 key.otr_token = otr_token;
68377e53 1347 key.context = context;
eefe9a99
JH
1348 slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
1349 if (cache_token)
1350 *cache_token = (void *)*slot;
1351 if (*slot)
68377e53
JH
1352 {
1353 if (completep)
1354 *completep = (*slot)->final;
1355 return (*slot)->targets;
1356 }
1357
1358 final = true;
eefe9a99
JH
1359
1360 /* Do actual search. */
1361 timevar_push (TV_IPA_VIRTUAL_CALL);
1362 *slot = XCNEW (polymorphic_call_target_d);
1363 if (cache_token)
68377e53 1364 *cache_token = (void *)*slot;
eefe9a99
JH
1365 (*slot)->type = type;
1366 (*slot)->otr_token = otr_token;
68377e53 1367 (*slot)->context = context;
eefe9a99
JH
1368
1369 inserted = pointer_set_create ();
1370 matched_vtables = pointer_set_create ();
1371
1372 /* First see virtual method of type itself. */
1373
68377e53
JH
1374 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1375 context.offset, otr_type);
eefe9a99
JH
1376 target = gimple_get_virt_method_for_binfo (otr_token, binfo);
1377 if (target)
68377e53
JH
1378 {
1379 maybe_record_node (nodes, target, inserted, &final);
1380
1381 /* In the case we get final method, we don't need
1382 to walk derivations. */
1383 if (DECL_FINAL_P (target))
1384 context.maybe_derived_type = false;
1385 }
1386 /* The only way method in anonymous namespace can become unreferable
1387 is that it has been fully optimized out. */
1388 else if (flag_ltrans || !type->anonymous_namespace)
1389 final = false;
eefe9a99
JH
1390 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1391
68377e53
JH
1392 /* Next walk bases, if asked to. */
1393 if (context.maybe_in_construction)
1394 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1395 context.offset, nodes, inserted,
1396 matched_vtables, &final);
eefe9a99 1397
68377e53
JH
1398 /* Finally walk recursively all derived types. */
1399 if (context.maybe_derived_type)
1400 {
1401 /* For anonymous namespace types we can attempt to build full type.
1402 All derivations must be in this unit (unless we see partial unit). */
1403 if (!type->anonymous_namespace || flag_ltrans)
1404 final = false;
1405 for (i = 0; i < outer_type->derived_types.length(); i++)
1406 possible_polymorphic_call_targets_1 (nodes, inserted,
1407 matched_vtables,
1408 otr_type, outer_type->derived_types[i],
1409 otr_token, outer_type->type,
1410 context.offset);
1411 }
eefe9a99 1412 (*slot)->targets = nodes;
68377e53
JH
1413 (*slot)->final = final;
1414 if (completep)
1415 *completep = final;
eefe9a99
JH
1416
1417 pointer_set_destroy (inserted);
1418 pointer_set_destroy (matched_vtables);
1419 timevar_pop (TV_IPA_VIRTUAL_CALL);
1420 return nodes;
1421}
1422
1423/* Dump all possible targets of a polymorphic call. */
1424
1425void
1426dump_possible_polymorphic_call_targets (FILE *f,
68377e53
JH
1427 tree otr_type,
1428 HOST_WIDE_INT otr_token,
1429 const ipa_polymorphic_call_context &ctx)
eefe9a99
JH
1430{
1431 vec <cgraph_node *> targets;
1432 bool final;
1433 odr_type type = get_odr_type (otr_type, false);
1434 unsigned int i;
1435
1436 if (!type)
1437 return;
1438 targets = possible_polymorphic_call_targets (otr_type, otr_token,
68377e53 1439 ctx,
eefe9a99 1440 &final);
68377e53 1441 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
eefe9a99 1442 print_generic_expr (f, type->type, TDF_SLIM);
68377e53
JH
1443 fprintf (f, " token %i\n"
1444 " Contained in type:",
1445 (int)otr_token);
1446 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1447 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n"
1448 " %s%s%s\n ",
1449 ctx.offset,
1450 final ? "This is full list." :
1451 "This is partial list; extra targets may be defined in other units.",
1452 ctx.maybe_in_construction ? " (base types included)" : "",
1453 ctx.maybe_derived_type ? " (derived types included)" : "");
eefe9a99 1454 for (i = 0; i < targets.length (); i++)
fec39fa6 1455 fprintf (f, " %s/%i", targets[i]->name (),
67348ccc 1456 targets[i]->order);
68377e53 1457 fprintf (f, "\n\n");
eefe9a99
JH
1458}
1459
0e1474e5
JH
1460
1461/* Return true if N can be possibly target of a polymorphic call of
1462 OTR_TYPE/OTR_TOKEN. */
1463
1464bool
1465possible_polymorphic_call_target_p (tree otr_type,
1466 HOST_WIDE_INT otr_token,
68377e53 1467 const ipa_polymorphic_call_context &ctx,
0e1474e5
JH
1468 struct cgraph_node *n)
1469{
1470 vec <cgraph_node *> targets;
1471 unsigned int i;
68377e53 1472 enum built_in_function fcode;
450ad0cd 1473 bool final;
0e1474e5 1474
68377e53
JH
1475 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1476 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1477 == BUILT_IN_UNREACHABLE
1478 || fcode == BUILT_IN_TRAP))
1479 return true;
1480
0e1474e5
JH
1481 if (!odr_hash.is_created ())
1482 return true;
68377e53 1483 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
0e1474e5 1484 for (i = 0; i < targets.length (); i++)
68377e53 1485 if (symtab_semantically_equivalent_p (n, targets[i]))
0e1474e5 1486 return true;
450ad0cd
JH
1487
1488 /* At a moment we allow middle end to dig out new external declarations
1489 as a targets of polymorphic calls. */
67348ccc 1490 if (!final && !n->definition)
450ad0cd 1491 return true;
0e1474e5
JH
1492 return false;
1493}
1494
1495
1496/* After callgraph construction new external nodes may appear.
1497 Add them into the graph. */
1498
1499void
1500update_type_inheritance_graph (void)
1501{
1502 struct cgraph_node *n;
1503
1504 if (!odr_hash.is_created ())
1505 return;
1506 free_polymorphic_call_targets_hash ();
1507 timevar_push (TV_IPA_INHERITANCE);
68377e53 1508 /* We reconstruct the graph starting from types of all methods seen in the
0e1474e5
JH
1509 the unit. */
1510 FOR_EACH_FUNCTION (n)
67348ccc
DM
1511 if (DECL_VIRTUAL_P (n->decl)
1512 && !n->definition
1513 && symtab_real_symbol_p (n))
1514 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
0e1474e5
JH
1515 timevar_pop (TV_IPA_INHERITANCE);
1516}
bbc9396b
JH
1517
1518
1519/* Return true if N looks like likely target of a polymorphic call.
1520 Rule out cxa_pure_virtual, noreturns, function declared cold and
1521 other obvious cases. */
1522
1523bool
1524likely_target_p (struct cgraph_node *n)
1525{
1526 int flags;
1527 /* cxa_pure_virtual and similar things are not likely. */
67348ccc 1528 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
bbc9396b 1529 return false;
67348ccc 1530 flags = flags_from_decl_or_type (n->decl);
bbc9396b
JH
1531 if (flags & ECF_NORETURN)
1532 return false;
1533 if (lookup_attribute ("cold",
67348ccc 1534 DECL_ATTRIBUTES (n->decl)))
bbc9396b
JH
1535 return false;
1536 if (n->frequency < NODE_FREQUENCY_NORMAL)
1537 return false;
1538 return true;
1539}
1540
1541/* The ipa-devirt pass.
3462aa02
JH
1542 When polymorphic call has only one likely target in the unit,
1543 turn it into speculative call. */
bbc9396b
JH
1544
1545static unsigned int
1546ipa_devirt (void)
1547{
1548 struct cgraph_node *n;
1549 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1550 struct cgraph_edge *e;
1551
1552 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1553 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
1554 int nwrong = 0, nok = 0, nexternal = 0;;
1555
1556 FOR_EACH_DEFINED_FUNCTION (n)
1557 {
1558 bool update = false;
1559 if (dump_file && n->indirect_calls)
1560 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
fec39fa6 1561 n->name (), n->order);
bbc9396b
JH
1562 for (e = n->indirect_calls; e; e = e->next_callee)
1563 if (e->indirect_info->polymorphic)
1564 {
1565 struct cgraph_node *likely_target = NULL;
1566 void *cache_token;
1567 bool final;
1568 vec <cgraph_node *>targets
1569 = possible_polymorphic_call_targets
1570 (e, &final, &cache_token);
1571 unsigned int i;
1572
1573 if (dump_file)
1574 dump_possible_polymorphic_call_targets
1575 (dump_file, e);
3462aa02 1576
bbc9396b
JH
1577 npolymorphic++;
1578
bbc9396b
JH
1579 if (!cgraph_maybe_hot_edge_p (e))
1580 {
1581 if (dump_file)
1582 fprintf (dump_file, "Call is cold\n");
1583 ncold++;
1584 continue;
1585 }
1586 if (e->speculative)
1587 {
1588 if (dump_file)
1589 fprintf (dump_file, "Call is aready speculated\n");
1590 nspeculated++;
1591
1592 /* When dumping see if we agree with speculation. */
1593 if (!dump_file)
1594 continue;
1595 }
1596 if (pointer_set_contains (bad_call_targets,
1597 cache_token))
1598 {
1599 if (dump_file)
1600 fprintf (dump_file, "Target list is known to be useless\n");
1601 nmultiple++;
1602 continue;
1603 }
c3284718 1604 for (i = 0; i < targets.length (); i++)
bbc9396b
JH
1605 if (likely_target_p (targets[i]))
1606 {
1607 if (likely_target)
1608 {
1609 likely_target = NULL;
1610 if (dump_file)
1611 fprintf (dump_file, "More than one likely target\n");
1612 nmultiple++;
1613 break;
1614 }
1615 likely_target = targets[i];
1616 }
1617 if (!likely_target)
1618 {
1619 pointer_set_insert (bad_call_targets, cache_token);
1620 continue;
1621 }
1622 /* This is reached only when dumping; check if we agree or disagree
1623 with the speculation. */
1624 if (e->speculative)
1625 {
1626 struct cgraph_edge *e2;
1627 struct ipa_ref *ref;
1628 cgraph_speculative_call_info (e, e2, e, ref);
1629 if (cgraph_function_or_thunk_node (e2->callee, NULL)
1630 == cgraph_function_or_thunk_node (likely_target, NULL))
1631 {
1632 fprintf (dump_file, "We agree with speculation\n");
1633 nok++;
1634 }
1635 else
1636 {
1637 fprintf (dump_file, "We disagree with speculation\n");
1638 nwrong++;
1639 }
1640 continue;
1641 }
67348ccc 1642 if (!likely_target->definition)
bbc9396b
JH
1643 {
1644 if (dump_file)
1645 fprintf (dump_file, "Target is not an definition\n");
1646 nnotdefined++;
1647 continue;
1648 }
1649 /* Do not introduce new references to external symbols. While we
1650 can handle these just well, it is common for programs to
1651 incorrectly with headers defining methods they are linked
1652 with. */
67348ccc 1653 if (DECL_EXTERNAL (likely_target->decl))
bbc9396b
JH
1654 {
1655 if (dump_file)
1656 fprintf (dump_file, "Target is external\n");
1657 nexternal++;
1658 continue;
1659 }
1660 if (cgraph_function_body_availability (likely_target)
1661 <= AVAIL_OVERWRITABLE
67348ccc 1662 && symtab_can_be_discarded (likely_target))
bbc9396b
JH
1663 {
1664 if (dump_file)
1665 fprintf (dump_file, "Target is overwritable\n");
1666 noverwritable++;
1667 continue;
1668 }
1669 else
1670 {
1671 if (dump_file)
1672 fprintf (dump_file,
1673 "Speculatively devirtualizing call in %s/%i to %s/%i\n",
fec39fa6
TS
1674 n->name (), n->order,
1675 likely_target->name (),
67348ccc
DM
1676 likely_target->order);
1677 if (!symtab_can_be_discarded (likely_target))
5b79657a
JH
1678 {
1679 cgraph_node *alias;
1680 alias = cgraph (symtab_nonoverwritable_alias
67348ccc 1681 (likely_target));
5b79657a
JH
1682 if (alias)
1683 likely_target = alias;
1684 }
bbc9396b
JH
1685 nconverted++;
1686 update = true;
1687 cgraph_turn_edge_to_speculative
1688 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
1689 }
1690 }
1691 if (update)
1692 inline_update_overall_summary (n);
1693 }
1694 pointer_set_destroy (bad_call_targets);
1695
1696 if (dump_file)
1697 fprintf (dump_file,
1698 "%i polymorphic calls, %i devirtualized,"
1699 " %i speculatively devirtualized, %i cold\n"
1700 "%i have multiple targets, %i overwritable,"
1701 " %i already speculated (%i agree, %i disagree),"
1702 " %i external, %i not defined\n",
1703 npolymorphic, ndevirtualized, nconverted, ncold,
1704 nmultiple, noverwritable, nspeculated, nok, nwrong,
1705 nexternal, nnotdefined);
1706 return ndevirtualized ? TODO_remove_functions : 0;
1707}
1708
a88bf705 1709/* Gate for speculative IPA devirtualization optimization. */
bbc9396b
JH
1710
1711static bool
1712gate_ipa_devirt (void)
1713{
a88bf705
JJ
1714 return (flag_devirtualize
1715 && flag_devirtualize_speculatively
1716 && optimize);
bbc9396b
JH
1717}
1718
1719namespace {
1720
1721const pass_data pass_data_ipa_devirt =
1722{
1723 IPA_PASS, /* type */
1724 "devirt", /* name */
1725 OPTGROUP_NONE, /* optinfo_flags */
1726 true, /* has_gate */
1727 true, /* has_execute */
1728 TV_IPA_DEVIRT, /* tv_id */
1729 0, /* properties_required */
1730 0, /* properties_provided */
1731 0, /* properties_destroyed */
1732 0, /* todo_flags_start */
1733 ( TODO_dump_symtab ), /* todo_flags_finish */
1734};
1735
1736class pass_ipa_devirt : public ipa_opt_pass_d
1737{
1738public:
c3284718
RS
1739 pass_ipa_devirt (gcc::context *ctxt)
1740 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
1741 NULL, /* generate_summary */
1742 NULL, /* write_summary */
1743 NULL, /* read_summary */
1744 NULL, /* write_optimization_summary */
1745 NULL, /* read_optimization_summary */
1746 NULL, /* stmt_fixup */
1747 0, /* function_transform_todo_flags_start */
1748 NULL, /* function_transform */
1749 NULL) /* variable_transform */
bbc9396b
JH
1750 {}
1751
1752 /* opt_pass methods: */
1753 bool gate () { return gate_ipa_devirt (); }
1754 unsigned int execute () { return ipa_devirt (); }
1755
1756}; // class pass_ipa_devirt
1757
1758} // anon namespace
1759
1760ipa_opt_pass_d *
1761make_pass_ipa_devirt (gcc::context *ctxt)
1762{
1763 return new pass_ipa_devirt (ctxt);
1764}
1765
eefe9a99 1766#include "gt-ipa-devirt.h"