]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-devirt.c
cgraphunit.c: Remove struct tags when referring to class ipa_opt_pass_d or class...
[thirdparty/gcc.git] / gcc / ipa-devirt.c
CommitLineData
eefe9a99
JH
1/* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
0e1474e5 40 This is the Gimple representation of type information of a polymorphic call.
eefe9a99
JH
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
0e1474e5 43 otr_token is the index into virtual table where address is taken.
eefe9a99
JH
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
0e1474e5 58 virtual table of the base type. Also BINFO_OFFSET specifies
eefe9a99
JH
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
0e1474e5 75 or from DECL_VINDEX of a given virtual table.
eefe9a99
JH
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
0e1474e5 89 inserted into the graph. Also types without virtual methods are not
eefe9a99
JH
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
0e1474e5 98 Edges are represented by odr_type->base and odr_type->derived_types.
eefe9a99
JH
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
bbc9396b
JH
104
105 pass_ipa_devirt performs simple speculative devirtualization.
eefe9a99
JH
106*/
107
108#include "config.h"
109#include "system.h"
110#include "coretypes.h"
111#include "tm.h"
4d648807 112#include "tree.h"
d8a2d370
DN
113#include "print-tree.h"
114#include "calls.h"
eefe9a99 115#include "cgraph.h"
d8a2d370 116#include "expr.h"
eefe9a99 117#include "tree-pass.h"
eefe9a99
JH
118#include "pointer-set.h"
119#include "target.h"
120#include "hash-table.h"
121#include "tree-pretty-print.h"
122#include "ipa-utils.h"
2fb9a547
AM
123#include "tree-ssa-alias.h"
124#include "internal-fn.h"
125#include "gimple-fold.h"
126#include "gimple-expr.h"
eefe9a99 127#include "gimple.h"
bbc9396b 128#include "ipa-inline.h"
61a74079 129#include "diagnostic.h"
68377e53
JH
130#include "tree-dfa.h"
131
132/* Dummy polymorphic call context. */
133
134const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
135 = {0, NULL, false, true};
eefe9a99 136
0e1474e5
JH
137/* Pointer set of all call targets appearing in the cache. */
138static pointer_set_t *cached_polymorphic_call_targets;
139
eefe9a99
JH
140/* The node of type inheritance graph. For each type unique in
141 One Defintion Rule (ODR) sense, we produce one node linking all
142 main variants of types equivalent to it, bases and derived types. */
143
144struct GTY(()) odr_type_d
145{
eefe9a99
JH
146 /* leader type. */
147 tree type;
148 /* All bases. */
149 vec<odr_type> GTY((skip)) bases;
150 /* All derrived types with virtual methods seen in unit. */
151 vec<odr_type> GTY((skip)) derived_types;
0e1474e5 152
61a74079
JH
153 /* All equivalent types, if more than one. */
154 vec<tree, va_gc> *types;
155 /* Set of all equivalent types, if NON-NULL. */
156 pointer_set_t * GTY((skip)) types_set;
157
0e1474e5
JH
158 /* Unique ID indexing the type in odr_types array. */
159 int id;
eefe9a99
JH
160 /* Is it in anonymous namespace? */
161 bool anonymous_namespace;
162};
163
164
0e1474e5
JH
165/* Return true if BINFO corresponds to a type with virtual methods.
166
167 Every type has several BINFOs. One is the BINFO associated by the type
168 while other represents bases of derived types. The BINFOs representing
169 bases do not have BINFO_VTABLE pointer set when this is the single
170 inheritance (because vtables are shared). Look up the BINFO of type
171 and check presence of its vtable. */
eefe9a99
JH
172
173static inline bool
174polymorphic_type_binfo_p (tree binfo)
175{
176 /* See if BINFO's type has an virtual table associtated with it. */
177 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
178}
179
180/* One Definition Rule hashtable helpers. */
181
182struct odr_hasher
183{
184 typedef odr_type_d value_type;
185 typedef union tree_node compare_type;
186 static inline hashval_t hash (const value_type *);
187 static inline bool equal (const value_type *, const compare_type *);
188 static inline void remove (value_type *);
189};
190
191/* Produce hash based on type name. */
192
193hashval_t
194hash_type_name (tree t)
195{
196 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
197
198 /* If not in LTO, all main variants are unique, so we can do
199 pointer hash. */
200 if (!in_lto_p)
201 return htab_hash_pointer (t);
202
203 /* Anonymous types are unique. */
204 if (type_in_anonymous_namespace_p (t))
205 return htab_hash_pointer (t);
206
61a74079
JH
207 /* For polymorphic types, we can simply hash the virtual table. */
208 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
209 {
210 tree v = BINFO_VTABLE (TYPE_BINFO (t));
211 hashval_t hash = 0;
212
213 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
214 {
215 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
216 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
217 }
218
219 v = DECL_ASSEMBLER_NAME (v);
61a74079
JH
220 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
221 return hash;
222 }
223
eefe9a99
JH
224 /* Rest is not implemented yet. */
225 gcc_unreachable ();
226}
227
228/* Return the computed hashcode for ODR_TYPE. */
229
230inline hashval_t
231odr_hasher::hash (const value_type *odr_type)
232{
233 return hash_type_name (odr_type->type);
234}
235
0e1474e5 236/* Compare types T1 and T2 and return true if they are
eefe9a99
JH
237 equivalent. */
238
239inline bool
240odr_hasher::equal (const value_type *t1, const compare_type *ct2)
241{
242 tree t2 = const_cast <tree> (ct2);
243
244 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
245 if (t1->type == t2)
246 return true;
247 if (!in_lto_p)
248 return false;
249 return types_same_for_odr (t1->type, t2);
250}
251
0e1474e5 252/* Free ODR type V. */
eefe9a99
JH
253
254inline void
255odr_hasher::remove (value_type *v)
256{
257 v->bases.release ();
258 v->derived_types.release ();
61a74079
JH
259 if (v->types_set)
260 pointer_set_destroy (v->types_set);
eefe9a99
JH
261 ggc_free (v);
262}
263
264/* ODR type hash used to lookup ODR type based on tree type node. */
265
266typedef hash_table <odr_hasher> odr_hash_type;
267static odr_hash_type odr_hash;
268
269/* ODR types are also stored into ODR_TYPE vector to allow consistent
270 walking. Bases appear before derived types. Vector is garbage collected
271 so we won't end up visiting empty types. */
272
273static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
274#define odr_types (*odr_types_ptr)
275
61a74079
JH
276/* TYPE is equivalent to VAL by ODR, but its tree representation differs
277 from VAL->type. This may happen in LTO where tree merging did not merge
278 all variants of the same type. It may or may not mean the ODR violation.
279 Add it to the list of duplicates and warn on some violations. */
280
281static void
282add_type_duplicate (odr_type val, tree type)
283{
284 if (!val->types_set)
285 val->types_set = pointer_set_create ();
286
287 /* See if this duplicate is new. */
288 if (!pointer_set_insert (val->types_set, type))
289 {
290 bool merge = true;
291 bool base_mismatch = false;
292 gcc_assert (in_lto_p);
293 vec_safe_push (val->types, type);
294 unsigned int i,j;
295
296 /* First we compare memory layout. */
297 if (!types_compatible_p (val->type, type))
298 {
299 merge = false;
300 if (BINFO_VTABLE (TYPE_BINFO (val->type))
301 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
302 "type %qD violates one definition rule ",
303 type))
304 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
305 "a type with the same name but different layout is "
306 "defined in another translation unit");
61a74079
JH
307 if (cgraph_dump_file)
308 {
309 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
310
311 print_node (cgraph_dump_file, "", val->type, 0);
312 putc ('\n',cgraph_dump_file);
313 print_node (cgraph_dump_file, "", type, 0);
314 putc ('\n',cgraph_dump_file);
315 }
316 }
317
318 /* Next sanity check that bases are the same. If not, we will end
319 up producing wrong answers. */
320 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
321 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
322 {
323 odr_type base = get_odr_type
324 (BINFO_TYPE
325 (BINFO_BASE_BINFO (TYPE_BINFO (type),
326 i)),
327 true);
328 if (val->bases.length () <= j || val->bases[j] != base)
329 base_mismatch = true;
330 j++;
331 }
332 if (base_mismatch)
333 {
334 merge = false;
335
336 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
337 "type %qD violates one definition rule ",
338 type))
339 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
340 "a type with the same name but different bases is "
341 "defined in another translation unit");
342 if (cgraph_dump_file)
343 {
344 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
345
346 print_node (cgraph_dump_file, "", val->type, 0);
347 putc ('\n',cgraph_dump_file);
348 print_node (cgraph_dump_file, "", type, 0);
349 putc ('\n',cgraph_dump_file);
350 }
351 }
352
353 /* Regularize things a little. During LTO same types may come with
354 different BINFOs. Either because their virtual table was
355 not merged by tree merging and only later at decl merging or
356 because one type comes with external vtable, while other
357 with internal. We want to merge equivalent binfos to conserve
358 memory and streaming overhead.
359
360 The external vtables are more harmful: they contain references
361 to external declarations of methods that may be defined in the
362 merged LTO unit. For this reason we absolutely need to remove
363 them and replace by internal variants. Not doing so will lead
364 to incomplete answers from possible_polymorphic_call_targets. */
365 if (!flag_ltrans && merge)
366 {
367 tree master_binfo = TYPE_BINFO (val->type);
368 tree v1 = BINFO_VTABLE (master_binfo);
369 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
370
371 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
372 {
373 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
374 && operand_equal_p (TREE_OPERAND (v1, 1),
375 TREE_OPERAND (v2, 1), 0));
376 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
377 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
378 }
379 gcc_assert (DECL_ASSEMBLER_NAME (v1)
380 == DECL_ASSEMBLER_NAME (v2));
381
382 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
383 {
384 unsigned int i;
385
386 TYPE_BINFO (val->type) = TYPE_BINFO (type);
c3284718 387 for (i = 0; i < val->types->length (); i++)
61a74079
JH
388 {
389 if (TYPE_BINFO ((*val->types)[i])
390 == master_binfo)
391 TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
392 }
393 }
394 else
395 TYPE_BINFO (type) = master_binfo;
396 }
397 }
398}
399
eefe9a99
JH
400/* Get ODR type hash entry for TYPE. If INSERT is true, create
401 possibly new entry. */
402
403odr_type
404get_odr_type (tree type, bool insert)
405{
406 odr_type_d **slot;
407 odr_type val;
408 hashval_t hash;
409
410 type = TYPE_MAIN_VARIANT (type);
411 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
412 hash = hash_type_name (type);
413 slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
414 if (!slot)
415 return NULL;
416
417 /* See if we already have entry for type. */
418 if (*slot)
419 {
420 val = *slot;
421
61a74079
JH
422 /* With LTO we need to support multiple tree representation of
423 the same ODR type. */
424 if (val->type != type)
425 add_type_duplicate (val, type);
eefe9a99
JH
426 }
427 else
428 {
429 tree binfo = TYPE_BINFO (type);
430 unsigned int i;
431
432 val = ggc_alloc_cleared_odr_type_d ();
433 val->type = type;
434 val->bases = vNULL;
435 val->derived_types = vNULL;
0e1474e5 436 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
eefe9a99
JH
437 *slot = val;
438 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
439 /* For now record only polymorphic types. other are
440 pointless for devirtualization and we can not precisely
441 determine ODR equivalency of these during LTO. */
442 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
443 {
444 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
445 i)),
446 true);
447 base->derived_types.safe_push (val);
448 val->bases.safe_push (base);
449 }
450 /* First record bases, then add into array so ids are increasing. */
451 if (odr_types_ptr)
c3284718 452 val->id = odr_types.length ();
eefe9a99
JH
453 vec_safe_push (odr_types_ptr, val);
454 }
455 return val;
456}
457
458/* Dump ODR type T and all its derrived type. INDENT specify indentation for
459 recusive printing. */
460
461static void
462dump_odr_type (FILE *f, odr_type t, int indent=0)
463{
464 unsigned int i;
465 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
466 print_generic_expr (f, t->type, TDF_SLIM);
0e1474e5 467 fprintf (f, "%s\n", t->anonymous_namespace ? " (anonymous namespace)":"");
eefe9a99
JH
468 if (TYPE_NAME (t->type))
469 {
470 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
471 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
472 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
473 }
c3284718 474 if (t->bases.length ())
eefe9a99
JH
475 {
476 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
c3284718 477 for (i = 0; i < t->bases.length (); i++)
eefe9a99
JH
478 fprintf (f, " %i", t->bases[i]->id);
479 fprintf (f, "\n");
480 }
c3284718 481 if (t->derived_types.length ())
eefe9a99
JH
482 {
483 fprintf (f, "%*s derived types:\n", indent * 2, "");
c3284718 484 for (i = 0; i < t->derived_types.length (); i++)
eefe9a99
JH
485 dump_odr_type (f, t->derived_types[i], indent + 1);
486 }
487 fprintf (f, "\n");
488}
489
490/* Dump the type inheritance graph. */
491
492static void
493dump_type_inheritance_graph (FILE *f)
494{
495 unsigned int i;
0e1474e5
JH
496 if (!odr_types_ptr)
497 return;
eefe9a99 498 fprintf (f, "\n\nType inheritance graph:\n");
c3284718 499 for (i = 0; i < odr_types.length (); i++)
eefe9a99 500 {
c3284718 501 if (odr_types[i]->bases.length () == 0)
eefe9a99
JH
502 dump_odr_type (f, odr_types[i]);
503 }
c3284718 504 for (i = 0; i < odr_types.length (); i++)
61a74079 505 {
c3284718 506 if (odr_types[i]->types && odr_types[i]->types->length ())
61a74079
JH
507 {
508 unsigned int j;
509 fprintf (f, "Duplicate tree types for odr type %i\n", i);
510 print_node (f, "", odr_types[i]->type, 0);
c3284718 511 for (j = 0; j < odr_types[i]->types->length (); j++)
61a74079
JH
512 {
513 tree t;
514 fprintf (f, "duplicate #%i\n", j);
515 print_node (f, "", (*odr_types[i]->types)[j], 0);
516 t = (*odr_types[i]->types)[j];
517 while (TYPE_P (t) && TYPE_CONTEXT (t))
518 {
519 t = TYPE_CONTEXT (t);
520 print_node (f, "", t, 0);
521 }
522 putc ('\n',f);
523 }
524 }
525 }
eefe9a99
JH
526}
527
528/* Given method type T, return type of class it belongs to.
529 Lookup this pointer and get its type. */
530
64cbf23d 531tree
eefe9a99
JH
532method_class_type (tree t)
533{
534 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
68377e53 535 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
eefe9a99
JH
536
537 return TREE_TYPE (first_parm_type);
538}
539
540/* Initialize IPA devirt and build inheritance tree graph. */
541
542void
543build_type_inheritance_graph (void)
544{
545 struct cgraph_node *n;
546 FILE *inheritance_dump_file;
547 int flags;
548
549 if (odr_hash.is_created ())
550 return;
551 timevar_push (TV_IPA_INHERITANCE);
552 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
553 odr_hash.create (23);
554
555 /* We reconstruct the graph starting of types of all methods seen in the
556 the unit. */
557 FOR_EACH_FUNCTION (n)
67348ccc
DM
558 if (DECL_VIRTUAL_P (n->decl)
559 && symtab_real_symbol_p (n))
560 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
eefe9a99
JH
561 if (inheritance_dump_file)
562 {
563 dump_type_inheritance_graph (inheritance_dump_file);
564 dump_end (TDI_inheritance, inheritance_dump_file);
565 }
566 timevar_pop (TV_IPA_INHERITANCE);
567}
568
68377e53
JH
569/* If TARGET has associated node, record it in the NODES array.
570 if TARGET can not be inserted (for example because its body was
571 already removed and there is no way to refer to it), clear COMPLETEP. */
eefe9a99
JH
572
573static void
574maybe_record_node (vec <cgraph_node *> &nodes,
68377e53
JH
575 tree target, pointer_set_t *inserted,
576 bool *completep)
eefe9a99
JH
577{
578 struct cgraph_node *target_node;
579 enum built_in_function fcode;
580
68377e53 581 if (!target
eefe9a99 582 /* Those are used to mark impossible scenarios. */
68377e53
JH
583 || (fcode = DECL_FUNCTION_CODE (target))
584 == BUILT_IN_UNREACHABLE
585 || fcode == BUILT_IN_TRAP)
586 return;
587
588 target_node = cgraph_get_node (target);
589
590 if (target_node != NULL
3462aa02 591 && (TREE_PUBLIC (target)
67348ccc
DM
592 || target_node->definition)
593 && symtab_real_symbol_p (target_node))
0e1474e5 594 {
68377e53
JH
595 gcc_assert (!target_node->global.inlined_to);
596 gcc_assert (symtab_real_symbol_p (target_node));
597 if (!pointer_set_insert (inserted, target))
598 {
599 pointer_set_insert (cached_polymorphic_call_targets,
600 target_node);
601 nodes.safe_push (target_node);
602 }
0e1474e5 603 }
68377e53
JH
604 else if (completep
605 && !type_in_anonymous_namespace_p
606 (method_class_type (TREE_TYPE (target))))
607 *completep = true;
eefe9a99
JH
608}
609
68377e53
JH
610/* See if BINFO's type match OUTER_TYPE. If so, lookup
611 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
612 method in vtable and insert method to NODES array.
eefe9a99
JH
613 Otherwise recurse to base BINFOs.
614 This match what get_binfo_at_offset does, but with offset
615 being unknown.
616
617 TYPE_BINFO is binfo holding an virtual table matching
618 BINFO's type. In the case of single inheritance, this
619 is binfo of BINFO's type ancestor (vtable is shared),
620 otherwise it is binfo of BINFO's type.
621
622 MATCHED_VTABLES tracks virtual tables we already did lookup
68377e53
JH
623 for virtual function in. INSERTED tracks nodes we already
624 inserted.
3462aa02
JH
625
626 ANONYMOUS is true if BINFO is part of anonymous namespace.
eefe9a99
JH
627 */
628
629static void
68377e53
JH
630record_target_from_binfo (vec <cgraph_node *> &nodes,
631 tree binfo,
632 tree otr_type,
633 tree type_binfo,
634 HOST_WIDE_INT otr_token,
635 tree outer_type,
636 HOST_WIDE_INT offset,
637 pointer_set_t *inserted,
638 pointer_set_t *matched_vtables,
639 bool anonymous)
eefe9a99
JH
640{
641 tree type = BINFO_TYPE (binfo);
642 int i;
643 tree base_binfo;
644
645 gcc_checking_assert (BINFO_VTABLE (type_binfo));
646
68377e53 647 if (types_same_for_odr (type, outer_type))
eefe9a99 648 {
68377e53
JH
649 tree inner_binfo = get_binfo_at_offset (type_binfo,
650 offset, otr_type);
3462aa02
JH
651 /* For types in anonymous namespace first check if the respective vtable
652 is alive. If not, we know the type can't be called. */
653 if (!flag_ltrans && anonymous)
654 {
68377e53 655 tree vtable = BINFO_VTABLE (inner_binfo);
3462aa02
JH
656 struct varpool_node *vnode;
657
658 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
659 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
660 vnode = varpool_get_node (vtable);
67348ccc 661 if (!vnode || !vnode->definition)
3462aa02
JH
662 return;
663 }
68377e53
JH
664 gcc_assert (inner_binfo);
665 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
666 {
667 tree target = gimple_get_virt_method_for_binfo (otr_token, inner_binfo);
668 if (target)
669 maybe_record_node (nodes, target, inserted, NULL);
670 }
eefe9a99
JH
671 return;
672 }
673
674 /* Walk bases. */
675 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
676 /* Walking bases that have no virtual method is pointless excercise. */
677 if (polymorphic_type_binfo_p (base_binfo))
68377e53
JH
678 record_target_from_binfo (nodes, base_binfo, otr_type,
679 /* In the case of single inheritance,
680 the virtual table is shared with
681 the outer type. */
682 BINFO_VTABLE (base_binfo) ? base_binfo : type_binfo,
683 otr_token, outer_type, offset, inserted,
684 matched_vtables, anonymous);
eefe9a99
JH
685}
686
687/* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
688 of TYPE, insert them to NODES, recurse into derived nodes.
689 INSERTED is used to avoid duplicate insertions of methods into NODES.
690 MATCHED_VTABLES are used to avoid duplicate walking vtables. */
691
692static void
693possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
694 pointer_set_t *inserted,
695 pointer_set_t *matched_vtables,
696 tree otr_type,
697 odr_type type,
68377e53
JH
698 HOST_WIDE_INT otr_token,
699 tree outer_type,
700 HOST_WIDE_INT offset)
eefe9a99
JH
701{
702 tree binfo = TYPE_BINFO (type->type);
703 unsigned int i;
704
68377e53
JH
705 record_target_from_binfo (nodes, binfo, otr_type, binfo, otr_token,
706 outer_type, offset,
707 inserted, matched_vtables,
708 type->anonymous_namespace);
c3284718 709 for (i = 0; i < type->derived_types.length (); i++)
eefe9a99
JH
710 possible_polymorphic_call_targets_1 (nodes, inserted,
711 matched_vtables,
712 otr_type,
713 type->derived_types[i],
68377e53 714 otr_token, outer_type, offset);
eefe9a99
JH
715}
716
717/* Cache of queries for polymorphic call targets.
718
719 Enumerating all call targets may get expensive when there are many
720 polymorphic calls in the program, so we memoize all the previous
721 queries and avoid duplicated work. */
722
723struct polymorphic_call_target_d
724{
eefe9a99 725 HOST_WIDE_INT otr_token;
68377e53
JH
726 ipa_polymorphic_call_context context;
727 odr_type type;
eefe9a99 728 vec <cgraph_node *> targets;
68377e53 729 bool final;
eefe9a99
JH
730};
731
732/* Polymorphic call target cache helpers. */
733
734struct polymorphic_call_target_hasher
735{
736 typedef polymorphic_call_target_d value_type;
737 typedef polymorphic_call_target_d compare_type;
738 static inline hashval_t hash (const value_type *);
739 static inline bool equal (const value_type *, const compare_type *);
740 static inline void remove (value_type *);
741};
742
743/* Return the computed hashcode for ODR_QUERY. */
744
745inline hashval_t
746polymorphic_call_target_hasher::hash (const value_type *odr_query)
747{
68377e53
JH
748 hashval_t hash;
749
750 hash = iterative_hash_host_wide_int
751 (odr_query->otr_token,
752 odr_query->type->id);
753 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
754 hash);
755 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
756 return iterative_hash_hashval_t
757 (((int)odr_query->context.maybe_in_construction << 1)
758 | (int)odr_query->context.maybe_derived_type, hash);
eefe9a99
JH
759}
760
761/* Compare cache entries T1 and T2. */
762
763inline bool
764polymorphic_call_target_hasher::equal (const value_type *t1,
765 const compare_type *t2)
766{
68377e53
JH
767 return (t1->type == t2->type && t1->otr_token == t2->otr_token
768 && t1->context.offset == t2->context.offset
769 && t1->context.outer_type == t2->context.outer_type
770 && t1->context.maybe_in_construction
771 == t2->context.maybe_in_construction
772 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
eefe9a99
JH
773}
774
775/* Remove entry in polymorphic call target cache hash. */
776
777inline void
778polymorphic_call_target_hasher::remove (value_type *v)
779{
780 v->targets.release ();
781 free (v);
782}
783
784/* Polymorphic call target query cache. */
785
786typedef hash_table <polymorphic_call_target_hasher>
787 polymorphic_call_target_hash_type;
788static polymorphic_call_target_hash_type polymorphic_call_target_hash;
eefe9a99
JH
789
790/* Destroy polymorphic call target query cache. */
791
792static void
793free_polymorphic_call_targets_hash ()
794{
0e1474e5
JH
795 if (cached_polymorphic_call_targets)
796 {
797 polymorphic_call_target_hash.dispose ();
798 pointer_set_destroy (cached_polymorphic_call_targets);
799 cached_polymorphic_call_targets = NULL;
800 }
eefe9a99
JH
801}
802
803/* When virtual function is removed, we may need to flush the cache. */
804
805static void
806devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
807{
0e1474e5
JH
808 if (cached_polymorphic_call_targets
809 && pointer_set_contains (cached_polymorphic_call_targets, n))
eefe9a99
JH
810 free_polymorphic_call_targets_hash ();
811}
812
68377e53
JH
813/* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
814 is contained at CONTEXT->OFFSET. Walk the memory representation of
815 CONTEXT->OUTER_TYPE and find the outermost class type that match
816 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
817 to represent it.
818
819 For example when CONTEXT represents type
820 class A
821 {
822 int a;
823 class B b;
824 }
825 and we look for type at offset sizeof(int), we end up with B and offset 0.
826 If the same is produced by multiple inheritance, we end up with A and offset
827 sizeof(int).
828
829 If we can not find corresponding class, give up by setting
830 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
831 Return true when lookup was sucesful. */
832
833static bool
834get_class_context (ipa_polymorphic_call_context *context,
835 tree expected_type)
836{
837 tree type = context->outer_type;
838 HOST_WIDE_INT offset = context->offset;
839
840 /* Find the sub-object the constant actually refers to and mark whether it is
841 an artificial one (as opposed to a user-defined one). */
842 while (true)
843 {
844 HOST_WIDE_INT pos, size;
845 tree fld;
846
847 /* On a match, just return what we found. */
848 if (TREE_CODE (type) == TREE_CODE (expected_type)
849 && types_same_for_odr (type, expected_type))
850 {
851 gcc_assert (offset == 0);
852 return true;
853 }
854
855 /* Walk fields and find corresponding on at OFFSET. */
856 if (TREE_CODE (type) == RECORD_TYPE)
857 {
858 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
859 {
860 if (TREE_CODE (fld) != FIELD_DECL)
861 continue;
862
863 pos = int_bit_position (fld);
864 size = tree_to_uhwi (DECL_SIZE (fld));
865 if (pos <= offset && (pos + size) > offset)
866 break;
867 }
868
869 if (!fld)
870 goto give_up;
871
872 type = TREE_TYPE (fld);
873 offset -= pos;
874 /* DECL_ARTIFICIAL represents a basetype. */
875 if (!DECL_ARTIFICIAL (fld))
876 {
877 context->outer_type = type;
878 context->offset = offset;
879 /* As soon as we se an field containing the type,
880 we know we are not looking for derivations. */
881 context->maybe_derived_type = false;
882 }
883 }
884 else if (TREE_CODE (type) == ARRAY_TYPE)
885 {
886 tree subtype = TREE_TYPE (type);
887
888 /* Give up if we don't know array size. */
889 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
890 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
891 goto give_up;
892 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
893 type = subtype;
894 context->outer_type = type;
895 context->offset = offset;
896 context->maybe_derived_type = false;
897 }
898 /* Give up on anything else. */
899 else
900 goto give_up;
901 }
902
903 /* If we failed to find subtype we look for, give up and fall back to the
904 most generic query. */
905give_up:
906 context->outer_type = expected_type;
907 context->offset = 0;
908 context->maybe_derived_type = true;
909 return false;
910}
911
912/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
913
914static bool
915contains_type_p (tree outer_type, HOST_WIDE_INT offset,
916 tree otr_type)
917{
918 ipa_polymorphic_call_context context = {offset, outer_type,
919 false, true};
920 return get_class_context (&context, otr_type);
921}
922
923/* Given REF call in FNDECL, determine class of the polymorphic
924 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
925 Return pointer to object described by the context */
926
927tree
928get_polymorphic_call_info (tree fndecl,
929 tree ref,
930 tree *otr_type,
931 HOST_WIDE_INT *otr_token,
932 ipa_polymorphic_call_context *context)
933{
934 tree base_pointer;
935 *otr_type = obj_type_ref_class (ref);
936 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
937
938 /* Set up basic info in case we find nothing interesting in the analysis. */
939 context->outer_type = *otr_type;
940 context->offset = 0;
941 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
942 context->maybe_derived_type = true;
943 context->maybe_in_construction = false;
944
945 /* Walk SSA for outer object. */
946 do
947 {
948 if (TREE_CODE (base_pointer) == SSA_NAME
949 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
950 && SSA_NAME_DEF_STMT (base_pointer)
951 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
952 {
953 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
954 STRIP_NOPS (base_pointer);
955 }
956 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
957 {
958 HOST_WIDE_INT size, max_size;
959 HOST_WIDE_INT offset2;
960 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
961 &offset2, &size, &max_size);
962
963 /* If this is a varying address, punt. */
964 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
965 && max_size != -1
966 && max_size == size)
967 {
968 /* We found dereference of a pointer. Type of the pointer
969 and MEM_REF is meaningless, but we can look futher. */
970 if (TREE_CODE (base) == MEM_REF)
971 {
972 base_pointer = TREE_OPERAND (base, 0);
973 context->offset
974 += offset2 + mem_ref_offset (base).low * BITS_PER_UNIT;
975 context->outer_type = NULL;
976 }
977 /* We found base object. In this case the outer_type
978 is known. */
979 else if (DECL_P (base))
980 {
981 context->outer_type = TREE_TYPE (base);
982 gcc_assert (!POINTER_TYPE_P (context->outer_type));
983
984 /* Only type inconsistent programs can have otr_type that is
985 not part of outer type. */
986 if (!contains_type_p (context->outer_type,
987 context->offset, *otr_type))
988 return base_pointer;
989 context->offset += offset2;
990 base_pointer = NULL;
991 /* Make very conservative assumption that all objects
992 may be in construction.
993 TODO: ipa-prop already contains code to tell better.
994 merge it later. */
995 context->maybe_in_construction = true;
996 context->maybe_derived_type = false;
997 return base_pointer;
998 }
999 else
1000 break;
1001 }
1002 else
1003 break;
1004 }
1005 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1006 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1007 {
1008 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1009 * BITS_PER_UNIT;
1010 base_pointer = TREE_OPERAND (base_pointer, 0);
1011 }
1012 else
1013 break;
1014 }
1015 while (true);
1016
1017 /* Try to determine type of the outer object. */
1018 if (TREE_CODE (base_pointer) == SSA_NAME
1019 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1020 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1021 {
1022 /* See if parameter is THIS pointer of a method. */
1023 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1024 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1025 {
1026 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1027 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1028
1029 /* Dynamic casting has possibly upcasted the type
1030 in the hiearchy. In this case outer type is less
1031 informative than inner type and we should forget
1032 about it. */
1033 if (!contains_type_p (context->outer_type, context->offset,
1034 *otr_type))
1035 {
1036 context->outer_type = NULL;
1037 return base_pointer;
1038 }
1039
1040 /* If the function is constructor or destructor, then
1041 the type is possibly in consturction, but we know
1042 it is not derived type. */
1043 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1044 || DECL_CXX_DESTRUCTOR_P (fndecl))
1045 {
1046 context->maybe_in_construction = true;
1047 context->maybe_derived_type = false;
1048 }
1049 else
1050 {
1051 context->maybe_derived_type = true;
1052 context->maybe_in_construction = false;
1053 }
1054 return base_pointer;
1055 }
1056 /* Non-PODs passed by value are really passed by invisible
1057 reference. In this case we also know the type of the
1058 object. */
1059 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1060 {
1061 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1062 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1063 /* Only type inconsistent programs can have otr_type that is
1064 not part of outer type. */
1065 if (!contains_type_p (context->outer_type, context->offset,
1066 *otr_type))
1067 {
1068 context->outer_type = NULL;
1069 gcc_unreachable ();
1070 return base_pointer;
1071 }
1072 context->maybe_derived_type = false;
1073 context->maybe_in_construction = false;
1074 return base_pointer;
1075 }
1076 }
1077 /* TODO: There are multiple ways to derive a type. For instance
1078 if BASE_POINTER is passed to an constructor call prior our refernece.
1079 We do not make this type of flow sensitive analysis yet. */
1080 return base_pointer;
1081}
1082
1083/* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1084 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1085 and insert them to NODES.
1086
1087 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1088
1089static void
1090record_targets_from_bases (tree otr_type,
1091 HOST_WIDE_INT otr_token,
1092 tree outer_type,
1093 HOST_WIDE_INT offset,
1094 vec <cgraph_node *> nodes,
1095 pointer_set_t *inserted,
1096 pointer_set_t *matched_vtables,
1097 bool *completep)
1098{
1099 while (true)
1100 {
1101 HOST_WIDE_INT pos, size;
1102 tree base_binfo;
1103 tree fld;
1104
1105 if (types_same_for_odr (outer_type, otr_type))
1106 return;
1107
1108 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1109 {
1110 if (TREE_CODE (fld) != FIELD_DECL)
1111 continue;
1112
1113 pos = int_bit_position (fld);
1114 size = tree_to_shwi (DECL_SIZE (fld));
1115 if (pos <= offset && (pos + size) > offset)
1116 break;
1117 }
1118 /* Within a class type we should always find correcponding fields. */
1119 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1120
1121 /* Nonbasetypes should have been stripped by outer_class_type. */
1122 gcc_assert (DECL_ARTIFICIAL (fld));
1123
1124 outer_type = TREE_TYPE (fld);
1125 offset -= pos;
1126
1127 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1128 offset, otr_type);
1129 gcc_assert (base_binfo);
1130 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1131 {
1132 tree target = gimple_get_virt_method_for_binfo (otr_token, base_binfo);
1133 if (target)
1134 maybe_record_node (nodes, target, inserted, completep);
1135 /* The only way method in anonymous namespace can become unreferable
1136 is that it has been fully optimized out. */
1137 else if (flag_ltrans || !type_in_anonymous_namespace_p (outer_type))
1138 *completep = false;
1139 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1140 }
1141 }
1142}
1143
3462aa02
JH
1144/* When virtual table is removed, we may need to flush the cache. */
1145
1146static void
1147devirt_variable_node_removal_hook (struct varpool_node *n,
1148 void *d ATTRIBUTE_UNUSED)
1149{
1150 if (cached_polymorphic_call_targets
67348ccc
DM
1151 && DECL_VIRTUAL_P (n->decl)
1152 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3462aa02
JH
1153 free_polymorphic_call_targets_hash ();
1154}
1155
eefe9a99 1156/* Return vector containing possible targets of polymorphic call of type
68377e53
JH
1157 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1158 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1159 OTR_TYPE and include their virtual method. This is useful for types
1160 possibly in construction or destruction where the virtual table may
1161 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1162 us to walk the inheritance graph for all derivations.
1163
1164 If COMPLETEP is non-NULL, store true if the list is complette.
eefe9a99
JH
1165 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1166 in the target cache. If user needs to visit every target list
1167 just once, it can memoize them.
1168
1169 Returned vector is placed into cache. It is NOT caller's responsibility
1170 to free it. The vector can be freed on cgraph_remove_node call if
1171 the particular node is a virtual function present in the cache. */
1172
1173vec <cgraph_node *>
1174possible_polymorphic_call_targets (tree otr_type,
1175 HOST_WIDE_INT otr_token,
68377e53
JH
1176 ipa_polymorphic_call_context context,
1177 bool *completep,
eefe9a99
JH
1178 void **cache_token)
1179{
1180 static struct cgraph_node_hook_list *node_removal_hook_holder;
1181 pointer_set_t *inserted;
1182 pointer_set_t *matched_vtables;
1183 vec <cgraph_node *> nodes=vNULL;
68377e53 1184 odr_type type, outer_type;
eefe9a99
JH
1185 polymorphic_call_target_d key;
1186 polymorphic_call_target_d **slot;
1187 unsigned int i;
1188 tree binfo, target;
68377e53 1189 bool final;
eefe9a99 1190
68377e53 1191 type = get_odr_type (otr_type, true);
eefe9a99 1192
68377e53
JH
1193 /* Lookup the outer class type we want to walk. */
1194 if (context.outer_type)
1195 get_class_context (&context, otr_type);
eefe9a99 1196
68377e53
JH
1197 /* We now canonicalize our query, so we do not need extra hashtable entries. */
1198
1199 /* Without outer type, we have no use for offset. Just do the
1200 basic search from innter type */
1201 if (!context.outer_type)
1202 {
1203 context.outer_type = otr_type;
1204 context.offset = 0;
1205 }
1206 /* We need to update our hiearchy if the type does not exist. */
1207 outer_type = get_odr_type (context.outer_type, true);
1208 /* If outer and inner type match, there are no bases to see. */
1209 if (type == outer_type)
1210 context.maybe_in_construction = false;
1211 /* If the type is final, there are no derivations. */
1212 if (TYPE_FINAL_P (outer_type->type))
1213 context.maybe_derived_type = false;
eefe9a99
JH
1214
1215 /* Initialize query cache. */
1216 if (!cached_polymorphic_call_targets)
1217 {
1218 cached_polymorphic_call_targets = pointer_set_create ();
1219 polymorphic_call_target_hash.create (23);
1220 if (!node_removal_hook_holder)
3462aa02
JH
1221 {
1222 node_removal_hook_holder =
1223 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1224 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1225 NULL);
1226 }
eefe9a99
JH
1227 }
1228
1229 /* Lookup cached answer. */
1230 key.type = type;
1231 key.otr_token = otr_token;
68377e53 1232 key.context = context;
eefe9a99
JH
1233 slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
1234 if (cache_token)
1235 *cache_token = (void *)*slot;
1236 if (*slot)
68377e53
JH
1237 {
1238 if (completep)
1239 *completep = (*slot)->final;
1240 return (*slot)->targets;
1241 }
1242
1243 final = true;
eefe9a99
JH
1244
1245 /* Do actual search. */
1246 timevar_push (TV_IPA_VIRTUAL_CALL);
1247 *slot = XCNEW (polymorphic_call_target_d);
1248 if (cache_token)
68377e53 1249 *cache_token = (void *)*slot;
eefe9a99
JH
1250 (*slot)->type = type;
1251 (*slot)->otr_token = otr_token;
68377e53 1252 (*slot)->context = context;
eefe9a99
JH
1253
1254 inserted = pointer_set_create ();
1255 matched_vtables = pointer_set_create ();
1256
1257 /* First see virtual method of type itself. */
1258
68377e53
JH
1259 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1260 context.offset, otr_type);
eefe9a99
JH
1261 target = gimple_get_virt_method_for_binfo (otr_token, binfo);
1262 if (target)
68377e53
JH
1263 {
1264 maybe_record_node (nodes, target, inserted, &final);
1265
1266 /* In the case we get final method, we don't need
1267 to walk derivations. */
1268 if (DECL_FINAL_P (target))
1269 context.maybe_derived_type = false;
1270 }
1271 /* The only way method in anonymous namespace can become unreferable
1272 is that it has been fully optimized out. */
1273 else if (flag_ltrans || !type->anonymous_namespace)
1274 final = false;
eefe9a99
JH
1275 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1276
68377e53
JH
1277 /* Next walk bases, if asked to. */
1278 if (context.maybe_in_construction)
1279 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1280 context.offset, nodes, inserted,
1281 matched_vtables, &final);
eefe9a99 1282
68377e53
JH
1283 /* Finally walk recursively all derived types. */
1284 if (context.maybe_derived_type)
1285 {
1286 /* For anonymous namespace types we can attempt to build full type.
1287 All derivations must be in this unit (unless we see partial unit). */
1288 if (!type->anonymous_namespace || flag_ltrans)
1289 final = false;
1290 for (i = 0; i < outer_type->derived_types.length(); i++)
1291 possible_polymorphic_call_targets_1 (nodes, inserted,
1292 matched_vtables,
1293 otr_type, outer_type->derived_types[i],
1294 otr_token, outer_type->type,
1295 context.offset);
1296 }
eefe9a99 1297 (*slot)->targets = nodes;
68377e53
JH
1298 (*slot)->final = final;
1299 if (completep)
1300 *completep = final;
eefe9a99
JH
1301
1302 pointer_set_destroy (inserted);
1303 pointer_set_destroy (matched_vtables);
1304 timevar_pop (TV_IPA_VIRTUAL_CALL);
1305 return nodes;
1306}
1307
1308/* Dump all possible targets of a polymorphic call. */
1309
1310void
1311dump_possible_polymorphic_call_targets (FILE *f,
68377e53
JH
1312 tree otr_type,
1313 HOST_WIDE_INT otr_token,
1314 const ipa_polymorphic_call_context &ctx)
eefe9a99
JH
1315{
1316 vec <cgraph_node *> targets;
1317 bool final;
1318 odr_type type = get_odr_type (otr_type, false);
1319 unsigned int i;
1320
1321 if (!type)
1322 return;
1323 targets = possible_polymorphic_call_targets (otr_type, otr_token,
68377e53 1324 ctx,
eefe9a99 1325 &final);
68377e53 1326 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
eefe9a99 1327 print_generic_expr (f, type->type, TDF_SLIM);
68377e53
JH
1328 fprintf (f, " token %i\n"
1329 " Contained in type:",
1330 (int)otr_token);
1331 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1332 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n"
1333 " %s%s%s\n ",
1334 ctx.offset,
1335 final ? "This is full list." :
1336 "This is partial list; extra targets may be defined in other units.",
1337 ctx.maybe_in_construction ? " (base types included)" : "",
1338 ctx.maybe_derived_type ? " (derived types included)" : "");
eefe9a99 1339 for (i = 0; i < targets.length (); i++)
fec39fa6 1340 fprintf (f, " %s/%i", targets[i]->name (),
67348ccc 1341 targets[i]->order);
68377e53 1342 fprintf (f, "\n\n");
eefe9a99
JH
1343}
1344
0e1474e5
JH
1345
1346/* Return true if N can be possibly target of a polymorphic call of
1347 OTR_TYPE/OTR_TOKEN. */
1348
1349bool
1350possible_polymorphic_call_target_p (tree otr_type,
1351 HOST_WIDE_INT otr_token,
68377e53 1352 const ipa_polymorphic_call_context &ctx,
0e1474e5
JH
1353 struct cgraph_node *n)
1354{
1355 vec <cgraph_node *> targets;
1356 unsigned int i;
68377e53 1357 enum built_in_function fcode;
450ad0cd 1358 bool final;
0e1474e5 1359
68377e53
JH
1360 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1361 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1362 == BUILT_IN_UNREACHABLE
1363 || fcode == BUILT_IN_TRAP))
1364 return true;
1365
0e1474e5
JH
1366 if (!odr_hash.is_created ())
1367 return true;
68377e53 1368 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
0e1474e5 1369 for (i = 0; i < targets.length (); i++)
68377e53 1370 if (symtab_semantically_equivalent_p (n, targets[i]))
0e1474e5 1371 return true;
450ad0cd
JH
1372
1373 /* At a moment we allow middle end to dig out new external declarations
1374 as a targets of polymorphic calls. */
67348ccc 1375 if (!final && !n->definition)
450ad0cd 1376 return true;
0e1474e5
JH
1377 return false;
1378}
1379
1380
1381/* After callgraph construction new external nodes may appear.
1382 Add them into the graph. */
1383
1384void
1385update_type_inheritance_graph (void)
1386{
1387 struct cgraph_node *n;
1388
1389 if (!odr_hash.is_created ())
1390 return;
1391 free_polymorphic_call_targets_hash ();
1392 timevar_push (TV_IPA_INHERITANCE);
68377e53 1393 /* We reconstruct the graph starting from types of all methods seen in the
0e1474e5
JH
1394 the unit. */
1395 FOR_EACH_FUNCTION (n)
67348ccc
DM
1396 if (DECL_VIRTUAL_P (n->decl)
1397 && !n->definition
1398 && symtab_real_symbol_p (n))
1399 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
0e1474e5
JH
1400 timevar_pop (TV_IPA_INHERITANCE);
1401}
bbc9396b
JH
1402
1403
1404/* Return true if N looks like likely target of a polymorphic call.
1405 Rule out cxa_pure_virtual, noreturns, function declared cold and
1406 other obvious cases. */
1407
1408bool
1409likely_target_p (struct cgraph_node *n)
1410{
1411 int flags;
1412 /* cxa_pure_virtual and similar things are not likely. */
67348ccc 1413 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
bbc9396b 1414 return false;
67348ccc 1415 flags = flags_from_decl_or_type (n->decl);
bbc9396b
JH
1416 if (flags & ECF_NORETURN)
1417 return false;
1418 if (lookup_attribute ("cold",
67348ccc 1419 DECL_ATTRIBUTES (n->decl)))
bbc9396b
JH
1420 return false;
1421 if (n->frequency < NODE_FREQUENCY_NORMAL)
1422 return false;
1423 return true;
1424}
1425
1426/* The ipa-devirt pass.
3462aa02
JH
1427 When polymorphic call has only one likely target in the unit,
1428 turn it into speculative call. */
bbc9396b
JH
1429
1430static unsigned int
1431ipa_devirt (void)
1432{
1433 struct cgraph_node *n;
1434 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1435 struct cgraph_edge *e;
1436
1437 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1438 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
1439 int nwrong = 0, nok = 0, nexternal = 0;;
1440
1441 FOR_EACH_DEFINED_FUNCTION (n)
1442 {
1443 bool update = false;
1444 if (dump_file && n->indirect_calls)
1445 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
fec39fa6 1446 n->name (), n->order);
bbc9396b
JH
1447 for (e = n->indirect_calls; e; e = e->next_callee)
1448 if (e->indirect_info->polymorphic)
1449 {
1450 struct cgraph_node *likely_target = NULL;
1451 void *cache_token;
1452 bool final;
1453 vec <cgraph_node *>targets
1454 = possible_polymorphic_call_targets
1455 (e, &final, &cache_token);
1456 unsigned int i;
1457
1458 if (dump_file)
1459 dump_possible_polymorphic_call_targets
1460 (dump_file, e);
3462aa02 1461
bbc9396b
JH
1462 npolymorphic++;
1463
bbc9396b
JH
1464 if (!cgraph_maybe_hot_edge_p (e))
1465 {
1466 if (dump_file)
1467 fprintf (dump_file, "Call is cold\n");
1468 ncold++;
1469 continue;
1470 }
1471 if (e->speculative)
1472 {
1473 if (dump_file)
1474 fprintf (dump_file, "Call is aready speculated\n");
1475 nspeculated++;
1476
1477 /* When dumping see if we agree with speculation. */
1478 if (!dump_file)
1479 continue;
1480 }
1481 if (pointer_set_contains (bad_call_targets,
1482 cache_token))
1483 {
1484 if (dump_file)
1485 fprintf (dump_file, "Target list is known to be useless\n");
1486 nmultiple++;
1487 continue;
1488 }
c3284718 1489 for (i = 0; i < targets.length (); i++)
bbc9396b
JH
1490 if (likely_target_p (targets[i]))
1491 {
1492 if (likely_target)
1493 {
1494 likely_target = NULL;
1495 if (dump_file)
1496 fprintf (dump_file, "More than one likely target\n");
1497 nmultiple++;
1498 break;
1499 }
1500 likely_target = targets[i];
1501 }
1502 if (!likely_target)
1503 {
1504 pointer_set_insert (bad_call_targets, cache_token);
1505 continue;
1506 }
1507 /* This is reached only when dumping; check if we agree or disagree
1508 with the speculation. */
1509 if (e->speculative)
1510 {
1511 struct cgraph_edge *e2;
1512 struct ipa_ref *ref;
1513 cgraph_speculative_call_info (e, e2, e, ref);
1514 if (cgraph_function_or_thunk_node (e2->callee, NULL)
1515 == cgraph_function_or_thunk_node (likely_target, NULL))
1516 {
1517 fprintf (dump_file, "We agree with speculation\n");
1518 nok++;
1519 }
1520 else
1521 {
1522 fprintf (dump_file, "We disagree with speculation\n");
1523 nwrong++;
1524 }
1525 continue;
1526 }
67348ccc 1527 if (!likely_target->definition)
bbc9396b
JH
1528 {
1529 if (dump_file)
1530 fprintf (dump_file, "Target is not an definition\n");
1531 nnotdefined++;
1532 continue;
1533 }
1534 /* Do not introduce new references to external symbols. While we
1535 can handle these just well, it is common for programs to
1536 incorrectly with headers defining methods they are linked
1537 with. */
67348ccc 1538 if (DECL_EXTERNAL (likely_target->decl))
bbc9396b
JH
1539 {
1540 if (dump_file)
1541 fprintf (dump_file, "Target is external\n");
1542 nexternal++;
1543 continue;
1544 }
1545 if (cgraph_function_body_availability (likely_target)
1546 <= AVAIL_OVERWRITABLE
67348ccc 1547 && symtab_can_be_discarded (likely_target))
bbc9396b
JH
1548 {
1549 if (dump_file)
1550 fprintf (dump_file, "Target is overwritable\n");
1551 noverwritable++;
1552 continue;
1553 }
1554 else
1555 {
1556 if (dump_file)
1557 fprintf (dump_file,
1558 "Speculatively devirtualizing call in %s/%i to %s/%i\n",
fec39fa6
TS
1559 n->name (), n->order,
1560 likely_target->name (),
67348ccc
DM
1561 likely_target->order);
1562 if (!symtab_can_be_discarded (likely_target))
5b79657a
JH
1563 {
1564 cgraph_node *alias;
1565 alias = cgraph (symtab_nonoverwritable_alias
67348ccc 1566 (likely_target));
5b79657a
JH
1567 if (alias)
1568 likely_target = alias;
1569 }
bbc9396b
JH
1570 nconverted++;
1571 update = true;
1572 cgraph_turn_edge_to_speculative
1573 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
1574 }
1575 }
1576 if (update)
1577 inline_update_overall_summary (n);
1578 }
1579 pointer_set_destroy (bad_call_targets);
1580
1581 if (dump_file)
1582 fprintf (dump_file,
1583 "%i polymorphic calls, %i devirtualized,"
1584 " %i speculatively devirtualized, %i cold\n"
1585 "%i have multiple targets, %i overwritable,"
1586 " %i already speculated (%i agree, %i disagree),"
1587 " %i external, %i not defined\n",
1588 npolymorphic, ndevirtualized, nconverted, ncold,
1589 nmultiple, noverwritable, nspeculated, nok, nwrong,
1590 nexternal, nnotdefined);
1591 return ndevirtualized ? TODO_remove_functions : 0;
1592}
1593
a88bf705 1594/* Gate for speculative IPA devirtualization optimization. */
bbc9396b
JH
1595
1596static bool
1597gate_ipa_devirt (void)
1598{
a88bf705
JJ
1599 return (flag_devirtualize
1600 && flag_devirtualize_speculatively
1601 && optimize);
bbc9396b
JH
1602}
1603
1604namespace {
1605
1606const pass_data pass_data_ipa_devirt =
1607{
1608 IPA_PASS, /* type */
1609 "devirt", /* name */
1610 OPTGROUP_NONE, /* optinfo_flags */
1611 true, /* has_gate */
1612 true, /* has_execute */
1613 TV_IPA_DEVIRT, /* tv_id */
1614 0, /* properties_required */
1615 0, /* properties_provided */
1616 0, /* properties_destroyed */
1617 0, /* todo_flags_start */
1618 ( TODO_dump_symtab ), /* todo_flags_finish */
1619};
1620
1621class pass_ipa_devirt : public ipa_opt_pass_d
1622{
1623public:
c3284718
RS
1624 pass_ipa_devirt (gcc::context *ctxt)
1625 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
1626 NULL, /* generate_summary */
1627 NULL, /* write_summary */
1628 NULL, /* read_summary */
1629 NULL, /* write_optimization_summary */
1630 NULL, /* read_optimization_summary */
1631 NULL, /* stmt_fixup */
1632 0, /* function_transform_todo_flags_start */
1633 NULL, /* function_transform */
1634 NULL) /* variable_transform */
bbc9396b
JH
1635 {}
1636
1637 /* opt_pass methods: */
1638 bool gate () { return gate_ipa_devirt (); }
1639 unsigned int execute () { return ipa_devirt (); }
1640
1641}; // class pass_ipa_devirt
1642
1643} // anon namespace
1644
1645ipa_opt_pass_d *
1646make_pass_ipa_devirt (gcc::context *ctxt)
1647{
1648 return new pass_ipa_devirt (ctxt);
1649}
1650
eefe9a99 1651#include "gt-ipa-devirt.h"