]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-devirt.c
* lto-streamer-out.c (hash_scc): Avoid quadratic hashing loop.
[thirdparty/gcc.git] / gcc / ipa-devirt.c
CommitLineData
eefe9a99
JH
1/* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
23a5b65a 3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
eefe9a99
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
0e1474e5 40 This is the Gimple representation of type information of a polymorphic call.
eefe9a99
JH
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
0e1474e5 43 otr_token is the index into virtual table where address is taken.
eefe9a99
JH
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
0e1474e5 58 virtual table of the base type. Also BINFO_OFFSET specifies
eefe9a99
JH
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
0e1474e5 75 or from DECL_VINDEX of a given virtual table.
eefe9a99
JH
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
0e1474e5 89 inserted into the graph. Also types without virtual methods are not
eefe9a99
JH
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
0e1474e5 98 Edges are represented by odr_type->base and odr_type->derived_types.
eefe9a99
JH
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
bbc9396b
JH
104
105 pass_ipa_devirt performs simple speculative devirtualization.
eefe9a99
JH
106*/
107
108#include "config.h"
109#include "system.h"
110#include "coretypes.h"
111#include "tm.h"
4d648807 112#include "tree.h"
d8a2d370
DN
113#include "print-tree.h"
114#include "calls.h"
eefe9a99 115#include "cgraph.h"
d8a2d370 116#include "expr.h"
eefe9a99 117#include "tree-pass.h"
eefe9a99
JH
118#include "pointer-set.h"
119#include "target.h"
120#include "hash-table.h"
121#include "tree-pretty-print.h"
122#include "ipa-utils.h"
2fb9a547
AM
123#include "tree-ssa-alias.h"
124#include "internal-fn.h"
125#include "gimple-fold.h"
126#include "gimple-expr.h"
eefe9a99 127#include "gimple.h"
bbc9396b 128#include "ipa-inline.h"
61a74079 129#include "diagnostic.h"
68377e53 130#include "tree-dfa.h"
ec77d61f 131#include "demangle.h"
2b5f0895 132#include "dbgcnt.h"
ec77d61f
JH
133
134static bool odr_violation_reported = false;
68377e53
JH
135
136/* Dummy polymorphic call context. */
137
138const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
139 = {0, NULL, false, true};
eefe9a99 140
0e1474e5
JH
141/* Pointer set of all call targets appearing in the cache. */
142static pointer_set_t *cached_polymorphic_call_targets;
143
eefe9a99
JH
144/* The node of type inheritance graph. For each type unique in
145 One Defintion Rule (ODR) sense, we produce one node linking all
146 main variants of types equivalent to it, bases and derived types. */
147
148struct GTY(()) odr_type_d
149{
eefe9a99
JH
150 /* leader type. */
151 tree type;
549bcbd1 152 /* All bases; built only for main variants of types */
eefe9a99 153 vec<odr_type> GTY((skip)) bases;
549bcbd1
JH
154 /* All derrived types with virtual methods seen in unit;
155 built only for main variants oftypes */
eefe9a99 156 vec<odr_type> GTY((skip)) derived_types;
0e1474e5 157
61a74079
JH
158 /* All equivalent types, if more than one. */
159 vec<tree, va_gc> *types;
160 /* Set of all equivalent types, if NON-NULL. */
161 pointer_set_t * GTY((skip)) types_set;
162
0e1474e5
JH
163 /* Unique ID indexing the type in odr_types array. */
164 int id;
eefe9a99
JH
165 /* Is it in anonymous namespace? */
166 bool anonymous_namespace;
2d1644bf
JH
167 /* Do we know about all derivations of given type? */
168 bool all_derivations_known;
549bcbd1
JH
169 /* Did we report ODR violation here? */
170 bool odr_violated;
eefe9a99
JH
171};
172
173
0e1474e5
JH
174/* Return true if BINFO corresponds to a type with virtual methods.
175
176 Every type has several BINFOs. One is the BINFO associated by the type
177 while other represents bases of derived types. The BINFOs representing
178 bases do not have BINFO_VTABLE pointer set when this is the single
179 inheritance (because vtables are shared). Look up the BINFO of type
180 and check presence of its vtable. */
eefe9a99
JH
181
182static inline bool
183polymorphic_type_binfo_p (tree binfo)
184{
185 /* See if BINFO's type has an virtual table associtated with it. */
186 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
187}
188
2d1644bf
JH
189/* Return TRUE if all derived types of T are known and thus
190 we may consider the walk of derived type complete.
191
192 This is typically true only for final anonymous namespace types and types
193 defined within functions (that may be COMDAT and thus shared across units,
194 but with the same set of derived types). */
195
196static bool
197type_all_derivations_known_p (tree t)
198{
199 if (TYPE_FINAL_P (t))
200 return true;
201 if (flag_ltrans)
202 return false;
203 if (type_in_anonymous_namespace_p (t))
204 return true;
205 return (decl_function_context (TYPE_NAME (t)) != NULL);
206}
207
208/* Return TURE if type's constructors are all visible. */
209
210static bool
211type_all_ctors_visible_p (tree t)
212{
213 return !flag_ltrans
214 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
215 /* We can not always use type_all_derivations_known_p.
216 For function local types we must assume case where
217 the function is COMDAT and shared in between units.
218
219 TODO: These cases are quite easy to get, but we need
220 to keep track of C++ privatizing via -Wno-weak
221 as well as the IPA privatizing. */
222 && type_in_anonymous_namespace_p (t);
223}
224
225/* Return TRUE if type may have instance. */
226
227static bool
228type_possibly_instantiated_p (tree t)
229{
230 tree vtable;
231 varpool_node *vnode;
232
233 /* TODO: Add abstract types here. */
234 if (!type_all_ctors_visible_p (t))
235 return true;
236
237 vtable = BINFO_VTABLE (TYPE_BINFO (t));
238 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
239 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
240 vnode = varpool_get_node (vtable);
241 return vnode && vnode->definition;
242}
243
eefe9a99
JH
244/* One Definition Rule hashtable helpers. */
245
246struct odr_hasher
247{
248 typedef odr_type_d value_type;
249 typedef union tree_node compare_type;
250 static inline hashval_t hash (const value_type *);
251 static inline bool equal (const value_type *, const compare_type *);
252 static inline void remove (value_type *);
253};
254
549bcbd1
JH
255/* Return type that was declared with T's name so that T is an
256 qualified variant of it. */
257
258static inline tree
259main_odr_variant (const_tree t)
260{
261 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
262 return TREE_TYPE (TYPE_NAME (t));
263 /* Unnamed types and non-C++ produced types can be compared by variants. */
264 else
265 return TYPE_MAIN_VARIANT (t);
266}
267
eefe9a99
JH
268/* Produce hash based on type name. */
269
549bcbd1 270static hashval_t
eefe9a99
JH
271hash_type_name (tree t)
272{
549bcbd1 273 gcc_checking_assert (main_odr_variant (t) == t);
eefe9a99
JH
274
275 /* If not in LTO, all main variants are unique, so we can do
276 pointer hash. */
277 if (!in_lto_p)
278 return htab_hash_pointer (t);
279
280 /* Anonymous types are unique. */
281 if (type_in_anonymous_namespace_p (t))
282 return htab_hash_pointer (t);
283
61a74079 284 /* For polymorphic types, we can simply hash the virtual table. */
549bcbd1
JH
285 if (TREE_CODE (t) == RECORD_TYPE
286 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
61a74079
JH
287 {
288 tree v = BINFO_VTABLE (TYPE_BINFO (t));
289 hashval_t hash = 0;
290
291 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
292 {
293 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
294 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
295 }
296
297 v = DECL_ASSEMBLER_NAME (v);
61a74079
JH
298 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
299 return hash;
300 }
301
eefe9a99
JH
302 /* Rest is not implemented yet. */
303 gcc_unreachable ();
304}
305
306/* Return the computed hashcode for ODR_TYPE. */
307
308inline hashval_t
309odr_hasher::hash (const value_type *odr_type)
310{
311 return hash_type_name (odr_type->type);
312}
313
549bcbd1
JH
314/* For languages with One Definition Rule, work out if
315 types are the same based on their name.
316
317 This is non-trivial for LTO where minnor differences in
318 the type representation may have prevented type merging
319 to merge two copies of otherwise equivalent type.
320
321 Until we start streaming mangled type names, this function works
322 only for polymorphic types. */
323
324bool
325types_same_for_odr (const_tree type1, const_tree type2)
326{
327 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
328
329 type1 = main_odr_variant (type1);
330 type2 = main_odr_variant (type2);
331
332 if (type1 == type2)
333 return true;
334
335 if (!in_lto_p)
336 return false;
337
338 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
339 on the corresponding TYPE_STUB_DECL. */
340 if (type_in_anonymous_namespace_p (type1)
341 || type_in_anonymous_namespace_p (type2))
342 return false;
343
344 /* At the moment we have no way to establish ODR equivlaence at LTO
345 other than comparing virtual table pointrs of polymorphic types.
346 Eventually we should start saving mangled names in TYPE_NAME.
347 Then this condition will become non-trivial. */
348
349 if (TREE_CODE (type1) == RECORD_TYPE
350 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
351 && BINFO_VTABLE (TYPE_BINFO (type1))
352 && BINFO_VTABLE (TYPE_BINFO (type2)))
353 {
354 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
355 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
356 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
357 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
358 return (operand_equal_p (TREE_OPERAND (v1, 1),
359 TREE_OPERAND (v2, 1), 0)
360 && DECL_ASSEMBLER_NAME
361 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
362 == DECL_ASSEMBLER_NAME
363 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
364 }
365 gcc_unreachable ();
366}
367
368
0e1474e5 369/* Compare types T1 and T2 and return true if they are
eefe9a99
JH
370 equivalent. */
371
372inline bool
373odr_hasher::equal (const value_type *t1, const compare_type *ct2)
374{
375 tree t2 = const_cast <tree> (ct2);
376
549bcbd1 377 gcc_checking_assert (main_odr_variant (t2) == t2);
eefe9a99
JH
378 if (t1->type == t2)
379 return true;
380 if (!in_lto_p)
381 return false;
382 return types_same_for_odr (t1->type, t2);
383}
384
0e1474e5 385/* Free ODR type V. */
eefe9a99
JH
386
387inline void
388odr_hasher::remove (value_type *v)
389{
390 v->bases.release ();
391 v->derived_types.release ();
61a74079
JH
392 if (v->types_set)
393 pointer_set_destroy (v->types_set);
eefe9a99
JH
394 ggc_free (v);
395}
396
397/* ODR type hash used to lookup ODR type based on tree type node. */
398
c203e8a7
TS
399typedef hash_table<odr_hasher> odr_hash_type;
400static odr_hash_type *odr_hash;
eefe9a99
JH
401
402/* ODR types are also stored into ODR_TYPE vector to allow consistent
403 walking. Bases appear before derived types. Vector is garbage collected
404 so we won't end up visiting empty types. */
405
406static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
407#define odr_types (*odr_types_ptr)
408
c7e1befa
JH
409/* Set TYPE_BINFO of TYPE and its variants to BINFO. */
410void
411set_type_binfo (tree type, tree binfo)
412{
413 for (; type; type = TYPE_NEXT_VARIANT (type))
414 if (COMPLETE_TYPE_P (type))
415 TYPE_BINFO (type) = binfo;
416 else
417 gcc_assert (!TYPE_BINFO (type));
418}
419
61a74079
JH
420/* TYPE is equivalent to VAL by ODR, but its tree representation differs
421 from VAL->type. This may happen in LTO where tree merging did not merge
422 all variants of the same type. It may or may not mean the ODR violation.
423 Add it to the list of duplicates and warn on some violations. */
424
549bcbd1 425static bool
61a74079
JH
426add_type_duplicate (odr_type val, tree type)
427{
549bcbd1 428 bool build_bases = false;
61a74079
JH
429 if (!val->types_set)
430 val->types_set = pointer_set_create ();
431
549bcbd1
JH
432 /* Always prefer complete type to be the leader. */
433 if (!COMPLETE_TYPE_P (val->type)
434 && COMPLETE_TYPE_P (type))
435 {
436 tree tmp = type;
437
438 build_bases = true;
439 type = val->type;
440 val->type = tmp;
441 }
442
61a74079
JH
443 /* See if this duplicate is new. */
444 if (!pointer_set_insert (val->types_set, type))
445 {
446 bool merge = true;
447 bool base_mismatch = false;
549bcbd1
JH
448 bool warned = 0;
449 unsigned int i,j;
450
61a74079
JH
451 gcc_assert (in_lto_p);
452 vec_safe_push (val->types, type);
61a74079
JH
453
454 /* First we compare memory layout. */
455 if (!types_compatible_p (val->type, type))
456 {
457 merge = false;
ec77d61f 458 odr_violation_reported = true;
61a74079
JH
459 if (BINFO_VTABLE (TYPE_BINFO (val->type))
460 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
461 "type %qD violates one definition rule ",
462 type))
549bcbd1
JH
463 {
464 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
465 "a type with the same name but different layout is "
466 "defined in another translation unit");
467 warned = true;
468 }
469 val->odr_violated = true;
61a74079
JH
470 if (cgraph_dump_file)
471 {
472 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
473
474 print_node (cgraph_dump_file, "", val->type, 0);
475 putc ('\n',cgraph_dump_file);
476 print_node (cgraph_dump_file, "", type, 0);
477 putc ('\n',cgraph_dump_file);
478 }
479 }
480
481 /* Next sanity check that bases are the same. If not, we will end
482 up producing wrong answers. */
549bcbd1
JH
483 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
484 && TREE_CODE (val->type) == RECORD_TYPE
485 && TREE_CODE (type) == RECORD_TYPE
486 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
61a74079 487 {
549bcbd1
JH
488 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
489 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
490 {
491 odr_type base = get_odr_type
492 (BINFO_TYPE
493 (BINFO_BASE_BINFO (TYPE_BINFO (type),
494 i)),
495 true);
496 if (val->bases.length () <= j || val->bases[j] != base)
497 base_mismatch = true;
498 j++;
499 }
500 if (base_mismatch)
61a74079 501 {
549bcbd1
JH
502 merge = false;
503 odr_violation_reported = true;
504
505 if (!warned
506 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
507 "type %qD violates one definition rule ",
508 type))
509 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
510 "a type with the same name but different bases is "
511 "defined in another translation unit");
512 val->odr_violated = true;
513 if (cgraph_dump_file)
514 {
515 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
516
517 print_node (cgraph_dump_file, "", val->type, 0);
518 putc ('\n',cgraph_dump_file);
519 print_node (cgraph_dump_file, "", type, 0);
520 putc ('\n',cgraph_dump_file);
521 }
61a74079
JH
522 }
523 }
524
525 /* Regularize things a little. During LTO same types may come with
526 different BINFOs. Either because their virtual table was
527 not merged by tree merging and only later at decl merging or
528 because one type comes with external vtable, while other
529 with internal. We want to merge equivalent binfos to conserve
530 memory and streaming overhead.
531
532 The external vtables are more harmful: they contain references
533 to external declarations of methods that may be defined in the
534 merged LTO unit. For this reason we absolutely need to remove
535 them and replace by internal variants. Not doing so will lead
536 to incomplete answers from possible_polymorphic_call_targets. */
549bcbd1
JH
537 if (!flag_ltrans && merge
538 && TREE_CODE (val->type) == RECORD_TYPE
539 && TREE_CODE (type) == RECORD_TYPE
540 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
541 && TYPE_MAIN_VARIANT (type) == type
542 && TYPE_MAIN_VARIANT (val->type) == val->type
543 && BINFO_VTABLE (TYPE_BINFO (val->type))
544 && BINFO_VTABLE (TYPE_BINFO (type)))
61a74079
JH
545 {
546 tree master_binfo = TYPE_BINFO (val->type);
547 tree v1 = BINFO_VTABLE (master_binfo);
548 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
549
550 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
551 {
552 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
553 && operand_equal_p (TREE_OPERAND (v1, 1),
554 TREE_OPERAND (v2, 1), 0));
555 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
556 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
557 }
558 gcc_assert (DECL_ASSEMBLER_NAME (v1)
559 == DECL_ASSEMBLER_NAME (v2));
560
561 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
562 {
563 unsigned int i;
564
c7e1befa 565 set_type_binfo (val->type, TYPE_BINFO (type));
c3284718 566 for (i = 0; i < val->types->length (); i++)
61a74079
JH
567 {
568 if (TYPE_BINFO ((*val->types)[i])
569 == master_binfo)
c7e1befa 570 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
61a74079 571 }
c7e1befa 572 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
61a74079
JH
573 }
574 else
c7e1befa 575 set_type_binfo (type, master_binfo);
61a74079
JH
576 }
577 }
549bcbd1 578 return build_bases;
61a74079
JH
579}
580
eefe9a99
JH
581/* Get ODR type hash entry for TYPE. If INSERT is true, create
582 possibly new entry. */
583
584odr_type
585get_odr_type (tree type, bool insert)
586{
587 odr_type_d **slot;
588 odr_type val;
589 hashval_t hash;
549bcbd1
JH
590 bool build_bases = false;
591 bool insert_to_odr_array = false;
592 int base_id = -1;
593
594 type = main_odr_variant (type);
eefe9a99 595
eefe9a99 596 hash = hash_type_name (type);
c203e8a7
TS
597 slot
598 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
eefe9a99
JH
599 if (!slot)
600 return NULL;
601
602 /* See if we already have entry for type. */
603 if (*slot)
604 {
605 val = *slot;
606
61a74079
JH
607 /* With LTO we need to support multiple tree representation of
608 the same ODR type. */
609 if (val->type != type)
549bcbd1 610 build_bases = add_type_duplicate (val, type);
eefe9a99
JH
611 }
612 else
613 {
eefe9a99 614
766090c2 615 val = ggc_cleared_alloc<odr_type_d> ();
eefe9a99
JH
616 val->type = type;
617 val->bases = vNULL;
618 val->derived_types = vNULL;
0e1474e5 619 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
549bcbd1
JH
620 build_bases = COMPLETE_TYPE_P (val->type);
621 insert_to_odr_array = true;
622 }
623
624 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
625 && type == TYPE_MAIN_VARIANT (type))
626 {
627 tree binfo = TYPE_BINFO (type);
628 unsigned int i;
629
630 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
631
2d1644bf 632 val->all_derivations_known = type_all_derivations_known_p (type);
eefe9a99
JH
633 *slot = val;
634 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
635 /* For now record only polymorphic types. other are
636 pointless for devirtualization and we can not precisely
637 determine ODR equivalency of these during LTO. */
638 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
639 {
640 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
641 i)),
642 true);
549bcbd1 643 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
eefe9a99
JH
644 base->derived_types.safe_push (val);
645 val->bases.safe_push (base);
549bcbd1
JH
646 if (base->id > base_id)
647 base_id = base->id;
eefe9a99 648 }
549bcbd1
JH
649 }
650 /* Ensure that type always appears after bases. */
651 if (insert_to_odr_array)
652 {
eefe9a99 653 if (odr_types_ptr)
c3284718 654 val->id = odr_types.length ();
eefe9a99
JH
655 vec_safe_push (odr_types_ptr, val);
656 }
549bcbd1
JH
657 else if (base_id > val->id)
658 {
659 odr_types[val->id] = 0;
660 /* Be sure we did not recorded any derived types; these may need
661 renumbering too. */
662 gcc_assert (val->derived_types.length() == 0);
663 if (odr_types_ptr)
664 val->id = odr_types.length ();
665 vec_safe_push (odr_types_ptr, val);
666 }
eefe9a99
JH
667 return val;
668}
669
670/* Dump ODR type T and all its derrived type. INDENT specify indentation for
671 recusive printing. */
672
673static void
674dump_odr_type (FILE *f, odr_type t, int indent=0)
675{
676 unsigned int i;
677 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
678 print_generic_expr (f, t->type, TDF_SLIM);
2d1644bf
JH
679 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
680 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
eefe9a99
JH
681 if (TYPE_NAME (t->type))
682 {
683 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
684 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
685 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
686 }
c3284718 687 if (t->bases.length ())
eefe9a99
JH
688 {
689 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
c3284718 690 for (i = 0; i < t->bases.length (); i++)
eefe9a99
JH
691 fprintf (f, " %i", t->bases[i]->id);
692 fprintf (f, "\n");
693 }
c3284718 694 if (t->derived_types.length ())
eefe9a99
JH
695 {
696 fprintf (f, "%*s derived types:\n", indent * 2, "");
c3284718 697 for (i = 0; i < t->derived_types.length (); i++)
eefe9a99
JH
698 dump_odr_type (f, t->derived_types[i], indent + 1);
699 }
700 fprintf (f, "\n");
701}
702
703/* Dump the type inheritance graph. */
704
705static void
706dump_type_inheritance_graph (FILE *f)
707{
708 unsigned int i;
0e1474e5
JH
709 if (!odr_types_ptr)
710 return;
eefe9a99 711 fprintf (f, "\n\nType inheritance graph:\n");
c3284718 712 for (i = 0; i < odr_types.length (); i++)
eefe9a99 713 {
549bcbd1 714 if (odr_types[i] && odr_types[i]->bases.length () == 0)
eefe9a99
JH
715 dump_odr_type (f, odr_types[i]);
716 }
c3284718 717 for (i = 0; i < odr_types.length (); i++)
61a74079 718 {
549bcbd1 719 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
61a74079
JH
720 {
721 unsigned int j;
722 fprintf (f, "Duplicate tree types for odr type %i\n", i);
723 print_node (f, "", odr_types[i]->type, 0);
c3284718 724 for (j = 0; j < odr_types[i]->types->length (); j++)
61a74079
JH
725 {
726 tree t;
727 fprintf (f, "duplicate #%i\n", j);
728 print_node (f, "", (*odr_types[i]->types)[j], 0);
729 t = (*odr_types[i]->types)[j];
730 while (TYPE_P (t) && TYPE_CONTEXT (t))
731 {
732 t = TYPE_CONTEXT (t);
733 print_node (f, "", t, 0);
734 }
735 putc ('\n',f);
736 }
737 }
738 }
eefe9a99
JH
739}
740
741/* Given method type T, return type of class it belongs to.
742 Lookup this pointer and get its type. */
743
64cbf23d 744tree
d570d364 745method_class_type (const_tree t)
eefe9a99
JH
746{
747 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
68377e53 748 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
eefe9a99
JH
749
750 return TREE_TYPE (first_parm_type);
751}
752
753/* Initialize IPA devirt and build inheritance tree graph. */
754
755void
756build_type_inheritance_graph (void)
757{
b270b096 758 struct symtab_node *n;
eefe9a99
JH
759 FILE *inheritance_dump_file;
760 int flags;
761
c203e8a7 762 if (odr_hash)
eefe9a99
JH
763 return;
764 timevar_push (TV_IPA_INHERITANCE);
765 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
c203e8a7 766 odr_hash = new odr_hash_type (23);
eefe9a99
JH
767
768 /* We reconstruct the graph starting of types of all methods seen in the
769 the unit. */
b270b096 770 FOR_EACH_SYMBOL (n)
7de90a6c 771 if (is_a <cgraph_node *> (n)
b270b096 772 && DECL_VIRTUAL_P (n->decl)
67348ccc 773 && symtab_real_symbol_p (n))
549bcbd1
JH
774 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
775 true);
b270b096
JH
776
777 /* Look also for virtual tables of types that do not define any methods.
778
779 We need it in a case where class B has virtual base of class A
780 re-defining its virtual method and there is class C with no virtual
781 methods with B as virtual base.
782
783 Here we output B's virtual method in two variant - for non-virtual
784 and virtual inheritance. B's virtual table has non-virtual version,
785 while C's has virtual.
786
787 For this reason we need to know about C in order to include both
788 variants of B. More correctly, record_target_from_binfo should
789 add both variants of the method when walking B, but we have no
790 link in between them.
791
792 We rely on fact that either the method is exported and thus we
793 assume it is called externally or C is in anonymous namespace and
794 thus we will see the vtable. */
795
7de90a6c 796 else if (is_a <varpool_node *> (n)
b270b096
JH
797 && DECL_VIRTUAL_P (n->decl)
798 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
799 && TYPE_BINFO (DECL_CONTEXT (n->decl))
800 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
549bcbd1 801 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
eefe9a99
JH
802 if (inheritance_dump_file)
803 {
804 dump_type_inheritance_graph (inheritance_dump_file);
805 dump_end (TDI_inheritance, inheritance_dump_file);
806 }
807 timevar_pop (TV_IPA_INHERITANCE);
808}
809
ccb05ef2
JH
810/* Return true if N has reference from live virtual table
811 (and thus can be a destination of polymorphic call).
812 Be conservatively correct when callgraph is not built or
813 if the method may be referred externally. */
814
815static bool
816referenced_from_vtable_p (struct cgraph_node *node)
817{
818 int i;
819 struct ipa_ref *ref;
820 bool found = false;
821
822 if (node->externally_visible
823 || node->used_from_other_partition)
824 return true;
825
826 /* Keep this test constant time.
827 It is unlikely this can happen except for the case where speculative
828 devirtualization introduced many speculative edges to this node.
829 In this case the target is very likely alive anyway. */
830 if (node->ref_list.referring.length () > 100)
831 return true;
832
833 /* We need references built. */
834 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
835 return true;
836
d122681a 837 for (i = 0; node->iterate_referring (i, ref); i++)
ccb05ef2
JH
838
839 if ((ref->use == IPA_REF_ALIAS
840 && referenced_from_vtable_p (cgraph (ref->referring)))
841 || (ref->use == IPA_REF_ADDR
842 && TREE_CODE (ref->referring->decl) == VAR_DECL
843 && DECL_VIRTUAL_P (ref->referring->decl)))
844 {
845 found = true;
846 break;
847 }
848 return found;
849}
850
68377e53 851/* If TARGET has associated node, record it in the NODES array.
ec77d61f
JH
852 CAN_REFER specify if program can refer to the target directly.
853 if TARGET is unknown (NULL) or it can not be inserted (for example because
854 its body was already removed and there is no way to refer to it), clear
855 COMPLETEP. */
eefe9a99
JH
856
857static void
858maybe_record_node (vec <cgraph_node *> &nodes,
68377e53 859 tree target, pointer_set_t *inserted,
ec77d61f 860 bool can_refer,
68377e53 861 bool *completep)
eefe9a99
JH
862{
863 struct cgraph_node *target_node;
88f592e3
JH
864
865 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
866 list of targets; the runtime effect of calling them is undefined.
867 Only "real" virtual methods should be accounted. */
868 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
869 return;
eefe9a99 870
ec77d61f
JH
871 if (!can_refer)
872 {
873 /* The only case when method of anonymous namespace becomes unreferable
874 is when we completely optimized it out. */
875 if (flag_ltrans
876 || !target
88f592e3 877 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
ec77d61f
JH
878 *completep = false;
879 return;
880 }
881
88f592e3 882 if (!target)
68377e53
JH
883 return;
884
885 target_node = cgraph_get_node (target);
886
ccb05ef2
JH
887 /* Method can only be called by polymorphic call if any
888 of vtables refering to it are alive.
889
890 While this holds for non-anonymous functions, too, there are
891 cases where we want to keep them in the list; for example
892 inline functions with -fno-weak are static, but we still
893 may devirtualize them when instance comes from other unit.
894 The same holds for LTO.
895
896 Currently we ignore these functions in speculative devirtualization.
897 ??? Maybe it would make sense to be more aggressive for LTO even
898 eslewhere. */
899 if (!flag_ltrans
900 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
901 && (!target_node
902 || !referenced_from_vtable_p (target_node)))
903 ;
904 /* See if TARGET is useful function we can deal with. */
905 else if (target_node != NULL
906 && (TREE_PUBLIC (target)
907 || DECL_EXTERNAL (target)
908 || target_node->definition)
909 && symtab_real_symbol_p (target_node))
0e1474e5 910 {
68377e53
JH
911 gcc_assert (!target_node->global.inlined_to);
912 gcc_assert (symtab_real_symbol_p (target_node));
913 if (!pointer_set_insert (inserted, target))
914 {
915 pointer_set_insert (cached_polymorphic_call_targets,
916 target_node);
917 nodes.safe_push (target_node);
918 }
0e1474e5 919 }
68377e53 920 else if (completep
2d1644bf
JH
921 && (!type_in_anonymous_namespace_p
922 (DECL_CONTEXT (target))
923 || flag_ltrans))
0439a947 924 *completep = false;
eefe9a99
JH
925}
926
68377e53
JH
927/* See if BINFO's type match OUTER_TYPE. If so, lookup
928 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2d1644bf
JH
929 method in vtable and insert method to NODES array
930 or BASES_TO_CONSIDER if this array is non-NULL.
eefe9a99
JH
931 Otherwise recurse to base BINFOs.
932 This match what get_binfo_at_offset does, but with offset
933 being unknown.
934
a3788dde
JH
935 TYPE_BINFOS is a stack of BINFOS of types with defined
936 virtual table seen on way from class type to BINFO.
eefe9a99
JH
937
938 MATCHED_VTABLES tracks virtual tables we already did lookup
68377e53
JH
939 for virtual function in. INSERTED tracks nodes we already
940 inserted.
3462aa02
JH
941
942 ANONYMOUS is true if BINFO is part of anonymous namespace.
ec77d61f
JH
943
944 Clear COMPLETEP when we hit unreferable target.
eefe9a99
JH
945 */
946
947static void
68377e53 948record_target_from_binfo (vec <cgraph_node *> &nodes,
2d1644bf 949 vec <tree> *bases_to_consider,
68377e53
JH
950 tree binfo,
951 tree otr_type,
a3788dde 952 vec <tree> &type_binfos,
68377e53
JH
953 HOST_WIDE_INT otr_token,
954 tree outer_type,
955 HOST_WIDE_INT offset,
956 pointer_set_t *inserted,
957 pointer_set_t *matched_vtables,
ec77d61f
JH
958 bool anonymous,
959 bool *completep)
eefe9a99
JH
960{
961 tree type = BINFO_TYPE (binfo);
962 int i;
963 tree base_binfo;
964
eefe9a99 965
a3788dde
JH
966 if (BINFO_VTABLE (binfo))
967 type_binfos.safe_push (binfo);
68377e53 968 if (types_same_for_odr (type, outer_type))
eefe9a99 969 {
a3788dde
JH
970 int i;
971 tree type_binfo = NULL;
972
973 /* Lookup BINFO with virtual table. For normal types it is always last
974 binfo on stack. */
975 for (i = type_binfos.length () - 1; i >= 0; i--)
976 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
977 {
978 type_binfo = type_binfos[i];
979 break;
980 }
981 if (BINFO_VTABLE (binfo))
982 type_binfos.pop ();
983 /* If this is duplicated BINFO for base shared by virtual inheritance,
984 we may not have its associated vtable. This is not a problem, since
985 we will walk it on the other path. */
986 if (!type_binfo)
6d6af792 987 return;
68377e53
JH
988 tree inner_binfo = get_binfo_at_offset (type_binfo,
989 offset, otr_type);
ec77d61f
JH
990 if (!inner_binfo)
991 {
992 gcc_assert (odr_violation_reported);
993 return;
994 }
3462aa02
JH
995 /* For types in anonymous namespace first check if the respective vtable
996 is alive. If not, we know the type can't be called. */
997 if (!flag_ltrans && anonymous)
998 {
68377e53 999 tree vtable = BINFO_VTABLE (inner_binfo);
2c8326a5 1000 varpool_node *vnode;
3462aa02
JH
1001
1002 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1003 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1004 vnode = varpool_get_node (vtable);
67348ccc 1005 if (!vnode || !vnode->definition)
3462aa02
JH
1006 return;
1007 }
68377e53 1008 gcc_assert (inner_binfo);
2d1644bf
JH
1009 if (bases_to_consider
1010 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
1011 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
68377e53 1012 {
ec77d61f
JH
1013 bool can_refer;
1014 tree target = gimple_get_virt_method_for_binfo (otr_token,
1015 inner_binfo,
1016 &can_refer);
2d1644bf
JH
1017 if (!bases_to_consider)
1018 maybe_record_node (nodes, target, inserted, can_refer, completep);
1019 /* Destructors are never called via construction vtables. */
1020 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1021 bases_to_consider->safe_push (target);
68377e53 1022 }
eefe9a99
JH
1023 return;
1024 }
1025
1026 /* Walk bases. */
1027 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1028 /* Walking bases that have no virtual method is pointless excercise. */
1029 if (polymorphic_type_binfo_p (base_binfo))
2d1644bf 1030 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
a3788dde 1031 type_binfos,
68377e53 1032 otr_token, outer_type, offset, inserted,
ec77d61f 1033 matched_vtables, anonymous, completep);
a3788dde
JH
1034 if (BINFO_VTABLE (binfo))
1035 type_binfos.pop ();
eefe9a99
JH
1036}
1037
1038/* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1039 of TYPE, insert them to NODES, recurse into derived nodes.
1040 INSERTED is used to avoid duplicate insertions of methods into NODES.
ec77d61f 1041 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2d1644bf
JH
1042 Clear COMPLETEP if unreferable target is found.
1043
1044 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1045 all cases where BASE_SKIPPED is true (because the base is abstract
1046 class). */
eefe9a99
JH
1047
1048static void
1049possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1050 pointer_set_t *inserted,
1051 pointer_set_t *matched_vtables,
1052 tree otr_type,
1053 odr_type type,
68377e53
JH
1054 HOST_WIDE_INT otr_token,
1055 tree outer_type,
ec77d61f 1056 HOST_WIDE_INT offset,
2d1644bf
JH
1057 bool *completep,
1058 vec <tree> &bases_to_consider,
1059 bool consider_construction)
eefe9a99
JH
1060{
1061 tree binfo = TYPE_BINFO (type->type);
1062 unsigned int i;
a3788dde 1063 vec <tree> type_binfos = vNULL;
2d1644bf
JH
1064 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1065
1066 /* We may need to consider types w/o instances because of possible derived
1067 types using their methods either directly or via construction vtables.
1068 We are safe to skip them when all derivations are known, since we will
1069 handle them later.
1070 This is done by recording them to BASES_TO_CONSIDER array. */
1071 if (possibly_instantiated || consider_construction)
1072 {
1073 record_target_from_binfo (nodes,
1074 (!possibly_instantiated
1075 && type_all_derivations_known_p (type->type))
1076 ? &bases_to_consider : NULL,
1077 binfo, otr_type, type_binfos, otr_token,
1078 outer_type, offset,
1079 inserted, matched_vtables,
1080 type->anonymous_namespace, completep);
1081 }
a3788dde 1082 type_binfos.release ();
c3284718 1083 for (i = 0; i < type->derived_types.length (); i++)
eefe9a99
JH
1084 possible_polymorphic_call_targets_1 (nodes, inserted,
1085 matched_vtables,
1086 otr_type,
1087 type->derived_types[i],
2d1644bf
JH
1088 otr_token, outer_type, offset, completep,
1089 bases_to_consider, consider_construction);
eefe9a99
JH
1090}
1091
1092/* Cache of queries for polymorphic call targets.
1093
1094 Enumerating all call targets may get expensive when there are many
1095 polymorphic calls in the program, so we memoize all the previous
1096 queries and avoid duplicated work. */
1097
1098struct polymorphic_call_target_d
1099{
eefe9a99 1100 HOST_WIDE_INT otr_token;
68377e53
JH
1101 ipa_polymorphic_call_context context;
1102 odr_type type;
eefe9a99 1103 vec <cgraph_node *> targets;
ec77d61f
JH
1104 int nonconstruction_targets;
1105 bool complete;
eefe9a99
JH
1106};
1107
1108/* Polymorphic call target cache helpers. */
1109
1110struct polymorphic_call_target_hasher
1111{
1112 typedef polymorphic_call_target_d value_type;
1113 typedef polymorphic_call_target_d compare_type;
1114 static inline hashval_t hash (const value_type *);
1115 static inline bool equal (const value_type *, const compare_type *);
1116 static inline void remove (value_type *);
1117};
1118
1119/* Return the computed hashcode for ODR_QUERY. */
1120
1121inline hashval_t
1122polymorphic_call_target_hasher::hash (const value_type *odr_query)
1123{
68377e53
JH
1124 hashval_t hash;
1125
1126 hash = iterative_hash_host_wide_int
1127 (odr_query->otr_token,
1128 odr_query->type->id);
1129 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
1130 hash);
1131 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
1132 return iterative_hash_hashval_t
1133 (((int)odr_query->context.maybe_in_construction << 1)
1134 | (int)odr_query->context.maybe_derived_type, hash);
eefe9a99
JH
1135}
1136
1137/* Compare cache entries T1 and T2. */
1138
1139inline bool
1140polymorphic_call_target_hasher::equal (const value_type *t1,
1141 const compare_type *t2)
1142{
68377e53
JH
1143 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1144 && t1->context.offset == t2->context.offset
1145 && t1->context.outer_type == t2->context.outer_type
1146 && t1->context.maybe_in_construction
1147 == t2->context.maybe_in_construction
1148 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
eefe9a99
JH
1149}
1150
1151/* Remove entry in polymorphic call target cache hash. */
1152
1153inline void
1154polymorphic_call_target_hasher::remove (value_type *v)
1155{
1156 v->targets.release ();
1157 free (v);
1158}
1159
1160/* Polymorphic call target query cache. */
1161
c203e8a7 1162typedef hash_table<polymorphic_call_target_hasher>
eefe9a99 1163 polymorphic_call_target_hash_type;
c203e8a7 1164static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
eefe9a99
JH
1165
1166/* Destroy polymorphic call target query cache. */
1167
1168static void
1169free_polymorphic_call_targets_hash ()
1170{
0e1474e5
JH
1171 if (cached_polymorphic_call_targets)
1172 {
c203e8a7
TS
1173 delete polymorphic_call_target_hash;
1174 polymorphic_call_target_hash = NULL;
0e1474e5
JH
1175 pointer_set_destroy (cached_polymorphic_call_targets);
1176 cached_polymorphic_call_targets = NULL;
1177 }
eefe9a99
JH
1178}
1179
1180/* When virtual function is removed, we may need to flush the cache. */
1181
1182static void
1183devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1184{
0e1474e5
JH
1185 if (cached_polymorphic_call_targets
1186 && pointer_set_contains (cached_polymorphic_call_targets, n))
eefe9a99
JH
1187 free_polymorphic_call_targets_hash ();
1188}
1189
d570d364
JH
1190/* Return true when TYPE contains an polymorphic type and thus is interesting
1191 for devirtualization machinery. */
1192
1193bool
1194contains_polymorphic_type_p (const_tree type)
1195{
1196 type = TYPE_MAIN_VARIANT (type);
1197
1198 if (RECORD_OR_UNION_TYPE_P (type))
1199 {
1200 if (TYPE_BINFO (type)
1201 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1202 return true;
1203 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1204 if (TREE_CODE (fld) == FIELD_DECL
1205 && !DECL_ARTIFICIAL (fld)
1206 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1207 return true;
1208 return false;
1209 }
1210 if (TREE_CODE (type) == ARRAY_TYPE)
1211 return contains_polymorphic_type_p (TREE_TYPE (type));
1212 return false;
1213}
1214
68377e53
JH
1215/* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1216 is contained at CONTEXT->OFFSET. Walk the memory representation of
1217 CONTEXT->OUTER_TYPE and find the outermost class type that match
1218 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1219 to represent it.
1220
1221 For example when CONTEXT represents type
1222 class A
1223 {
1224 int a;
1225 class B b;
1226 }
1227 and we look for type at offset sizeof(int), we end up with B and offset 0.
1228 If the same is produced by multiple inheritance, we end up with A and offset
1229 sizeof(int).
1230
1231 If we can not find corresponding class, give up by setting
1232 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1233 Return true when lookup was sucesful. */
1234
1235static bool
1236get_class_context (ipa_polymorphic_call_context *context,
1237 tree expected_type)
1238{
1239 tree type = context->outer_type;
1240 HOST_WIDE_INT offset = context->offset;
1241
1242 /* Find the sub-object the constant actually refers to and mark whether it is
1243 an artificial one (as opposed to a user-defined one). */
1244 while (true)
1245 {
1246 HOST_WIDE_INT pos, size;
1247 tree fld;
1248
1249 /* On a match, just return what we found. */
1250 if (TREE_CODE (type) == TREE_CODE (expected_type)
1251 && types_same_for_odr (type, expected_type))
1252 {
3b4e93c3
JH
1253 /* Type can not contain itself on an non-zero offset. In that case
1254 just give up. */
1255 if (offset != 0)
1256 goto give_up;
68377e53
JH
1257 gcc_assert (offset == 0);
1258 return true;
1259 }
1260
1261 /* Walk fields and find corresponding on at OFFSET. */
1262 if (TREE_CODE (type) == RECORD_TYPE)
1263 {
1264 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1265 {
1266 if (TREE_CODE (fld) != FIELD_DECL)
1267 continue;
1268
1269 pos = int_bit_position (fld);
1270 size = tree_to_uhwi (DECL_SIZE (fld));
1271 if (pos <= offset && (pos + size) > offset)
1272 break;
1273 }
1274
1275 if (!fld)
1276 goto give_up;
1277
c7e1befa 1278 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
68377e53
JH
1279 offset -= pos;
1280 /* DECL_ARTIFICIAL represents a basetype. */
1281 if (!DECL_ARTIFICIAL (fld))
1282 {
1283 context->outer_type = type;
1284 context->offset = offset;
1285 /* As soon as we se an field containing the type,
1286 we know we are not looking for derivations. */
1287 context->maybe_derived_type = false;
1288 }
1289 }
1290 else if (TREE_CODE (type) == ARRAY_TYPE)
1291 {
c7e1befa 1292 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
68377e53
JH
1293
1294 /* Give up if we don't know array size. */
1295 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1296 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1297 goto give_up;
1298 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1299 type = subtype;
1300 context->outer_type = type;
1301 context->offset = offset;
1302 context->maybe_derived_type = false;
1303 }
1304 /* Give up on anything else. */
1305 else
1306 goto give_up;
1307 }
1308
1309 /* If we failed to find subtype we look for, give up and fall back to the
1310 most generic query. */
1311give_up:
1312 context->outer_type = expected_type;
1313 context->offset = 0;
1314 context->maybe_derived_type = true;
e400f081
JH
1315 context->maybe_in_construction = true;
1316 /* POD can be changed to an instance of a polymorphic type by
1317 placement new. Here we play safe and assume that any
1318 non-polymorphic type is POD. */
1319 if ((TREE_CODE (type) != RECORD_TYPE
1320 || !TYPE_BINFO (type)
1321 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1322 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1323 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1324 tree_to_uhwi (TYPE_SIZE (type)))))
1325 return true;
68377e53
JH
1326 return false;
1327}
1328
1329/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1330
1331static bool
1332contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1333 tree otr_type)
1334{
c7e1befa
JH
1335 ipa_polymorphic_call_context context = {offset,
1336 TYPE_MAIN_VARIANT (outer_type),
68377e53
JH
1337 false, true};
1338 return get_class_context (&context, otr_type);
1339}
1340
390675c8
JH
1341/* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1342
1343static tree
85942f45
JH
1344subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1345 tree vtable)
390675c8
JH
1346{
1347 tree v = BINFO_VTABLE (binfo);
1348 int i;
1349 tree base_binfo;
85942f45 1350 unsigned HOST_WIDE_INT this_offset;
390675c8 1351
85942f45
JH
1352 if (v)
1353 {
1354 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1355 gcc_unreachable ();
1356
1357 if (offset == this_offset
1358 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1359 return binfo;
1360 }
390675c8 1361
390675c8
JH
1362 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1363 if (polymorphic_type_binfo_p (base_binfo))
1364 {
1365 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1366 if (base_binfo)
1367 return base_binfo;
1368 }
1369 return NULL;
1370}
1371
85942f45
JH
1372/* T is known constant value of virtual table pointer.
1373 Store virtual table to V and its offset to OFFSET.
1374 Return false if T does not look like virtual table reference. */
390675c8 1375
85942f45 1376bool
d570d364
JH
1377vtable_pointer_value_to_vtable (const_tree t, tree *v,
1378 unsigned HOST_WIDE_INT *offset)
390675c8
JH
1379{
1380 /* We expect &MEM[(void *)&virtual_table + 16B].
1381 We obtain object's BINFO from the context of the virtual table.
1382 This one contains pointer to virtual table represented via
1383 POINTER_PLUS_EXPR. Verify that this pointer match to what
1384 we propagated through.
1385
1386 In the case of virtual inheritance, the virtual tables may
1387 be nested, i.e. the offset may be different from 16 and we may
1388 need to dive into the type representation. */
85942f45 1389 if (TREE_CODE (t) == ADDR_EXPR
390675c8
JH
1390 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1391 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1392 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1393 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1394 == VAR_DECL)
1395 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1396 (TREE_OPERAND (t, 0), 0), 0)))
1397 {
85942f45
JH
1398 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1399 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1400 return true;
390675c8 1401 }
85942f45
JH
1402
1403 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1404 We need to handle it when T comes from static variable initializer or
1405 BINFO. */
1406 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1407 {
1408 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1409 t = TREE_OPERAND (t, 0);
1410 }
1411 else
1412 *offset = 0;
1413
1414 if (TREE_CODE (t) != ADDR_EXPR)
1415 return false;
1416 *v = TREE_OPERAND (t, 0);
1417 return true;
1418}
1419
1420/* T is known constant value of virtual table pointer. Return BINFO of the
1421 instance type. */
1422
1423tree
d570d364 1424vtable_pointer_value_to_binfo (const_tree t)
85942f45
JH
1425{
1426 tree vtable;
1427 unsigned HOST_WIDE_INT offset;
1428
1429 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1430 return NULL_TREE;
1431
1432 /* FIXME: for stores of construction vtables we return NULL,
1433 because we do not have BINFO for those. Eventually we should fix
1434 our representation to allow this case to be handled, too.
1435 In the case we see store of BINFO we however may assume
1436 that standard folding will be ale to cope with it. */
1437 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1438 offset, vtable);
390675c8
JH
1439}
1440
058d0a90
JH
1441/* We know that the instance is stored in variable or parameter
1442 (not dynamically allocated) and we want to disprove the fact
1443 that it may be in construction at invocation of CALL.
1444
1445 For the variable to be in construction we actually need to
1446 be in constructor of corresponding global variable or
1447 the inline stack of CALL must contain the constructor.
1448 Check this condition. This check works safely only before
1449 IPA passes, because inline stacks may become out of date
1450 later. */
1451
1452bool
1453decl_maybe_in_construction_p (tree base, tree outer_type,
1454 gimple call, tree function)
1455{
1456 outer_type = TYPE_MAIN_VARIANT (outer_type);
1457 gcc_assert (DECL_P (base));
1458
1459 /* After inlining the code unification optimizations may invalidate
1460 inline stacks. Also we need to give up on global variables after
1461 IPA, because addresses of these may have been propagated to their
1462 constructors. */
1463 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
1464 return true;
1465
1466 /* Pure functions can not do any changes on the dynamic type;
1467 that require writting to memory. */
1468 if (!auto_var_in_fn_p (base, function)
1469 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
1470 return false;
1471
1472 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
1473 block = BLOCK_SUPERCONTEXT (block))
1474 if (BLOCK_ABSTRACT_ORIGIN (block)
1475 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
1476 {
1477 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
1478
1479 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
1480 || (!DECL_CXX_CONSTRUCTOR_P (fn)
1481 || !DECL_CXX_DESTRUCTOR_P (fn)))
1482 {
1483 /* Watch for clones where we constant propagated the first
1484 argument (pointer to the instance). */
1485 fn = DECL_ABSTRACT_ORIGIN (fn);
1486 if (!fn
1487 || !is_global_var (base)
1488 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
1489 || (!DECL_CXX_CONSTRUCTOR_P (fn)
1490 || !DECL_CXX_DESTRUCTOR_P (fn)))
1491 continue;
1492 }
1493 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
1494 continue;
1495
1496 /* FIXME: this can go away once we have ODR types equivalency on
1497 LTO level. */
1498 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
1499 return true;
1500 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
1501 if (types_same_for_odr (type, outer_type))
1502 return true;
1503 }
1504
1505 if (TREE_CODE (base) == VAR_DECL
1506 && is_global_var (base))
1507 {
1508 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
1509 || (!DECL_CXX_CONSTRUCTOR_P (function)
1510 || !DECL_CXX_DESTRUCTOR_P (function)))
1511 {
1512 if (!DECL_ABSTRACT_ORIGIN (function))
1513 return false;
1514 /* Watch for clones where we constant propagated the first
1515 argument (pointer to the instance). */
1516 function = DECL_ABSTRACT_ORIGIN (function);
1517 if (!function
1518 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
1519 || (!DECL_CXX_CONSTRUCTOR_P (function)
1520 || !DECL_CXX_DESTRUCTOR_P (function)))
1521 return false;
1522 }
1523 /* FIXME: this can go away once we have ODR types equivalency on
1524 LTO level. */
1525 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
1526 return true;
1527 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
1528 if (types_same_for_odr (type, outer_type))
1529 return true;
1530 }
1531 return false;
1532}
1533
5bccb77a
JH
1534/* Proudce polymorphic call context for call method of instance
1535 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1536
1537static void
1538get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1539 tree base, HOST_WIDE_INT offset)
1540{
1541 gcc_assert (DECL_P (base));
1542
c7e1befa 1543 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
5bccb77a
JH
1544 context->offset = offset;
1545 /* Make very conservative assumption that all objects
1546 may be in construction.
1547 TODO: ipa-prop already contains code to tell better.
1548 merge it later. */
1549 context->maybe_in_construction = true;
1550 context->maybe_derived_type = false;
1551}
1552
1553/* CST is an invariant (address of decl), try to get meaningful
1554 polymorphic call context for polymorphic call of method
1555 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1556 Return FALSE if nothing meaningful can be found. */
1557
1558bool
1559get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1560 tree cst,
1561 tree otr_type,
1562 HOST_WIDE_INT offset)
1563{
1564 HOST_WIDE_INT offset2, size, max_size;
1565 tree base;
1566
1567 if (TREE_CODE (cst) != ADDR_EXPR)
79c7de84 1568 return false;
5bccb77a
JH
1569
1570 cst = TREE_OPERAND (cst, 0);
1571 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
79c7de84
EB
1572 if (!DECL_P (base) || max_size == -1 || max_size != size)
1573 return false;
5bccb77a
JH
1574
1575 /* Only type inconsistent programs can have otr_type that is
1576 not part of outer type. */
79c7de84
EB
1577 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1578 return false;
5bccb77a 1579
79c7de84 1580 get_polymorphic_call_info_for_decl (context, base, offset);
5bccb77a
JH
1581 return true;
1582}
1583
68377e53
JH
1584/* Given REF call in FNDECL, determine class of the polymorphic
1585 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
058d0a90
JH
1586 CALL is optional argument giving the actual statement (usually call) where
1587 the context is used.
68377e53
JH
1588 Return pointer to object described by the context */
1589
1590tree
1591get_polymorphic_call_info (tree fndecl,
1592 tree ref,
1593 tree *otr_type,
1594 HOST_WIDE_INT *otr_token,
058d0a90
JH
1595 ipa_polymorphic_call_context *context,
1596 gimple call)
68377e53
JH
1597{
1598 tree base_pointer;
1599 *otr_type = obj_type_ref_class (ref);
1600 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1601
1602 /* Set up basic info in case we find nothing interesting in the analysis. */
c7e1befa 1603 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
68377e53
JH
1604 context->offset = 0;
1605 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1606 context->maybe_derived_type = true;
2d1644bf 1607 context->maybe_in_construction = true;
68377e53
JH
1608
1609 /* Walk SSA for outer object. */
1610 do
1611 {
1612 if (TREE_CODE (base_pointer) == SSA_NAME
1613 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1614 && SSA_NAME_DEF_STMT (base_pointer)
1615 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1616 {
1617 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1618 STRIP_NOPS (base_pointer);
1619 }
1620 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1621 {
1622 HOST_WIDE_INT size, max_size;
1623 HOST_WIDE_INT offset2;
1624 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1625 &offset2, &size, &max_size);
1626
1627 /* If this is a varying address, punt. */
1628 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1629 && max_size != -1
1630 && max_size == size)
1631 {
1632 /* We found dereference of a pointer. Type of the pointer
1633 and MEM_REF is meaningless, but we can look futher. */
1634 if (TREE_CODE (base) == MEM_REF)
1635 {
1636 base_pointer = TREE_OPERAND (base, 0);
1637 context->offset
807e902e 1638 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
68377e53
JH
1639 context->outer_type = NULL;
1640 }
1641 /* We found base object. In this case the outer_type
1642 is known. */
1643 else if (DECL_P (base))
1644 {
7656ee72 1645 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
68377e53
JH
1646
1647 /* Only type inconsistent programs can have otr_type that is
1648 not part of outer type. */
7656ee72
JH
1649 if (!contains_type_p (TREE_TYPE (base),
1650 context->offset + offset2, *otr_type))
3e86c6a8
JH
1651 {
1652 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1653 code sequences; we arrange the calls to be builtin_unreachable
1654 later. */
1655 *otr_token = INT_MAX;
1656 return base_pointer;
1657 }
5bccb77a
JH
1658 get_polymorphic_call_info_for_decl (context, base,
1659 context->offset + offset2);
058d0a90
JH
1660 if (context->maybe_in_construction && call)
1661 context->maybe_in_construction
1662 = decl_maybe_in_construction_p (base,
1663 context->outer_type,
1664 call,
1665 current_function_decl);
7656ee72 1666 return NULL;
68377e53
JH
1667 }
1668 else
1669 break;
1670 }
1671 else
1672 break;
1673 }
1674 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1675 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1676 {
1677 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1678 * BITS_PER_UNIT;
1679 base_pointer = TREE_OPERAND (base_pointer, 0);
1680 }
1681 else
1682 break;
1683 }
1684 while (true);
1685
1686 /* Try to determine type of the outer object. */
1687 if (TREE_CODE (base_pointer) == SSA_NAME
1688 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1689 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1690 {
1691 /* See if parameter is THIS pointer of a method. */
1692 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1693 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1694 {
c7e1befa
JH
1695 context->outer_type
1696 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
68377e53
JH
1697 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1698
1699 /* Dynamic casting has possibly upcasted the type
1700 in the hiearchy. In this case outer type is less
1701 informative than inner type and we should forget
1702 about it. */
1703 if (!contains_type_p (context->outer_type, context->offset,
1704 *otr_type))
1705 {
1706 context->outer_type = NULL;
1707 return base_pointer;
1708 }
1709
1710 /* If the function is constructor or destructor, then
d74db8ff 1711 the type is possibly in construction, but we know
68377e53
JH
1712 it is not derived type. */
1713 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1714 || DECL_CXX_DESTRUCTOR_P (fndecl))
1715 {
1716 context->maybe_in_construction = true;
1717 context->maybe_derived_type = false;
1718 }
1719 else
1720 {
1721 context->maybe_derived_type = true;
1722 context->maybe_in_construction = false;
1723 }
1724 return base_pointer;
1725 }
1726 /* Non-PODs passed by value are really passed by invisible
1727 reference. In this case we also know the type of the
1728 object. */
1729 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1730 {
c7e1befa
JH
1731 context->outer_type
1732 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
68377e53
JH
1733 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1734 /* Only type inconsistent programs can have otr_type that is
1735 not part of outer type. */
1736 if (!contains_type_p (context->outer_type, context->offset,
1737 *otr_type))
1738 {
3e86c6a8
JH
1739 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1740 code sequences; we arrange the calls to be builtin_unreachable
1741 later. */
1742 *otr_token = INT_MAX;
68377e53
JH
1743 return base_pointer;
1744 }
1745 context->maybe_derived_type = false;
1746 context->maybe_in_construction = false;
1747 return base_pointer;
1748 }
1749 }
1750 /* TODO: There are multiple ways to derive a type. For instance
1751 if BASE_POINTER is passed to an constructor call prior our refernece.
1752 We do not make this type of flow sensitive analysis yet. */
1753 return base_pointer;
1754}
1755
1756/* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1757 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1758 and insert them to NODES.
1759
1760 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1761
1762static void
1763record_targets_from_bases (tree otr_type,
1764 HOST_WIDE_INT otr_token,
1765 tree outer_type,
1766 HOST_WIDE_INT offset,
ec77d61f 1767 vec <cgraph_node *> &nodes,
68377e53
JH
1768 pointer_set_t *inserted,
1769 pointer_set_t *matched_vtables,
1770 bool *completep)
1771{
1772 while (true)
1773 {
1774 HOST_WIDE_INT pos, size;
1775 tree base_binfo;
1776 tree fld;
1777
1778 if (types_same_for_odr (outer_type, otr_type))
1779 return;
1780
1781 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1782 {
1783 if (TREE_CODE (fld) != FIELD_DECL)
1784 continue;
1785
1786 pos = int_bit_position (fld);
1787 size = tree_to_shwi (DECL_SIZE (fld));
ec77d61f
JH
1788 if (pos <= offset && (pos + size) > offset
1789 /* Do not get confused by zero sized bases. */
1790 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
68377e53
JH
1791 break;
1792 }
1793 /* Within a class type we should always find correcponding fields. */
1794 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1795
1796 /* Nonbasetypes should have been stripped by outer_class_type. */
1797 gcc_assert (DECL_ARTIFICIAL (fld));
1798
1799 outer_type = TREE_TYPE (fld);
1800 offset -= pos;
1801
1802 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1803 offset, otr_type);
ec77d61f
JH
1804 if (!base_binfo)
1805 {
1806 gcc_assert (odr_violation_reported);
1807 return;
1808 }
68377e53
JH
1809 gcc_assert (base_binfo);
1810 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1811 {
ec77d61f
JH
1812 bool can_refer;
1813 tree target = gimple_get_virt_method_for_binfo (otr_token,
1814 base_binfo,
1815 &can_refer);
2d1644bf
JH
1816 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
1817 maybe_record_node (nodes, target, inserted, can_refer, completep);
68377e53
JH
1818 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1819 }
1820 }
1821}
1822
3462aa02
JH
1823/* When virtual table is removed, we may need to flush the cache. */
1824
1825static void
2c8326a5 1826devirt_variable_node_removal_hook (varpool_node *n,
3462aa02
JH
1827 void *d ATTRIBUTE_UNUSED)
1828{
1829 if (cached_polymorphic_call_targets
67348ccc
DM
1830 && DECL_VIRTUAL_P (n->decl)
1831 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3462aa02
JH
1832 free_polymorphic_call_targets_hash ();
1833}
1834
eefe9a99 1835/* Return vector containing possible targets of polymorphic call of type
68377e53
JH
1836 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1837 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1838 OTR_TYPE and include their virtual method. This is useful for types
1839 possibly in construction or destruction where the virtual table may
1840 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1841 us to walk the inheritance graph for all derivations.
1842
3e86c6a8
JH
1843 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1844 undefined and should be redirected to unreachable.
1845
add5c763 1846 If COMPLETEP is non-NULL, store true if the list is complete.
eefe9a99
JH
1847 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1848 in the target cache. If user needs to visit every target list
1849 just once, it can memoize them.
1850
ec77d61f
JH
1851 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1852 the type is not in the construction. Those targets appear first in the
1853 vector returned.
1854
eefe9a99
JH
1855 Returned vector is placed into cache. It is NOT caller's responsibility
1856 to free it. The vector can be freed on cgraph_remove_node call if
1857 the particular node is a virtual function present in the cache. */
1858
1859vec <cgraph_node *>
1860possible_polymorphic_call_targets (tree otr_type,
1861 HOST_WIDE_INT otr_token,
68377e53
JH
1862 ipa_polymorphic_call_context context,
1863 bool *completep,
ec77d61f
JH
1864 void **cache_token,
1865 int *nonconstruction_targetsp)
eefe9a99
JH
1866{
1867 static struct cgraph_node_hook_list *node_removal_hook_holder;
1868 pointer_set_t *inserted;
1869 pointer_set_t *matched_vtables;
add5c763 1870 vec <cgraph_node *> nodes = vNULL;
2d1644bf 1871 vec <tree> bases_to_consider = vNULL;
68377e53 1872 odr_type type, outer_type;
eefe9a99
JH
1873 polymorphic_call_target_d key;
1874 polymorphic_call_target_d **slot;
1875 unsigned int i;
1876 tree binfo, target;
ec77d61f
JH
1877 bool complete;
1878 bool can_refer;
2d1644bf 1879 bool skipped = false;
eefe9a99 1880
c7e1befa
JH
1881 otr_type = TYPE_MAIN_VARIANT (otr_type);
1882
3e86c6a8 1883 /* If ODR is not initialized, return empty incomplete list. */
c203e8a7 1884 if (!odr_hash)
79c7de84
EB
1885 {
1886 if (completep)
1887 *completep = false;
beb683ab
MJ
1888 if (cache_token)
1889 *cache_token = NULL;
ec77d61f
JH
1890 if (nonconstruction_targetsp)
1891 *nonconstruction_targetsp = 0;
79c7de84
EB
1892 return nodes;
1893 }
add5c763 1894
3e86c6a8
JH
1895 /* If we hit type inconsistency, just return empty list of targets. */
1896 if (otr_token == INT_MAX)
1897 {
1898 if (completep)
1899 *completep = true;
beb683ab
MJ
1900 if (cache_token)
1901 *cache_token = NULL;
3e86c6a8
JH
1902 if (nonconstruction_targetsp)
1903 *nonconstruction_targetsp = 0;
1904 return nodes;
1905 }
1906
68377e53 1907 type = get_odr_type (otr_type, true);
eefe9a99 1908
c7e1befa
JH
1909 /* Recording type variants would wast results cache. */
1910 gcc_assert (!context.outer_type
1911 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
1912
68377e53 1913 /* Lookup the outer class type we want to walk. */
3e86c6a8
JH
1914 if (context.outer_type
1915 && !get_class_context (&context, otr_type))
1916 {
1917 if (completep)
1918 *completep = false;
beb683ab
MJ
1919 if (cache_token)
1920 *cache_token = NULL;
3e86c6a8
JH
1921 if (nonconstruction_targetsp)
1922 *nonconstruction_targetsp = 0;
1923 return nodes;
1924 }
eefe9a99 1925
c7e1befa
JH
1926 /* Check that get_class_context kept the main variant. */
1927 gcc_assert (!context.outer_type
1928 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
1929
79c7de84 1930 /* We canonicalize our query, so we do not need extra hashtable entries. */
68377e53
JH
1931
1932 /* Without outer type, we have no use for offset. Just do the
1933 basic search from innter type */
1934 if (!context.outer_type)
1935 {
1936 context.outer_type = otr_type;
1937 context.offset = 0;
1938 }
1939 /* We need to update our hiearchy if the type does not exist. */
1940 outer_type = get_odr_type (context.outer_type, true);
ec77d61f 1941 /* If the type is complete, there are no derivations. */
68377e53
JH
1942 if (TYPE_FINAL_P (outer_type->type))
1943 context.maybe_derived_type = false;
eefe9a99
JH
1944
1945 /* Initialize query cache. */
1946 if (!cached_polymorphic_call_targets)
1947 {
1948 cached_polymorphic_call_targets = pointer_set_create ();
c203e8a7
TS
1949 polymorphic_call_target_hash
1950 = new polymorphic_call_target_hash_type (23);
eefe9a99 1951 if (!node_removal_hook_holder)
3462aa02
JH
1952 {
1953 node_removal_hook_holder =
1954 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1955 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1956 NULL);
1957 }
eefe9a99
JH
1958 }
1959
1960 /* Lookup cached answer. */
1961 key.type = type;
1962 key.otr_token = otr_token;
68377e53 1963 key.context = context;
c203e8a7 1964 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
eefe9a99
JH
1965 if (cache_token)
1966 *cache_token = (void *)*slot;
1967 if (*slot)
68377e53
JH
1968 {
1969 if (completep)
ec77d61f
JH
1970 *completep = (*slot)->complete;
1971 if (nonconstruction_targetsp)
1972 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
68377e53
JH
1973 return (*slot)->targets;
1974 }
1975
ec77d61f 1976 complete = true;
eefe9a99
JH
1977
1978 /* Do actual search. */
1979 timevar_push (TV_IPA_VIRTUAL_CALL);
1980 *slot = XCNEW (polymorphic_call_target_d);
1981 if (cache_token)
68377e53 1982 *cache_token = (void *)*slot;
eefe9a99
JH
1983 (*slot)->type = type;
1984 (*slot)->otr_token = otr_token;
68377e53 1985 (*slot)->context = context;
eefe9a99
JH
1986
1987 inserted = pointer_set_create ();
1988 matched_vtables = pointer_set_create ();
1989
1990 /* First see virtual method of type itself. */
68377e53
JH
1991 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1992 context.offset, otr_type);
ec77d61f
JH
1993 if (binfo)
1994 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1995 &can_refer);
1996 else
68377e53 1997 {
ec77d61f
JH
1998 gcc_assert (odr_violation_reported);
1999 target = NULL;
2000 }
68377e53 2001
2d1644bf
JH
2002 /* Destructors are never called through construction virtual tables,
2003 because the type is always known. */
2004 if (target && DECL_CXX_DESTRUCTOR_P (target))
2005 context.maybe_in_construction = false;
ec77d61f
JH
2006
2007 if (target)
2008 {
2009 /* In the case we get complete method, we don't need
68377e53
JH
2010 to walk derivations. */
2011 if (DECL_FINAL_P (target))
2012 context.maybe_derived_type = false;
2013 }
2d1644bf
JH
2014
2015 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2016 if (type_possibly_instantiated_p (outer_type->type))
2017 maybe_record_node (nodes, target, inserted, can_refer, &complete);
ec77d61f 2018 else
2d1644bf
JH
2019 {
2020 skipped = true;
2021 gcc_assert (in_lto_p || context.maybe_derived_type);
2022 }
79c7de84 2023
549bcbd1
JH
2024 if (binfo)
2025 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
eefe9a99 2026
ec77d61f 2027 /* Next walk recursively all derived types. */
68377e53
JH
2028 if (context.maybe_derived_type)
2029 {
2030 /* For anonymous namespace types we can attempt to build full type.
2031 All derivations must be in this unit (unless we see partial unit). */
2d1644bf 2032 if (!type->all_derivations_known)
ec77d61f 2033 complete = false;
68377e53
JH
2034 for (i = 0; i < outer_type->derived_types.length(); i++)
2035 possible_polymorphic_call_targets_1 (nodes, inserted,
2036 matched_vtables,
79c7de84
EB
2037 otr_type,
2038 outer_type->derived_types[i],
68377e53 2039 otr_token, outer_type->type,
2d1644bf
JH
2040 context.offset, &complete,
2041 bases_to_consider,
2042 context.maybe_in_construction);
68377e53 2043 }
79c7de84 2044
ec77d61f
JH
2045 /* Finally walk bases, if asked to. */
2046 (*slot)->nonconstruction_targets = nodes.length();
2d1644bf
JH
2047
2048 /* Destructors are never called through construction virtual tables,
2049 because the type is always known. One of entries may be cxa_pure_virtual
2050 so look to at least two of them. */
2051 if (context.maybe_in_construction)
2052 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2053 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2054 context.maybe_in_construction = false;
ec77d61f 2055 if (context.maybe_in_construction)
2d1644bf
JH
2056 {
2057 if (type != outer_type
2058 && (!skipped
2059 || (context.maybe_derived_type
2060 && !type_all_derivations_known_p (outer_type->type))))
2061 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2062 context.offset, nodes, inserted,
2063 matched_vtables, &complete);
2064 if (skipped)
2065 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2066 for (i = 0; i < bases_to_consider.length(); i++)
2067 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
2068 }
2069 bases_to_consider.release();
ec77d61f 2070
eefe9a99 2071 (*slot)->targets = nodes;
ec77d61f 2072 (*slot)->complete = complete;
68377e53 2073 if (completep)
ec77d61f
JH
2074 *completep = complete;
2075 if (nonconstruction_targetsp)
2076 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
eefe9a99
JH
2077
2078 pointer_set_destroy (inserted);
2079 pointer_set_destroy (matched_vtables);
2080 timevar_pop (TV_IPA_VIRTUAL_CALL);
2081 return nodes;
2082}
2083
2084/* Dump all possible targets of a polymorphic call. */
2085
2086void
2087dump_possible_polymorphic_call_targets (FILE *f,
68377e53
JH
2088 tree otr_type,
2089 HOST_WIDE_INT otr_token,
2090 const ipa_polymorphic_call_context &ctx)
eefe9a99
JH
2091{
2092 vec <cgraph_node *> targets;
2093 bool final;
549bcbd1 2094 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
eefe9a99 2095 unsigned int i;
ec77d61f 2096 int nonconstruction;
eefe9a99
JH
2097
2098 if (!type)
2099 return;
2100 targets = possible_polymorphic_call_targets (otr_type, otr_token,
68377e53 2101 ctx,
ec77d61f 2102 &final, NULL, &nonconstruction);
68377e53 2103 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
eefe9a99 2104 print_generic_expr (f, type->type, TDF_SLIM);
ec77d61f
JH
2105 fprintf (f, " token %i\n", (int)otr_token);
2106 if (ctx.outer_type || ctx.offset)
2107 {
2108 fprintf (f, " Contained in type:");
2109 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
2110 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2111 ctx.offset);
2112 }
2113
2114 fprintf (f, " %s%s%s\n ",
2115 final ? "This is a complete list." :
68377e53
JH
2116 "This is partial list; extra targets may be defined in other units.",
2117 ctx.maybe_in_construction ? " (base types included)" : "",
2118 ctx.maybe_derived_type ? " (derived types included)" : "");
eefe9a99 2119 for (i = 0; i < targets.length (); i++)
ec77d61f
JH
2120 {
2121 char *name = NULL;
2122 if (i == (unsigned)nonconstruction)
2123 fprintf (f, "\n If the type is in construction,"
2124 " then additional tarets are:\n"
2125 " ");
2126 if (in_lto_p)
2127 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2128 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2129 if (in_lto_p)
2130 free (name);
2131 if (!targets[i]->definition)
2132 fprintf (f, " (no definition%s)",
2133 DECL_DECLARED_INLINE_P (targets[i]->decl)
2134 ? " inline" : "");
2135 }
68377e53 2136 fprintf (f, "\n\n");
eefe9a99
JH
2137}
2138
0e1474e5
JH
2139
2140/* Return true if N can be possibly target of a polymorphic call of
2141 OTR_TYPE/OTR_TOKEN. */
2142
2143bool
2144possible_polymorphic_call_target_p (tree otr_type,
2145 HOST_WIDE_INT otr_token,
68377e53 2146 const ipa_polymorphic_call_context &ctx,
0e1474e5
JH
2147 struct cgraph_node *n)
2148{
2149 vec <cgraph_node *> targets;
2150 unsigned int i;
68377e53 2151 enum built_in_function fcode;
450ad0cd 2152 bool final;
0e1474e5 2153
68377e53
JH
2154 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2155 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2156 == BUILT_IN_UNREACHABLE
2157 || fcode == BUILT_IN_TRAP))
2158 return true;
2159
c203e8a7 2160 if (!odr_hash)
0e1474e5 2161 return true;
68377e53 2162 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
0e1474e5 2163 for (i = 0; i < targets.length (); i++)
68377e53 2164 if (symtab_semantically_equivalent_p (n, targets[i]))
0e1474e5 2165 return true;
450ad0cd
JH
2166
2167 /* At a moment we allow middle end to dig out new external declarations
2168 as a targets of polymorphic calls. */
67348ccc 2169 if (!final && !n->definition)
450ad0cd 2170 return true;
0e1474e5
JH
2171 return false;
2172}
2173
2174
2175/* After callgraph construction new external nodes may appear.
2176 Add them into the graph. */
2177
2178void
2179update_type_inheritance_graph (void)
2180{
2181 struct cgraph_node *n;
2182
c203e8a7 2183 if (!odr_hash)
0e1474e5
JH
2184 return;
2185 free_polymorphic_call_targets_hash ();
2186 timevar_push (TV_IPA_INHERITANCE);
68377e53 2187 /* We reconstruct the graph starting from types of all methods seen in the
0e1474e5
JH
2188 the unit. */
2189 FOR_EACH_FUNCTION (n)
67348ccc
DM
2190 if (DECL_VIRTUAL_P (n->decl)
2191 && !n->definition
2192 && symtab_real_symbol_p (n))
549bcbd1
JH
2193 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2194 true);
0e1474e5
JH
2195 timevar_pop (TV_IPA_INHERITANCE);
2196}
bbc9396b
JH
2197
2198
2199/* Return true if N looks like likely target of a polymorphic call.
2200 Rule out cxa_pure_virtual, noreturns, function declared cold and
2201 other obvious cases. */
2202
2203bool
2204likely_target_p (struct cgraph_node *n)
2205{
2206 int flags;
2207 /* cxa_pure_virtual and similar things are not likely. */
67348ccc 2208 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
bbc9396b 2209 return false;
67348ccc 2210 flags = flags_from_decl_or_type (n->decl);
bbc9396b
JH
2211 if (flags & ECF_NORETURN)
2212 return false;
2213 if (lookup_attribute ("cold",
67348ccc 2214 DECL_ATTRIBUTES (n->decl)))
bbc9396b
JH
2215 return false;
2216 if (n->frequency < NODE_FREQUENCY_NORMAL)
2217 return false;
ccb05ef2
JH
2218 /* If there are no virtual tables refering the target alive,
2219 the only way the target can be called is an instance comming from other
2220 compilation unit; speculative devirtualization is build around an
2221 assumption that won't happen. */
2222 if (!referenced_from_vtable_p (n))
2223 return false;
bbc9396b
JH
2224 return true;
2225}
2226
2227/* The ipa-devirt pass.
3462aa02
JH
2228 When polymorphic call has only one likely target in the unit,
2229 turn it into speculative call. */
bbc9396b
JH
2230
2231static unsigned int
2232ipa_devirt (void)
2233{
2234 struct cgraph_node *n;
2235 struct pointer_set_t *bad_call_targets = pointer_set_create ();
2236 struct cgraph_edge *e;
2237
2238 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
2239 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
570215f9 2240 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
bbc9396b
JH
2241
2242 FOR_EACH_DEFINED_FUNCTION (n)
2243 {
2244 bool update = false;
2245 if (dump_file && n->indirect_calls)
2246 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
fec39fa6 2247 n->name (), n->order);
bbc9396b
JH
2248 for (e = n->indirect_calls; e; e = e->next_callee)
2249 if (e->indirect_info->polymorphic)
2250 {
2251 struct cgraph_node *likely_target = NULL;
2252 void *cache_token;
2253 bool final;
ec77d61f 2254 int nonconstruction_targets;
bbc9396b
JH
2255 vec <cgraph_node *>targets
2256 = possible_polymorphic_call_targets
ec77d61f 2257 (e, &final, &cache_token, &nonconstruction_targets);
bbc9396b
JH
2258 unsigned int i;
2259
2260 if (dump_file)
2261 dump_possible_polymorphic_call_targets
2262 (dump_file, e);
3462aa02 2263
bbc9396b
JH
2264 npolymorphic++;
2265
bbc9396b
JH
2266 if (!cgraph_maybe_hot_edge_p (e))
2267 {
2268 if (dump_file)
ec77d61f 2269 fprintf (dump_file, "Call is cold\n\n");
bbc9396b
JH
2270 ncold++;
2271 continue;
2272 }
2273 if (e->speculative)
2274 {
2275 if (dump_file)
ec77d61f 2276 fprintf (dump_file, "Call is aready speculated\n\n");
bbc9396b
JH
2277 nspeculated++;
2278
2279 /* When dumping see if we agree with speculation. */
2280 if (!dump_file)
2281 continue;
2282 }
2283 if (pointer_set_contains (bad_call_targets,
2284 cache_token))
2285 {
2286 if (dump_file)
ec77d61f 2287 fprintf (dump_file, "Target list is known to be useless\n\n");
bbc9396b
JH
2288 nmultiple++;
2289 continue;
2290 }
c3284718 2291 for (i = 0; i < targets.length (); i++)
bbc9396b
JH
2292 if (likely_target_p (targets[i]))
2293 {
2294 if (likely_target)
2295 {
ec77d61f
JH
2296 if (i < (unsigned) nonconstruction_targets)
2297 {
2298 likely_target = NULL;
2299 if (dump_file)
2300 fprintf (dump_file, "More than one likely target\n\n");
2301 nmultiple++;
2302 }
bbc9396b
JH
2303 break;
2304 }
2305 likely_target = targets[i];
2306 }
2307 if (!likely_target)
2308 {
2309 pointer_set_insert (bad_call_targets, cache_token);
2310 continue;
2311 }
2312 /* This is reached only when dumping; check if we agree or disagree
2313 with the speculation. */
2314 if (e->speculative)
2315 {
2316 struct cgraph_edge *e2;
2317 struct ipa_ref *ref;
2318 cgraph_speculative_call_info (e, e2, e, ref);
2319 if (cgraph_function_or_thunk_node (e2->callee, NULL)
2320 == cgraph_function_or_thunk_node (likely_target, NULL))
2321 {
ec77d61f 2322 fprintf (dump_file, "We agree with speculation\n\n");
bbc9396b
JH
2323 nok++;
2324 }
2325 else
2326 {
ec77d61f 2327 fprintf (dump_file, "We disagree with speculation\n\n");
bbc9396b
JH
2328 nwrong++;
2329 }
2330 continue;
2331 }
67348ccc 2332 if (!likely_target->definition)
bbc9396b
JH
2333 {
2334 if (dump_file)
ec77d61f 2335 fprintf (dump_file, "Target is not an definition\n\n");
bbc9396b
JH
2336 nnotdefined++;
2337 continue;
2338 }
2339 /* Do not introduce new references to external symbols. While we
2340 can handle these just well, it is common for programs to
2341 incorrectly with headers defining methods they are linked
2342 with. */
67348ccc 2343 if (DECL_EXTERNAL (likely_target->decl))
bbc9396b
JH
2344 {
2345 if (dump_file)
ec77d61f 2346 fprintf (dump_file, "Target is external\n\n");
bbc9396b
JH
2347 nexternal++;
2348 continue;
2349 }
570215f9
JM
2350 /* Don't use an implicitly-declared destructor (c++/58678). */
2351 struct cgraph_node *non_thunk_target
2352 = cgraph_function_node (likely_target);
2353 if (DECL_ARTIFICIAL (non_thunk_target->decl)
2354 && DECL_COMDAT (non_thunk_target->decl))
2355 {
2356 if (dump_file)
2357 fprintf (dump_file, "Target is artificial\n\n");
2358 nartificial++;
2359 continue;
2360 }
bbc9396b
JH
2361 if (cgraph_function_body_availability (likely_target)
2362 <= AVAIL_OVERWRITABLE
67348ccc 2363 && symtab_can_be_discarded (likely_target))
bbc9396b
JH
2364 {
2365 if (dump_file)
ec77d61f 2366 fprintf (dump_file, "Target is overwritable\n\n");
bbc9396b
JH
2367 noverwritable++;
2368 continue;
2369 }
2b5f0895 2370 else if (dbg_cnt (devirt))
bbc9396b 2371 {
2b5f0895
XDL
2372 if (dump_enabled_p ())
2373 {
807b7d62 2374 location_t locus = gimple_location_safe (e->call_stmt);
2b5f0895
XDL
2375 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2376 "speculatively devirtualizing call in %s/%i to %s/%i\n",
2377 n->name (), n->order,
2378 likely_target->name (),
2379 likely_target->order);
2380 }
67348ccc 2381 if (!symtab_can_be_discarded (likely_target))
5b79657a
JH
2382 {
2383 cgraph_node *alias;
2384 alias = cgraph (symtab_nonoverwritable_alias
67348ccc 2385 (likely_target));
5b79657a
JH
2386 if (alias)
2387 likely_target = alias;
2388 }
bbc9396b
JH
2389 nconverted++;
2390 update = true;
2391 cgraph_turn_edge_to_speculative
2392 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2393 }
2394 }
2395 if (update)
2396 inline_update_overall_summary (n);
2397 }
2398 pointer_set_destroy (bad_call_targets);
2399
2400 if (dump_file)
2401 fprintf (dump_file,
2402 "%i polymorphic calls, %i devirtualized,"
2403 " %i speculatively devirtualized, %i cold\n"
2404 "%i have multiple targets, %i overwritable,"
2405 " %i already speculated (%i agree, %i disagree),"
570215f9 2406 " %i external, %i not defined, %i artificial\n",
bbc9396b
JH
2407 npolymorphic, ndevirtualized, nconverted, ncold,
2408 nmultiple, noverwritable, nspeculated, nok, nwrong,
570215f9 2409 nexternal, nnotdefined, nartificial);
bbc9396b
JH
2410 return ndevirtualized ? TODO_remove_functions : 0;
2411}
2412
bbc9396b
JH
2413namespace {
2414
2415const pass_data pass_data_ipa_devirt =
2416{
2417 IPA_PASS, /* type */
2418 "devirt", /* name */
2419 OPTGROUP_NONE, /* optinfo_flags */
bbc9396b
JH
2420 TV_IPA_DEVIRT, /* tv_id */
2421 0, /* properties_required */
2422 0, /* properties_provided */
2423 0, /* properties_destroyed */
2424 0, /* todo_flags_start */
2425 ( TODO_dump_symtab ), /* todo_flags_finish */
2426};
2427
2428class pass_ipa_devirt : public ipa_opt_pass_d
2429{
2430public:
c3284718
RS
2431 pass_ipa_devirt (gcc::context *ctxt)
2432 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
2433 NULL, /* generate_summary */
2434 NULL, /* write_summary */
2435 NULL, /* read_summary */
2436 NULL, /* write_optimization_summary */
2437 NULL, /* read_optimization_summary */
2438 NULL, /* stmt_fixup */
2439 0, /* function_transform_todo_flags_start */
2440 NULL, /* function_transform */
2441 NULL) /* variable_transform */
bbc9396b
JH
2442 {}
2443
2444 /* opt_pass methods: */
1a3d085c
TS
2445 virtual bool gate (function *)
2446 {
2447 return (flag_devirtualize
2448 && flag_devirtualize_speculatively
2449 && optimize);
2450 }
2451
be55bfe6 2452 virtual unsigned int execute (function *) { return ipa_devirt (); }
bbc9396b
JH
2453
2454}; // class pass_ipa_devirt
2455
2456} // anon namespace
2457
2458ipa_opt_pass_d *
2459make_pass_ipa_devirt (gcc::context *ctxt)
2460{
2461 return new pass_ipa_devirt (ctxt);
2462}
2463
eefe9a99 2464#include "gt-ipa-devirt.h"