]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-polymorphic-call.c
re PR ipa/62121 (ICE: Segmentation fault in ipa-devirt.c:997)
[thirdparty/gcc.git] / gcc / ipa-polymorphic-call.c
1 /* Analysis of polymorphic call context.
2 Copyright (C) 2013-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "print-tree.h"
27 #include "calls.h"
28 #include "expr.h"
29 #include "tree-pass.h"
30 #include "hash-set.h"
31 #include "target.h"
32 #include "hash-table.h"
33 #include "inchash.h"
34 #include "tree-pretty-print.h"
35 #include "ipa-utils.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "gimple-expr.h"
40 #include "gimple.h"
41 #include "ipa-inline.h"
42 #include "diagnostic.h"
43 #include "tree-dfa.h"
44 #include "demangle.h"
45 #include "dbgcnt.h"
46 #include "gimple-pretty-print.h"
47 #include "stor-layout.h"
48 #include "intl.h"
49 #include "data-streamer.h"
50 #include "lto-streamer.h"
51 #include "streamer-hooks.h"
52
53 /* Return true when TYPE contains an polymorphic type and thus is interesting
54 for devirtualization machinery. */
55
56 static bool contains_type_p (tree, HOST_WIDE_INT, tree);
57
58 bool
59 contains_polymorphic_type_p (const_tree type)
60 {
61 type = TYPE_MAIN_VARIANT (type);
62
63 if (RECORD_OR_UNION_TYPE_P (type))
64 {
65 if (TYPE_BINFO (type)
66 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
67 return true;
68 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
69 if (TREE_CODE (fld) == FIELD_DECL
70 && !DECL_ARTIFICIAL (fld)
71 && contains_polymorphic_type_p (TREE_TYPE (fld)))
72 return true;
73 return false;
74 }
75 if (TREE_CODE (type) == ARRAY_TYPE)
76 return contains_polymorphic_type_p (TREE_TYPE (type));
77 return false;
78 }
79
80 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
81 at possition CUR_OFFSET within TYPE.
82
83 POD can be changed to an instance of a polymorphic type by
84 placement new. Here we play safe and assume that any
85 non-polymorphic type is POD. */
86 bool
87 possible_placement_new (tree type, tree expected_type,
88 HOST_WIDE_INT cur_offset)
89 {
90 return ((TREE_CODE (type) != RECORD_TYPE
91 || !TYPE_BINFO (type)
92 || cur_offset >= BITS_PER_WORD
93 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
94 && (!TYPE_SIZE (type)
95 || !tree_fits_shwi_p (TYPE_SIZE (type))
96 || (cur_offset
97 + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
98 : 1)
99 <= tree_to_uhwi (TYPE_SIZE (type)))));
100 }
101
102 /* THIS->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
103 is contained at THIS->OFFSET. Walk the memory representation of
104 THIS->OUTER_TYPE and find the outermost class type that match
105 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update THIS
106 to represent it.
107
108 If EXPECTED_TYPE is NULL, just find outermost polymorphic type with
109 virtual table present at possition OFFSET.
110
111 For example when THIS represents type
112 class A
113 {
114 int a;
115 class B b;
116 }
117 and we look for type at offset sizeof(int), we end up with B and offset 0.
118 If the same is produced by multiple inheritance, we end up with A and offset
119 sizeof(int).
120
121 If we can not find corresponding class, give up by setting
122 THIS->OUTER_TYPE to EXPECTED_TYPE and THIS->OFFSET to NULL.
123 Return true when lookup was sucesful. */
124
125 bool
126 ipa_polymorphic_call_context::restrict_to_inner_class (tree expected_type)
127 {
128 tree type = outer_type;
129 HOST_WIDE_INT cur_offset = offset;
130 bool speculative = false;
131 bool size_unknown = false;
132
133 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
134 if (!outer_type)
135 {
136 clear_outer_type (expected_type);
137 type = expected_type;
138 cur_offset = 0;
139 }
140 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
141 that the context is either invalid, or the instance type must be
142 derived from OUTER_TYPE.
143
144 Because the instance type may contain field whose type is of OUTER_TYPE,
145 we can not derive any effective information about it.
146
147 TODO: In the case we know all derrived types, we can definitely do better
148 here. */
149 else if (TYPE_SIZE (outer_type)
150 && tree_fits_shwi_p (TYPE_SIZE (outer_type))
151 && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
152 && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
153 {
154 clear_outer_type (expected_type);
155 type = expected_type;
156 cur_offset = 0;
157
158 /* If derived type is not allowed, we know that the context is invalid. */
159 if (!maybe_derived_type)
160 {
161 clear_speculation ();
162 invalid = true;
163 return false;
164 }
165 }
166
167 if (speculative_outer_type)
168 {
169 /* Short cirucit the busy work bellow and give up on case when speculation
170 is obviously the same as outer_type. */
171 if ((!maybe_derived_type
172 || speculative_maybe_derived_type)
173 && types_must_be_same_for_odr (speculative_outer_type, outer_type))
174 clear_speculation ();
175
176 /* See if SPECULATIVE_OUTER_TYPE is contained in or derived from OUTER_TYPE.
177 In this case speculation is valid only if derived types are allowed.
178
179 The test does not really look for derivate, but also accepts the case where
180 outer_type is a field of speculative_outer_type. In this case eiter
181 MAYBE_DERIVED_TYPE is false and we have full non-speculative information or
182 the loop bellow will correctly update SPECULATIVE_OUTER_TYPE
183 and SPECULATIVE_MAYBE_DERIVED_TYPE. */
184 else if (speculative_offset < offset
185 || !contains_type_p (speculative_outer_type,
186 speculative_offset - offset,
187 outer_type)
188 || !maybe_derived_type)
189 clear_speculation ();
190 }
191 else
192 /* Regularize things little bit and clear all the fields when no useful
193 speculatin is known. */
194 clear_speculation ();
195
196 if (!type)
197 goto no_useful_type_info;
198
199 /* Find the sub-object the constant actually refers to and mark whether it is
200 an artificial one (as opposed to a user-defined one).
201
202 This loop is performed twice; first time for outer_type and second time
203 for speculative_outer_type. The second run has SPECULATIVE set. */
204 while (true)
205 {
206 HOST_WIDE_INT pos, size;
207 tree fld;
208
209 /* If we do not know size of TYPE, we need to be more conservative
210 about accepting cases where we can not find EXPECTED_TYPE.
211 Generally the types that do matter here are of constant size.
212 Size_unknown case should be very rare. */
213 if (TYPE_SIZE (type)
214 && tree_fits_shwi_p (TYPE_SIZE (type))
215 && tree_to_shwi (TYPE_SIZE (type)) >= 0)
216 size_unknown = false;
217 else
218 size_unknown = true;
219
220 /* On a match, just return what we found. */
221 if ((types_odr_comparable (type, expected_type)
222 && types_same_for_odr (type, expected_type))
223 || (!expected_type
224 && TREE_CODE (type) == RECORD_TYPE
225 && TYPE_BINFO (type)
226 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
227 {
228 if (speculative)
229 {
230 /* If we did not match the offset, just give up on speculation. */
231 if (cur_offset != 0
232 /* Also check if speculation did not end up being same as
233 non-speculation. */
234 || (types_must_be_same_for_odr (speculative_outer_type,
235 outer_type)
236 && (maybe_derived_type
237 == speculative_maybe_derived_type)))
238 clear_speculation ();
239 return true;
240 }
241 else
242 {
243 /* If type is known to be final, do not worry about derived
244 types. Testing it here may help us to avoid speculation. */
245 if (type_known_to_have_no_deriavations_p (outer_type))
246 maybe_derived_type = false;
247
248 /* Type can not contain itself on an non-zero offset. In that case
249 just give up. Still accept the case where size is now known.
250 Either the second copy may appear past the end of type or within
251 the non-POD buffer located inside the variably sized type
252 itself. */
253 if (cur_offset != 0)
254 goto no_useful_type_info;
255 /* If we determined type precisely or we have no clue on
256 speuclation, we are done. */
257 if (!maybe_derived_type || !speculative_outer_type)
258 {
259 clear_speculation ();
260 return true;
261 }
262 /* Otherwise look into speculation now. */
263 else
264 {
265 speculative = true;
266 type = speculative_outer_type;
267 cur_offset = speculative_offset;
268 continue;
269 }
270 }
271 }
272
273 /* Walk fields and find corresponding on at OFFSET. */
274 if (TREE_CODE (type) == RECORD_TYPE)
275 {
276 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
277 {
278 if (TREE_CODE (fld) != FIELD_DECL)
279 continue;
280
281 pos = int_bit_position (fld);
282 size = tree_to_uhwi (DECL_SIZE (fld));
283 if (pos <= cur_offset && (pos + size) > cur_offset)
284 break;
285 }
286
287 if (!fld)
288 goto no_useful_type_info;
289
290 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
291 cur_offset -= pos;
292 /* DECL_ARTIFICIAL represents a basetype. */
293 if (!DECL_ARTIFICIAL (fld))
294 {
295 if (!speculative)
296 {
297 outer_type = type;
298 offset = cur_offset;
299 /* As soon as we se an field containing the type,
300 we know we are not looking for derivations. */
301 maybe_derived_type = false;
302 }
303 else
304 {
305 speculative_outer_type = type;
306 speculative_offset = cur_offset;
307 speculative_maybe_derived_type = false;
308 }
309 }
310 }
311 else if (TREE_CODE (type) == ARRAY_TYPE)
312 {
313 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
314
315 /* Give up if we don't know array size. */
316 if (!TYPE_SIZE (subtype)
317 || !tree_fits_shwi_p (TYPE_SIZE (subtype))
318 || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
319 || !contains_polymorphic_type_p (subtype))
320 goto no_useful_type_info;
321
322 HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
323
324 /* We may see buffer for placement new. In this case the expected type
325 can be bigger than the subtype. */
326 if (TYPE_SIZE (subtype)
327 && (cur_offset
328 + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
329 : 0)
330 > tree_to_uhwi (TYPE_SIZE (subtype))))
331 goto no_useful_type_info;
332
333 cur_offset = new_offset;
334 type = subtype;
335 if (!speculative)
336 {
337 outer_type = type;
338 offset = cur_offset;
339 maybe_derived_type = false;
340 }
341 else
342 {
343 speculative_outer_type = type;
344 speculative_offset = cur_offset;
345 speculative_maybe_derived_type = false;
346 }
347 }
348 /* Give up on anything else. */
349 else
350 {
351 no_useful_type_info:
352 /* We found no way to embedd EXPECTED_TYPE in TYPE.
353 We still permit two special cases - placement new and
354 the case of variadic types containing themselves. */
355 if (!speculative
356 && (size_unknown || !type
357 || possible_placement_new (type, expected_type, cur_offset)))
358 {
359 /* In these weird cases we want to accept the context.
360 In non-speculative run we have no useful outer_type info
361 (TODO: we may eventually want to record upper bound on the
362 type size that can be used to prune the walk),
363 but we still want to consider speculation that may
364 give useful info. */
365 if (!speculative)
366 {
367 clear_outer_type (expected_type);
368 if (speculative_outer_type)
369 {
370 speculative = true;
371 type = speculative_outer_type;
372 cur_offset = speculative_offset;
373 }
374 else
375 return true;
376 }
377 else
378 clear_speculation ();
379 return true;
380 }
381 else
382 {
383 clear_speculation ();
384 if (speculative)
385 return true;
386 clear_outer_type (expected_type);
387 invalid = true;
388 return false;
389 }
390 }
391 }
392 }
393
394 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
395
396 static bool
397 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
398 tree otr_type)
399 {
400 ipa_polymorphic_call_context context;
401 context.offset = offset;
402 context.outer_type = TYPE_MAIN_VARIANT (outer_type);
403 context.maybe_derived_type = false;
404 return context.restrict_to_inner_class (otr_type);
405 }
406
407
408 /* We know that the instance is stored in variable or parameter
409 (not dynamically allocated) and we want to disprove the fact
410 that it may be in construction at invocation of CALL.
411
412 For the variable to be in construction we actually need to
413 be in constructor of corresponding global variable or
414 the inline stack of CALL must contain the constructor.
415 Check this condition. This check works safely only before
416 IPA passes, because inline stacks may become out of date
417 later. */
418
419 bool
420 decl_maybe_in_construction_p (tree base, tree outer_type,
421 gimple call, tree function)
422 {
423 outer_type = TYPE_MAIN_VARIANT (outer_type);
424 gcc_assert (DECL_P (base));
425
426 /* After inlining the code unification optimizations may invalidate
427 inline stacks. Also we need to give up on global variables after
428 IPA, because addresses of these may have been propagated to their
429 constructors. */
430 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
431 return true;
432
433 /* Pure functions can not do any changes on the dynamic type;
434 that require writting to memory. */
435 if (!auto_var_in_fn_p (base, function)
436 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
437 return false;
438
439 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
440 block = BLOCK_SUPERCONTEXT (block))
441 if (BLOCK_ABSTRACT_ORIGIN (block)
442 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
443 {
444 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
445
446 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
447 || (!DECL_CXX_CONSTRUCTOR_P (fn)
448 && !DECL_CXX_DESTRUCTOR_P (fn)))
449 {
450 /* Watch for clones where we constant propagated the first
451 argument (pointer to the instance). */
452 fn = DECL_ABSTRACT_ORIGIN (fn);
453 if (!fn
454 || !is_global_var (base)
455 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
456 || (!DECL_CXX_CONSTRUCTOR_P (fn)
457 && !DECL_CXX_DESTRUCTOR_P (fn)))
458 continue;
459 }
460 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
461 continue;
462
463 /* FIXME: this can go away once we have ODR types equivalency on
464 LTO level. */
465 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
466 return true;
467 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
468 if (types_same_for_odr (type, outer_type))
469 return true;
470 }
471
472 if (TREE_CODE (base) == VAR_DECL
473 && is_global_var (base))
474 {
475 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
476 || (!DECL_CXX_CONSTRUCTOR_P (function)
477 && !DECL_CXX_DESTRUCTOR_P (function)))
478 {
479 if (!DECL_ABSTRACT_ORIGIN (function))
480 return false;
481 /* Watch for clones where we constant propagated the first
482 argument (pointer to the instance). */
483 function = DECL_ABSTRACT_ORIGIN (function);
484 if (!function
485 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
486 || (!DECL_CXX_CONSTRUCTOR_P (function)
487 && !DECL_CXX_DESTRUCTOR_P (function)))
488 return false;
489 }
490 /* FIXME: this can go away once we have ODR types equivalency on
491 LTO level. */
492 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
493 return true;
494 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
495 if (types_same_for_odr (type, outer_type))
496 return true;
497 }
498 return false;
499 }
500
501 /* Dump human readable context to F. */
502
503 void
504 ipa_polymorphic_call_context::dump (FILE *f) const
505 {
506 fprintf (f, " ");
507 if (invalid)
508 fprintf (f, "Call is known to be undefined\n");
509 else
510 {
511 if (!outer_type && !offset && !speculative_outer_type)
512 fprintf (f, "Empty context\n");
513 if (outer_type || offset)
514 {
515 fprintf (f, "Outer type:");
516 print_generic_expr (f, outer_type, TDF_SLIM);
517 if (maybe_derived_type)
518 fprintf (f, " (or a derived type)");
519 if (maybe_in_construction)
520 fprintf (f, " (maybe in construction)");
521 fprintf (f, " offset "HOST_WIDE_INT_PRINT_DEC,
522 offset);
523 }
524 if (speculative_outer_type)
525 {
526 fprintf (f, " speculative outer type:");
527 print_generic_expr (f, speculative_outer_type, TDF_SLIM);
528 if (speculative_maybe_derived_type)
529 fprintf (f, " (or a derived type)");
530 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC,
531 speculative_offset);
532 }
533 }
534 fprintf(f, "\n");
535 }
536
537 /* Print context to stderr. */
538
539 void
540 ipa_polymorphic_call_context::debug () const
541 {
542 dump (stderr);
543 }
544
545 /* Stream out the context to OB. */
546
547 void
548 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
549 {
550 struct bitpack_d bp = bitpack_create (ob->main_stream);
551
552 bp_pack_value (&bp, invalid, 1);
553 bp_pack_value (&bp, maybe_in_construction, 1);
554 bp_pack_value (&bp, maybe_derived_type, 1);
555 bp_pack_value (&bp, speculative_maybe_derived_type, 1);
556 bp_pack_value (&bp, outer_type != NULL, 1);
557 bp_pack_value (&bp, offset != 0, 1);
558 bp_pack_value (&bp, speculative_outer_type != NULL, 1);
559 streamer_write_bitpack (&bp);
560
561 if (outer_type != NULL)
562 stream_write_tree (ob, outer_type, true);
563 if (offset)
564 streamer_write_hwi (ob, offset);
565 if (speculative_outer_type != NULL)
566 {
567 stream_write_tree (ob, speculative_outer_type, true);
568 streamer_write_hwi (ob, speculative_offset);
569 }
570 else
571 gcc_assert (!speculative_offset);
572 }
573
574 /* Stream in the context from IB and DATA_IN. */
575
576 void
577 ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib,
578 struct data_in *data_in)
579 {
580 struct bitpack_d bp = streamer_read_bitpack (ib);
581
582 invalid = bp_unpack_value (&bp, 1);
583 maybe_in_construction = bp_unpack_value (&bp, 1);
584 maybe_derived_type = bp_unpack_value (&bp, 1);
585 speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
586 bool outer_type_p = bp_unpack_value (&bp, 1);
587 bool offset_p = bp_unpack_value (&bp, 1);
588 bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
589
590 if (outer_type_p)
591 outer_type = stream_read_tree (ib, data_in);
592 else
593 outer_type = NULL;
594 if (offset_p)
595 offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
596 else
597 offset = 0;
598 if (speculative_outer_type_p)
599 {
600 speculative_outer_type = stream_read_tree (ib, data_in);
601 speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
602 }
603 else
604 {
605 speculative_outer_type = NULL;
606 speculative_offset = 0;
607 }
608 }
609
610 /* Proudce polymorphic call context for call method of instance
611 that is located within BASE (that is assumed to be a decl) at offset OFF. */
612
613 void
614 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
615 {
616 gcc_assert (DECL_P (base));
617
618 outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
619 offset = off;
620 clear_speculation ();
621 /* Make very conservative assumption that all objects
622 may be in construction.
623
624 It is up to caller to revisit this via
625 get_dynamic_type or decl_maybe_in_construction_p. */
626 maybe_in_construction = true;
627 maybe_derived_type = false;
628 }
629
630 /* CST is an invariant (address of decl), try to get meaningful
631 polymorphic call context for polymorphic call of method
632 if instance of OTR_TYPE that is located at offset OFF of this invariant.
633 Return FALSE if nothing meaningful can be found. */
634
635 bool
636 ipa_polymorphic_call_context::set_by_invariant (tree cst,
637 tree otr_type,
638 HOST_WIDE_INT off)
639 {
640 HOST_WIDE_INT offset2, size, max_size;
641 tree base;
642
643 invalid = false;
644 off = 0;
645 clear_outer_type (otr_type);
646
647 if (TREE_CODE (cst) != ADDR_EXPR)
648 return false;
649
650 cst = TREE_OPERAND (cst, 0);
651 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
652 if (!DECL_P (base) || max_size == -1 || max_size != size)
653 return false;
654
655 /* Only type inconsistent programs can have otr_type that is
656 not part of outer type. */
657 if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
658 return false;
659
660 set_by_decl (base, off);
661 return true;
662 }
663
664 /* See if OP is SSA name initialized as a copy or by single assignment.
665 If so, walk the SSA graph up. */
666
667 static tree
668 walk_ssa_copies (tree op)
669 {
670 STRIP_NOPS (op);
671 while (TREE_CODE (op) == SSA_NAME
672 && !SSA_NAME_IS_DEFAULT_DEF (op)
673 && SSA_NAME_DEF_STMT (op)
674 && gimple_assign_single_p (SSA_NAME_DEF_STMT (op)))
675 {
676 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
677 return op;
678 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
679 STRIP_NOPS (op);
680 }
681 return op;
682 }
683
684 /* Create polymorphic call context from IP invariant CST.
685 This is typically &global_var.
686 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
687 is offset of call. */
688
689 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
690 tree otr_type,
691 HOST_WIDE_INT off)
692 {
693 clear_speculation ();
694 set_by_invariant (cst, otr_type, off);
695 }
696
697 /* Build context for pointer REF contained in FNDECL at statement STMT.
698 if INSTANCE is non-NULL, return pointer to the object described by
699 the context or DECL where context is contained in. */
700
701 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
702 tree ref,
703 gimple stmt,
704 tree *instance)
705 {
706 tree otr_type = NULL;
707 tree base_pointer;
708
709 if (TREE_CODE (ref) == OBJ_TYPE_REF)
710 {
711 otr_type = obj_type_ref_class (ref);
712 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
713 }
714 else
715 base_pointer = ref;
716
717 /* Set up basic info in case we find nothing interesting in the analysis. */
718 clear_speculation ();
719 clear_outer_type (otr_type);
720 invalid = false;
721
722 /* Walk SSA for outer object. */
723 do
724 {
725 base_pointer = walk_ssa_copies (base_pointer);
726 if (TREE_CODE (base_pointer) == ADDR_EXPR)
727 {
728 HOST_WIDE_INT size, max_size;
729 HOST_WIDE_INT offset2;
730 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
731 &offset2, &size, &max_size);
732
733 /* If this is a varying address, punt. */
734 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
735 && max_size != -1
736 && max_size == size)
737 {
738 /* We found dereference of a pointer. Type of the pointer
739 and MEM_REF is meaningless, but we can look futher. */
740 if (TREE_CODE (base) == MEM_REF)
741 {
742 base_pointer = TREE_OPERAND (base, 0);
743 offset
744 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
745 outer_type = NULL;
746 }
747 /* We found base object. In this case the outer_type
748 is known. */
749 else if (DECL_P (base))
750 {
751 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
752
753 /* Only type inconsistent programs can have otr_type that is
754 not part of outer type. */
755 if (otr_type
756 && !contains_type_p (TREE_TYPE (base),
757 offset + offset2, otr_type))
758 {
759 invalid = true;
760 if (instance)
761 *instance = base_pointer;
762 return;
763 }
764 set_by_decl (base, offset + offset2);
765 if (maybe_in_construction && stmt)
766 maybe_in_construction
767 = decl_maybe_in_construction_p (base,
768 outer_type,
769 stmt,
770 fndecl);
771 if (instance)
772 *instance = base;
773 return;
774 }
775 else
776 break;
777 }
778 else
779 break;
780 }
781 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
782 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
783 {
784 offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
785 * BITS_PER_UNIT;
786 base_pointer = TREE_OPERAND (base_pointer, 0);
787 }
788 else
789 break;
790 }
791 while (true);
792
793 /* Try to determine type of the outer object. */
794 if (TREE_CODE (base_pointer) == SSA_NAME
795 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
796 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
797 {
798 /* See if parameter is THIS pointer of a method. */
799 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
800 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
801 {
802 outer_type
803 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
804 gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE);
805
806 /* Dynamic casting has possibly upcasted the type
807 in the hiearchy. In this case outer type is less
808 informative than inner type and we should forget
809 about it. */
810 if (otr_type
811 && !contains_type_p (outer_type, offset,
812 otr_type))
813 {
814 outer_type = NULL;
815 if (instance)
816 *instance = base_pointer;
817 return;
818 }
819
820 /* If the function is constructor or destructor, then
821 the type is possibly in construction, but we know
822 it is not derived type. */
823 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
824 || DECL_CXX_DESTRUCTOR_P (fndecl))
825 {
826 maybe_in_construction = true;
827 maybe_derived_type = false;
828 }
829 else
830 {
831 maybe_derived_type = true;
832 maybe_in_construction = false;
833 }
834 if (instance)
835 *instance = base_pointer;
836 return;
837 }
838 /* Non-PODs passed by value are really passed by invisible
839 reference. In this case we also know the type of the
840 object. */
841 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
842 {
843 outer_type
844 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
845 gcc_assert (!POINTER_TYPE_P (outer_type));
846 /* Only type inconsistent programs can have otr_type that is
847 not part of outer type. */
848 if (!contains_type_p (outer_type, offset,
849 otr_type))
850 {
851 invalid = true;
852 if (instance)
853 *instance = base_pointer;
854 return;
855 }
856 maybe_derived_type = false;
857 maybe_in_construction = false;
858 if (instance)
859 *instance = base_pointer;
860 return;
861 }
862 }
863
864 tree base_type = TREE_TYPE (base_pointer);
865
866 if (TREE_CODE (base_pointer) == SSA_NAME
867 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
868 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
869 {
870 invalid = true;
871 if (instance)
872 *instance = base_pointer;
873 return;
874 }
875 if (TREE_CODE (base_pointer) == SSA_NAME
876 && SSA_NAME_DEF_STMT (base_pointer)
877 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
878 base_type = TREE_TYPE (gimple_assign_rhs1
879 (SSA_NAME_DEF_STMT (base_pointer)));
880
881 if (POINTER_TYPE_P (base_type)
882 && (otr_type
883 || !contains_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
884 offset,
885 otr_type)))
886 {
887 speculative_outer_type = TYPE_MAIN_VARIANT
888 (TREE_TYPE (base_type));
889 speculative_offset = offset;
890 speculative_maybe_derived_type = true;
891 }
892 /* TODO: There are multiple ways to derive a type. For instance
893 if BASE_POINTER is passed to an constructor call prior our refernece.
894 We do not make this type of flow sensitive analysis yet. */
895 if (instance)
896 *instance = base_pointer;
897 return;
898 }
899
900 /* Structure to be passed in between detect_type_change and
901 check_stmt_for_type_change. */
902
903 struct type_change_info
904 {
905 /* Offset into the object where there is the virtual method pointer we are
906 looking for. */
907 HOST_WIDE_INT offset;
908 /* The declaration or SSA_NAME pointer of the base that we are checking for
909 type change. */
910 tree instance;
911 /* The reference to virtual table pointer used. */
912 tree vtbl_ptr_ref;
913 tree otr_type;
914 /* If we actually can tell the type that the object has changed to, it is
915 stored in this field. Otherwise it remains NULL_TREE. */
916 tree known_current_type;
917 HOST_WIDE_INT known_current_offset;
918
919 /* Set to true if dynamic type change has been detected. */
920 bool type_maybe_changed;
921 /* Set to true if multiple types have been encountered. known_current_type
922 must be disregarded in that case. */
923 bool multiple_types_encountered;
924 /* Set to true if we possibly missed some dynamic type changes and we should
925 consider the set to be speculative. */
926 bool speculative;
927 bool seen_unanalyzed_store;
928 };
929
930 /* Return true if STMT is not call and can modify a virtual method table pointer.
931 We take advantage of fact that vtable stores must appear within constructor
932 and destructor functions. */
933
934 static bool
935 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
936 {
937 if (is_gimple_assign (stmt))
938 {
939 tree lhs = gimple_assign_lhs (stmt);
940
941 if (gimple_clobber_p (stmt))
942 return false;
943 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
944 {
945 if (flag_strict_aliasing
946 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
947 return false;
948
949 if (TREE_CODE (lhs) == COMPONENT_REF
950 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
951 return false;
952 /* In the future we might want to use get_base_ref_and_offset to find
953 if there is a field corresponding to the offset and if so, proceed
954 almost like if it was a component ref. */
955 }
956 }
957
958 /* Code unification may mess with inline stacks. */
959 if (cfun->after_inlining)
960 return true;
961
962 /* Walk the inline stack and watch out for ctors/dtors.
963 TODO: Maybe we can require the store to appear in toplevel
964 block of CTOR/DTOR. */
965 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
966 block = BLOCK_SUPERCONTEXT (block))
967 if (BLOCK_ABSTRACT_ORIGIN (block)
968 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
969 {
970 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
971
972 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
973 return false;
974 return (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
975 && (DECL_CXX_CONSTRUCTOR_P (fn)
976 || DECL_CXX_DESTRUCTOR_P (fn)));
977 }
978 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
979 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
980 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
981 }
982
983 /* If STMT can be proved to be an assignment to the virtual method table
984 pointer of ANALYZED_OBJ and the type associated with the new table
985 identified, return the type. Otherwise return NULL_TREE. */
986
987 static tree
988 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
989 HOST_WIDE_INT *type_offset)
990 {
991 HOST_WIDE_INT offset, size, max_size;
992 tree lhs, rhs, base;
993
994 if (!gimple_assign_single_p (stmt))
995 return NULL_TREE;
996
997 lhs = gimple_assign_lhs (stmt);
998 rhs = gimple_assign_rhs1 (stmt);
999 if (TREE_CODE (lhs) != COMPONENT_REF
1000 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1001 {
1002 if (dump_file)
1003 fprintf (dump_file, " LHS is not virtual table.\n");
1004 return NULL_TREE;
1005 }
1006
1007 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
1008 ;
1009 else
1010 {
1011 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
1012 if (offset != tci->offset
1013 || size != POINTER_SIZE
1014 || max_size != POINTER_SIZE)
1015 {
1016 if (dump_file)
1017 fprintf (dump_file, " wrong offset %i!=%i or size %i\n",
1018 (int)offset, (int)tci->offset, (int)size);
1019 return NULL_TREE;
1020 }
1021 if (DECL_P (tci->instance))
1022 {
1023 if (base != tci->instance)
1024 {
1025 if (dump_file)
1026 {
1027 fprintf (dump_file, " base:");
1028 print_generic_expr (dump_file, base, TDF_SLIM);
1029 fprintf (dump_file, " does not match instance:");
1030 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1031 fprintf (dump_file, "\n");
1032 }
1033 return NULL_TREE;
1034 }
1035 }
1036 else if (TREE_CODE (base) == MEM_REF)
1037 {
1038 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0)
1039 || !integer_zerop (TREE_OPERAND (base, 1)))
1040 {
1041 if (dump_file)
1042 {
1043 fprintf (dump_file, " base mem ref:");
1044 print_generic_expr (dump_file, base, TDF_SLIM);
1045 fprintf (dump_file, " has nonzero offset or does not match instance:");
1046 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1047 fprintf (dump_file, "\n");
1048 }
1049 return NULL_TREE;
1050 }
1051 }
1052 else if (!operand_equal_p (tci->instance, base, 0)
1053 || tci->offset)
1054 {
1055 if (dump_file)
1056 {
1057 fprintf (dump_file, " base:");
1058 print_generic_expr (dump_file, base, TDF_SLIM);
1059 fprintf (dump_file, " does not match instance:");
1060 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1061 fprintf (dump_file, " with offset %i\n", (int)tci->offset);
1062 }
1063 return NULL_TREE;
1064 }
1065 }
1066
1067 tree vtable;
1068 unsigned HOST_WIDE_INT offset2;
1069
1070 if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
1071 {
1072 if (dump_file)
1073 fprintf (dump_file, " Failed to lookup binfo\n");
1074 return NULL;
1075 }
1076
1077 tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1078 offset2, vtable);
1079 if (!binfo)
1080 {
1081 if (dump_file)
1082 fprintf (dump_file, " Construction vtable used\n");
1083 /* FIXME: We should suport construction contextes. */
1084 return NULL;
1085 }
1086
1087 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
1088 return DECL_CONTEXT (vtable);
1089 }
1090
1091 /* Record dynamic type change of TCI to TYPE. */
1092
1093 static void
1094 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
1095 {
1096 if (dump_file)
1097 {
1098 if (type)
1099 {
1100 fprintf (dump_file, " Recording type: ");
1101 print_generic_expr (dump_file, type, TDF_SLIM);
1102 fprintf (dump_file, " at offset %i\n", (int)offset);
1103 }
1104 else
1105 fprintf (dump_file, " Recording unknown type\n");
1106 }
1107
1108 /* If we found a constructor of type that is not polymorphic or
1109 that may contain the type in question as a field (not as base),
1110 restrict to the inner class first to make type matching bellow
1111 happier. */
1112 if (type
1113 && (offset
1114 || (TREE_CODE (type) != RECORD_TYPE
1115 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
1116 {
1117 ipa_polymorphic_call_context context;
1118
1119 context.offset = offset;
1120 context.outer_type = type;
1121 context.maybe_in_construction = false;
1122 context.maybe_derived_type = false;
1123 /* If we failed to find the inner type, we know that the call
1124 would be undefined for type produced here. */
1125 if (!context.restrict_to_inner_class (tci->otr_type))
1126 {
1127 if (dump_file)
1128 fprintf (dump_file, " Ignoring; does not contain otr_type\n");
1129 return;
1130 }
1131 /* Watch for case we reached an POD type and anticipate placement
1132 new. */
1133 if (!context.maybe_derived_type)
1134 {
1135 type = context.outer_type;
1136 offset = context.offset;
1137 }
1138 }
1139 if (tci->type_maybe_changed
1140 && (!types_same_for_odr (type, tci->known_current_type)
1141 || offset != tci->known_current_offset))
1142 tci->multiple_types_encountered = true;
1143 tci->known_current_type = TYPE_MAIN_VARIANT (type);
1144 tci->known_current_offset = offset;
1145 tci->type_maybe_changed = true;
1146 }
1147
1148 /* Callback of walk_aliased_vdefs and a helper function for
1149 detect_type_change to check whether a particular statement may modify
1150 the virtual table pointer, and if possible also determine the new type of
1151 the (sub-)object. It stores its result into DATA, which points to a
1152 type_change_info structure. */
1153
1154 static bool
1155 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
1156 {
1157 gimple stmt = SSA_NAME_DEF_STMT (vdef);
1158 struct type_change_info *tci = (struct type_change_info *) data;
1159 tree fn;
1160
1161 /* If we already gave up, just terminate the rest of walk. */
1162 if (tci->multiple_types_encountered)
1163 return true;
1164
1165 if (is_gimple_call (stmt))
1166 {
1167 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
1168 return false;
1169
1170 /* Check for a constructor call. */
1171 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
1172 && DECL_CXX_CONSTRUCTOR_P (fn)
1173 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1174 && gimple_call_num_args (stmt))
1175 {
1176 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
1177 tree type = method_class_type (TREE_TYPE (fn));
1178 HOST_WIDE_INT offset = 0, size, max_size;
1179
1180 if (dump_file)
1181 {
1182 fprintf (dump_file, " Checking constructor call: ");
1183 print_gimple_stmt (dump_file, stmt, 0, 0);
1184 }
1185
1186 /* See if THIS parameter seems like instance pointer. */
1187 if (TREE_CODE (op) == ADDR_EXPR)
1188 {
1189 op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
1190 &offset, &size, &max_size);
1191 if (size != max_size || max_size == -1)
1192 {
1193 tci->speculative = true;
1194 return false;
1195 }
1196 if (op && TREE_CODE (op) == MEM_REF)
1197 {
1198 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
1199 {
1200 tci->speculative = true;
1201 return false;
1202 }
1203 offset += tree_to_shwi (TREE_OPERAND (op, 1))
1204 * BITS_PER_UNIT;
1205 op = TREE_OPERAND (op, 0);
1206 }
1207 else if (DECL_P (op))
1208 ;
1209 else
1210 {
1211 tci->speculative = true;
1212 return false;
1213 }
1214 op = walk_ssa_copies (op);
1215 }
1216 if (operand_equal_p (op, tci->instance, 0)
1217 && TYPE_SIZE (type)
1218 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
1219 && tree_fits_shwi_p (TYPE_SIZE (type))
1220 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset)
1221 {
1222 record_known_type (tci, type, tci->offset - offset);
1223 return true;
1224 }
1225 }
1226 /* Calls may possibly change dynamic type by placement new. Assume
1227 it will not happen, but make result speculative only. */
1228 if (dump_file)
1229 {
1230 fprintf (dump_file, " Function call may change dynamic type:");
1231 print_gimple_stmt (dump_file, stmt, 0, 0);
1232 }
1233 tci->speculative = true;
1234 return false;
1235 }
1236 /* Check for inlined virtual table store. */
1237 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
1238 {
1239 tree type;
1240 HOST_WIDE_INT offset = 0;
1241 if (dump_file)
1242 {
1243 fprintf (dump_file, " Checking vtbl store: ");
1244 print_gimple_stmt (dump_file, stmt, 0, 0);
1245 }
1246
1247 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
1248 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
1249 if (!type)
1250 {
1251 if (dump_file)
1252 fprintf (dump_file, " Unanalyzed store may change type.\n");
1253 tci->seen_unanalyzed_store = true;
1254 tci->speculative = true;
1255 }
1256 else
1257 record_known_type (tci, type, offset);
1258 return true;
1259 }
1260 else
1261 return false;
1262 }
1263
1264 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1265 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1266 INSTANCE is pointer to the outer instance as returned by
1267 get_polymorphic_context. To avoid creation of temporary expressions,
1268 INSTANCE may also be an declaration of get_polymorphic_context found the
1269 value to be in static storage.
1270
1271 If the type of instance is not fully determined
1272 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1273 is set), try to walk memory writes and find the actual construction of the
1274 instance.
1275
1276 We do not include this analysis in the context analysis itself, because
1277 it needs memory SSA to be fully built and the walk may be expensive.
1278 So it is not suitable for use withing fold_stmt and similar uses. */
1279
1280 bool
1281 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
1282 tree otr_object,
1283 tree otr_type,
1284 gimple call)
1285 {
1286 struct type_change_info tci;
1287 ao_ref ao;
1288 bool function_entry_reached = false;
1289 tree instance_ref = NULL;
1290 gimple stmt = call;
1291 /* Remember OFFSET before it is modified by restrict_to_inner_class.
1292 This is because we do not update INSTANCE when walking inwards. */
1293 HOST_WIDE_INT instance_offset = offset;
1294
1295 otr_type = TYPE_MAIN_VARIANT (otr_type);
1296
1297 /* Walk into inner type. This may clear maybe_derived_type and save us
1298 from useless work. It also makes later comparsions with static type
1299 easier. */
1300 if (outer_type)
1301 {
1302 if (!restrict_to_inner_class (otr_type))
1303 return false;
1304 }
1305
1306 if (!maybe_in_construction && !maybe_derived_type)
1307 return false;
1308
1309 /* We need to obtain refernce to virtual table pointer. It is better
1310 to look it up in the code rather than build our own. This require bit
1311 of pattern matching, but we end up verifying that what we found is
1312 correct.
1313
1314 What we pattern match is:
1315
1316 tmp = instance->_vptr.A; // vtbl ptr load
1317 tmp2 = tmp[otr_token]; // vtable lookup
1318 OBJ_TYPE_REF(tmp2;instance->0) (instance);
1319
1320 We want to start alias oracle walk from vtbl pointer load,
1321 but we may not be able to identify it, for example, when PRE moved the
1322 load around. */
1323
1324 if (gimple_code (call) == GIMPLE_CALL)
1325 {
1326 tree ref = gimple_call_fn (call);
1327 HOST_WIDE_INT offset2, size, max_size;
1328
1329 if (TREE_CODE (ref) == OBJ_TYPE_REF)
1330 {
1331 ref = OBJ_TYPE_REF_EXPR (ref);
1332 ref = walk_ssa_copies (ref);
1333
1334 /* Check if definition looks like vtable lookup. */
1335 if (TREE_CODE (ref) == SSA_NAME
1336 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1337 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
1338 && TREE_CODE (gimple_assign_rhs1
1339 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
1340 {
1341 ref = get_base_address
1342 (TREE_OPERAND (gimple_assign_rhs1
1343 (SSA_NAME_DEF_STMT (ref)), 0));
1344 ref = walk_ssa_copies (ref);
1345 /* Find base address of the lookup and see if it looks like
1346 vptr load. */
1347 if (TREE_CODE (ref) == SSA_NAME
1348 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1349 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
1350 {
1351 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
1352 tree base_ref = get_ref_base_and_extent
1353 (ref_exp, &offset2, &size, &max_size);
1354
1355 /* Finally verify that what we found looks like read from OTR_OBJECT
1356 or from INSTANCE with offset OFFSET. */
1357 if (base_ref
1358 && ((TREE_CODE (base_ref) == MEM_REF
1359 && ((offset2 == instance_offset
1360 && TREE_OPERAND (base_ref, 0) == instance)
1361 || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object)))
1362 || (DECL_P (instance) && base_ref == instance
1363 && offset2 == instance_offset)))
1364 {
1365 stmt = SSA_NAME_DEF_STMT (ref);
1366 instance_ref = ref_exp;
1367 }
1368 }
1369 }
1370 }
1371 }
1372
1373 /* If we failed to look up the refernece in code, build our own. */
1374 if (!instance_ref)
1375 {
1376 /* If the statement in question does not use memory, we can't tell
1377 anything. */
1378 if (!gimple_vuse (stmt))
1379 return false;
1380 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
1381 }
1382 else
1383 /* Otherwise use the real reference. */
1384 ao_ref_init (&ao, instance_ref);
1385
1386 /* We look for vtbl pointer read. */
1387 ao.size = POINTER_SIZE;
1388 ao.max_size = ao.size;
1389 ao.ref_alias_set
1390 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
1391
1392 if (dump_file)
1393 {
1394 fprintf (dump_file, "Determining dynamic type for call: ");
1395 print_gimple_stmt (dump_file, call, 0, 0);
1396 fprintf (dump_file, " Starting walk at: ");
1397 print_gimple_stmt (dump_file, stmt, 0, 0);
1398 fprintf (dump_file, " instance pointer: ");
1399 print_generic_expr (dump_file, otr_object, TDF_SLIM);
1400 fprintf (dump_file, " Outer instance pointer: ");
1401 print_generic_expr (dump_file, instance, TDF_SLIM);
1402 fprintf (dump_file, " offset: %i (bits)", (int)offset);
1403 fprintf (dump_file, " vtbl reference: ");
1404 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
1405 fprintf (dump_file, "\n");
1406 }
1407
1408 tci.offset = offset;
1409 tci.instance = instance;
1410 tci.vtbl_ptr_ref = instance_ref;
1411 gcc_assert (TREE_CODE (instance) != MEM_REF);
1412 tci.known_current_type = NULL_TREE;
1413 tci.known_current_offset = 0;
1414 tci.otr_type = otr_type;
1415 tci.type_maybe_changed = false;
1416 tci.multiple_types_encountered = false;
1417 tci.speculative = false;
1418 tci.seen_unanalyzed_store = false;
1419
1420 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
1421 &tci, NULL, &function_entry_reached);
1422
1423 /* If we did not find any type changing statements, we may still drop
1424 maybe_in_construction flag if the context already have outer type.
1425
1426 Here we make special assumptions about both constructors and
1427 destructors which are all the functions that are allowed to alter the
1428 VMT pointers. It assumes that destructors begin with assignment into
1429 all VMT pointers and that constructors essentially look in the
1430 following way:
1431
1432 1) The very first thing they do is that they call constructors of
1433 ancestor sub-objects that have them.
1434
1435 2) Then VMT pointers of this and all its ancestors is set to new
1436 values corresponding to the type corresponding to the constructor.
1437
1438 3) Only afterwards, other stuff such as constructor of member
1439 sub-objects and the code written by the user is run. Only this may
1440 include calling virtual functions, directly or indirectly.
1441
1442 4) placement new can not be used to change type of non-POD statically
1443 allocated variables.
1444
1445 There is no way to call a constructor of an ancestor sub-object in any
1446 other way.
1447
1448 This means that we do not have to care whether constructors get the
1449 correct type information because they will always change it (in fact,
1450 if we define the type to be given by the VMT pointer, it is undefined).
1451
1452 The most important fact to derive from the above is that if, for some
1453 statement in the section 3, we try to detect whether the dynamic type
1454 has changed, we can safely ignore all calls as we examine the function
1455 body backwards until we reach statements in section 2 because these
1456 calls cannot be ancestor constructors or destructors (if the input is
1457 not bogus) and so do not change the dynamic type (this holds true only
1458 for automatically allocated objects but at the moment we devirtualize
1459 only these). We then must detect that statements in section 2 change
1460 the dynamic type and can try to derive the new type. That is enough
1461 and we can stop, we will never see the calls into constructors of
1462 sub-objects in this code.
1463
1464 Therefore if the static outer type was found (outer_type)
1465 we can safely ignore tci.speculative that is set on calls and give up
1466 only if there was dyanmic type store that may affect given variable
1467 (seen_unanalyzed_store) */
1468
1469 if (!tci.type_maybe_changed
1470 || (outer_type
1471 && !tci.seen_unanalyzed_store
1472 && !tci.multiple_types_encountered
1473 && offset == tci.offset
1474 && types_same_for_odr (tci.known_current_type,
1475 outer_type)))
1476 {
1477 if (!outer_type || tci.seen_unanalyzed_store)
1478 return false;
1479 if (maybe_in_construction)
1480 maybe_in_construction = false;
1481 if (dump_file)
1482 fprintf (dump_file, " No dynamic type change found.\n");
1483 return true;
1484 }
1485
1486 if (tci.known_current_type
1487 && !function_entry_reached
1488 && !tci.multiple_types_encountered)
1489 {
1490 if (!tci.speculative)
1491 {
1492 outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1493 offset = tci.known_current_offset;
1494 maybe_in_construction = false;
1495 maybe_derived_type = false;
1496 if (dump_file)
1497 fprintf (dump_file, " Determined dynamic type.\n");
1498 }
1499 else if (!speculative_outer_type
1500 || speculative_maybe_derived_type)
1501 {
1502 speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1503 speculative_offset = tci.known_current_offset;
1504 speculative_maybe_derived_type = false;
1505 if (dump_file)
1506 fprintf (dump_file, " Determined speculative dynamic type.\n");
1507 }
1508 }
1509 else if (dump_file)
1510 {
1511 fprintf (dump_file, " Found multiple types%s%s\n",
1512 function_entry_reached ? " (function entry reached)" : "",
1513 function_entry_reached ? " (multiple types encountered)" : "");
1514 }
1515
1516 return true;
1517 }
1518