]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-polymorphic-call.c
ipa-polymorphic-call.c (possible_placement_new): Fix condition on size.
[thirdparty/gcc.git] / gcc / ipa-polymorphic-call.c
1 /* Analysis of polymorphic call context.
2 Copyright (C) 2013-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "print-tree.h"
27 #include "calls.h"
28 #include "expr.h"
29 #include "tree-pass.h"
30 #include "hash-set.h"
31 #include "target.h"
32 #include "hash-table.h"
33 #include "inchash.h"
34 #include "tree-pretty-print.h"
35 #include "ipa-utils.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "gimple-expr.h"
40 #include "gimple.h"
41 #include "ipa-inline.h"
42 #include "diagnostic.h"
43 #include "tree-dfa.h"
44 #include "demangle.h"
45 #include "dbgcnt.h"
46 #include "gimple-pretty-print.h"
47 #include "stor-layout.h"
48 #include "intl.h"
49 #include "data-streamer.h"
50 #include "lto-streamer.h"
51 #include "streamer-hooks.h"
52
53 /* Return true when TYPE contains an polymorphic type and thus is interesting
54 for devirtualization machinery. */
55
56 static bool contains_type_p (tree, HOST_WIDE_INT, tree,
57 bool consider_placement_new = true,
58 bool consider_bases = true);
59
60 bool
61 contains_polymorphic_type_p (const_tree type)
62 {
63 type = TYPE_MAIN_VARIANT (type);
64
65 if (RECORD_OR_UNION_TYPE_P (type))
66 {
67 if (TYPE_BINFO (type)
68 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
69 return true;
70 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
71 if (TREE_CODE (fld) == FIELD_DECL
72 && !DECL_ARTIFICIAL (fld)
73 && contains_polymorphic_type_p (TREE_TYPE (fld)))
74 return true;
75 return false;
76 }
77 if (TREE_CODE (type) == ARRAY_TYPE)
78 return contains_polymorphic_type_p (TREE_TYPE (type));
79 return false;
80 }
81
82 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
83 at possition CUR_OFFSET within TYPE.
84
85 POD can be changed to an instance of a polymorphic type by
86 placement new. Here we play safe and assume that any
87 non-polymorphic type is POD. */
88 bool
89 possible_placement_new (tree type, tree expected_type,
90 HOST_WIDE_INT cur_offset)
91 {
92 return ((TREE_CODE (type) != RECORD_TYPE
93 || !TYPE_BINFO (type)
94 || cur_offset >= BITS_PER_WORD
95 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
96 && (!TYPE_SIZE (type)
97 || !tree_fits_shwi_p (TYPE_SIZE (type))
98 || (cur_offset
99 + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
100 : GET_MODE_BITSIZE (Pmode))
101 <= tree_to_uhwi (TYPE_SIZE (type)))));
102 }
103
104 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
105 is contained at THIS->OFFSET. Walk the memory representation of
106 THIS->OUTER_TYPE and find the outermost class type that match
107 OTR_TYPE or contain OTR_TYPE as a base. Update THIS
108 to represent it.
109
110 If OTR_TYPE is NULL, just find outermost polymorphic type with
111 virtual table present at possition OFFSET.
112
113 For example when THIS represents type
114 class A
115 {
116 int a;
117 class B b;
118 }
119 and we look for type at offset sizeof(int), we end up with B and offset 0.
120 If the same is produced by multiple inheritance, we end up with A and offset
121 sizeof(int).
122
123 If we can not find corresponding class, give up by setting
124 THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
125 Return true when lookup was sucesful.
126
127 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
128 valid only via alocation of new polymorphic type inside by means
129 of placement new.
130
131 When CONSIDER_BASES is false, only look for actual fields, not base types
132 of TYPE. */
133
134 bool
135 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type,
136 bool consider_placement_new,
137 bool consider_bases)
138 {
139 tree type = outer_type;
140 HOST_WIDE_INT cur_offset = offset;
141 bool speculative = false;
142 bool size_unknown = false;
143 unsigned HOST_WIDE_INT otr_type_size = GET_MODE_BITSIZE (Pmode);
144
145 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
146 if (!outer_type)
147 {
148 clear_outer_type (otr_type);
149 type = otr_type;
150 cur_offset = 0;
151 }
152 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
153 that the context is either invalid, or the instance type must be
154 derived from OUTER_TYPE.
155
156 Because the instance type may contain field whose type is of OUTER_TYPE,
157 we can not derive any effective information about it.
158
159 TODO: In the case we know all derrived types, we can definitely do better
160 here. */
161 else if (TYPE_SIZE (outer_type)
162 && tree_fits_shwi_p (TYPE_SIZE (outer_type))
163 && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
164 && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
165 {
166 clear_outer_type (otr_type);
167 type = otr_type;
168 cur_offset = 0;
169
170 /* If derived type is not allowed, we know that the context is invalid.
171 For dynamic types, we really do not have information about
172 size of the memory location. It is possible that completely
173 different type is stored after outer_type. */
174 if (!maybe_derived_type && !dynamic)
175 {
176 clear_speculation ();
177 invalid = true;
178 return false;
179 }
180 }
181
182 if (otr_type && TYPE_SIZE (otr_type)
183 && tree_fits_shwi_p (TYPE_SIZE (otr_type)))
184 otr_type_size = tree_to_uhwi (TYPE_SIZE (otr_type));
185
186 if (!type || offset < 0)
187 goto no_useful_type_info;
188
189 /* Find the sub-object the constant actually refers to and mark whether it is
190 an artificial one (as opposed to a user-defined one).
191
192 This loop is performed twice; first time for outer_type and second time
193 for speculative_outer_type. The second run has SPECULATIVE set. */
194 while (true)
195 {
196 unsigned HOST_WIDE_INT pos, size;
197 tree fld;
198
199 /* If we do not know size of TYPE, we need to be more conservative
200 about accepting cases where we can not find EXPECTED_TYPE.
201 Generally the types that do matter here are of constant size.
202 Size_unknown case should be very rare. */
203 if (TYPE_SIZE (type)
204 && tree_fits_shwi_p (TYPE_SIZE (type))
205 && tree_to_shwi (TYPE_SIZE (type)) >= 0)
206 size_unknown = false;
207 else
208 size_unknown = true;
209
210 /* On a match, just return what we found. */
211 if ((otr_type
212 && types_odr_comparable (type, otr_type)
213 && types_same_for_odr (type, otr_type))
214 || (!otr_type
215 && TREE_CODE (type) == RECORD_TYPE
216 && TYPE_BINFO (type)
217 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
218 {
219 if (speculative)
220 {
221 /* If we did not match the offset, just give up on speculation. */
222 if (cur_offset != 0
223 /* Also check if speculation did not end up being same as
224 non-speculation. */
225 || (types_must_be_same_for_odr (speculative_outer_type,
226 outer_type)
227 && (maybe_derived_type
228 == speculative_maybe_derived_type)))
229 clear_speculation ();
230 return true;
231 }
232 else
233 {
234 /* If type is known to be final, do not worry about derived
235 types. Testing it here may help us to avoid speculation. */
236 if (otr_type && TREE_CODE (outer_type) == RECORD_TYPE
237 && (!in_lto_p || odr_type_p (outer_type))
238 && type_known_to_have_no_deriavations_p (outer_type))
239 maybe_derived_type = false;
240
241 /* Type can not contain itself on an non-zero offset. In that case
242 just give up. Still accept the case where size is now known.
243 Either the second copy may appear past the end of type or within
244 the non-POD buffer located inside the variably sized type
245 itself. */
246 if (cur_offset != 0)
247 goto no_useful_type_info;
248 /* If we determined type precisely or we have no clue on
249 speuclation, we are done. */
250 if (!maybe_derived_type || !speculative_outer_type
251 || !speculation_consistent_p (speculative_outer_type,
252 speculative_offset,
253 speculative_maybe_derived_type,
254 otr_type))
255 {
256 clear_speculation ();
257 return true;
258 }
259 /* Otherwise look into speculation now. */
260 else
261 {
262 speculative = true;
263 type = speculative_outer_type;
264 cur_offset = speculative_offset;
265 continue;
266 }
267 }
268 }
269
270 /* Walk fields and find corresponding on at OFFSET. */
271 if (TREE_CODE (type) == RECORD_TYPE)
272 {
273 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
274 {
275 if (TREE_CODE (fld) != FIELD_DECL)
276 continue;
277
278 pos = int_bit_position (fld);
279 if (pos > (unsigned HOST_WIDE_INT)cur_offset)
280 continue;
281
282 /* Do not consider vptr itself. Not even for placement new. */
283 if (!pos && DECL_ARTIFICIAL (fld)
284 && POINTER_TYPE_P (TREE_TYPE (fld))
285 && TYPE_BINFO (type)
286 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
287 continue;
288
289 if (!DECL_SIZE (fld) || !tree_fits_uhwi_p (DECL_SIZE (fld)))
290 goto no_useful_type_info;
291 size = tree_to_uhwi (DECL_SIZE (fld));
292
293 /* We can always skip types smaller than pointer size:
294 those can not contain a virtual table pointer.
295
296 Disqualifying fields that are too small to fit OTR_TYPE
297 saves work needed to walk them for no benefit.
298 Because of the way the bases are packed into a class, the
299 field's size may be smaller than type size, so it needs
300 to be done with a care. */
301
302 if (pos <= (unsigned HOST_WIDE_INT)cur_offset
303 && (pos + size) >= (unsigned HOST_WIDE_INT)cur_offset
304 + GET_MODE_BITSIZE (Pmode)
305 && (!otr_type
306 || !TYPE_SIZE (TREE_TYPE (fld))
307 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld)))
308 || (pos + tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld))))
309 >= cur_offset + otr_type_size))
310 break;
311 }
312
313 if (!fld)
314 goto no_useful_type_info;
315
316 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
317 cur_offset -= pos;
318 /* DECL_ARTIFICIAL represents a basetype. */
319 if (!DECL_ARTIFICIAL (fld))
320 {
321 if (!speculative)
322 {
323 outer_type = type;
324 offset = cur_offset;
325 /* As soon as we se an field containing the type,
326 we know we are not looking for derivations. */
327 maybe_derived_type = false;
328 }
329 else
330 {
331 speculative_outer_type = type;
332 speculative_offset = cur_offset;
333 speculative_maybe_derived_type = false;
334 }
335 }
336 else if (!consider_bases)
337 goto no_useful_type_info;
338 }
339 else if (TREE_CODE (type) == ARRAY_TYPE)
340 {
341 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
342
343 /* Give up if we don't know array field size.
344 Also give up on non-polymorphic types as they are used
345 as buffers for placement new. */
346 if (!TYPE_SIZE (subtype)
347 || !tree_fits_shwi_p (TYPE_SIZE (subtype))
348 || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
349 || !contains_polymorphic_type_p (subtype))
350 goto no_useful_type_info;
351
352 HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
353
354 /* We may see buffer for placement new. In this case the expected type
355 can be bigger than the subtype. */
356 if (TYPE_SIZE (subtype)
357 && (cur_offset + otr_type_size
358 > tree_to_uhwi (TYPE_SIZE (subtype))))
359 goto no_useful_type_info;
360
361 cur_offset = new_offset;
362 type = subtype;
363 if (!speculative)
364 {
365 outer_type = type;
366 offset = cur_offset;
367 maybe_derived_type = false;
368 }
369 else
370 {
371 speculative_outer_type = type;
372 speculative_offset = cur_offset;
373 speculative_maybe_derived_type = false;
374 }
375 }
376 /* Give up on anything else. */
377 else
378 {
379 no_useful_type_info:
380 if (maybe_derived_type && !speculative
381 && TREE_CODE (outer_type) == RECORD_TYPE
382 && TREE_CODE (otr_type) == RECORD_TYPE
383 && TYPE_BINFO (otr_type)
384 && !offset
385 && get_binfo_at_offset (TYPE_BINFO (otr_type), 0, outer_type))
386 {
387 clear_outer_type (otr_type);
388 if (!speculative_outer_type
389 || !speculation_consistent_p (speculative_outer_type,
390 speculative_offset,
391 speculative_maybe_derived_type,
392 otr_type))
393 clear_speculation ();
394 if (speculative_outer_type)
395 {
396 speculative = true;
397 type = speculative_outer_type;
398 cur_offset = speculative_offset;
399 }
400 else
401 return true;
402 }
403 /* We found no way to embedd EXPECTED_TYPE in TYPE.
404 We still permit two special cases - placement new and
405 the case of variadic types containing themselves. */
406 if (!speculative
407 && consider_placement_new
408 && (size_unknown || !type || maybe_derived_type
409 || possible_placement_new (type, otr_type, cur_offset)))
410 {
411 /* In these weird cases we want to accept the context.
412 In non-speculative run we have no useful outer_type info
413 (TODO: we may eventually want to record upper bound on the
414 type size that can be used to prune the walk),
415 but we still want to consider speculation that may
416 give useful info. */
417 if (!speculative)
418 {
419 clear_outer_type (otr_type);
420 if (!speculative_outer_type
421 || !speculation_consistent_p (speculative_outer_type,
422 speculative_offset,
423 speculative_maybe_derived_type,
424 otr_type))
425 clear_speculation ();
426 if (speculative_outer_type)
427 {
428 speculative = true;
429 type = speculative_outer_type;
430 cur_offset = speculative_offset;
431 }
432 else
433 return true;
434 }
435 else
436 clear_speculation ();
437 return true;
438 }
439 else
440 {
441 clear_speculation ();
442 if (speculative)
443 return true;
444 clear_outer_type (otr_type);
445 invalid = true;
446 return false;
447 }
448 }
449 }
450 }
451
452 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
453 CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
454 be built within OUTER_TYPE by means of placement new. CONSIDER_BASES makes
455 function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
456 base of one of fields of OUTER_TYPE. */
457
458 static bool
459 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
460 tree otr_type,
461 bool consider_placement_new,
462 bool consider_bases)
463 {
464 ipa_polymorphic_call_context context;
465
466 /* Check that type is within range. */
467 if (offset < 0)
468 return false;
469 if (TYPE_SIZE (outer_type) && TYPE_SIZE (otr_type)
470 && TREE_CODE (outer_type) == INTEGER_CST
471 && TREE_CODE (otr_type) == INTEGER_CST
472 && wi::ltu_p (wi::to_offset (outer_type), (wi::to_offset (otr_type) + offset)))
473 return false;
474
475 context.offset = offset;
476 context.outer_type = TYPE_MAIN_VARIANT (outer_type);
477 context.maybe_derived_type = false;
478 return context.restrict_to_inner_class (otr_type, consider_placement_new, consider_bases);
479 }
480
481
482 /* We know that the instance is stored in variable or parameter
483 (not dynamically allocated) and we want to disprove the fact
484 that it may be in construction at invocation of CALL.
485
486 BASE represents memory location where instance is stored.
487 If BASE is NULL, it is assumed to be global memory.
488 OUTER_TYPE is known type of the instance or NULL if not
489 known.
490
491 For the variable to be in construction we actually need to
492 be in constructor of corresponding global variable or
493 the inline stack of CALL must contain the constructor.
494 Check this condition. This check works safely only before
495 IPA passes, because inline stacks may become out of date
496 later. */
497
498 bool
499 decl_maybe_in_construction_p (tree base, tree outer_type,
500 gimple call, tree function)
501 {
502 if (outer_type)
503 outer_type = TYPE_MAIN_VARIANT (outer_type);
504 gcc_assert (!base || DECL_P (base));
505
506 /* After inlining the code unification optimizations may invalidate
507 inline stacks. Also we need to give up on global variables after
508 IPA, because addresses of these may have been propagated to their
509 constructors. */
510 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
511 return true;
512
513 /* Pure functions can not do any changes on the dynamic type;
514 that require writting to memory. */
515 if ((!base || !auto_var_in_fn_p (base, function))
516 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
517 return false;
518
519 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
520 block = BLOCK_SUPERCONTEXT (block))
521 if (BLOCK_ABSTRACT_ORIGIN (block)
522 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
523 {
524 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
525
526 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
527 || (!DECL_CXX_CONSTRUCTOR_P (fn)
528 && !DECL_CXX_DESTRUCTOR_P (fn)))
529 {
530 /* Watch for clones where we constant propagated the first
531 argument (pointer to the instance). */
532 fn = DECL_ABSTRACT_ORIGIN (fn);
533 if (!fn
534 || (base && !is_global_var (base))
535 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
536 || (!DECL_CXX_CONSTRUCTOR_P (fn)
537 && !DECL_CXX_DESTRUCTOR_P (fn)))
538 continue;
539 }
540 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
541 continue;
542
543 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
544
545 if (!outer_type || !types_odr_comparable (type, outer_type))
546 {
547 if (TREE_CODE (type) == RECORD_TYPE
548 && TYPE_BINFO (type)
549 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
550 return true;
551 }
552 else if (types_same_for_odr (type, outer_type))
553 return true;
554 }
555
556 if (!base || (TREE_CODE (base) == VAR_DECL && is_global_var (base)))
557 {
558 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
559 || (!DECL_CXX_CONSTRUCTOR_P (function)
560 && !DECL_CXX_DESTRUCTOR_P (function)))
561 {
562 if (!DECL_ABSTRACT_ORIGIN (function))
563 return false;
564 /* Watch for clones where we constant propagated the first
565 argument (pointer to the instance). */
566 function = DECL_ABSTRACT_ORIGIN (function);
567 if (!function
568 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
569 || (!DECL_CXX_CONSTRUCTOR_P (function)
570 && !DECL_CXX_DESTRUCTOR_P (function)))
571 return false;
572 }
573 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
574 if (!outer_type || !types_odr_comparable (type, outer_type))
575 {
576 if (TREE_CODE (type) == RECORD_TYPE
577 && TYPE_BINFO (type)
578 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
579 return true;
580 }
581 else if (types_same_for_odr (type, outer_type))
582 return true;
583 }
584 return false;
585 }
586
587 /* Dump human readable context to F. */
588
589 void
590 ipa_polymorphic_call_context::dump (FILE *f) const
591 {
592 fprintf (f, " ");
593 if (invalid)
594 fprintf (f, "Call is known to be undefined");
595 else
596 {
597 if (useless_p ())
598 fprintf (f, "nothing known");
599 if (outer_type || offset)
600 {
601 fprintf (f, "Outer type%s:", dynamic ? " (dynamic)":"");
602 print_generic_expr (f, outer_type, TDF_SLIM);
603 if (maybe_derived_type)
604 fprintf (f, " (or a derived type)");
605 if (maybe_in_construction)
606 fprintf (f, " (maybe in construction)");
607 fprintf (f, " offset "HOST_WIDE_INT_PRINT_DEC,
608 offset);
609 }
610 if (speculative_outer_type)
611 {
612 if (outer_type || offset)
613 fprintf (f, " ");
614 fprintf (f, "Speculative outer type:");
615 print_generic_expr (f, speculative_outer_type, TDF_SLIM);
616 if (speculative_maybe_derived_type)
617 fprintf (f, " (or a derived type)");
618 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC,
619 speculative_offset);
620 }
621 }
622 fprintf(f, "\n");
623 }
624
625 /* Print context to stderr. */
626
627 void
628 ipa_polymorphic_call_context::debug () const
629 {
630 dump (stderr);
631 }
632
633 /* Stream out the context to OB. */
634
635 void
636 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
637 {
638 struct bitpack_d bp = bitpack_create (ob->main_stream);
639
640 bp_pack_value (&bp, invalid, 1);
641 bp_pack_value (&bp, maybe_in_construction, 1);
642 bp_pack_value (&bp, maybe_derived_type, 1);
643 bp_pack_value (&bp, speculative_maybe_derived_type, 1);
644 bp_pack_value (&bp, dynamic, 1);
645 bp_pack_value (&bp, outer_type != NULL, 1);
646 bp_pack_value (&bp, offset != 0, 1);
647 bp_pack_value (&bp, speculative_outer_type != NULL, 1);
648 streamer_write_bitpack (&bp);
649
650 if (outer_type != NULL)
651 stream_write_tree (ob, outer_type, true);
652 if (offset)
653 streamer_write_hwi (ob, offset);
654 if (speculative_outer_type != NULL)
655 {
656 stream_write_tree (ob, speculative_outer_type, true);
657 streamer_write_hwi (ob, speculative_offset);
658 }
659 else
660 gcc_assert (!speculative_offset);
661 }
662
663 /* Stream in the context from IB and DATA_IN. */
664
665 void
666 ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib,
667 struct data_in *data_in)
668 {
669 struct bitpack_d bp = streamer_read_bitpack (ib);
670
671 invalid = bp_unpack_value (&bp, 1);
672 maybe_in_construction = bp_unpack_value (&bp, 1);
673 maybe_derived_type = bp_unpack_value (&bp, 1);
674 speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
675 dynamic = bp_unpack_value (&bp, 1);
676 bool outer_type_p = bp_unpack_value (&bp, 1);
677 bool offset_p = bp_unpack_value (&bp, 1);
678 bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
679
680 if (outer_type_p)
681 outer_type = stream_read_tree (ib, data_in);
682 else
683 outer_type = NULL;
684 if (offset_p)
685 offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
686 else
687 offset = 0;
688 if (speculative_outer_type_p)
689 {
690 speculative_outer_type = stream_read_tree (ib, data_in);
691 speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
692 }
693 else
694 {
695 speculative_outer_type = NULL;
696 speculative_offset = 0;
697 }
698 }
699
700 /* Proudce polymorphic call context for call method of instance
701 that is located within BASE (that is assumed to be a decl) at offset OFF. */
702
703 void
704 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
705 {
706 gcc_assert (DECL_P (base));
707 clear_speculation ();
708
709 if (!contains_polymorphic_type_p (TREE_TYPE (base)))
710 {
711 clear_outer_type ();
712 offset = off;
713 return;
714 }
715 outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
716 offset = off;
717 /* Make very conservative assumption that all objects
718 may be in construction.
719
720 It is up to caller to revisit this via
721 get_dynamic_type or decl_maybe_in_construction_p. */
722 maybe_in_construction = true;
723 maybe_derived_type = false;
724 dynamic = false;
725 }
726
727 /* CST is an invariant (address of decl), try to get meaningful
728 polymorphic call context for polymorphic call of method
729 if instance of OTR_TYPE that is located at offset OFF of this invariant.
730 Return FALSE if nothing meaningful can be found. */
731
732 bool
733 ipa_polymorphic_call_context::set_by_invariant (tree cst,
734 tree otr_type,
735 HOST_WIDE_INT off)
736 {
737 HOST_WIDE_INT offset2, size, max_size;
738 tree base;
739
740 invalid = false;
741 off = 0;
742 clear_outer_type (otr_type);
743
744 if (TREE_CODE (cst) != ADDR_EXPR)
745 return false;
746
747 cst = TREE_OPERAND (cst, 0);
748 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
749 if (!DECL_P (base) || max_size == -1 || max_size != size)
750 return false;
751
752 /* Only type inconsistent programs can have otr_type that is
753 not part of outer type. */
754 if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
755 return false;
756
757 set_by_decl (base, off);
758 return true;
759 }
760
761 /* See if OP is SSA name initialized as a copy or by single assignment.
762 If so, walk the SSA graph up. Because simple PHI conditional is considered
763 copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
764 graph. */
765
766 static tree
767 walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL)
768 {
769 hash_set <tree> *visited = NULL;
770 STRIP_NOPS (op);
771 while (TREE_CODE (op) == SSA_NAME
772 && !SSA_NAME_IS_DEFAULT_DEF (op)
773 && SSA_NAME_DEF_STMT (op)
774 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op))
775 || gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI))
776 {
777 if (global_visited)
778 {
779 if (!*global_visited)
780 *global_visited = new hash_set<tree>;
781 if ((*global_visited)->add (op))
782 goto done;
783 }
784 else
785 {
786 if (!visited)
787 visited = new hash_set<tree>;
788 if (visited->add (op))
789 goto done;
790 }
791 /* Special case
792 if (ptr == 0)
793 ptr = 0;
794 else
795 ptr = ptr.foo;
796 This pattern is implicitly produced for casts to non-primary
797 bases. When doing context analysis, we do not really care
798 about the case pointer is NULL, becuase the call will be
799 undefined anyway. */
800 if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
801 {
802 gimple phi = SSA_NAME_DEF_STMT (op);
803
804 if (gimple_phi_num_args (phi) > 2)
805 goto done;
806 if (gimple_phi_num_args (phi) == 1)
807 op = gimple_phi_arg_def (phi, 0);
808 else if (integer_zerop (gimple_phi_arg_def (phi, 0)))
809 op = gimple_phi_arg_def (phi, 1);
810 else if (integer_zerop (gimple_phi_arg_def (phi, 1)))
811 op = gimple_phi_arg_def (phi, 0);
812 else
813 goto done;
814 }
815 else
816 {
817 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
818 goto done;
819 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
820 }
821 STRIP_NOPS (op);
822 }
823 done:
824 if (visited)
825 delete (visited);
826 return op;
827 }
828
829 /* Create polymorphic call context from IP invariant CST.
830 This is typically &global_var.
831 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
832 is offset of call. */
833
834 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
835 tree otr_type,
836 HOST_WIDE_INT off)
837 {
838 clear_speculation ();
839 set_by_invariant (cst, otr_type, off);
840 }
841
842 /* Build context for pointer REF contained in FNDECL at statement STMT.
843 if INSTANCE is non-NULL, return pointer to the object described by
844 the context or DECL where context is contained in. */
845
846 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
847 tree ref,
848 gimple stmt,
849 tree *instance)
850 {
851 tree otr_type = NULL;
852 tree base_pointer;
853 hash_set <tree> *visited = NULL;
854
855 if (TREE_CODE (ref) == OBJ_TYPE_REF)
856 {
857 otr_type = obj_type_ref_class (ref);
858 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
859 }
860 else
861 base_pointer = ref;
862
863 /* Set up basic info in case we find nothing interesting in the analysis. */
864 clear_speculation ();
865 clear_outer_type (otr_type);
866 invalid = false;
867
868 /* Walk SSA for outer object. */
869 while (true)
870 {
871 base_pointer = walk_ssa_copies (base_pointer, &visited);
872 if (TREE_CODE (base_pointer) == ADDR_EXPR)
873 {
874 HOST_WIDE_INT size, max_size;
875 HOST_WIDE_INT offset2;
876 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
877 &offset2, &size, &max_size);
878
879 if (max_size != -1 && max_size == size)
880 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
881 offset + offset2,
882 true,
883 NULL /* Do not change outer type. */);
884
885 /* If this is a varying address, punt. */
886 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
887 && max_size != -1
888 && max_size == size)
889 {
890 /* We found dereference of a pointer. Type of the pointer
891 and MEM_REF is meaningless, but we can look futher. */
892 if (TREE_CODE (base) == MEM_REF)
893 {
894 base_pointer = TREE_OPERAND (base, 0);
895 offset
896 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
897 outer_type = NULL;
898 }
899 /* We found base object. In this case the outer_type
900 is known. */
901 else if (DECL_P (base))
902 {
903 if (visited)
904 delete (visited);
905 /* Only type inconsistent programs can have otr_type that is
906 not part of outer type. */
907 if (otr_type
908 && !contains_type_p (TREE_TYPE (base),
909 offset + offset2, otr_type))
910 {
911 invalid = true;
912 if (instance)
913 *instance = base_pointer;
914 return;
915 }
916 set_by_decl (base, offset + offset2);
917 if (outer_type && maybe_in_construction && stmt)
918 maybe_in_construction
919 = decl_maybe_in_construction_p (base,
920 outer_type,
921 stmt,
922 fndecl);
923 if (instance)
924 *instance = base;
925 return;
926 }
927 else
928 break;
929 }
930 else
931 break;
932 }
933 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
934 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
935 {
936 offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
937 * BITS_PER_UNIT;
938 base_pointer = TREE_OPERAND (base_pointer, 0);
939 }
940 else
941 break;
942 }
943
944 if (visited)
945 delete (visited);
946
947 /* Try to determine type of the outer object. */
948 if (TREE_CODE (base_pointer) == SSA_NAME
949 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
950 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
951 {
952 /* See if parameter is THIS pointer of a method. */
953 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
954 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
955 {
956 outer_type
957 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
958 gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE
959 || TREE_CODE (outer_type) == UNION_TYPE);
960
961 /* Dynamic casting has possibly upcasted the type
962 in the hiearchy. In this case outer type is less
963 informative than inner type and we should forget
964 about it. */
965 if ((otr_type
966 && !contains_type_p (outer_type, offset,
967 otr_type))
968 || !contains_polymorphic_type_p (outer_type))
969 {
970 outer_type = NULL;
971 if (instance)
972 *instance = base_pointer;
973 return;
974 }
975
976 dynamic = true;
977
978 /* If the function is constructor or destructor, then
979 the type is possibly in construction, but we know
980 it is not derived type. */
981 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
982 || DECL_CXX_DESTRUCTOR_P (fndecl))
983 {
984 maybe_in_construction = true;
985 maybe_derived_type = false;
986 }
987 else
988 {
989 maybe_derived_type = true;
990 maybe_in_construction = false;
991 }
992 if (instance)
993 *instance = base_pointer;
994 return;
995 }
996 /* Non-PODs passed by value are really passed by invisible
997 reference. In this case we also know the type of the
998 object. */
999 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1000 {
1001 outer_type
1002 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1003 /* Only type inconsistent programs can have otr_type that is
1004 not part of outer type. */
1005 if (otr_type && !contains_type_p (outer_type, offset,
1006 otr_type))
1007 {
1008 invalid = true;
1009 if (instance)
1010 *instance = base_pointer;
1011 return;
1012 }
1013 /* Non-polymorphic types have no interest for us. */
1014 else if (!otr_type && !contains_polymorphic_type_p (outer_type))
1015 {
1016 outer_type = NULL;
1017 if (instance)
1018 *instance = base_pointer;
1019 return;
1020 }
1021 maybe_derived_type = false;
1022 maybe_in_construction = false;
1023 if (instance)
1024 *instance = base_pointer;
1025 return;
1026 }
1027 }
1028
1029 tree base_type = TREE_TYPE (base_pointer);
1030
1031 if (TREE_CODE (base_pointer) == SSA_NAME
1032 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1033 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
1034 {
1035 invalid = true;
1036 if (instance)
1037 *instance = base_pointer;
1038 return;
1039 }
1040 if (TREE_CODE (base_pointer) == SSA_NAME
1041 && SSA_NAME_DEF_STMT (base_pointer)
1042 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1043 base_type = TREE_TYPE (gimple_assign_rhs1
1044 (SSA_NAME_DEF_STMT (base_pointer)));
1045
1046 if (POINTER_TYPE_P (base_type))
1047 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
1048 offset,
1049 true, NULL /* Do not change type here */);
1050 /* TODO: There are multiple ways to derive a type. For instance
1051 if BASE_POINTER is passed to an constructor call prior our refernece.
1052 We do not make this type of flow sensitive analysis yet. */
1053 if (instance)
1054 *instance = base_pointer;
1055 return;
1056 }
1057
1058 /* Structure to be passed in between detect_type_change and
1059 check_stmt_for_type_change. */
1060
1061 struct type_change_info
1062 {
1063 /* Offset into the object where there is the virtual method pointer we are
1064 looking for. */
1065 HOST_WIDE_INT offset;
1066 /* The declaration or SSA_NAME pointer of the base that we are checking for
1067 type change. */
1068 tree instance;
1069 /* The reference to virtual table pointer used. */
1070 tree vtbl_ptr_ref;
1071 tree otr_type;
1072 /* If we actually can tell the type that the object has changed to, it is
1073 stored in this field. Otherwise it remains NULL_TREE. */
1074 tree known_current_type;
1075 HOST_WIDE_INT known_current_offset;
1076
1077 /* Set to true if dynamic type change has been detected. */
1078 bool type_maybe_changed;
1079 /* Set to true if multiple types have been encountered. known_current_type
1080 must be disregarded in that case. */
1081 bool multiple_types_encountered;
1082 /* Set to true if we possibly missed some dynamic type changes and we should
1083 consider the set to be speculative. */
1084 bool speculative;
1085 bool seen_unanalyzed_store;
1086 };
1087
1088 /* Return true if STMT is not call and can modify a virtual method table pointer.
1089 We take advantage of fact that vtable stores must appear within constructor
1090 and destructor functions. */
1091
1092 static bool
1093 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
1094 {
1095 if (is_gimple_assign (stmt))
1096 {
1097 tree lhs = gimple_assign_lhs (stmt);
1098
1099 if (gimple_clobber_p (stmt))
1100 return false;
1101 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
1102 {
1103 if (flag_strict_aliasing
1104 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
1105 return false;
1106
1107 if (TREE_CODE (lhs) == COMPONENT_REF
1108 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1109 return false;
1110 /* In the future we might want to use get_base_ref_and_offset to find
1111 if there is a field corresponding to the offset and if so, proceed
1112 almost like if it was a component ref. */
1113 }
1114 }
1115
1116 /* Code unification may mess with inline stacks. */
1117 if (cfun->after_inlining)
1118 return true;
1119
1120 /* Walk the inline stack and watch out for ctors/dtors.
1121 TODO: Maybe we can require the store to appear in toplevel
1122 block of CTOR/DTOR. */
1123 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
1124 block = BLOCK_SUPERCONTEXT (block))
1125 if (BLOCK_ABSTRACT_ORIGIN (block)
1126 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
1127 {
1128 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
1129
1130 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
1131 return false;
1132 return (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1133 && (DECL_CXX_CONSTRUCTOR_P (fn)
1134 || DECL_CXX_DESTRUCTOR_P (fn)));
1135 }
1136 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
1137 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
1138 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
1139 }
1140
1141 /* If STMT can be proved to be an assignment to the virtual method table
1142 pointer of ANALYZED_OBJ and the type associated with the new table
1143 identified, return the type. Otherwise return NULL_TREE. */
1144
1145 static tree
1146 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
1147 HOST_WIDE_INT *type_offset)
1148 {
1149 HOST_WIDE_INT offset, size, max_size;
1150 tree lhs, rhs, base;
1151
1152 if (!gimple_assign_single_p (stmt))
1153 return NULL_TREE;
1154
1155 lhs = gimple_assign_lhs (stmt);
1156 rhs = gimple_assign_rhs1 (stmt);
1157 if (TREE_CODE (lhs) != COMPONENT_REF
1158 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1159 {
1160 if (dump_file)
1161 fprintf (dump_file, " LHS is not virtual table.\n");
1162 return NULL_TREE;
1163 }
1164
1165 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
1166 ;
1167 else
1168 {
1169 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
1170 if (offset != tci->offset
1171 || size != POINTER_SIZE
1172 || max_size != POINTER_SIZE)
1173 {
1174 if (dump_file)
1175 fprintf (dump_file, " wrong offset %i!=%i or size %i\n",
1176 (int)offset, (int)tci->offset, (int)size);
1177 return NULL_TREE;
1178 }
1179 if (DECL_P (tci->instance))
1180 {
1181 if (base != tci->instance)
1182 {
1183 if (dump_file)
1184 {
1185 fprintf (dump_file, " base:");
1186 print_generic_expr (dump_file, base, TDF_SLIM);
1187 fprintf (dump_file, " does not match instance:");
1188 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1189 fprintf (dump_file, "\n");
1190 }
1191 return NULL_TREE;
1192 }
1193 }
1194 else if (TREE_CODE (base) == MEM_REF)
1195 {
1196 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0)
1197 || !integer_zerop (TREE_OPERAND (base, 1)))
1198 {
1199 if (dump_file)
1200 {
1201 fprintf (dump_file, " base mem ref:");
1202 print_generic_expr (dump_file, base, TDF_SLIM);
1203 fprintf (dump_file, " has nonzero offset or does not match instance:");
1204 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1205 fprintf (dump_file, "\n");
1206 }
1207 return NULL_TREE;
1208 }
1209 }
1210 else if (!operand_equal_p (tci->instance, base, 0)
1211 || tci->offset)
1212 {
1213 if (dump_file)
1214 {
1215 fprintf (dump_file, " base:");
1216 print_generic_expr (dump_file, base, TDF_SLIM);
1217 fprintf (dump_file, " does not match instance:");
1218 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1219 fprintf (dump_file, " with offset %i\n", (int)tci->offset);
1220 }
1221 return NULL_TREE;
1222 }
1223 }
1224
1225 tree vtable;
1226 unsigned HOST_WIDE_INT offset2;
1227
1228 if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
1229 {
1230 if (dump_file)
1231 fprintf (dump_file, " Failed to lookup binfo\n");
1232 return NULL;
1233 }
1234
1235 tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1236 offset2, vtable);
1237 if (!binfo)
1238 {
1239 if (dump_file)
1240 fprintf (dump_file, " Construction vtable used\n");
1241 /* FIXME: We should suport construction contexts. */
1242 return NULL;
1243 }
1244
1245 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
1246 return DECL_CONTEXT (vtable);
1247 }
1248
1249 /* Record dynamic type change of TCI to TYPE. */
1250
1251 static void
1252 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
1253 {
1254 if (dump_file)
1255 {
1256 if (type)
1257 {
1258 fprintf (dump_file, " Recording type: ");
1259 print_generic_expr (dump_file, type, TDF_SLIM);
1260 fprintf (dump_file, " at offset %i\n", (int)offset);
1261 }
1262 else
1263 fprintf (dump_file, " Recording unknown type\n");
1264 }
1265
1266 /* If we found a constructor of type that is not polymorphic or
1267 that may contain the type in question as a field (not as base),
1268 restrict to the inner class first to make type matching bellow
1269 happier. */
1270 if (type
1271 && (offset
1272 || (TREE_CODE (type) != RECORD_TYPE
1273 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
1274 {
1275 ipa_polymorphic_call_context context;
1276
1277 context.offset = offset;
1278 context.outer_type = type;
1279 context.maybe_in_construction = false;
1280 context.maybe_derived_type = false;
1281 context.dynamic = true;
1282 /* If we failed to find the inner type, we know that the call
1283 would be undefined for type produced here. */
1284 if (!context.restrict_to_inner_class (tci->otr_type))
1285 {
1286 if (dump_file)
1287 fprintf (dump_file, " Ignoring; does not contain otr_type\n");
1288 return;
1289 }
1290 /* Watch for case we reached an POD type and anticipate placement
1291 new. */
1292 if (!context.maybe_derived_type)
1293 {
1294 type = context.outer_type;
1295 offset = context.offset;
1296 }
1297 }
1298 if (tci->type_maybe_changed
1299 && (!types_same_for_odr (type, tci->known_current_type)
1300 || offset != tci->known_current_offset))
1301 tci->multiple_types_encountered = true;
1302 tci->known_current_type = TYPE_MAIN_VARIANT (type);
1303 tci->known_current_offset = offset;
1304 tci->type_maybe_changed = true;
1305 }
1306
1307 /* Callback of walk_aliased_vdefs and a helper function for
1308 detect_type_change to check whether a particular statement may modify
1309 the virtual table pointer, and if possible also determine the new type of
1310 the (sub-)object. It stores its result into DATA, which points to a
1311 type_change_info structure. */
1312
1313 static bool
1314 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
1315 {
1316 gimple stmt = SSA_NAME_DEF_STMT (vdef);
1317 struct type_change_info *tci = (struct type_change_info *) data;
1318 tree fn;
1319
1320 /* If we already gave up, just terminate the rest of walk. */
1321 if (tci->multiple_types_encountered)
1322 return true;
1323
1324 if (is_gimple_call (stmt))
1325 {
1326 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
1327 return false;
1328
1329 /* Check for a constructor call. */
1330 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
1331 && DECL_CXX_CONSTRUCTOR_P (fn)
1332 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1333 && gimple_call_num_args (stmt))
1334 {
1335 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
1336 tree type = method_class_type (TREE_TYPE (fn));
1337 HOST_WIDE_INT offset = 0, size, max_size;
1338
1339 if (dump_file)
1340 {
1341 fprintf (dump_file, " Checking constructor call: ");
1342 print_gimple_stmt (dump_file, stmt, 0, 0);
1343 }
1344
1345 /* See if THIS parameter seems like instance pointer. */
1346 if (TREE_CODE (op) == ADDR_EXPR)
1347 {
1348 op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
1349 &offset, &size, &max_size);
1350 if (size != max_size || max_size == -1)
1351 {
1352 tci->speculative = true;
1353 return false;
1354 }
1355 if (op && TREE_CODE (op) == MEM_REF)
1356 {
1357 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
1358 {
1359 tci->speculative = true;
1360 return false;
1361 }
1362 offset += tree_to_shwi (TREE_OPERAND (op, 1))
1363 * BITS_PER_UNIT;
1364 op = TREE_OPERAND (op, 0);
1365 }
1366 else if (DECL_P (op))
1367 ;
1368 else
1369 {
1370 tci->speculative = true;
1371 return false;
1372 }
1373 op = walk_ssa_copies (op);
1374 }
1375 if (operand_equal_p (op, tci->instance, 0)
1376 && TYPE_SIZE (type)
1377 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
1378 && tree_fits_shwi_p (TYPE_SIZE (type))
1379 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset)
1380 {
1381 record_known_type (tci, type, tci->offset - offset);
1382 return true;
1383 }
1384 }
1385 /* Calls may possibly change dynamic type by placement new. Assume
1386 it will not happen, but make result speculative only. */
1387 if (dump_file)
1388 {
1389 fprintf (dump_file, " Function call may change dynamic type:");
1390 print_gimple_stmt (dump_file, stmt, 0, 0);
1391 }
1392 tci->speculative = true;
1393 return false;
1394 }
1395 /* Check for inlined virtual table store. */
1396 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
1397 {
1398 tree type;
1399 HOST_WIDE_INT offset = 0;
1400 if (dump_file)
1401 {
1402 fprintf (dump_file, " Checking vtbl store: ");
1403 print_gimple_stmt (dump_file, stmt, 0, 0);
1404 }
1405
1406 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
1407 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
1408 if (!type)
1409 {
1410 if (dump_file)
1411 fprintf (dump_file, " Unanalyzed store may change type.\n");
1412 tci->seen_unanalyzed_store = true;
1413 tci->speculative = true;
1414 }
1415 else
1416 record_known_type (tci, type, offset);
1417 return true;
1418 }
1419 else
1420 return false;
1421 }
1422
1423 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1424 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1425 INSTANCE is pointer to the outer instance as returned by
1426 get_polymorphic_context. To avoid creation of temporary expressions,
1427 INSTANCE may also be an declaration of get_polymorphic_context found the
1428 value to be in static storage.
1429
1430 If the type of instance is not fully determined
1431 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1432 is set), try to walk memory writes and find the actual construction of the
1433 instance.
1434
1435 Return true if memory is unchanged from function entry.
1436
1437 We do not include this analysis in the context analysis itself, because
1438 it needs memory SSA to be fully built and the walk may be expensive.
1439 So it is not suitable for use withing fold_stmt and similar uses. */
1440
1441 bool
1442 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
1443 tree otr_object,
1444 tree otr_type,
1445 gimple call)
1446 {
1447 struct type_change_info tci;
1448 ao_ref ao;
1449 bool function_entry_reached = false;
1450 tree instance_ref = NULL;
1451 gimple stmt = call;
1452 /* Remember OFFSET before it is modified by restrict_to_inner_class.
1453 This is because we do not update INSTANCE when walking inwards. */
1454 HOST_WIDE_INT instance_offset = offset;
1455
1456 if (otr_type)
1457 otr_type = TYPE_MAIN_VARIANT (otr_type);
1458
1459 /* Walk into inner type. This may clear maybe_derived_type and save us
1460 from useless work. It also makes later comparsions with static type
1461 easier. */
1462 if (outer_type && otr_type)
1463 {
1464 if (!restrict_to_inner_class (otr_type))
1465 return false;
1466 }
1467
1468 if (!maybe_in_construction && !maybe_derived_type)
1469 return false;
1470
1471 /* We need to obtain refernce to virtual table pointer. It is better
1472 to look it up in the code rather than build our own. This require bit
1473 of pattern matching, but we end up verifying that what we found is
1474 correct.
1475
1476 What we pattern match is:
1477
1478 tmp = instance->_vptr.A; // vtbl ptr load
1479 tmp2 = tmp[otr_token]; // vtable lookup
1480 OBJ_TYPE_REF(tmp2;instance->0) (instance);
1481
1482 We want to start alias oracle walk from vtbl pointer load,
1483 but we may not be able to identify it, for example, when PRE moved the
1484 load around. */
1485
1486 if (gimple_code (call) == GIMPLE_CALL)
1487 {
1488 tree ref = gimple_call_fn (call);
1489 HOST_WIDE_INT offset2, size, max_size;
1490
1491 if (TREE_CODE (ref) == OBJ_TYPE_REF)
1492 {
1493 ref = OBJ_TYPE_REF_EXPR (ref);
1494 ref = walk_ssa_copies (ref);
1495
1496 /* Check if definition looks like vtable lookup. */
1497 if (TREE_CODE (ref) == SSA_NAME
1498 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1499 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
1500 && TREE_CODE (gimple_assign_rhs1
1501 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
1502 {
1503 ref = get_base_address
1504 (TREE_OPERAND (gimple_assign_rhs1
1505 (SSA_NAME_DEF_STMT (ref)), 0));
1506 ref = walk_ssa_copies (ref);
1507 /* Find base address of the lookup and see if it looks like
1508 vptr load. */
1509 if (TREE_CODE (ref) == SSA_NAME
1510 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1511 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
1512 {
1513 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
1514 tree base_ref = get_ref_base_and_extent
1515 (ref_exp, &offset2, &size, &max_size);
1516
1517 /* Finally verify that what we found looks like read from OTR_OBJECT
1518 or from INSTANCE with offset OFFSET. */
1519 if (base_ref
1520 && ((TREE_CODE (base_ref) == MEM_REF
1521 && ((offset2 == instance_offset
1522 && TREE_OPERAND (base_ref, 0) == instance)
1523 || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object)))
1524 || (DECL_P (instance) && base_ref == instance
1525 && offset2 == instance_offset)))
1526 {
1527 stmt = SSA_NAME_DEF_STMT (ref);
1528 instance_ref = ref_exp;
1529 }
1530 }
1531 }
1532 }
1533 }
1534
1535 /* If we failed to look up the refernece in code, build our own. */
1536 if (!instance_ref)
1537 {
1538 /* If the statement in question does not use memory, we can't tell
1539 anything. */
1540 if (!gimple_vuse (stmt))
1541 return false;
1542 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
1543 }
1544 else
1545 /* Otherwise use the real reference. */
1546 ao_ref_init (&ao, instance_ref);
1547
1548 /* We look for vtbl pointer read. */
1549 ao.size = POINTER_SIZE;
1550 ao.max_size = ao.size;
1551 if (otr_type)
1552 ao.ref_alias_set
1553 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
1554
1555 if (dump_file)
1556 {
1557 fprintf (dump_file, "Determining dynamic type for call: ");
1558 print_gimple_stmt (dump_file, call, 0, 0);
1559 fprintf (dump_file, " Starting walk at: ");
1560 print_gimple_stmt (dump_file, stmt, 0, 0);
1561 fprintf (dump_file, " instance pointer: ");
1562 print_generic_expr (dump_file, otr_object, TDF_SLIM);
1563 fprintf (dump_file, " Outer instance pointer: ");
1564 print_generic_expr (dump_file, instance, TDF_SLIM);
1565 fprintf (dump_file, " offset: %i (bits)", (int)offset);
1566 fprintf (dump_file, " vtbl reference: ");
1567 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
1568 fprintf (dump_file, "\n");
1569 }
1570
1571 tci.offset = offset;
1572 tci.instance = instance;
1573 tci.vtbl_ptr_ref = instance_ref;
1574 gcc_assert (TREE_CODE (instance) != MEM_REF);
1575 tci.known_current_type = NULL_TREE;
1576 tci.known_current_offset = 0;
1577 tci.otr_type = otr_type;
1578 tci.type_maybe_changed = false;
1579 tci.multiple_types_encountered = false;
1580 tci.speculative = false;
1581 tci.seen_unanalyzed_store = false;
1582
1583 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
1584 &tci, NULL, &function_entry_reached);
1585
1586 /* If we did not find any type changing statements, we may still drop
1587 maybe_in_construction flag if the context already have outer type.
1588
1589 Here we make special assumptions about both constructors and
1590 destructors which are all the functions that are allowed to alter the
1591 VMT pointers. It assumes that destructors begin with assignment into
1592 all VMT pointers and that constructors essentially look in the
1593 following way:
1594
1595 1) The very first thing they do is that they call constructors of
1596 ancestor sub-objects that have them.
1597
1598 2) Then VMT pointers of this and all its ancestors is set to new
1599 values corresponding to the type corresponding to the constructor.
1600
1601 3) Only afterwards, other stuff such as constructor of member
1602 sub-objects and the code written by the user is run. Only this may
1603 include calling virtual functions, directly or indirectly.
1604
1605 4) placement new can not be used to change type of non-POD statically
1606 allocated variables.
1607
1608 There is no way to call a constructor of an ancestor sub-object in any
1609 other way.
1610
1611 This means that we do not have to care whether constructors get the
1612 correct type information because they will always change it (in fact,
1613 if we define the type to be given by the VMT pointer, it is undefined).
1614
1615 The most important fact to derive from the above is that if, for some
1616 statement in the section 3, we try to detect whether the dynamic type
1617 has changed, we can safely ignore all calls as we examine the function
1618 body backwards until we reach statements in section 2 because these
1619 calls cannot be ancestor constructors or destructors (if the input is
1620 not bogus) and so do not change the dynamic type (this holds true only
1621 for automatically allocated objects but at the moment we devirtualize
1622 only these). We then must detect that statements in section 2 change
1623 the dynamic type and can try to derive the new type. That is enough
1624 and we can stop, we will never see the calls into constructors of
1625 sub-objects in this code.
1626
1627 Therefore if the static outer type was found (outer_type)
1628 we can safely ignore tci.speculative that is set on calls and give up
1629 only if there was dyanmic type store that may affect given variable
1630 (seen_unanalyzed_store) */
1631
1632 if (!tci.type_maybe_changed
1633 || (outer_type
1634 && !dynamic
1635 && !tci.seen_unanalyzed_store
1636 && !tci.multiple_types_encountered
1637 && offset == tci.offset
1638 && types_same_for_odr (tci.known_current_type,
1639 outer_type)))
1640 {
1641 if (!outer_type || tci.seen_unanalyzed_store)
1642 return false;
1643 if (maybe_in_construction)
1644 maybe_in_construction = false;
1645 if (dump_file)
1646 fprintf (dump_file, " No dynamic type change found.\n");
1647 return true;
1648 }
1649
1650 if (tci.known_current_type
1651 && !function_entry_reached
1652 && !tci.multiple_types_encountered)
1653 {
1654 if (!tci.speculative)
1655 {
1656 outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1657 offset = tci.known_current_offset;
1658 dynamic = true;
1659 maybe_in_construction = false;
1660 maybe_derived_type = false;
1661 if (dump_file)
1662 fprintf (dump_file, " Determined dynamic type.\n");
1663 }
1664 else if (!speculative_outer_type
1665 || speculative_maybe_derived_type)
1666 {
1667 speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1668 speculative_offset = tci.known_current_offset;
1669 speculative_maybe_derived_type = false;
1670 if (dump_file)
1671 fprintf (dump_file, " Determined speculative dynamic type.\n");
1672 }
1673 }
1674 else if (dump_file)
1675 {
1676 fprintf (dump_file, " Found multiple types%s%s\n",
1677 function_entry_reached ? " (function entry reached)" : "",
1678 function_entry_reached ? " (multiple types encountered)" : "");
1679 }
1680
1681 return false;
1682 }
1683
1684 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1685 seems consistent (and useful) with what we already have in the non-speculative context. */
1686
1687 bool
1688 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type,
1689 HOST_WIDE_INT spec_offset,
1690 bool spec_maybe_derived_type,
1691 tree otr_type)
1692 {
1693 if (!flag_devirtualize_speculatively)
1694 return false;
1695
1696 /* Non-polymorphic types are useless for deriving likely polymorphic
1697 call targets. */
1698 if (!spec_outer_type || !contains_polymorphic_type_p (spec_outer_type))
1699 return false;
1700
1701 /* If we know nothing, speculation is always good. */
1702 if (!outer_type)
1703 return true;
1704
1705 /* Speculation is only useful to avoid derived types.
1706 This is not 100% true for placement new, where the outer context may
1707 turn out to be useless, but ignore these for now. */
1708 if (!maybe_derived_type)
1709 return false;
1710
1711 /* If types agrees, speculation is consistent, but it makes sense only
1712 when it says something new. */
1713 if (types_must_be_same_for_odr (spec_outer_type, outer_type))
1714 return maybe_derived_type && !spec_maybe_derived_type;
1715
1716 /* If speculation does not contain the type in question, ignore it. */
1717 if (otr_type
1718 && !contains_type_p (spec_outer_type, spec_offset, otr_type, false, true))
1719 return false;
1720
1721 /* If outer type already contains speculation as a filed,
1722 it is useless. We already know from OUTER_TYPE
1723 SPEC_TYPE and that it is not in the construction. */
1724 if (contains_type_p (outer_type, offset - spec_offset,
1725 spec_outer_type, false, false))
1726 return false;
1727
1728 /* If speculative outer type is not more specified than outer
1729 type, just give up.
1730 We can only decide this safely if we can compare types with OUTER_TYPE.
1731 */
1732 if ((!in_lto_p || odr_type_p (outer_type))
1733 && !contains_type_p (spec_outer_type,
1734 spec_offset - offset,
1735 outer_type, false))
1736 return false;
1737 return true;
1738 }
1739
1740 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1741 NEW_MAYBE_DERIVED_TYPE
1742 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1743
1744 bool
1745 ipa_polymorphic_call_context::combine_speculation_with
1746 (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1747 tree otr_type)
1748 {
1749 if (!new_outer_type)
1750 return false;
1751
1752 /* restrict_to_inner_class may eliminate wrong speculation making our job
1753 easeier. */
1754 if (otr_type)
1755 restrict_to_inner_class (otr_type);
1756
1757 if (!speculation_consistent_p (new_outer_type, new_offset,
1758 new_maybe_derived_type, otr_type))
1759 return false;
1760
1761 /* New speculation is a win in case we have no speculation or new
1762 speculation does not consider derivations. */
1763 if (!speculative_outer_type
1764 || (speculative_maybe_derived_type
1765 && !new_maybe_derived_type))
1766 {
1767 speculative_outer_type = new_outer_type;
1768 speculative_offset = new_offset;
1769 speculative_maybe_derived_type = new_maybe_derived_type;
1770 return true;
1771 }
1772 else if (types_must_be_same_for_odr (speculative_outer_type,
1773 new_outer_type))
1774 {
1775 if (speculative_offset != new_offset)
1776 {
1777 /* OK we have two contexts that seems valid but they disagree,
1778 just give up.
1779
1780 This is not a lattice operation, so we may want to drop it later. */
1781 if (dump_file && (dump_flags & TDF_DETAILS))
1782 fprintf (dump_file,
1783 "Speculative outer types match, "
1784 "offset mismatch -> invalid speculation\n");
1785 clear_speculation ();
1786 return true;
1787 }
1788 else
1789 {
1790 if (speculative_maybe_derived_type && !new_maybe_derived_type)
1791 {
1792 speculative_maybe_derived_type = false;
1793 return true;
1794 }
1795 else
1796 return false;
1797 }
1798 }
1799 /* Choose type that contains the other. This one either contains the outer
1800 as a field (thus giving exactly one target) or is deeper in the type
1801 hiearchy. */
1802 else if (speculative_outer_type
1803 && speculative_maybe_derived_type
1804 && (new_offset > speculative_offset
1805 || (new_offset == speculative_offset
1806 && contains_type_p (new_outer_type,
1807 0, speculative_outer_type, false))))
1808 {
1809 tree old_outer_type = speculative_outer_type;
1810 HOST_WIDE_INT old_offset = speculative_offset;
1811 bool old_maybe_derived_type = speculative_maybe_derived_type;
1812
1813 speculative_outer_type = new_outer_type;
1814 speculative_offset = new_offset;
1815 speculative_maybe_derived_type = new_maybe_derived_type;
1816
1817 if (otr_type)
1818 restrict_to_inner_class (otr_type);
1819
1820 /* If the speculation turned out to make no sense, revert to sensible
1821 one. */
1822 if (!speculative_outer_type)
1823 {
1824 speculative_outer_type = old_outer_type;
1825 speculative_offset = old_offset;
1826 speculative_maybe_derived_type = old_maybe_derived_type;
1827 return false;
1828 }
1829 return (old_offset != speculative_offset
1830 || old_maybe_derived_type != speculative_maybe_derived_type
1831 || types_must_be_same_for_odr (speculative_outer_type,
1832 new_outer_type));
1833 }
1834 return false;
1835 }
1836
1837 /* Assume that both THIS and a given context is valid and strenghten THIS
1838 if possible. Return true if any strenghtening was made.
1839 If actual type the context is being used in is known, OTR_TYPE should be
1840 set accordingly. This improves quality of combined result. */
1841
1842 bool
1843 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx,
1844 tree otr_type)
1845 {
1846 bool updated = false;
1847
1848 if (ctx.useless_p () || invalid)
1849 return false;
1850
1851 /* Restricting context to inner type makes merging easier, however do not
1852 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
1853 if (otr_type && !invalid && !ctx.invalid)
1854 {
1855 restrict_to_inner_class (otr_type);
1856 ctx.restrict_to_inner_class (otr_type);
1857 if(invalid)
1858 return false;
1859 }
1860
1861 if (dump_file && (dump_flags & TDF_DETAILS))
1862 {
1863 fprintf (dump_file, "Polymorphic call context combine:");
1864 dump (dump_file);
1865 fprintf (dump_file, "With context: ");
1866 ctx.dump (dump_file);
1867 if (otr_type)
1868 {
1869 fprintf (dump_file, "To be used with type: ");
1870 print_generic_expr (dump_file, otr_type, TDF_SLIM);
1871 fprintf (dump_file, "\n");
1872 }
1873 }
1874
1875 /* If call is known to be invalid, we are done. */
1876 if (ctx.invalid)
1877 {
1878 if (dump_file && (dump_flags & TDF_DETAILS))
1879 fprintf (dump_file, "-> Invalid context\n");
1880 goto invalidate;
1881 }
1882
1883 if (!ctx.outer_type)
1884 ;
1885 else if (!outer_type)
1886 {
1887 outer_type = ctx.outer_type;
1888 offset = ctx.offset;
1889 dynamic = ctx.dynamic;
1890 maybe_in_construction = ctx.maybe_in_construction;
1891 maybe_derived_type = ctx.maybe_derived_type;
1892 updated = true;
1893 }
1894 /* If types are known to be same, merging is quite easy. */
1895 else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
1896 {
1897 if (offset != ctx.offset
1898 && TYPE_SIZE (outer_type)
1899 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
1900 {
1901 if (dump_file && (dump_flags & TDF_DETAILS))
1902 fprintf (dump_file, "Outer types match, offset mismatch -> invalid\n");
1903 clear_speculation ();
1904 clear_outer_type ();
1905 invalid = true;
1906 return true;
1907 }
1908 if (dump_file && (dump_flags & TDF_DETAILS))
1909 fprintf (dump_file, "Outer types match, merging flags\n");
1910 if (maybe_in_construction && !ctx.maybe_in_construction)
1911 {
1912 updated = true;
1913 maybe_in_construction = false;
1914 }
1915 if (maybe_derived_type && !ctx.maybe_derived_type)
1916 {
1917 updated = true;
1918 maybe_derived_type = false;
1919 }
1920 if (dynamic && !ctx.dynamic)
1921 {
1922 updated = true;
1923 dynamic = false;
1924 }
1925 }
1926 /* If we know the type precisely, there is not much to improve. */
1927 else if (!maybe_derived_type && !maybe_in_construction
1928 && !ctx.maybe_derived_type && !ctx.maybe_in_construction)
1929 {
1930 /* It may be easy to check if second context permits the first
1931 and set INVALID otherwise. This is not easy to do in general;
1932 contains_type_p may return false negatives for non-comparable
1933 types.
1934
1935 If OTR_TYPE is known, we however can expect that
1936 restrict_to_inner_class should have discovered the same base
1937 type. */
1938 if (otr_type && !ctx.maybe_in_construction && !ctx.maybe_derived_type)
1939 {
1940 if (dump_file && (dump_flags & TDF_DETAILS))
1941 fprintf (dump_file, "Contextes disagree -> invalid\n");
1942 goto invalidate;
1943 }
1944 }
1945 /* See if one type contains the other as a field (not base).
1946 In this case we want to choose the wider type, because it contains
1947 more information. */
1948 else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
1949 outer_type, false, false))
1950 {
1951 if (dump_file && (dump_flags & TDF_DETAILS))
1952 fprintf (dump_file, "Second type contain the first as a field\n");
1953
1954 if (maybe_derived_type)
1955 {
1956 outer_type = ctx.outer_type;
1957 maybe_derived_type = ctx.maybe_derived_type;
1958 offset = ctx.offset;
1959 dynamic = ctx.dynamic;
1960 updated = true;
1961 }
1962
1963 /* If we do not know how the context is being used, we can
1964 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
1965 to other component of OUTER_TYPE later and we know nothing
1966 about it. */
1967 if (otr_type && maybe_in_construction
1968 && !ctx.maybe_in_construction)
1969 {
1970 maybe_in_construction = false;
1971 updated = true;
1972 }
1973 }
1974 else if (contains_type_p (outer_type, offset - ctx.offset,
1975 ctx.outer_type, false, false))
1976 {
1977 if (dump_file && (dump_flags & TDF_DETAILS))
1978 fprintf (dump_file, "First type contain the second as a field\n");
1979
1980 if (otr_type && maybe_in_construction
1981 && !ctx.maybe_in_construction)
1982 {
1983 maybe_in_construction = false;
1984 updated = true;
1985 }
1986 }
1987 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
1988 else if (contains_type_p (ctx.outer_type,
1989 ctx.offset - offset, outer_type, false, true))
1990 {
1991 if (dump_file && (dump_flags & TDF_DETAILS))
1992 fprintf (dump_file, "First type is base of second\n");
1993 if (!maybe_derived_type)
1994 {
1995 if (!ctx.maybe_in_construction
1996 && types_odr_comparable (outer_type, ctx.outer_type))
1997 {
1998 if (dump_file && (dump_flags & TDF_DETAILS))
1999 fprintf (dump_file, "Second context does not permit base -> invalid\n");
2000 goto invalidate;
2001 }
2002 }
2003 /* Pick variant deeper in the hiearchy. */
2004 else
2005 {
2006 outer_type = ctx.outer_type;
2007 maybe_in_construction = ctx.maybe_in_construction;
2008 maybe_derived_type = ctx.maybe_derived_type;
2009 offset = ctx.offset;
2010 dynamic = ctx.dynamic;
2011 updated = true;
2012 }
2013 }
2014 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2015 else if (contains_type_p (outer_type,
2016 offset - ctx.offset, ctx.outer_type, false, true))
2017 {
2018 if (dump_file && (dump_flags & TDF_DETAILS))
2019 fprintf (dump_file, "Second type is base of first\n");
2020 if (!ctx.maybe_derived_type)
2021 {
2022 if (!maybe_in_construction
2023 && types_odr_comparable (outer_type, ctx.outer_type))
2024 {
2025 if (dump_file && (dump_flags & TDF_DETAILS))
2026 fprintf (dump_file, "First context does not permit base -> invalid\n");
2027 goto invalidate;
2028 }
2029 }
2030 }
2031 /* TODO handle merging using hiearchy. */
2032 else if (dump_file && (dump_flags & TDF_DETAILS))
2033 fprintf (dump_file, "Giving up on merge\n");
2034
2035 updated |= combine_speculation_with (ctx.speculative_outer_type,
2036 ctx.speculative_offset,
2037 ctx.speculative_maybe_derived_type,
2038 otr_type);
2039
2040 if (updated && dump_file && (dump_flags & TDF_DETAILS))
2041 {
2042 fprintf (dump_file, "Updated as: ");
2043 dump (dump_file);
2044 fprintf (dump_file, "\n");
2045 }
2046 return updated;
2047
2048 invalidate:
2049 invalid = true;
2050 clear_speculation ();
2051 clear_outer_type ();
2052 return true;
2053 }
2054
2055 /* Take non-speculative info, merge it with speculative and clear speculation.
2056 Used when we no longer manage to keep track of actual outer type, but we
2057 think it is still there.
2058
2059 If OTR_TYPE is set, the transformation can be done more effectively assuming
2060 that context is going to be used only that way. */
2061
2062 void
2063 ipa_polymorphic_call_context::make_speculative (tree otr_type)
2064 {
2065 tree spec_outer_type = outer_type;
2066 HOST_WIDE_INT spec_offset = offset;
2067 bool spec_maybe_derived_type = maybe_derived_type;
2068
2069 if (invalid)
2070 {
2071 invalid = false;
2072 clear_outer_type ();
2073 clear_speculation ();
2074 return;
2075 }
2076 if (!outer_type)
2077 return;
2078 clear_outer_type ();
2079 combine_speculation_with (spec_outer_type, spec_offset,
2080 spec_maybe_derived_type,
2081 otr_type);
2082 }
2083
2084 /* Use when we can not track dynamic type change. This speculatively assume
2085 type change is not happening. */
2086
2087 void
2088 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor,
2089 tree otr_type)
2090 {
2091 if (dynamic)
2092 make_speculative (otr_type);
2093 else if (in_poly_cdtor)
2094 maybe_in_construction = true;
2095 }