]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-polymorphic-call.c
poly_int: get_ref_base_and_extent
[thirdparty/gcc.git] / gcc / ipa-polymorphic-call.c
1 /* Analysis of polymorphic call context.
2 Copyright (C) 2013-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "tree-ssa-operands.h"
30 #include "streamer-hooks.h"
31 #include "cgraph.h"
32 #include "data-streamer.h"
33 #include "diagnostic.h"
34 #include "alias.h"
35 #include "fold-const.h"
36 #include "calls.h"
37 #include "ipa-utils.h"
38 #include "tree-dfa.h"
39 #include "gimple-pretty-print.h"
40 #include "tree-into-ssa.h"
41 #include "params.h"
42
43 /* Return true when TYPE contains an polymorphic type and thus is interesting
44 for devirtualization machinery. */
45
46 static bool contains_type_p (tree, HOST_WIDE_INT, tree,
47 bool consider_placement_new = true,
48 bool consider_bases = true);
49
50 bool
51 contains_polymorphic_type_p (const_tree type)
52 {
53 type = TYPE_MAIN_VARIANT (type);
54
55 if (RECORD_OR_UNION_TYPE_P (type))
56 {
57 if (TYPE_BINFO (type)
58 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
59 return true;
60 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
61 if (TREE_CODE (fld) == FIELD_DECL
62 && !DECL_ARTIFICIAL (fld)
63 && contains_polymorphic_type_p (TREE_TYPE (fld)))
64 return true;
65 return false;
66 }
67 if (TREE_CODE (type) == ARRAY_TYPE)
68 return contains_polymorphic_type_p (TREE_TYPE (type));
69 return false;
70 }
71
72 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
73 at possition CUR_OFFSET within TYPE.
74
75 POD can be changed to an instance of a polymorphic type by
76 placement new. Here we play safe and assume that any
77 non-polymorphic type is POD. */
78 bool
79 possible_placement_new (tree type, tree expected_type,
80 HOST_WIDE_INT cur_offset)
81 {
82 if (cur_offset < 0)
83 return true;
84 return ((TREE_CODE (type) != RECORD_TYPE
85 || !TYPE_BINFO (type)
86 || cur_offset >= POINTER_SIZE
87 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
88 && (!TYPE_SIZE (type)
89 || !tree_fits_shwi_p (TYPE_SIZE (type))
90 || (cur_offset
91 + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
92 : POINTER_SIZE)
93 <= tree_to_uhwi (TYPE_SIZE (type)))));
94 }
95
96 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
97 is contained at THIS->OFFSET. Walk the memory representation of
98 THIS->OUTER_TYPE and find the outermost class type that match
99 OTR_TYPE or contain OTR_TYPE as a base. Update THIS
100 to represent it.
101
102 If OTR_TYPE is NULL, just find outermost polymorphic type with
103 virtual table present at possition OFFSET.
104
105 For example when THIS represents type
106 class A
107 {
108 int a;
109 class B b;
110 }
111 and we look for type at offset sizeof(int), we end up with B and offset 0.
112 If the same is produced by multiple inheritance, we end up with A and offset
113 sizeof(int).
114
115 If we can not find corresponding class, give up by setting
116 THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
117 Return true when lookup was sucesful.
118
119 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
120 valid only via allocation of new polymorphic type inside by means
121 of placement new.
122
123 When CONSIDER_BASES is false, only look for actual fields, not base types
124 of TYPE. */
125
126 bool
127 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type,
128 bool consider_placement_new,
129 bool consider_bases)
130 {
131 tree type = outer_type;
132 HOST_WIDE_INT cur_offset = offset;
133 bool speculative = false;
134 bool size_unknown = false;
135 unsigned HOST_WIDE_INT otr_type_size = POINTER_SIZE;
136
137 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
138 if (!outer_type)
139 {
140 clear_outer_type (otr_type);
141 type = otr_type;
142 cur_offset = 0;
143 }
144 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
145 that the context is either invalid, or the instance type must be
146 derived from OUTER_TYPE.
147
148 Because the instance type may contain field whose type is of OUTER_TYPE,
149 we can not derive any effective information about it.
150
151 TODO: In the case we know all derrived types, we can definitely do better
152 here. */
153 else if (TYPE_SIZE (outer_type)
154 && tree_fits_shwi_p (TYPE_SIZE (outer_type))
155 && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
156 && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
157 {
158 bool der = maybe_derived_type; /* clear_outer_type will reset it. */
159 bool dyn = dynamic;
160 clear_outer_type (otr_type);
161 type = otr_type;
162 cur_offset = 0;
163
164 /* If derived type is not allowed, we know that the context is invalid.
165 For dynamic types, we really do not have information about
166 size of the memory location. It is possible that completely
167 different type is stored after outer_type. */
168 if (!der && !dyn)
169 {
170 clear_speculation ();
171 invalid = true;
172 return false;
173 }
174 }
175
176 if (otr_type && TYPE_SIZE (otr_type)
177 && tree_fits_shwi_p (TYPE_SIZE (otr_type)))
178 otr_type_size = tree_to_uhwi (TYPE_SIZE (otr_type));
179
180 if (!type || offset < 0)
181 goto no_useful_type_info;
182
183 /* Find the sub-object the constant actually refers to and mark whether it is
184 an artificial one (as opposed to a user-defined one).
185
186 This loop is performed twice; first time for outer_type and second time
187 for speculative_outer_type. The second run has SPECULATIVE set. */
188 while (true)
189 {
190 unsigned HOST_WIDE_INT pos, size;
191 tree fld;
192
193 /* If we do not know size of TYPE, we need to be more conservative
194 about accepting cases where we can not find EXPECTED_TYPE.
195 Generally the types that do matter here are of constant size.
196 Size_unknown case should be very rare. */
197 if (TYPE_SIZE (type)
198 && tree_fits_shwi_p (TYPE_SIZE (type))
199 && tree_to_shwi (TYPE_SIZE (type)) >= 0)
200 size_unknown = false;
201 else
202 size_unknown = true;
203
204 /* On a match, just return what we found. */
205 if ((otr_type
206 && types_odr_comparable (type, otr_type)
207 && types_same_for_odr (type, otr_type))
208 || (!otr_type
209 && TREE_CODE (type) == RECORD_TYPE
210 && TYPE_BINFO (type)
211 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
212 {
213 if (speculative)
214 {
215 /* If we did not match the offset, just give up on speculation. */
216 if (cur_offset != 0
217 /* Also check if speculation did not end up being same as
218 non-speculation. */
219 || (types_must_be_same_for_odr (speculative_outer_type,
220 outer_type)
221 && (maybe_derived_type
222 == speculative_maybe_derived_type)))
223 clear_speculation ();
224 return true;
225 }
226 else
227 {
228 /* If type is known to be final, do not worry about derived
229 types. Testing it here may help us to avoid speculation. */
230 if (otr_type && TREE_CODE (outer_type) == RECORD_TYPE
231 && (!in_lto_p || odr_type_p (outer_type))
232 && type_with_linkage_p (outer_type)
233 && type_known_to_have_no_derivations_p (outer_type))
234 maybe_derived_type = false;
235
236 /* Type can not contain itself on an non-zero offset. In that case
237 just give up. Still accept the case where size is now known.
238 Either the second copy may appear past the end of type or within
239 the non-POD buffer located inside the variably sized type
240 itself. */
241 if (cur_offset != 0)
242 goto no_useful_type_info;
243 /* If we determined type precisely or we have no clue on
244 speuclation, we are done. */
245 if (!maybe_derived_type || !speculative_outer_type
246 || !speculation_consistent_p (speculative_outer_type,
247 speculative_offset,
248 speculative_maybe_derived_type,
249 otr_type))
250 {
251 clear_speculation ();
252 return true;
253 }
254 /* Otherwise look into speculation now. */
255 else
256 {
257 speculative = true;
258 type = speculative_outer_type;
259 cur_offset = speculative_offset;
260 continue;
261 }
262 }
263 }
264
265 /* Walk fields and find corresponding on at OFFSET. */
266 if (TREE_CODE (type) == RECORD_TYPE)
267 {
268 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
269 {
270 if (TREE_CODE (fld) != FIELD_DECL
271 || TREE_TYPE (fld) == error_mark_node)
272 continue;
273
274 pos = int_bit_position (fld);
275 if (pos > (unsigned HOST_WIDE_INT)cur_offset)
276 continue;
277
278 /* Do not consider vptr itself. Not even for placement new. */
279 if (!pos && DECL_ARTIFICIAL (fld)
280 && POINTER_TYPE_P (TREE_TYPE (fld))
281 && TYPE_BINFO (type)
282 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
283 continue;
284
285 if (!DECL_SIZE (fld) || !tree_fits_uhwi_p (DECL_SIZE (fld)))
286 goto no_useful_type_info;
287 size = tree_to_uhwi (DECL_SIZE (fld));
288
289 /* We can always skip types smaller than pointer size:
290 those can not contain a virtual table pointer.
291
292 Disqualifying fields that are too small to fit OTR_TYPE
293 saves work needed to walk them for no benefit.
294 Because of the way the bases are packed into a class, the
295 field's size may be smaller than type size, so it needs
296 to be done with a care. */
297
298 if (pos <= (unsigned HOST_WIDE_INT)cur_offset
299 && (pos + size) >= (unsigned HOST_WIDE_INT)cur_offset
300 + POINTER_SIZE
301 && (!otr_type
302 || !TYPE_SIZE (TREE_TYPE (fld))
303 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld)))
304 || (pos + tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld))))
305 >= cur_offset + otr_type_size))
306 break;
307 }
308
309 if (!fld)
310 goto no_useful_type_info;
311
312 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
313 cur_offset -= pos;
314 /* DECL_ARTIFICIAL represents a basetype. */
315 if (!DECL_ARTIFICIAL (fld))
316 {
317 if (!speculative)
318 {
319 outer_type = type;
320 offset = cur_offset;
321 /* As soon as we se an field containing the type,
322 we know we are not looking for derivations. */
323 maybe_derived_type = false;
324 }
325 else
326 {
327 speculative_outer_type = type;
328 speculative_offset = cur_offset;
329 speculative_maybe_derived_type = false;
330 }
331 }
332 else if (!consider_bases)
333 goto no_useful_type_info;
334 }
335 else if (TREE_CODE (type) == ARRAY_TYPE)
336 {
337 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
338
339 /* Give up if we don't know array field size.
340 Also give up on non-polymorphic types as they are used
341 as buffers for placement new. */
342 if (!TYPE_SIZE (subtype)
343 || !tree_fits_shwi_p (TYPE_SIZE (subtype))
344 || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
345 || !contains_polymorphic_type_p (subtype))
346 goto no_useful_type_info;
347
348 HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
349
350 /* We may see buffer for placement new. In this case the expected type
351 can be bigger than the subtype. */
352 if (TYPE_SIZE (subtype)
353 && (cur_offset + otr_type_size
354 > tree_to_uhwi (TYPE_SIZE (subtype))))
355 goto no_useful_type_info;
356
357 cur_offset = new_offset;
358 type = TYPE_MAIN_VARIANT (subtype);
359 if (!speculative)
360 {
361 outer_type = type;
362 offset = cur_offset;
363 maybe_derived_type = false;
364 }
365 else
366 {
367 speculative_outer_type = type;
368 speculative_offset = cur_offset;
369 speculative_maybe_derived_type = false;
370 }
371 }
372 /* Give up on anything else. */
373 else
374 {
375 no_useful_type_info:
376 if (maybe_derived_type && !speculative
377 && TREE_CODE (outer_type) == RECORD_TYPE
378 && TREE_CODE (otr_type) == RECORD_TYPE
379 && TYPE_BINFO (otr_type)
380 && !offset
381 && get_binfo_at_offset (TYPE_BINFO (otr_type), 0, outer_type))
382 {
383 clear_outer_type (otr_type);
384 if (!speculative_outer_type
385 || !speculation_consistent_p (speculative_outer_type,
386 speculative_offset,
387 speculative_maybe_derived_type,
388 otr_type))
389 clear_speculation ();
390 if (speculative_outer_type)
391 {
392 speculative = true;
393 type = speculative_outer_type;
394 cur_offset = speculative_offset;
395 }
396 else
397 return true;
398 }
399 /* We found no way to embedd EXPECTED_TYPE in TYPE.
400 We still permit two special cases - placement new and
401 the case of variadic types containing themselves. */
402 if (!speculative
403 && consider_placement_new
404 && (size_unknown || !type || maybe_derived_type
405 || possible_placement_new (type, otr_type, cur_offset)))
406 {
407 /* In these weird cases we want to accept the context.
408 In non-speculative run we have no useful outer_type info
409 (TODO: we may eventually want to record upper bound on the
410 type size that can be used to prune the walk),
411 but we still want to consider speculation that may
412 give useful info. */
413 if (!speculative)
414 {
415 clear_outer_type (otr_type);
416 if (!speculative_outer_type
417 || !speculation_consistent_p (speculative_outer_type,
418 speculative_offset,
419 speculative_maybe_derived_type,
420 otr_type))
421 clear_speculation ();
422 if (speculative_outer_type)
423 {
424 speculative = true;
425 type = speculative_outer_type;
426 cur_offset = speculative_offset;
427 }
428 else
429 return true;
430 }
431 else
432 {
433 clear_speculation ();
434 return true;
435 }
436 }
437 else
438 {
439 clear_speculation ();
440 if (speculative)
441 return true;
442 clear_outer_type (otr_type);
443 invalid = true;
444 return false;
445 }
446 }
447 }
448 }
449
450 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
451 CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
452 be built within OUTER_TYPE by means of placement new. CONSIDER_BASES makes
453 function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
454 base of one of fields of OUTER_TYPE. */
455
456 static bool
457 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
458 tree otr_type,
459 bool consider_placement_new,
460 bool consider_bases)
461 {
462 ipa_polymorphic_call_context context;
463
464 /* Check that type is within range. */
465 if (offset < 0)
466 return false;
467
468 /* PR ipa/71207
469 As OUTER_TYPE can be a type which has a diamond virtual inheritance,
470 it's not necessary that INNER_TYPE will fit within OUTER_TYPE with
471 a given offset. It can happen that INNER_TYPE also contains a base object,
472 however it would point to the same instance in the OUTER_TYPE. */
473
474 context.offset = offset;
475 context.outer_type = TYPE_MAIN_VARIANT (outer_type);
476 context.maybe_derived_type = false;
477 context.dynamic = false;
478 return context.restrict_to_inner_class (otr_type, consider_placement_new,
479 consider_bases);
480 }
481
482
483 /* Return a FUNCTION_DECL if FN represent a constructor or destructor.
484 If CHECK_CLONES is true, also check for clones of ctor/dtors. */
485
486 tree
487 polymorphic_ctor_dtor_p (tree fn, bool check_clones)
488 {
489 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
490 || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
491 {
492 if (!check_clones)
493 return NULL_TREE;
494
495 /* Watch for clones where we constant propagated the first
496 argument (pointer to the instance). */
497 fn = DECL_ABSTRACT_ORIGIN (fn);
498 if (!fn
499 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
500 || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
501 return NULL_TREE;
502 }
503
504 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
505 return NULL_TREE;
506
507 return fn;
508 }
509
510 /* Return a FUNCTION_DECL if BLOCK represents a constructor or destructor.
511 If CHECK_CLONES is true, also check for clones of ctor/dtors. */
512
513 tree
514 inlined_polymorphic_ctor_dtor_block_p (tree block, bool check_clones)
515 {
516 tree fn = block_ultimate_origin (block);
517 if (fn == NULL || TREE_CODE (fn) != FUNCTION_DECL)
518 return NULL_TREE;
519
520 return polymorphic_ctor_dtor_p (fn, check_clones);
521 }
522
523
524 /* We know that the instance is stored in variable or parameter
525 (not dynamically allocated) and we want to disprove the fact
526 that it may be in construction at invocation of CALL.
527
528 BASE represents memory location where instance is stored.
529 If BASE is NULL, it is assumed to be global memory.
530 OUTER_TYPE is known type of the instance or NULL if not
531 known.
532
533 For the variable to be in construction we actually need to
534 be in constructor of corresponding global variable or
535 the inline stack of CALL must contain the constructor.
536 Check this condition. This check works safely only before
537 IPA passes, because inline stacks may become out of date
538 later. */
539
540 bool
541 decl_maybe_in_construction_p (tree base, tree outer_type,
542 gimple *call, tree function)
543 {
544 if (outer_type)
545 outer_type = TYPE_MAIN_VARIANT (outer_type);
546 gcc_assert (!base || DECL_P (base));
547
548 /* After inlining the code unification optimizations may invalidate
549 inline stacks. Also we need to give up on global variables after
550 IPA, because addresses of these may have been propagated to their
551 constructors. */
552 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
553 return true;
554
555 /* Pure functions can not do any changes on the dynamic type;
556 that require writting to memory. */
557 if ((!base || !auto_var_in_fn_p (base, function))
558 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
559 return false;
560
561 bool check_clones = !base || is_global_var (base);
562 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
563 block = BLOCK_SUPERCONTEXT (block))
564 if (tree fn = inlined_polymorphic_ctor_dtor_block_p (block, check_clones))
565 {
566 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
567
568 if (!outer_type || !types_odr_comparable (type, outer_type))
569 {
570 if (TREE_CODE (type) == RECORD_TYPE
571 && TYPE_BINFO (type)
572 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
573 return true;
574 }
575 else if (types_same_for_odr (type, outer_type))
576 return true;
577 }
578
579 if (!base || (VAR_P (base) && is_global_var (base)))
580 {
581 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
582 || (!DECL_CXX_CONSTRUCTOR_P (function)
583 && !DECL_CXX_DESTRUCTOR_P (function)))
584 {
585 if (!DECL_ABSTRACT_ORIGIN (function))
586 return false;
587 /* Watch for clones where we constant propagated the first
588 argument (pointer to the instance). */
589 function = DECL_ABSTRACT_ORIGIN (function);
590 if (!function
591 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
592 || (!DECL_CXX_CONSTRUCTOR_P (function)
593 && !DECL_CXX_DESTRUCTOR_P (function)))
594 return false;
595 }
596 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (function));
597 if (!outer_type || !types_odr_comparable (type, outer_type))
598 {
599 if (TREE_CODE (type) == RECORD_TYPE
600 && TYPE_BINFO (type)
601 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
602 return true;
603 }
604 else if (types_same_for_odr (type, outer_type))
605 return true;
606 }
607 return false;
608 }
609
610 /* Dump human readable context to F. If NEWLINE is true, it will be terminated
611 by a newline. */
612
613 void
614 ipa_polymorphic_call_context::dump (FILE *f, bool newline) const
615 {
616 fprintf (f, " ");
617 if (invalid)
618 fprintf (f, "Call is known to be undefined");
619 else
620 {
621 if (useless_p ())
622 fprintf (f, "nothing known");
623 if (outer_type || offset)
624 {
625 fprintf (f, "Outer type%s:", dynamic ? " (dynamic)":"");
626 print_generic_expr (f, outer_type, TDF_SLIM);
627 if (maybe_derived_type)
628 fprintf (f, " (or a derived type)");
629 if (maybe_in_construction)
630 fprintf (f, " (maybe in construction)");
631 fprintf (f, " offset " HOST_WIDE_INT_PRINT_DEC,
632 offset);
633 }
634 if (speculative_outer_type)
635 {
636 if (outer_type || offset)
637 fprintf (f, " ");
638 fprintf (f, "Speculative outer type:");
639 print_generic_expr (f, speculative_outer_type, TDF_SLIM);
640 if (speculative_maybe_derived_type)
641 fprintf (f, " (or a derived type)");
642 fprintf (f, " at offset " HOST_WIDE_INT_PRINT_DEC,
643 speculative_offset);
644 }
645 }
646 if (newline)
647 fprintf(f, "\n");
648 }
649
650 /* Print context to stderr. */
651
652 void
653 ipa_polymorphic_call_context::debug () const
654 {
655 dump (stderr);
656 }
657
658 /* Stream out the context to OB. */
659
660 void
661 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
662 {
663 struct bitpack_d bp = bitpack_create (ob->main_stream);
664
665 bp_pack_value (&bp, invalid, 1);
666 bp_pack_value (&bp, maybe_in_construction, 1);
667 bp_pack_value (&bp, maybe_derived_type, 1);
668 bp_pack_value (&bp, speculative_maybe_derived_type, 1);
669 bp_pack_value (&bp, dynamic, 1);
670 bp_pack_value (&bp, outer_type != NULL, 1);
671 bp_pack_value (&bp, offset != 0, 1);
672 bp_pack_value (&bp, speculative_outer_type != NULL, 1);
673 streamer_write_bitpack (&bp);
674
675 if (outer_type != NULL)
676 stream_write_tree (ob, outer_type, true);
677 if (offset)
678 streamer_write_hwi (ob, offset);
679 if (speculative_outer_type != NULL)
680 {
681 stream_write_tree (ob, speculative_outer_type, true);
682 streamer_write_hwi (ob, speculative_offset);
683 }
684 else
685 gcc_assert (!speculative_offset);
686 }
687
688 /* Stream in the context from IB and DATA_IN. */
689
690 void
691 ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib,
692 struct data_in *data_in)
693 {
694 struct bitpack_d bp = streamer_read_bitpack (ib);
695
696 invalid = bp_unpack_value (&bp, 1);
697 maybe_in_construction = bp_unpack_value (&bp, 1);
698 maybe_derived_type = bp_unpack_value (&bp, 1);
699 speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
700 dynamic = bp_unpack_value (&bp, 1);
701 bool outer_type_p = bp_unpack_value (&bp, 1);
702 bool offset_p = bp_unpack_value (&bp, 1);
703 bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
704
705 if (outer_type_p)
706 outer_type = stream_read_tree (ib, data_in);
707 else
708 outer_type = NULL;
709 if (offset_p)
710 offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
711 else
712 offset = 0;
713 if (speculative_outer_type_p)
714 {
715 speculative_outer_type = stream_read_tree (ib, data_in);
716 speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
717 }
718 else
719 {
720 speculative_outer_type = NULL;
721 speculative_offset = 0;
722 }
723 }
724
725 /* Proudce polymorphic call context for call method of instance
726 that is located within BASE (that is assumed to be a decl) at offset OFF. */
727
728 void
729 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
730 {
731 gcc_assert (DECL_P (base));
732 clear_speculation ();
733
734 if (!contains_polymorphic_type_p (TREE_TYPE (base)))
735 {
736 clear_outer_type ();
737 offset = off;
738 return;
739 }
740 outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
741 offset = off;
742 /* Make very conservative assumption that all objects
743 may be in construction.
744
745 It is up to caller to revisit this via
746 get_dynamic_type or decl_maybe_in_construction_p. */
747 maybe_in_construction = true;
748 maybe_derived_type = false;
749 dynamic = false;
750 }
751
752 /* CST is an invariant (address of decl), try to get meaningful
753 polymorphic call context for polymorphic call of method
754 if instance of OTR_TYPE that is located at offset OFF of this invariant.
755 Return FALSE if nothing meaningful can be found. */
756
757 bool
758 ipa_polymorphic_call_context::set_by_invariant (tree cst,
759 tree otr_type,
760 HOST_WIDE_INT off)
761 {
762 poly_int64 offset2, size, max_size;
763 bool reverse;
764 tree base;
765
766 invalid = false;
767 off = 0;
768 clear_outer_type (otr_type);
769
770 if (TREE_CODE (cst) != ADDR_EXPR)
771 return false;
772
773 cst = TREE_OPERAND (cst, 0);
774 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size, &reverse);
775 if (!DECL_P (base) || !known_size_p (max_size) || maybe_ne (max_size, size))
776 return false;
777
778 /* Only type inconsistent programs can have otr_type that is
779 not part of outer type. */
780 if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
781 return false;
782
783 set_by_decl (base, off);
784 return true;
785 }
786
787 /* See if OP is SSA name initialized as a copy or by single assignment.
788 If so, walk the SSA graph up. Because simple PHI conditional is considered
789 copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
790 graph. */
791
792 static tree
793 walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL)
794 {
795 hash_set <tree> *visited = NULL;
796 STRIP_NOPS (op);
797 while (TREE_CODE (op) == SSA_NAME
798 && !SSA_NAME_IS_DEFAULT_DEF (op)
799 /* We might be called via fold_stmt during cfgcleanup where
800 SSA form need not be up-to-date. */
801 && !name_registered_for_update_p (op)
802 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op))
803 || gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI))
804 {
805 if (global_visited)
806 {
807 if (!*global_visited)
808 *global_visited = new hash_set<tree>;
809 if ((*global_visited)->add (op))
810 goto done;
811 }
812 else
813 {
814 if (!visited)
815 visited = new hash_set<tree>;
816 if (visited->add (op))
817 goto done;
818 }
819 /* Special case
820 if (ptr == 0)
821 ptr = 0;
822 else
823 ptr = ptr.foo;
824 This pattern is implicitly produced for casts to non-primary
825 bases. When doing context analysis, we do not really care
826 about the case pointer is NULL, because the call will be
827 undefined anyway. */
828 if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
829 {
830 gimple *phi = SSA_NAME_DEF_STMT (op);
831
832 if (gimple_phi_num_args (phi) > 2)
833 goto done;
834 if (gimple_phi_num_args (phi) == 1)
835 op = gimple_phi_arg_def (phi, 0);
836 else if (integer_zerop (gimple_phi_arg_def (phi, 0)))
837 op = gimple_phi_arg_def (phi, 1);
838 else if (integer_zerop (gimple_phi_arg_def (phi, 1)))
839 op = gimple_phi_arg_def (phi, 0);
840 else
841 goto done;
842 }
843 else
844 {
845 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
846 goto done;
847 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
848 }
849 STRIP_NOPS (op);
850 }
851 done:
852 if (visited)
853 delete (visited);
854 return op;
855 }
856
857 /* Create polymorphic call context from IP invariant CST.
858 This is typically &global_var.
859 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
860 is offset of call. */
861
862 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
863 tree otr_type,
864 HOST_WIDE_INT off)
865 {
866 clear_speculation ();
867 set_by_invariant (cst, otr_type, off);
868 }
869
870 /* Build context for pointer REF contained in FNDECL at statement STMT.
871 if INSTANCE is non-NULL, return pointer to the object described by
872 the context or DECL where context is contained in. */
873
874 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
875 tree ref,
876 gimple *stmt,
877 tree *instance)
878 {
879 tree otr_type = NULL;
880 tree base_pointer;
881 hash_set <tree> *visited = NULL;
882
883 if (TREE_CODE (ref) == OBJ_TYPE_REF)
884 {
885 otr_type = obj_type_ref_class (ref);
886 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
887 }
888 else
889 base_pointer = ref;
890
891 /* Set up basic info in case we find nothing interesting in the analysis. */
892 clear_speculation ();
893 clear_outer_type (otr_type);
894 invalid = false;
895
896 /* Walk SSA for outer object. */
897 while (true)
898 {
899 base_pointer = walk_ssa_copies (base_pointer, &visited);
900 if (TREE_CODE (base_pointer) == ADDR_EXPR)
901 {
902 HOST_WIDE_INT offset2, size;
903 bool reverse;
904 tree base
905 = get_ref_base_and_extent_hwi (TREE_OPERAND (base_pointer, 0),
906 &offset2, &size, &reverse);
907 if (!base)
908 break;
909
910 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
911 offset + offset2,
912 true,
913 NULL /* Do not change outer type. */);
914
915 /* If this is a varying address, punt. */
916 if (TREE_CODE (base) == MEM_REF || DECL_P (base))
917 {
918 /* We found dereference of a pointer. Type of the pointer
919 and MEM_REF is meaningless, but we can look futher. */
920 if (TREE_CODE (base) == MEM_REF)
921 {
922 offset_int o = mem_ref_offset (base) * BITS_PER_UNIT;
923 o += offset;
924 o += offset2;
925 if (!wi::fits_shwi_p (o))
926 break;
927 base_pointer = TREE_OPERAND (base, 0);
928 offset = o.to_shwi ();
929 outer_type = NULL;
930 }
931 /* We found base object. In this case the outer_type
932 is known. */
933 else if (DECL_P (base))
934 {
935 if (visited)
936 delete (visited);
937 /* Only type inconsistent programs can have otr_type that is
938 not part of outer type. */
939 if (otr_type
940 && !contains_type_p (TREE_TYPE (base),
941 offset + offset2, otr_type))
942 {
943 invalid = true;
944 if (instance)
945 *instance = base_pointer;
946 return;
947 }
948 set_by_decl (base, offset + offset2);
949 if (outer_type && maybe_in_construction && stmt)
950 maybe_in_construction
951 = decl_maybe_in_construction_p (base,
952 outer_type,
953 stmt,
954 fndecl);
955 if (instance)
956 *instance = base;
957 return;
958 }
959 else
960 break;
961 }
962 else
963 break;
964 }
965 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
966 && TREE_CODE (TREE_OPERAND (base_pointer, 1)) == INTEGER_CST)
967 {
968 offset_int o
969 = offset_int::from (wi::to_wide (TREE_OPERAND (base_pointer, 1)),
970 SIGNED);
971 o *= BITS_PER_UNIT;
972 o += offset;
973 if (!wi::fits_shwi_p (o))
974 break;
975 offset = o.to_shwi ();
976 base_pointer = TREE_OPERAND (base_pointer, 0);
977 }
978 else
979 break;
980 }
981
982 if (visited)
983 delete (visited);
984
985 /* Try to determine type of the outer object. */
986 if (TREE_CODE (base_pointer) == SSA_NAME
987 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
988 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
989 {
990 /* See if parameter is THIS pointer of a method. */
991 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
992 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
993 {
994 outer_type
995 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
996 gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE
997 || TREE_CODE (outer_type) == UNION_TYPE);
998
999 /* Dynamic casting has possibly upcasted the type
1000 in the hiearchy. In this case outer type is less
1001 informative than inner type and we should forget
1002 about it. */
1003 if ((otr_type
1004 && !contains_type_p (outer_type, offset,
1005 otr_type))
1006 || !contains_polymorphic_type_p (outer_type))
1007 {
1008 outer_type = NULL;
1009 if (instance)
1010 *instance = base_pointer;
1011 return;
1012 }
1013
1014 dynamic = true;
1015
1016 /* If the function is constructor or destructor, then
1017 the type is possibly in construction, but we know
1018 it is not derived type. */
1019 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1020 || DECL_CXX_DESTRUCTOR_P (fndecl))
1021 {
1022 maybe_in_construction = true;
1023 maybe_derived_type = false;
1024 }
1025 else
1026 {
1027 maybe_derived_type = true;
1028 maybe_in_construction = false;
1029 }
1030 if (instance)
1031 *instance = base_pointer;
1032 return;
1033 }
1034 /* Non-PODs passed by value are really passed by invisible
1035 reference. In this case we also know the type of the
1036 object. */
1037 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1038 {
1039 outer_type
1040 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1041 /* Only type inconsistent programs can have otr_type that is
1042 not part of outer type. */
1043 if (otr_type && !contains_type_p (outer_type, offset,
1044 otr_type))
1045 {
1046 invalid = true;
1047 if (instance)
1048 *instance = base_pointer;
1049 return;
1050 }
1051 /* Non-polymorphic types have no interest for us. */
1052 else if (!otr_type && !contains_polymorphic_type_p (outer_type))
1053 {
1054 outer_type = NULL;
1055 if (instance)
1056 *instance = base_pointer;
1057 return;
1058 }
1059 maybe_derived_type = false;
1060 maybe_in_construction = false;
1061 if (instance)
1062 *instance = base_pointer;
1063 return;
1064 }
1065 }
1066
1067 tree base_type = TREE_TYPE (base_pointer);
1068
1069 if (TREE_CODE (base_pointer) == SSA_NAME
1070 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1071 && !(TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL
1072 || TREE_CODE (SSA_NAME_VAR (base_pointer)) == RESULT_DECL))
1073 {
1074 invalid = true;
1075 if (instance)
1076 *instance = base_pointer;
1077 return;
1078 }
1079 if (TREE_CODE (base_pointer) == SSA_NAME
1080 && SSA_NAME_DEF_STMT (base_pointer)
1081 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1082 base_type = TREE_TYPE (gimple_assign_rhs1
1083 (SSA_NAME_DEF_STMT (base_pointer)));
1084
1085 if (base_type && POINTER_TYPE_P (base_type))
1086 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
1087 offset,
1088 true, NULL /* Do not change type here */);
1089 /* TODO: There are multiple ways to derive a type. For instance
1090 if BASE_POINTER is passed to an constructor call prior our refernece.
1091 We do not make this type of flow sensitive analysis yet. */
1092 if (instance)
1093 *instance = base_pointer;
1094 return;
1095 }
1096
1097 /* Structure to be passed in between detect_type_change and
1098 check_stmt_for_type_change. */
1099
1100 struct type_change_info
1101 {
1102 /* Offset into the object where there is the virtual method pointer we are
1103 looking for. */
1104 HOST_WIDE_INT offset;
1105 /* The declaration or SSA_NAME pointer of the base that we are checking for
1106 type change. */
1107 tree instance;
1108 /* The reference to virtual table pointer used. */
1109 tree vtbl_ptr_ref;
1110 tree otr_type;
1111 /* If we actually can tell the type that the object has changed to, it is
1112 stored in this field. Otherwise it remains NULL_TREE. */
1113 tree known_current_type;
1114 HOST_WIDE_INT known_current_offset;
1115
1116 /* Set to nonzero if we possibly missed some dynamic type changes and we
1117 should consider the set to be speculative. */
1118 unsigned speculative;
1119
1120 /* Set to true if dynamic type change has been detected. */
1121 bool type_maybe_changed;
1122 /* Set to true if multiple types have been encountered. known_current_type
1123 must be disregarded in that case. */
1124 bool multiple_types_encountered;
1125 bool seen_unanalyzed_store;
1126 };
1127
1128 /* Return true if STMT is not call and can modify a virtual method table pointer.
1129 We take advantage of fact that vtable stores must appear within constructor
1130 and destructor functions. */
1131
1132 static bool
1133 noncall_stmt_may_be_vtbl_ptr_store (gimple *stmt)
1134 {
1135 if (is_gimple_assign (stmt))
1136 {
1137 tree lhs = gimple_assign_lhs (stmt);
1138
1139 if (gimple_clobber_p (stmt))
1140 return false;
1141 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
1142 {
1143 if (flag_strict_aliasing
1144 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
1145 return false;
1146
1147 if (TREE_CODE (lhs) == COMPONENT_REF
1148 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1149 return false;
1150 /* In the future we might want to use get_ref_base_and_extent to find
1151 if there is a field corresponding to the offset and if so, proceed
1152 almost like if it was a component ref. */
1153 }
1154 }
1155
1156 /* Code unification may mess with inline stacks. */
1157 if (cfun->after_inlining)
1158 return true;
1159
1160 /* Walk the inline stack and watch out for ctors/dtors.
1161 TODO: Maybe we can require the store to appear in toplevel
1162 block of CTOR/DTOR. */
1163 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
1164 block = BLOCK_SUPERCONTEXT (block))
1165 if (BLOCK_ABSTRACT_ORIGIN (block)
1166 && TREE_CODE (block_ultimate_origin (block)) == FUNCTION_DECL)
1167 return inlined_polymorphic_ctor_dtor_block_p (block, false);
1168 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
1169 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
1170 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
1171 }
1172
1173 /* If STMT can be proved to be an assignment to the virtual method table
1174 pointer of ANALYZED_OBJ and the type associated with the new table
1175 identified, return the type. Otherwise return NULL_TREE if type changes
1176 in unknown way or ERROR_MARK_NODE if type is unchanged. */
1177
1178 static tree
1179 extr_type_from_vtbl_ptr_store (gimple *stmt, struct type_change_info *tci,
1180 HOST_WIDE_INT *type_offset)
1181 {
1182 poly_int64 offset, size, max_size;
1183 tree lhs, rhs, base;
1184 bool reverse;
1185
1186 if (!gimple_assign_single_p (stmt))
1187 return NULL_TREE;
1188
1189 lhs = gimple_assign_lhs (stmt);
1190 rhs = gimple_assign_rhs1 (stmt);
1191 if (TREE_CODE (lhs) != COMPONENT_REF
1192 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1193 {
1194 if (dump_file)
1195 fprintf (dump_file, " LHS is not virtual table.\n");
1196 return NULL_TREE;
1197 }
1198
1199 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
1200 ;
1201 else
1202 {
1203 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
1204 if (DECL_P (tci->instance))
1205 {
1206 if (base != tci->instance)
1207 {
1208 if (dump_file)
1209 {
1210 fprintf (dump_file, " base:");
1211 print_generic_expr (dump_file, base, TDF_SLIM);
1212 fprintf (dump_file, " does not match instance:");
1213 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1214 fprintf (dump_file, "\n");
1215 }
1216 return NULL_TREE;
1217 }
1218 }
1219 else if (TREE_CODE (base) == MEM_REF)
1220 {
1221 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0))
1222 {
1223 if (dump_file)
1224 {
1225 fprintf (dump_file, " base mem ref:");
1226 print_generic_expr (dump_file, base, TDF_SLIM);
1227 fprintf (dump_file, " does not match instance:");
1228 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1229 fprintf (dump_file, "\n");
1230 }
1231 return NULL_TREE;
1232 }
1233 if (!integer_zerop (TREE_OPERAND (base, 1)))
1234 {
1235 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
1236 {
1237 if (dump_file)
1238 {
1239 fprintf (dump_file, " base mem ref:");
1240 print_generic_expr (dump_file, base, TDF_SLIM);
1241 fprintf (dump_file, " has non-representable offset:");
1242 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1243 fprintf (dump_file, "\n");
1244 }
1245 return NULL_TREE;
1246 }
1247 else
1248 offset += tree_to_shwi (TREE_OPERAND (base, 1)) * BITS_PER_UNIT;
1249 }
1250 }
1251 else if (!operand_equal_p (tci->instance, base, 0)
1252 || tci->offset)
1253 {
1254 if (dump_file)
1255 {
1256 fprintf (dump_file, " base:");
1257 print_generic_expr (dump_file, base, TDF_SLIM);
1258 fprintf (dump_file, " does not match instance:");
1259 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1260 fprintf (dump_file, " with offset %i\n", (int)tci->offset);
1261 }
1262 return tci->offset > POINTER_SIZE ? error_mark_node : NULL_TREE;
1263 }
1264 if (maybe_ne (offset, tci->offset)
1265 || maybe_ne (size, POINTER_SIZE)
1266 || maybe_ne (max_size, POINTER_SIZE))
1267 {
1268 if (dump_file)
1269 {
1270 fprintf (dump_file, " wrong offset ");
1271 print_dec (offset, dump_file);
1272 fprintf (dump_file, "!=%i or size ", (int) tci->offset);
1273 print_dec (size, dump_file);
1274 fprintf (dump_file, "\n");
1275 }
1276 return (known_le (offset + POINTER_SIZE, tci->offset)
1277 || (known_size_p (max_size)
1278 && known_gt (tci->offset + POINTER_SIZE,
1279 offset + max_size))
1280 ? error_mark_node : NULL);
1281 }
1282 }
1283
1284 tree vtable;
1285 unsigned HOST_WIDE_INT offset2;
1286
1287 if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
1288 {
1289 if (dump_file)
1290 fprintf (dump_file, " Failed to lookup binfo\n");
1291 return NULL;
1292 }
1293
1294 tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1295 offset2, vtable);
1296 if (!binfo)
1297 {
1298 if (dump_file)
1299 fprintf (dump_file, " Construction vtable used\n");
1300 /* FIXME: We should suport construction contexts. */
1301 return NULL;
1302 }
1303
1304 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
1305 return DECL_CONTEXT (vtable);
1306 }
1307
1308 /* Record dynamic type change of TCI to TYPE. */
1309
1310 static void
1311 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
1312 {
1313 if (dump_file)
1314 {
1315 if (type)
1316 {
1317 fprintf (dump_file, " Recording type: ");
1318 print_generic_expr (dump_file, type, TDF_SLIM);
1319 fprintf (dump_file, " at offset %i\n", (int)offset);
1320 }
1321 else
1322 fprintf (dump_file, " Recording unknown type\n");
1323 }
1324
1325 /* If we found a constructor of type that is not polymorphic or
1326 that may contain the type in question as a field (not as base),
1327 restrict to the inner class first to make type matching bellow
1328 happier. */
1329 if (type
1330 && (offset
1331 || (TREE_CODE (type) != RECORD_TYPE
1332 || !TYPE_BINFO (type)
1333 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
1334 {
1335 ipa_polymorphic_call_context context;
1336
1337 context.offset = offset;
1338 context.outer_type = type;
1339 context.maybe_in_construction = false;
1340 context.maybe_derived_type = false;
1341 context.dynamic = true;
1342 /* If we failed to find the inner type, we know that the call
1343 would be undefined for type produced here. */
1344 if (!context.restrict_to_inner_class (tci->otr_type))
1345 {
1346 if (dump_file)
1347 fprintf (dump_file, " Ignoring; does not contain otr_type\n");
1348 return;
1349 }
1350 /* Watch for case we reached an POD type and anticipate placement
1351 new. */
1352 if (!context.maybe_derived_type)
1353 {
1354 type = context.outer_type;
1355 offset = context.offset;
1356 }
1357 }
1358 if (tci->type_maybe_changed
1359 && (!types_same_for_odr (type, tci->known_current_type)
1360 || offset != tci->known_current_offset))
1361 tci->multiple_types_encountered = true;
1362 tci->known_current_type = TYPE_MAIN_VARIANT (type);
1363 tci->known_current_offset = offset;
1364 tci->type_maybe_changed = true;
1365 }
1366
1367
1368 /* The maximum number of may-defs we visit when looking for a must-def
1369 that changes the dynamic type in check_stmt_for_type_change. Tuned
1370 after the PR12392 testcase which unlimited spends 40% time within
1371 these alias walks and 8% with the following limit. */
1372
1373 static inline bool
1374 csftc_abort_walking_p (unsigned speculative)
1375 {
1376 unsigned max = PARAM_VALUE (PARAM_MAX_SPECULATIVE_DEVIRT_MAYDEFS);
1377 return speculative > max ? true : false;
1378 }
1379
1380 /* Callback of walk_aliased_vdefs and a helper function for
1381 detect_type_change to check whether a particular statement may modify
1382 the virtual table pointer, and if possible also determine the new type of
1383 the (sub-)object. It stores its result into DATA, which points to a
1384 type_change_info structure. */
1385
1386 static bool
1387 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
1388 {
1389 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
1390 struct type_change_info *tci = (struct type_change_info *) data;
1391 tree fn;
1392
1393 /* If we already gave up, just terminate the rest of walk. */
1394 if (tci->multiple_types_encountered)
1395 return true;
1396
1397 if (is_gimple_call (stmt))
1398 {
1399 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
1400 return false;
1401
1402 /* Check for a constructor call. */
1403 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
1404 && DECL_CXX_CONSTRUCTOR_P (fn)
1405 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1406 && gimple_call_num_args (stmt))
1407 {
1408 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
1409 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1410 HOST_WIDE_INT offset = 0;
1411 bool reverse;
1412
1413 if (dump_file)
1414 {
1415 fprintf (dump_file, " Checking constructor call: ");
1416 print_gimple_stmt (dump_file, stmt, 0);
1417 }
1418
1419 /* See if THIS parameter seems like instance pointer. */
1420 if (TREE_CODE (op) == ADDR_EXPR)
1421 {
1422 HOST_WIDE_INT size;
1423 op = get_ref_base_and_extent_hwi (TREE_OPERAND (op, 0),
1424 &offset, &size, &reverse);
1425 if (!op)
1426 {
1427 tci->speculative++;
1428 return csftc_abort_walking_p (tci->speculative);
1429 }
1430 if (TREE_CODE (op) == MEM_REF)
1431 {
1432 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
1433 {
1434 tci->speculative++;
1435 return csftc_abort_walking_p (tci->speculative);
1436 }
1437 offset += tree_to_shwi (TREE_OPERAND (op, 1))
1438 * BITS_PER_UNIT;
1439 op = TREE_OPERAND (op, 0);
1440 }
1441 else if (DECL_P (op))
1442 ;
1443 else
1444 {
1445 tci->speculative++;
1446 return csftc_abort_walking_p (tci->speculative);
1447 }
1448 op = walk_ssa_copies (op);
1449 }
1450 if (operand_equal_p (op, tci->instance, 0)
1451 && TYPE_SIZE (type)
1452 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
1453 && tree_fits_shwi_p (TYPE_SIZE (type))
1454 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset
1455 /* Some inlined constructors may look as follows:
1456 _3 = operator new (16);
1457 MEM[(struct &)_3] ={v} {CLOBBER};
1458 MEM[(struct CompositeClass *)_3]._vptr.CompositeClass
1459 = &MEM[(void *)&_ZTV14CompositeClass + 16B];
1460 _7 = &MEM[(struct CompositeClass *)_3].object;
1461 EmptyClass::EmptyClass (_7);
1462
1463 When determining dynamic type of _3 and because we stop at first
1464 dynamic type found, we would stop on EmptyClass::EmptyClass (_7).
1465 In this case the emptyclass is not even polymorphic and we miss
1466 it is contained in an outer type that is polymorphic. */
1467
1468 && (tci->offset == offset || contains_polymorphic_type_p (type)))
1469 {
1470 record_known_type (tci, type, tci->offset - offset);
1471 return true;
1472 }
1473 }
1474 /* Calls may possibly change dynamic type by placement new. Assume
1475 it will not happen, but make result speculative only. */
1476 if (dump_file)
1477 {
1478 fprintf (dump_file, " Function call may change dynamic type:");
1479 print_gimple_stmt (dump_file, stmt, 0);
1480 }
1481 tci->speculative++;
1482 return csftc_abort_walking_p (tci->speculative);
1483 }
1484 /* Check for inlined virtual table store. */
1485 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
1486 {
1487 tree type;
1488 HOST_WIDE_INT offset = 0;
1489 if (dump_file)
1490 {
1491 fprintf (dump_file, " Checking vtbl store: ");
1492 print_gimple_stmt (dump_file, stmt, 0);
1493 }
1494
1495 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
1496 if (type == error_mark_node)
1497 return false;
1498 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
1499 if (!type)
1500 {
1501 if (dump_file)
1502 fprintf (dump_file, " Unanalyzed store may change type.\n");
1503 tci->seen_unanalyzed_store = true;
1504 tci->speculative++;
1505 }
1506 else
1507 record_known_type (tci, type, offset);
1508 return true;
1509 }
1510 else
1511 return false;
1512 }
1513
1514 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1515 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1516 INSTANCE is pointer to the outer instance as returned by
1517 get_polymorphic_context. To avoid creation of temporary expressions,
1518 INSTANCE may also be an declaration of get_polymorphic_context found the
1519 value to be in static storage.
1520
1521 If the type of instance is not fully determined
1522 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1523 is set), try to walk memory writes and find the actual construction of the
1524 instance.
1525
1526 Return true if memory is unchanged from function entry.
1527
1528 We do not include this analysis in the context analysis itself, because
1529 it needs memory SSA to be fully built and the walk may be expensive.
1530 So it is not suitable for use withing fold_stmt and similar uses. */
1531
1532 bool
1533 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
1534 tree otr_object,
1535 tree otr_type,
1536 gimple *call)
1537 {
1538 struct type_change_info tci;
1539 ao_ref ao;
1540 bool function_entry_reached = false;
1541 tree instance_ref = NULL;
1542 gimple *stmt = call;
1543 /* Remember OFFSET before it is modified by restrict_to_inner_class.
1544 This is because we do not update INSTANCE when walking inwards. */
1545 HOST_WIDE_INT instance_offset = offset;
1546 tree instance_outer_type = outer_type;
1547
1548 if (otr_type)
1549 otr_type = TYPE_MAIN_VARIANT (otr_type);
1550
1551 /* Walk into inner type. This may clear maybe_derived_type and save us
1552 from useless work. It also makes later comparsions with static type
1553 easier. */
1554 if (outer_type && otr_type)
1555 {
1556 if (!restrict_to_inner_class (otr_type))
1557 return false;
1558 }
1559
1560 if (!maybe_in_construction && !maybe_derived_type)
1561 return false;
1562
1563 /* If we are in fact not looking at any object object or the instance is
1564 some placement new into a random load, give up straight away. */
1565 if (TREE_CODE (instance) == MEM_REF)
1566 return false;
1567
1568 /* We need to obtain refernce to virtual table pointer. It is better
1569 to look it up in the code rather than build our own. This require bit
1570 of pattern matching, but we end up verifying that what we found is
1571 correct.
1572
1573 What we pattern match is:
1574
1575 tmp = instance->_vptr.A; // vtbl ptr load
1576 tmp2 = tmp[otr_token]; // vtable lookup
1577 OBJ_TYPE_REF(tmp2;instance->0) (instance);
1578
1579 We want to start alias oracle walk from vtbl pointer load,
1580 but we may not be able to identify it, for example, when PRE moved the
1581 load around. */
1582
1583 if (gimple_code (call) == GIMPLE_CALL)
1584 {
1585 tree ref = gimple_call_fn (call);
1586 bool reverse;
1587
1588 if (TREE_CODE (ref) == OBJ_TYPE_REF)
1589 {
1590 ref = OBJ_TYPE_REF_EXPR (ref);
1591 ref = walk_ssa_copies (ref);
1592
1593 /* If call target is already known, no need to do the expensive
1594 memory walk. */
1595 if (is_gimple_min_invariant (ref))
1596 return false;
1597
1598 /* Check if definition looks like vtable lookup. */
1599 if (TREE_CODE (ref) == SSA_NAME
1600 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1601 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
1602 && TREE_CODE (gimple_assign_rhs1
1603 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
1604 {
1605 ref = get_base_address
1606 (TREE_OPERAND (gimple_assign_rhs1
1607 (SSA_NAME_DEF_STMT (ref)), 0));
1608 ref = walk_ssa_copies (ref);
1609 /* Find base address of the lookup and see if it looks like
1610 vptr load. */
1611 if (TREE_CODE (ref) == SSA_NAME
1612 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1613 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
1614 {
1615 HOST_WIDE_INT offset2, size;
1616 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
1617 tree base_ref
1618 = get_ref_base_and_extent_hwi (ref_exp, &offset2,
1619 &size, &reverse);
1620
1621 /* Finally verify that what we found looks like read from
1622 OTR_OBJECT or from INSTANCE with offset OFFSET. */
1623 if (base_ref
1624 && ((TREE_CODE (base_ref) == MEM_REF
1625 && ((offset2 == instance_offset
1626 && TREE_OPERAND (base_ref, 0) == instance)
1627 || (!offset2
1628 && TREE_OPERAND (base_ref, 0)
1629 == otr_object)))
1630 || (DECL_P (instance) && base_ref == instance
1631 && offset2 == instance_offset)))
1632 {
1633 stmt = SSA_NAME_DEF_STMT (ref);
1634 instance_ref = ref_exp;
1635 }
1636 }
1637 }
1638 }
1639 }
1640
1641 /* If we failed to look up the reference in code, build our own. */
1642 if (!instance_ref)
1643 {
1644 /* If the statement in question does not use memory, we can't tell
1645 anything. */
1646 if (!gimple_vuse (stmt))
1647 return false;
1648 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
1649 }
1650 else
1651 /* Otherwise use the real reference. */
1652 ao_ref_init (&ao, instance_ref);
1653
1654 /* We look for vtbl pointer read. */
1655 ao.size = POINTER_SIZE;
1656 ao.max_size = ao.size;
1657 /* We are looking for stores to vptr pointer within the instance of
1658 outer type.
1659 TODO: The vptr pointer type is globally known, we probably should
1660 keep it and do that even when otr_type is unknown. */
1661 if (otr_type)
1662 {
1663 ao.base_alias_set
1664 = get_alias_set (outer_type ? outer_type : otr_type);
1665 ao.ref_alias_set
1666 = get_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
1667 }
1668
1669 if (dump_file)
1670 {
1671 fprintf (dump_file, "Determining dynamic type for call: ");
1672 print_gimple_stmt (dump_file, call, 0);
1673 fprintf (dump_file, " Starting walk at: ");
1674 print_gimple_stmt (dump_file, stmt, 0);
1675 fprintf (dump_file, " instance pointer: ");
1676 print_generic_expr (dump_file, otr_object, TDF_SLIM);
1677 fprintf (dump_file, " Outer instance pointer: ");
1678 print_generic_expr (dump_file, instance, TDF_SLIM);
1679 fprintf (dump_file, " offset: %i (bits)", (int)instance_offset);
1680 fprintf (dump_file, " vtbl reference: ");
1681 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
1682 fprintf (dump_file, "\n");
1683 }
1684
1685 tci.offset = instance_offset;
1686 tci.instance = instance;
1687 tci.vtbl_ptr_ref = instance_ref;
1688 tci.known_current_type = NULL_TREE;
1689 tci.known_current_offset = 0;
1690 tci.otr_type = otr_type;
1691 tci.type_maybe_changed = false;
1692 tci.multiple_types_encountered = false;
1693 tci.speculative = 0;
1694 tci.seen_unanalyzed_store = false;
1695
1696 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
1697 &tci, NULL, &function_entry_reached);
1698
1699 /* If we did not find any type changing statements, we may still drop
1700 maybe_in_construction flag if the context already have outer type.
1701
1702 Here we make special assumptions about both constructors and
1703 destructors which are all the functions that are allowed to alter the
1704 VMT pointers. It assumes that destructors begin with assignment into
1705 all VMT pointers and that constructors essentially look in the
1706 following way:
1707
1708 1) The very first thing they do is that they call constructors of
1709 ancestor sub-objects that have them.
1710
1711 2) Then VMT pointers of this and all its ancestors is set to new
1712 values corresponding to the type corresponding to the constructor.
1713
1714 3) Only afterwards, other stuff such as constructor of member
1715 sub-objects and the code written by the user is run. Only this may
1716 include calling virtual functions, directly or indirectly.
1717
1718 4) placement new can not be used to change type of non-POD statically
1719 allocated variables.
1720
1721 There is no way to call a constructor of an ancestor sub-object in any
1722 other way.
1723
1724 This means that we do not have to care whether constructors get the
1725 correct type information because they will always change it (in fact,
1726 if we define the type to be given by the VMT pointer, it is undefined).
1727
1728 The most important fact to derive from the above is that if, for some
1729 statement in the section 3, we try to detect whether the dynamic type
1730 has changed, we can safely ignore all calls as we examine the function
1731 body backwards until we reach statements in section 2 because these
1732 calls cannot be ancestor constructors or destructors (if the input is
1733 not bogus) and so do not change the dynamic type (this holds true only
1734 for automatically allocated objects but at the moment we devirtualize
1735 only these). We then must detect that statements in section 2 change
1736 the dynamic type and can try to derive the new type. That is enough
1737 and we can stop, we will never see the calls into constructors of
1738 sub-objects in this code.
1739
1740 Therefore if the static outer type was found (outer_type)
1741 we can safely ignore tci.speculative that is set on calls and give up
1742 only if there was dyanmic type store that may affect given variable
1743 (seen_unanalyzed_store) */
1744
1745 if (!tci.type_maybe_changed
1746 || (outer_type
1747 && !dynamic
1748 && !tci.seen_unanalyzed_store
1749 && !tci.multiple_types_encountered
1750 && ((offset == tci.offset
1751 && types_same_for_odr (tci.known_current_type,
1752 outer_type))
1753 || (instance_offset == offset
1754 && types_same_for_odr (tci.known_current_type,
1755 instance_outer_type)))))
1756 {
1757 if (!outer_type || tci.seen_unanalyzed_store)
1758 return false;
1759 if (maybe_in_construction)
1760 maybe_in_construction = false;
1761 if (dump_file)
1762 fprintf (dump_file, " No dynamic type change found.\n");
1763 return true;
1764 }
1765
1766 if (tci.known_current_type
1767 && !function_entry_reached
1768 && !tci.multiple_types_encountered)
1769 {
1770 if (!tci.speculative)
1771 {
1772 outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1773 offset = tci.known_current_offset;
1774 dynamic = true;
1775 maybe_in_construction = false;
1776 maybe_derived_type = false;
1777 if (dump_file)
1778 fprintf (dump_file, " Determined dynamic type.\n");
1779 }
1780 else if (!speculative_outer_type
1781 || speculative_maybe_derived_type)
1782 {
1783 speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1784 speculative_offset = tci.known_current_offset;
1785 speculative_maybe_derived_type = false;
1786 if (dump_file)
1787 fprintf (dump_file, " Determined speculative dynamic type.\n");
1788 }
1789 }
1790 else if (dump_file)
1791 {
1792 fprintf (dump_file, " Found multiple types%s%s\n",
1793 function_entry_reached ? " (function entry reached)" : "",
1794 function_entry_reached ? " (multiple types encountered)" : "");
1795 }
1796
1797 return false;
1798 }
1799
1800 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1801 seems consistent (and useful) with what we already have in the non-speculative context. */
1802
1803 bool
1804 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type,
1805 HOST_WIDE_INT spec_offset,
1806 bool spec_maybe_derived_type,
1807 tree otr_type) const
1808 {
1809 if (!flag_devirtualize_speculatively)
1810 return false;
1811
1812 /* Non-polymorphic types are useless for deriving likely polymorphic
1813 call targets. */
1814 if (!spec_outer_type || !contains_polymorphic_type_p (spec_outer_type))
1815 return false;
1816
1817 /* If we know nothing, speculation is always good. */
1818 if (!outer_type)
1819 return true;
1820
1821 /* Speculation is only useful to avoid derived types.
1822 This is not 100% true for placement new, where the outer context may
1823 turn out to be useless, but ignore these for now. */
1824 if (!maybe_derived_type)
1825 return false;
1826
1827 /* If types agrees, speculation is consistent, but it makes sense only
1828 when it says something new. */
1829 if (types_must_be_same_for_odr (spec_outer_type, outer_type))
1830 return maybe_derived_type && !spec_maybe_derived_type;
1831
1832 /* If speculation does not contain the type in question, ignore it. */
1833 if (otr_type
1834 && !contains_type_p (spec_outer_type, spec_offset, otr_type, false, true))
1835 return false;
1836
1837 /* If outer type already contains speculation as a filed,
1838 it is useless. We already know from OUTER_TYPE
1839 SPEC_TYPE and that it is not in the construction. */
1840 if (contains_type_p (outer_type, offset - spec_offset,
1841 spec_outer_type, false, false))
1842 return false;
1843
1844 /* If speculative outer type is not more specified than outer
1845 type, just give up.
1846 We can only decide this safely if we can compare types with OUTER_TYPE.
1847 */
1848 if ((!in_lto_p || odr_type_p (outer_type))
1849 && !contains_type_p (spec_outer_type,
1850 spec_offset - offset,
1851 outer_type, false))
1852 return false;
1853 return true;
1854 }
1855
1856 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1857 NEW_MAYBE_DERIVED_TYPE
1858 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1859
1860 bool
1861 ipa_polymorphic_call_context::combine_speculation_with
1862 (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1863 tree otr_type)
1864 {
1865 if (!new_outer_type)
1866 return false;
1867
1868 /* restrict_to_inner_class may eliminate wrong speculation making our job
1869 easeier. */
1870 if (otr_type)
1871 restrict_to_inner_class (otr_type);
1872
1873 if (!speculation_consistent_p (new_outer_type, new_offset,
1874 new_maybe_derived_type, otr_type))
1875 return false;
1876
1877 /* New speculation is a win in case we have no speculation or new
1878 speculation does not consider derivations. */
1879 if (!speculative_outer_type
1880 || (speculative_maybe_derived_type
1881 && !new_maybe_derived_type))
1882 {
1883 speculative_outer_type = new_outer_type;
1884 speculative_offset = new_offset;
1885 speculative_maybe_derived_type = new_maybe_derived_type;
1886 return true;
1887 }
1888 else if (types_must_be_same_for_odr (speculative_outer_type,
1889 new_outer_type))
1890 {
1891 if (speculative_offset != new_offset)
1892 {
1893 /* OK we have two contexts that seems valid but they disagree,
1894 just give up.
1895
1896 This is not a lattice operation, so we may want to drop it later. */
1897 if (dump_file && (dump_flags & TDF_DETAILS))
1898 fprintf (dump_file,
1899 "Speculative outer types match, "
1900 "offset mismatch -> invalid speculation\n");
1901 clear_speculation ();
1902 return true;
1903 }
1904 else
1905 {
1906 if (speculative_maybe_derived_type && !new_maybe_derived_type)
1907 {
1908 speculative_maybe_derived_type = false;
1909 return true;
1910 }
1911 else
1912 return false;
1913 }
1914 }
1915 /* Choose type that contains the other. This one either contains the outer
1916 as a field (thus giving exactly one target) or is deeper in the type
1917 hiearchy. */
1918 else if (speculative_outer_type
1919 && speculative_maybe_derived_type
1920 && (new_offset > speculative_offset
1921 || (new_offset == speculative_offset
1922 && contains_type_p (new_outer_type,
1923 0, speculative_outer_type, false))))
1924 {
1925 tree old_outer_type = speculative_outer_type;
1926 HOST_WIDE_INT old_offset = speculative_offset;
1927 bool old_maybe_derived_type = speculative_maybe_derived_type;
1928
1929 speculative_outer_type = new_outer_type;
1930 speculative_offset = new_offset;
1931 speculative_maybe_derived_type = new_maybe_derived_type;
1932
1933 if (otr_type)
1934 restrict_to_inner_class (otr_type);
1935
1936 /* If the speculation turned out to make no sense, revert to sensible
1937 one. */
1938 if (!speculative_outer_type)
1939 {
1940 speculative_outer_type = old_outer_type;
1941 speculative_offset = old_offset;
1942 speculative_maybe_derived_type = old_maybe_derived_type;
1943 return false;
1944 }
1945 return (old_offset != speculative_offset
1946 || old_maybe_derived_type != speculative_maybe_derived_type
1947 || types_must_be_same_for_odr (speculative_outer_type,
1948 new_outer_type));
1949 }
1950 return false;
1951 }
1952
1953 /* Make speculation less specific so
1954 NEW_OUTER_TYPE, NEW_OFFSET, NEW_MAYBE_DERIVED_TYPE is also included.
1955 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1956
1957 bool
1958 ipa_polymorphic_call_context::meet_speculation_with
1959 (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1960 tree otr_type)
1961 {
1962 if (!new_outer_type && speculative_outer_type)
1963 {
1964 clear_speculation ();
1965 return true;
1966 }
1967
1968 /* restrict_to_inner_class may eliminate wrong speculation making our job
1969 easeier. */
1970 if (otr_type)
1971 restrict_to_inner_class (otr_type);
1972
1973 if (!speculative_outer_type
1974 || !speculation_consistent_p (speculative_outer_type,
1975 speculative_offset,
1976 speculative_maybe_derived_type,
1977 otr_type))
1978 return false;
1979
1980 if (!speculation_consistent_p (new_outer_type, new_offset,
1981 new_maybe_derived_type, otr_type))
1982 {
1983 clear_speculation ();
1984 return true;
1985 }
1986
1987 else if (types_must_be_same_for_odr (speculative_outer_type,
1988 new_outer_type))
1989 {
1990 if (speculative_offset != new_offset)
1991 {
1992 clear_speculation ();
1993 return true;
1994 }
1995 else
1996 {
1997 if (!speculative_maybe_derived_type && new_maybe_derived_type)
1998 {
1999 speculative_maybe_derived_type = true;
2000 return true;
2001 }
2002 else
2003 return false;
2004 }
2005 }
2006 /* See if one type contains the other as a field (not base). */
2007 else if (contains_type_p (new_outer_type, new_offset - speculative_offset,
2008 speculative_outer_type, false, false))
2009 return false;
2010 else if (contains_type_p (speculative_outer_type,
2011 speculative_offset - new_offset,
2012 new_outer_type, false, false))
2013 {
2014 speculative_outer_type = new_outer_type;
2015 speculative_offset = new_offset;
2016 speculative_maybe_derived_type = new_maybe_derived_type;
2017 return true;
2018 }
2019 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2020 else if (contains_type_p (new_outer_type,
2021 new_offset - speculative_offset,
2022 speculative_outer_type, false, true))
2023 {
2024 if (!speculative_maybe_derived_type)
2025 {
2026 speculative_maybe_derived_type = true;
2027 return true;
2028 }
2029 return false;
2030 }
2031 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2032 else if (contains_type_p (speculative_outer_type,
2033 speculative_offset - new_offset, new_outer_type, false, true))
2034 {
2035 speculative_outer_type = new_outer_type;
2036 speculative_offset = new_offset;
2037 speculative_maybe_derived_type = true;
2038 return true;
2039 }
2040 else
2041 {
2042 if (dump_file && (dump_flags & TDF_DETAILS))
2043 fprintf (dump_file, "Giving up on speculative meet\n");
2044 clear_speculation ();
2045 return true;
2046 }
2047 }
2048
2049 /* Assume that both THIS and a given context is valid and strenghten THIS
2050 if possible. Return true if any strenghtening was made.
2051 If actual type the context is being used in is known, OTR_TYPE should be
2052 set accordingly. This improves quality of combined result. */
2053
2054 bool
2055 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx,
2056 tree otr_type)
2057 {
2058 bool updated = false;
2059
2060 if (ctx.useless_p () || invalid)
2061 return false;
2062
2063 /* Restricting context to inner type makes merging easier, however do not
2064 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
2065 if (otr_type && !invalid && !ctx.invalid)
2066 {
2067 restrict_to_inner_class (otr_type);
2068 ctx.restrict_to_inner_class (otr_type);
2069 if(invalid)
2070 return false;
2071 }
2072
2073 if (dump_file && (dump_flags & TDF_DETAILS))
2074 {
2075 fprintf (dump_file, "Polymorphic call context combine:");
2076 dump (dump_file);
2077 fprintf (dump_file, "With context: ");
2078 ctx.dump (dump_file);
2079 if (otr_type)
2080 {
2081 fprintf (dump_file, "To be used with type: ");
2082 print_generic_expr (dump_file, otr_type, TDF_SLIM);
2083 fprintf (dump_file, "\n");
2084 }
2085 }
2086
2087 /* If call is known to be invalid, we are done. */
2088 if (ctx.invalid)
2089 {
2090 if (dump_file && (dump_flags & TDF_DETAILS))
2091 fprintf (dump_file, "-> Invalid context\n");
2092 goto invalidate;
2093 }
2094
2095 if (!ctx.outer_type)
2096 ;
2097 else if (!outer_type)
2098 {
2099 outer_type = ctx.outer_type;
2100 offset = ctx.offset;
2101 dynamic = ctx.dynamic;
2102 maybe_in_construction = ctx.maybe_in_construction;
2103 maybe_derived_type = ctx.maybe_derived_type;
2104 updated = true;
2105 }
2106 /* If types are known to be same, merging is quite easy. */
2107 else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2108 {
2109 if (offset != ctx.offset
2110 && TYPE_SIZE (outer_type)
2111 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2112 {
2113 if (dump_file && (dump_flags & TDF_DETAILS))
2114 fprintf (dump_file, "Outer types match, offset mismatch -> invalid\n");
2115 clear_speculation ();
2116 clear_outer_type ();
2117 invalid = true;
2118 return true;
2119 }
2120 if (dump_file && (dump_flags & TDF_DETAILS))
2121 fprintf (dump_file, "Outer types match, merging flags\n");
2122 if (maybe_in_construction && !ctx.maybe_in_construction)
2123 {
2124 updated = true;
2125 maybe_in_construction = false;
2126 }
2127 if (maybe_derived_type && !ctx.maybe_derived_type)
2128 {
2129 updated = true;
2130 maybe_derived_type = false;
2131 }
2132 if (dynamic && !ctx.dynamic)
2133 {
2134 updated = true;
2135 dynamic = false;
2136 }
2137 }
2138 /* If we know the type precisely, there is not much to improve. */
2139 else if (!maybe_derived_type && !maybe_in_construction
2140 && !ctx.maybe_derived_type && !ctx.maybe_in_construction)
2141 {
2142 /* It may be easy to check if second context permits the first
2143 and set INVALID otherwise. This is not easy to do in general;
2144 contains_type_p may return false negatives for non-comparable
2145 types.
2146
2147 If OTR_TYPE is known, we however can expect that
2148 restrict_to_inner_class should have discovered the same base
2149 type. */
2150 if (otr_type && !ctx.maybe_in_construction && !ctx.maybe_derived_type)
2151 {
2152 if (dump_file && (dump_flags & TDF_DETAILS))
2153 fprintf (dump_file, "Contextes disagree -> invalid\n");
2154 goto invalidate;
2155 }
2156 }
2157 /* See if one type contains the other as a field (not base).
2158 In this case we want to choose the wider type, because it contains
2159 more information. */
2160 else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2161 outer_type, false, false))
2162 {
2163 if (dump_file && (dump_flags & TDF_DETAILS))
2164 fprintf (dump_file, "Second type contain the first as a field\n");
2165
2166 if (maybe_derived_type)
2167 {
2168 outer_type = ctx.outer_type;
2169 maybe_derived_type = ctx.maybe_derived_type;
2170 offset = ctx.offset;
2171 dynamic = ctx.dynamic;
2172 updated = true;
2173 }
2174
2175 /* If we do not know how the context is being used, we can
2176 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
2177 to other component of OUTER_TYPE later and we know nothing
2178 about it. */
2179 if (otr_type && maybe_in_construction
2180 && !ctx.maybe_in_construction)
2181 {
2182 maybe_in_construction = false;
2183 updated = true;
2184 }
2185 }
2186 else if (contains_type_p (outer_type, offset - ctx.offset,
2187 ctx.outer_type, false, false))
2188 {
2189 if (dump_file && (dump_flags & TDF_DETAILS))
2190 fprintf (dump_file, "First type contain the second as a field\n");
2191
2192 if (otr_type && maybe_in_construction
2193 && !ctx.maybe_in_construction)
2194 {
2195 maybe_in_construction = false;
2196 updated = true;
2197 }
2198 }
2199 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2200 else if (contains_type_p (ctx.outer_type,
2201 ctx.offset - offset, outer_type, false, true))
2202 {
2203 if (dump_file && (dump_flags & TDF_DETAILS))
2204 fprintf (dump_file, "First type is base of second\n");
2205 if (!maybe_derived_type)
2206 {
2207 if (!ctx.maybe_in_construction
2208 && types_odr_comparable (outer_type, ctx.outer_type))
2209 {
2210 if (dump_file && (dump_flags & TDF_DETAILS))
2211 fprintf (dump_file, "Second context does not permit base -> invalid\n");
2212 goto invalidate;
2213 }
2214 }
2215 /* Pick variant deeper in the hiearchy. */
2216 else
2217 {
2218 outer_type = ctx.outer_type;
2219 maybe_in_construction = ctx.maybe_in_construction;
2220 maybe_derived_type = ctx.maybe_derived_type;
2221 offset = ctx.offset;
2222 dynamic = ctx.dynamic;
2223 updated = true;
2224 }
2225 }
2226 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2227 else if (contains_type_p (outer_type,
2228 offset - ctx.offset, ctx.outer_type, false, true))
2229 {
2230 if (dump_file && (dump_flags & TDF_DETAILS))
2231 fprintf (dump_file, "Second type is base of first\n");
2232 if (!ctx.maybe_derived_type)
2233 {
2234 if (!maybe_in_construction
2235 && types_odr_comparable (outer_type, ctx.outer_type))
2236 {
2237 if (dump_file && (dump_flags & TDF_DETAILS))
2238 fprintf (dump_file, "First context does not permit base -> invalid\n");
2239 goto invalidate;
2240 }
2241 /* Pick the base type. */
2242 else if (maybe_in_construction)
2243 {
2244 outer_type = ctx.outer_type;
2245 maybe_in_construction = ctx.maybe_in_construction;
2246 maybe_derived_type = ctx.maybe_derived_type;
2247 offset = ctx.offset;
2248 dynamic = ctx.dynamic;
2249 updated = true;
2250 }
2251 }
2252 }
2253 /* TODO handle merging using hiearchy. */
2254 else if (dump_file && (dump_flags & TDF_DETAILS))
2255 fprintf (dump_file, "Giving up on merge\n");
2256
2257 updated |= combine_speculation_with (ctx.speculative_outer_type,
2258 ctx.speculative_offset,
2259 ctx.speculative_maybe_derived_type,
2260 otr_type);
2261
2262 if (updated && dump_file && (dump_flags & TDF_DETAILS))
2263 {
2264 fprintf (dump_file, "Updated as: ");
2265 dump (dump_file);
2266 fprintf (dump_file, "\n");
2267 }
2268 return updated;
2269
2270 invalidate:
2271 invalid = true;
2272 clear_speculation ();
2273 clear_outer_type ();
2274 return true;
2275 }
2276
2277 /* Take non-speculative info, merge it with speculative and clear speculation.
2278 Used when we no longer manage to keep track of actual outer type, but we
2279 think it is still there.
2280
2281 If OTR_TYPE is set, the transformation can be done more effectively assuming
2282 that context is going to be used only that way. */
2283
2284 void
2285 ipa_polymorphic_call_context::make_speculative (tree otr_type)
2286 {
2287 tree spec_outer_type = outer_type;
2288 HOST_WIDE_INT spec_offset = offset;
2289 bool spec_maybe_derived_type = maybe_derived_type;
2290
2291 if (invalid)
2292 {
2293 invalid = false;
2294 clear_outer_type ();
2295 clear_speculation ();
2296 return;
2297 }
2298 if (!outer_type)
2299 return;
2300 clear_outer_type ();
2301 combine_speculation_with (spec_outer_type, spec_offset,
2302 spec_maybe_derived_type,
2303 otr_type);
2304 }
2305
2306 /* Use when we can not track dynamic type change. This speculatively assume
2307 type change is not happening. */
2308
2309 void
2310 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor,
2311 tree otr_type)
2312 {
2313 if (dynamic)
2314 make_speculative (otr_type);
2315 else if (in_poly_cdtor)
2316 maybe_in_construction = true;
2317 }
2318
2319 /* Return TRUE if this context conveys the same information as OTHER. */
2320
2321 bool
2322 ipa_polymorphic_call_context::equal_to
2323 (const ipa_polymorphic_call_context &x) const
2324 {
2325 if (useless_p ())
2326 return x.useless_p ();
2327 if (invalid)
2328 return x.invalid;
2329 if (x.useless_p () || x.invalid)
2330 return false;
2331
2332 if (outer_type)
2333 {
2334 if (!x.outer_type
2335 || !types_odr_comparable (outer_type, x.outer_type)
2336 || !types_same_for_odr (outer_type, x.outer_type)
2337 || offset != x.offset
2338 || maybe_in_construction != x.maybe_in_construction
2339 || maybe_derived_type != x.maybe_derived_type
2340 || dynamic != x.dynamic)
2341 return false;
2342 }
2343 else if (x.outer_type)
2344 return false;
2345
2346
2347 if (speculative_outer_type
2348 && speculation_consistent_p (speculative_outer_type, speculative_offset,
2349 speculative_maybe_derived_type, NULL_TREE))
2350 {
2351 if (!x.speculative_outer_type)
2352 return false;
2353
2354 if (!types_odr_comparable (speculative_outer_type,
2355 x.speculative_outer_type)
2356 || !types_same_for_odr (speculative_outer_type,
2357 x.speculative_outer_type)
2358 || speculative_offset != x.speculative_offset
2359 || speculative_maybe_derived_type != x.speculative_maybe_derived_type)
2360 return false;
2361 }
2362 else if (x.speculative_outer_type
2363 && x.speculation_consistent_p (x.speculative_outer_type,
2364 x.speculative_offset,
2365 x.speculative_maybe_derived_type,
2366 NULL))
2367 return false;
2368
2369 return true;
2370 }
2371
2372 /* Modify context to be strictly less restrictive than CTX. */
2373
2374 bool
2375 ipa_polymorphic_call_context::meet_with (ipa_polymorphic_call_context ctx,
2376 tree otr_type)
2377 {
2378 bool updated = false;
2379
2380 if (useless_p () || ctx.invalid)
2381 return false;
2382
2383 /* Restricting context to inner type makes merging easier, however do not
2384 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
2385 if (otr_type && !useless_p () && !ctx.useless_p ())
2386 {
2387 restrict_to_inner_class (otr_type);
2388 ctx.restrict_to_inner_class (otr_type);
2389 if(invalid)
2390 return false;
2391 }
2392
2393 if (equal_to (ctx))
2394 return false;
2395
2396 if (ctx.useless_p () || invalid)
2397 {
2398 *this = ctx;
2399 return true;
2400 }
2401
2402 if (dump_file && (dump_flags & TDF_DETAILS))
2403 {
2404 fprintf (dump_file, "Polymorphic call context meet:");
2405 dump (dump_file);
2406 fprintf (dump_file, "With context: ");
2407 ctx.dump (dump_file);
2408 if (otr_type)
2409 {
2410 fprintf (dump_file, "To be used with type: ");
2411 print_generic_expr (dump_file, otr_type, TDF_SLIM);
2412 fprintf (dump_file, "\n");
2413 }
2414 }
2415
2416 if (!dynamic && ctx.dynamic)
2417 {
2418 dynamic = true;
2419 updated = true;
2420 }
2421
2422 /* If call is known to be invalid, we are done. */
2423 if (!outer_type)
2424 ;
2425 else if (!ctx.outer_type)
2426 {
2427 clear_outer_type ();
2428 updated = true;
2429 }
2430 /* If types are known to be same, merging is quite easy. */
2431 else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2432 {
2433 if (offset != ctx.offset
2434 && TYPE_SIZE (outer_type)
2435 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2436 {
2437 if (dump_file && (dump_flags & TDF_DETAILS))
2438 fprintf (dump_file, "Outer types match, offset mismatch -> clearing\n");
2439 clear_outer_type ();
2440 return true;
2441 }
2442 if (dump_file && (dump_flags & TDF_DETAILS))
2443 fprintf (dump_file, "Outer types match, merging flags\n");
2444 if (!maybe_in_construction && ctx.maybe_in_construction)
2445 {
2446 updated = true;
2447 maybe_in_construction = true;
2448 }
2449 if (!maybe_derived_type && ctx.maybe_derived_type)
2450 {
2451 updated = true;
2452 maybe_derived_type = true;
2453 }
2454 if (!dynamic && ctx.dynamic)
2455 {
2456 updated = true;
2457 dynamic = true;
2458 }
2459 }
2460 /* See if one type contains the other as a field (not base). */
2461 else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2462 outer_type, false, false))
2463 {
2464 if (dump_file && (dump_flags & TDF_DETAILS))
2465 fprintf (dump_file, "Second type contain the first as a field\n");
2466
2467 /* The second type is more specified, so we keep the first.
2468 We need to set DYNAMIC flag to avoid declaring context INVALID
2469 of OFFSET ends up being out of range. */
2470 if (!dynamic
2471 && (ctx.dynamic
2472 || (!otr_type
2473 && (!TYPE_SIZE (ctx.outer_type)
2474 || !TYPE_SIZE (outer_type)
2475 || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2476 TYPE_SIZE (outer_type), 0)))))
2477 {
2478 dynamic = true;
2479 updated = true;
2480 }
2481 }
2482 else if (contains_type_p (outer_type, offset - ctx.offset,
2483 ctx.outer_type, false, false))
2484 {
2485 if (dump_file && (dump_flags & TDF_DETAILS))
2486 fprintf (dump_file, "First type contain the second as a field\n");
2487
2488 if (!dynamic
2489 && (ctx.dynamic
2490 || (!otr_type
2491 && (!TYPE_SIZE (ctx.outer_type)
2492 || !TYPE_SIZE (outer_type)
2493 || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2494 TYPE_SIZE (outer_type), 0)))))
2495 dynamic = true;
2496 outer_type = ctx.outer_type;
2497 offset = ctx.offset;
2498 dynamic = ctx.dynamic;
2499 maybe_in_construction = ctx.maybe_in_construction;
2500 maybe_derived_type = ctx.maybe_derived_type;
2501 updated = true;
2502 }
2503 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2504 else if (contains_type_p (ctx.outer_type,
2505 ctx.offset - offset, outer_type, false, true))
2506 {
2507 if (dump_file && (dump_flags & TDF_DETAILS))
2508 fprintf (dump_file, "First type is base of second\n");
2509 if (!maybe_derived_type)
2510 {
2511 maybe_derived_type = true;
2512 updated = true;
2513 }
2514 if (!maybe_in_construction && ctx.maybe_in_construction)
2515 {
2516 maybe_in_construction = true;
2517 updated = true;
2518 }
2519 if (!dynamic && ctx.dynamic)
2520 {
2521 dynamic = true;
2522 updated = true;
2523 }
2524 }
2525 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2526 else if (contains_type_p (outer_type,
2527 offset - ctx.offset, ctx.outer_type, false, true))
2528 {
2529 if (dump_file && (dump_flags & TDF_DETAILS))
2530 fprintf (dump_file, "Second type is base of first\n");
2531 outer_type = ctx.outer_type;
2532 offset = ctx.offset;
2533 updated = true;
2534 if (!maybe_derived_type)
2535 maybe_derived_type = true;
2536 if (!maybe_in_construction && ctx.maybe_in_construction)
2537 maybe_in_construction = true;
2538 if (!dynamic && ctx.dynamic)
2539 dynamic = true;
2540 }
2541 /* TODO handle merging using hiearchy. */
2542 else
2543 {
2544 if (dump_file && (dump_flags & TDF_DETAILS))
2545 fprintf (dump_file, "Giving up on meet\n");
2546 clear_outer_type ();
2547 updated = true;
2548 }
2549
2550 updated |= meet_speculation_with (ctx.speculative_outer_type,
2551 ctx.speculative_offset,
2552 ctx.speculative_maybe_derived_type,
2553 otr_type);
2554
2555 if (updated && dump_file && (dump_flags & TDF_DETAILS))
2556 {
2557 fprintf (dump_file, "Updated as: ");
2558 dump (dump_file);
2559 fprintf (dump_file, "\n");
2560 }
2561 return updated;
2562 }