]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-polymorphic-call.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / ipa-polymorphic-call.c
1 /* Analysis of polymorphic call context.
2 Copyright (C) 2013-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "alias.h"
29 #include "fold-const.h"
30 #include "print-tree.h"
31 #include "calls.h"
32 #include "flags.h"
33 #include "insn-config.h"
34 #include "expmed.h"
35 #include "dojump.h"
36 #include "explow.h"
37 #include "emit-rtl.h"
38 #include "varasm.h"
39 #include "stmt.h"
40 #include "expr.h"
41 #include "tree-pass.h"
42 #include "target.h"
43 #include "tree-pretty-print.h"
44 #include "cgraph.h"
45 #include "ipa-utils.h"
46 #include "internal-fn.h"
47 #include "gimple-fold.h"
48 #include "alloc-pool.h"
49 #include "symbol-summary.h"
50 #include "ipa-prop.h"
51 #include "ipa-inline.h"
52 #include "diagnostic.h"
53 #include "tree-dfa.h"
54 #include "demangle.h"
55 #include "dbgcnt.h"
56 #include "gimple-pretty-print.h"
57 #include "stor-layout.h"
58 #include "intl.h"
59 #include "data-streamer.h"
60 #include "lto-streamer.h"
61 #include "streamer-hooks.h"
62 #include "tree-ssa-operands.h"
63 #include "tree-into-ssa.h"
64
65 /* Return true when TYPE contains an polymorphic type and thus is interesting
66 for devirtualization machinery. */
67
68 static bool contains_type_p (tree, HOST_WIDE_INT, tree,
69 bool consider_placement_new = true,
70 bool consider_bases = true);
71
72 bool
73 contains_polymorphic_type_p (const_tree type)
74 {
75 type = TYPE_MAIN_VARIANT (type);
76
77 if (RECORD_OR_UNION_TYPE_P (type))
78 {
79 if (TYPE_BINFO (type)
80 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
81 return true;
82 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
83 if (TREE_CODE (fld) == FIELD_DECL
84 && !DECL_ARTIFICIAL (fld)
85 && contains_polymorphic_type_p (TREE_TYPE (fld)))
86 return true;
87 return false;
88 }
89 if (TREE_CODE (type) == ARRAY_TYPE)
90 return contains_polymorphic_type_p (TREE_TYPE (type));
91 return false;
92 }
93
94 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
95 at possition CUR_OFFSET within TYPE.
96
97 POD can be changed to an instance of a polymorphic type by
98 placement new. Here we play safe and assume that any
99 non-polymorphic type is POD. */
100 bool
101 possible_placement_new (tree type, tree expected_type,
102 HOST_WIDE_INT cur_offset)
103 {
104 return ((TREE_CODE (type) != RECORD_TYPE
105 || !TYPE_BINFO (type)
106 || cur_offset >= POINTER_SIZE
107 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
108 && (!TYPE_SIZE (type)
109 || !tree_fits_shwi_p (TYPE_SIZE (type))
110 || (cur_offset
111 + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
112 : POINTER_SIZE)
113 <= tree_to_uhwi (TYPE_SIZE (type)))));
114 }
115
116 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
117 is contained at THIS->OFFSET. Walk the memory representation of
118 THIS->OUTER_TYPE and find the outermost class type that match
119 OTR_TYPE or contain OTR_TYPE as a base. Update THIS
120 to represent it.
121
122 If OTR_TYPE is NULL, just find outermost polymorphic type with
123 virtual table present at possition OFFSET.
124
125 For example when THIS represents type
126 class A
127 {
128 int a;
129 class B b;
130 }
131 and we look for type at offset sizeof(int), we end up with B and offset 0.
132 If the same is produced by multiple inheritance, we end up with A and offset
133 sizeof(int).
134
135 If we can not find corresponding class, give up by setting
136 THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
137 Return true when lookup was sucesful.
138
139 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
140 valid only via alocation of new polymorphic type inside by means
141 of placement new.
142
143 When CONSIDER_BASES is false, only look for actual fields, not base types
144 of TYPE. */
145
146 bool
147 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type,
148 bool consider_placement_new,
149 bool consider_bases)
150 {
151 tree type = outer_type;
152 HOST_WIDE_INT cur_offset = offset;
153 bool speculative = false;
154 bool size_unknown = false;
155 unsigned HOST_WIDE_INT otr_type_size = POINTER_SIZE;
156
157 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */
158 if (!outer_type)
159 {
160 clear_outer_type (otr_type);
161 type = otr_type;
162 cur_offset = 0;
163 }
164 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know
165 that the context is either invalid, or the instance type must be
166 derived from OUTER_TYPE.
167
168 Because the instance type may contain field whose type is of OUTER_TYPE,
169 we can not derive any effective information about it.
170
171 TODO: In the case we know all derrived types, we can definitely do better
172 here. */
173 else if (TYPE_SIZE (outer_type)
174 && tree_fits_shwi_p (TYPE_SIZE (outer_type))
175 && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
176 && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
177 {
178 clear_outer_type (otr_type);
179 type = otr_type;
180 cur_offset = 0;
181
182 /* If derived type is not allowed, we know that the context is invalid.
183 For dynamic types, we really do not have information about
184 size of the memory location. It is possible that completely
185 different type is stored after outer_type. */
186 if (!maybe_derived_type && !dynamic)
187 {
188 clear_speculation ();
189 invalid = true;
190 return false;
191 }
192 }
193
194 if (otr_type && TYPE_SIZE (otr_type)
195 && tree_fits_shwi_p (TYPE_SIZE (otr_type)))
196 otr_type_size = tree_to_uhwi (TYPE_SIZE (otr_type));
197
198 if (!type || offset < 0)
199 goto no_useful_type_info;
200
201 /* Find the sub-object the constant actually refers to and mark whether it is
202 an artificial one (as opposed to a user-defined one).
203
204 This loop is performed twice; first time for outer_type and second time
205 for speculative_outer_type. The second run has SPECULATIVE set. */
206 while (true)
207 {
208 unsigned HOST_WIDE_INT pos, size;
209 tree fld;
210
211 /* If we do not know size of TYPE, we need to be more conservative
212 about accepting cases where we can not find EXPECTED_TYPE.
213 Generally the types that do matter here are of constant size.
214 Size_unknown case should be very rare. */
215 if (TYPE_SIZE (type)
216 && tree_fits_shwi_p (TYPE_SIZE (type))
217 && tree_to_shwi (TYPE_SIZE (type)) >= 0)
218 size_unknown = false;
219 else
220 size_unknown = true;
221
222 /* On a match, just return what we found. */
223 if ((otr_type
224 && types_odr_comparable (type, otr_type)
225 && types_same_for_odr (type, otr_type))
226 || (!otr_type
227 && TREE_CODE (type) == RECORD_TYPE
228 && TYPE_BINFO (type)
229 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
230 {
231 if (speculative)
232 {
233 /* If we did not match the offset, just give up on speculation. */
234 if (cur_offset != 0
235 /* Also check if speculation did not end up being same as
236 non-speculation. */
237 || (types_must_be_same_for_odr (speculative_outer_type,
238 outer_type)
239 && (maybe_derived_type
240 == speculative_maybe_derived_type)))
241 clear_speculation ();
242 return true;
243 }
244 else
245 {
246 /* If type is known to be final, do not worry about derived
247 types. Testing it here may help us to avoid speculation. */
248 if (otr_type && TREE_CODE (outer_type) == RECORD_TYPE
249 && (!in_lto_p || odr_type_p (outer_type))
250 && type_with_linkage_p (outer_type)
251 && type_known_to_have_no_derivations_p (outer_type))
252 maybe_derived_type = false;
253
254 /* Type can not contain itself on an non-zero offset. In that case
255 just give up. Still accept the case where size is now known.
256 Either the second copy may appear past the end of type or within
257 the non-POD buffer located inside the variably sized type
258 itself. */
259 if (cur_offset != 0)
260 goto no_useful_type_info;
261 /* If we determined type precisely or we have no clue on
262 speuclation, we are done. */
263 if (!maybe_derived_type || !speculative_outer_type
264 || !speculation_consistent_p (speculative_outer_type,
265 speculative_offset,
266 speculative_maybe_derived_type,
267 otr_type))
268 {
269 clear_speculation ();
270 return true;
271 }
272 /* Otherwise look into speculation now. */
273 else
274 {
275 speculative = true;
276 type = speculative_outer_type;
277 cur_offset = speculative_offset;
278 continue;
279 }
280 }
281 }
282
283 /* Walk fields and find corresponding on at OFFSET. */
284 if (TREE_CODE (type) == RECORD_TYPE)
285 {
286 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
287 {
288 if (TREE_CODE (fld) != FIELD_DECL)
289 continue;
290
291 pos = int_bit_position (fld);
292 if (pos > (unsigned HOST_WIDE_INT)cur_offset)
293 continue;
294
295 /* Do not consider vptr itself. Not even for placement new. */
296 if (!pos && DECL_ARTIFICIAL (fld)
297 && POINTER_TYPE_P (TREE_TYPE (fld))
298 && TYPE_BINFO (type)
299 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
300 continue;
301
302 if (!DECL_SIZE (fld) || !tree_fits_uhwi_p (DECL_SIZE (fld)))
303 goto no_useful_type_info;
304 size = tree_to_uhwi (DECL_SIZE (fld));
305
306 /* We can always skip types smaller than pointer size:
307 those can not contain a virtual table pointer.
308
309 Disqualifying fields that are too small to fit OTR_TYPE
310 saves work needed to walk them for no benefit.
311 Because of the way the bases are packed into a class, the
312 field's size may be smaller than type size, so it needs
313 to be done with a care. */
314
315 if (pos <= (unsigned HOST_WIDE_INT)cur_offset
316 && (pos + size) >= (unsigned HOST_WIDE_INT)cur_offset
317 + POINTER_SIZE
318 && (!otr_type
319 || !TYPE_SIZE (TREE_TYPE (fld))
320 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld)))
321 || (pos + tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld))))
322 >= cur_offset + otr_type_size))
323 break;
324 }
325
326 if (!fld)
327 goto no_useful_type_info;
328
329 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
330 cur_offset -= pos;
331 /* DECL_ARTIFICIAL represents a basetype. */
332 if (!DECL_ARTIFICIAL (fld))
333 {
334 if (!speculative)
335 {
336 outer_type = type;
337 offset = cur_offset;
338 /* As soon as we se an field containing the type,
339 we know we are not looking for derivations. */
340 maybe_derived_type = false;
341 }
342 else
343 {
344 speculative_outer_type = type;
345 speculative_offset = cur_offset;
346 speculative_maybe_derived_type = false;
347 }
348 }
349 else if (!consider_bases)
350 goto no_useful_type_info;
351 }
352 else if (TREE_CODE (type) == ARRAY_TYPE)
353 {
354 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
355
356 /* Give up if we don't know array field size.
357 Also give up on non-polymorphic types as they are used
358 as buffers for placement new. */
359 if (!TYPE_SIZE (subtype)
360 || !tree_fits_shwi_p (TYPE_SIZE (subtype))
361 || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
362 || !contains_polymorphic_type_p (subtype))
363 goto no_useful_type_info;
364
365 HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
366
367 /* We may see buffer for placement new. In this case the expected type
368 can be bigger than the subtype. */
369 if (TYPE_SIZE (subtype)
370 && (cur_offset + otr_type_size
371 > tree_to_uhwi (TYPE_SIZE (subtype))))
372 goto no_useful_type_info;
373
374 cur_offset = new_offset;
375 type = TYPE_MAIN_VARIANT (subtype);
376 if (!speculative)
377 {
378 outer_type = type;
379 offset = cur_offset;
380 maybe_derived_type = false;
381 }
382 else
383 {
384 speculative_outer_type = type;
385 speculative_offset = cur_offset;
386 speculative_maybe_derived_type = false;
387 }
388 }
389 /* Give up on anything else. */
390 else
391 {
392 no_useful_type_info:
393 if (maybe_derived_type && !speculative
394 && TREE_CODE (outer_type) == RECORD_TYPE
395 && TREE_CODE (otr_type) == RECORD_TYPE
396 && TYPE_BINFO (otr_type)
397 && !offset
398 && get_binfo_at_offset (TYPE_BINFO (otr_type), 0, outer_type))
399 {
400 clear_outer_type (otr_type);
401 if (!speculative_outer_type
402 || !speculation_consistent_p (speculative_outer_type,
403 speculative_offset,
404 speculative_maybe_derived_type,
405 otr_type))
406 clear_speculation ();
407 if (speculative_outer_type)
408 {
409 speculative = true;
410 type = speculative_outer_type;
411 cur_offset = speculative_offset;
412 }
413 else
414 return true;
415 }
416 /* We found no way to embedd EXPECTED_TYPE in TYPE.
417 We still permit two special cases - placement new and
418 the case of variadic types containing themselves. */
419 if (!speculative
420 && consider_placement_new
421 && (size_unknown || !type || maybe_derived_type
422 || possible_placement_new (type, otr_type, cur_offset)))
423 {
424 /* In these weird cases we want to accept the context.
425 In non-speculative run we have no useful outer_type info
426 (TODO: we may eventually want to record upper bound on the
427 type size that can be used to prune the walk),
428 but we still want to consider speculation that may
429 give useful info. */
430 if (!speculative)
431 {
432 clear_outer_type (otr_type);
433 if (!speculative_outer_type
434 || !speculation_consistent_p (speculative_outer_type,
435 speculative_offset,
436 speculative_maybe_derived_type,
437 otr_type))
438 clear_speculation ();
439 if (speculative_outer_type)
440 {
441 speculative = true;
442 type = speculative_outer_type;
443 cur_offset = speculative_offset;
444 }
445 else
446 return true;
447 }
448 else
449 clear_speculation ();
450 return true;
451 }
452 else
453 {
454 clear_speculation ();
455 if (speculative)
456 return true;
457 clear_outer_type (otr_type);
458 invalid = true;
459 return false;
460 }
461 }
462 }
463 }
464
465 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
466 CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
467 be built within OUTER_TYPE by means of placement new. CONSIDER_BASES makes
468 function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
469 base of one of fields of OUTER_TYPE. */
470
471 static bool
472 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
473 tree otr_type,
474 bool consider_placement_new,
475 bool consider_bases)
476 {
477 ipa_polymorphic_call_context context;
478
479 /* Check that type is within range. */
480 if (offset < 0)
481 return false;
482 if (TYPE_SIZE (outer_type) && TYPE_SIZE (otr_type)
483 && TREE_CODE (outer_type) == INTEGER_CST
484 && TREE_CODE (otr_type) == INTEGER_CST
485 && wi::ltu_p (wi::to_offset (outer_type), (wi::to_offset (otr_type) + offset)))
486 return false;
487
488 context.offset = offset;
489 context.outer_type = TYPE_MAIN_VARIANT (outer_type);
490 context.maybe_derived_type = false;
491 return context.restrict_to_inner_class (otr_type, consider_placement_new, consider_bases);
492 }
493
494
495 /* Return a FUNCTION_DECL if BLOCK represents a constructor or destructor.
496 If CHECK_CLONES is true, also check for clones of ctor/dtors. */
497
498 tree
499 inlined_polymorphic_ctor_dtor_block_p (tree block, bool check_clones)
500 {
501 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
502 if (fn == NULL || TREE_CODE (fn) != FUNCTION_DECL)
503 return NULL_TREE;
504
505 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
506 || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
507 {
508 if (!check_clones)
509 return NULL_TREE;
510
511 /* Watch for clones where we constant propagated the first
512 argument (pointer to the instance). */
513 fn = DECL_ABSTRACT_ORIGIN (fn);
514 if (!fn
515 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
516 || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
517 return NULL_TREE;
518 }
519
520 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
521 return NULL_TREE;
522
523 return fn;
524 }
525
526
527 /* We know that the instance is stored in variable or parameter
528 (not dynamically allocated) and we want to disprove the fact
529 that it may be in construction at invocation of CALL.
530
531 BASE represents memory location where instance is stored.
532 If BASE is NULL, it is assumed to be global memory.
533 OUTER_TYPE is known type of the instance or NULL if not
534 known.
535
536 For the variable to be in construction we actually need to
537 be in constructor of corresponding global variable or
538 the inline stack of CALL must contain the constructor.
539 Check this condition. This check works safely only before
540 IPA passes, because inline stacks may become out of date
541 later. */
542
543 bool
544 decl_maybe_in_construction_p (tree base, tree outer_type,
545 gimple call, tree function)
546 {
547 if (outer_type)
548 outer_type = TYPE_MAIN_VARIANT (outer_type);
549 gcc_assert (!base || DECL_P (base));
550
551 /* After inlining the code unification optimizations may invalidate
552 inline stacks. Also we need to give up on global variables after
553 IPA, because addresses of these may have been propagated to their
554 constructors. */
555 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
556 return true;
557
558 /* Pure functions can not do any changes on the dynamic type;
559 that require writting to memory. */
560 if ((!base || !auto_var_in_fn_p (base, function))
561 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
562 return false;
563
564 bool check_clones = !base || is_global_var (base);
565 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
566 block = BLOCK_SUPERCONTEXT (block))
567 if (tree fn = inlined_polymorphic_ctor_dtor_block_p (block, check_clones))
568 {
569 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
570
571 if (!outer_type || !types_odr_comparable (type, outer_type))
572 {
573 if (TREE_CODE (type) == RECORD_TYPE
574 && TYPE_BINFO (type)
575 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
576 return true;
577 }
578 else if (types_same_for_odr (type, outer_type))
579 return true;
580 }
581
582 if (!base || (TREE_CODE (base) == VAR_DECL && is_global_var (base)))
583 {
584 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
585 || (!DECL_CXX_CONSTRUCTOR_P (function)
586 && !DECL_CXX_DESTRUCTOR_P (function)))
587 {
588 if (!DECL_ABSTRACT_ORIGIN (function))
589 return false;
590 /* Watch for clones where we constant propagated the first
591 argument (pointer to the instance). */
592 function = DECL_ABSTRACT_ORIGIN (function);
593 if (!function
594 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
595 || (!DECL_CXX_CONSTRUCTOR_P (function)
596 && !DECL_CXX_DESTRUCTOR_P (function)))
597 return false;
598 }
599 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (function));
600 if (!outer_type || !types_odr_comparable (type, outer_type))
601 {
602 if (TREE_CODE (type) == RECORD_TYPE
603 && TYPE_BINFO (type)
604 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
605 return true;
606 }
607 else if (types_same_for_odr (type, outer_type))
608 return true;
609 }
610 return false;
611 }
612
613 /* Dump human readable context to F. If NEWLINE is true, it will be terminated
614 by a newline. */
615
616 void
617 ipa_polymorphic_call_context::dump (FILE *f, bool newline) const
618 {
619 fprintf (f, " ");
620 if (invalid)
621 fprintf (f, "Call is known to be undefined");
622 else
623 {
624 if (useless_p ())
625 fprintf (f, "nothing known");
626 if (outer_type || offset)
627 {
628 fprintf (f, "Outer type%s:", dynamic ? " (dynamic)":"");
629 print_generic_expr (f, outer_type, TDF_SLIM);
630 if (maybe_derived_type)
631 fprintf (f, " (or a derived type)");
632 if (maybe_in_construction)
633 fprintf (f, " (maybe in construction)");
634 fprintf (f, " offset " HOST_WIDE_INT_PRINT_DEC,
635 offset);
636 }
637 if (speculative_outer_type)
638 {
639 if (outer_type || offset)
640 fprintf (f, " ");
641 fprintf (f, "Speculative outer type:");
642 print_generic_expr (f, speculative_outer_type, TDF_SLIM);
643 if (speculative_maybe_derived_type)
644 fprintf (f, " (or a derived type)");
645 fprintf (f, " at offset " HOST_WIDE_INT_PRINT_DEC,
646 speculative_offset);
647 }
648 }
649 if (newline)
650 fprintf(f, "\n");
651 }
652
653 /* Print context to stderr. */
654
655 void
656 ipa_polymorphic_call_context::debug () const
657 {
658 dump (stderr);
659 }
660
661 /* Stream out the context to OB. */
662
663 void
664 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
665 {
666 struct bitpack_d bp = bitpack_create (ob->main_stream);
667
668 bp_pack_value (&bp, invalid, 1);
669 bp_pack_value (&bp, maybe_in_construction, 1);
670 bp_pack_value (&bp, maybe_derived_type, 1);
671 bp_pack_value (&bp, speculative_maybe_derived_type, 1);
672 bp_pack_value (&bp, dynamic, 1);
673 bp_pack_value (&bp, outer_type != NULL, 1);
674 bp_pack_value (&bp, offset != 0, 1);
675 bp_pack_value (&bp, speculative_outer_type != NULL, 1);
676 streamer_write_bitpack (&bp);
677
678 if (outer_type != NULL)
679 stream_write_tree (ob, outer_type, true);
680 if (offset)
681 streamer_write_hwi (ob, offset);
682 if (speculative_outer_type != NULL)
683 {
684 stream_write_tree (ob, speculative_outer_type, true);
685 streamer_write_hwi (ob, speculative_offset);
686 }
687 else
688 gcc_assert (!speculative_offset);
689 }
690
691 /* Stream in the context from IB and DATA_IN. */
692
693 void
694 ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib,
695 struct data_in *data_in)
696 {
697 struct bitpack_d bp = streamer_read_bitpack (ib);
698
699 invalid = bp_unpack_value (&bp, 1);
700 maybe_in_construction = bp_unpack_value (&bp, 1);
701 maybe_derived_type = bp_unpack_value (&bp, 1);
702 speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
703 dynamic = bp_unpack_value (&bp, 1);
704 bool outer_type_p = bp_unpack_value (&bp, 1);
705 bool offset_p = bp_unpack_value (&bp, 1);
706 bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
707
708 if (outer_type_p)
709 outer_type = stream_read_tree (ib, data_in);
710 else
711 outer_type = NULL;
712 if (offset_p)
713 offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
714 else
715 offset = 0;
716 if (speculative_outer_type_p)
717 {
718 speculative_outer_type = stream_read_tree (ib, data_in);
719 speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
720 }
721 else
722 {
723 speculative_outer_type = NULL;
724 speculative_offset = 0;
725 }
726 }
727
728 /* Proudce polymorphic call context for call method of instance
729 that is located within BASE (that is assumed to be a decl) at offset OFF. */
730
731 void
732 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
733 {
734 gcc_assert (DECL_P (base));
735 clear_speculation ();
736
737 if (!contains_polymorphic_type_p (TREE_TYPE (base)))
738 {
739 clear_outer_type ();
740 offset = off;
741 return;
742 }
743 outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
744 offset = off;
745 /* Make very conservative assumption that all objects
746 may be in construction.
747
748 It is up to caller to revisit this via
749 get_dynamic_type or decl_maybe_in_construction_p. */
750 maybe_in_construction = true;
751 maybe_derived_type = false;
752 dynamic = false;
753 }
754
755 /* CST is an invariant (address of decl), try to get meaningful
756 polymorphic call context for polymorphic call of method
757 if instance of OTR_TYPE that is located at offset OFF of this invariant.
758 Return FALSE if nothing meaningful can be found. */
759
760 bool
761 ipa_polymorphic_call_context::set_by_invariant (tree cst,
762 tree otr_type,
763 HOST_WIDE_INT off)
764 {
765 HOST_WIDE_INT offset2, size, max_size;
766 tree base;
767
768 invalid = false;
769 off = 0;
770 clear_outer_type (otr_type);
771
772 if (TREE_CODE (cst) != ADDR_EXPR)
773 return false;
774
775 cst = TREE_OPERAND (cst, 0);
776 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
777 if (!DECL_P (base) || max_size == -1 || max_size != size)
778 return false;
779
780 /* Only type inconsistent programs can have otr_type that is
781 not part of outer type. */
782 if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
783 return false;
784
785 set_by_decl (base, off);
786 return true;
787 }
788
789 /* See if OP is SSA name initialized as a copy or by single assignment.
790 If so, walk the SSA graph up. Because simple PHI conditional is considered
791 copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
792 graph. */
793
794 static tree
795 walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL)
796 {
797 hash_set <tree> *visited = NULL;
798 STRIP_NOPS (op);
799 while (TREE_CODE (op) == SSA_NAME
800 && !SSA_NAME_IS_DEFAULT_DEF (op)
801 /* We might be called via fold_stmt during cfgcleanup where
802 SSA form need not be up-to-date. */
803 && !name_registered_for_update_p (op)
804 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op))
805 || gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI))
806 {
807 if (global_visited)
808 {
809 if (!*global_visited)
810 *global_visited = new hash_set<tree>;
811 if ((*global_visited)->add (op))
812 goto done;
813 }
814 else
815 {
816 if (!visited)
817 visited = new hash_set<tree>;
818 if (visited->add (op))
819 goto done;
820 }
821 /* Special case
822 if (ptr == 0)
823 ptr = 0;
824 else
825 ptr = ptr.foo;
826 This pattern is implicitly produced for casts to non-primary
827 bases. When doing context analysis, we do not really care
828 about the case pointer is NULL, becuase the call will be
829 undefined anyway. */
830 if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
831 {
832 gimple phi = SSA_NAME_DEF_STMT (op);
833
834 if (gimple_phi_num_args (phi) > 2)
835 goto done;
836 if (gimple_phi_num_args (phi) == 1)
837 op = gimple_phi_arg_def (phi, 0);
838 else if (integer_zerop (gimple_phi_arg_def (phi, 0)))
839 op = gimple_phi_arg_def (phi, 1);
840 else if (integer_zerop (gimple_phi_arg_def (phi, 1)))
841 op = gimple_phi_arg_def (phi, 0);
842 else
843 goto done;
844 }
845 else
846 {
847 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
848 goto done;
849 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
850 }
851 STRIP_NOPS (op);
852 }
853 done:
854 if (visited)
855 delete (visited);
856 return op;
857 }
858
859 /* Create polymorphic call context from IP invariant CST.
860 This is typically &global_var.
861 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
862 is offset of call. */
863
864 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
865 tree otr_type,
866 HOST_WIDE_INT off)
867 {
868 clear_speculation ();
869 set_by_invariant (cst, otr_type, off);
870 }
871
872 /* Build context for pointer REF contained in FNDECL at statement STMT.
873 if INSTANCE is non-NULL, return pointer to the object described by
874 the context or DECL where context is contained in. */
875
876 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
877 tree ref,
878 gimple stmt,
879 tree *instance)
880 {
881 tree otr_type = NULL;
882 tree base_pointer;
883 hash_set <tree> *visited = NULL;
884
885 if (TREE_CODE (ref) == OBJ_TYPE_REF)
886 {
887 otr_type = obj_type_ref_class (ref);
888 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
889 }
890 else
891 base_pointer = ref;
892
893 /* Set up basic info in case we find nothing interesting in the analysis. */
894 clear_speculation ();
895 clear_outer_type (otr_type);
896 invalid = false;
897
898 /* Walk SSA for outer object. */
899 while (true)
900 {
901 base_pointer = walk_ssa_copies (base_pointer, &visited);
902 if (TREE_CODE (base_pointer) == ADDR_EXPR)
903 {
904 HOST_WIDE_INT size, max_size;
905 HOST_WIDE_INT offset2;
906 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
907 &offset2, &size, &max_size);
908
909 if (max_size != -1 && max_size == size)
910 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
911 offset + offset2,
912 true,
913 NULL /* Do not change outer type. */);
914
915 /* If this is a varying address, punt. */
916 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
917 && max_size != -1
918 && max_size == size)
919 {
920 /* We found dereference of a pointer. Type of the pointer
921 and MEM_REF is meaningless, but we can look futher. */
922 if (TREE_CODE (base) == MEM_REF)
923 {
924 base_pointer = TREE_OPERAND (base, 0);
925 offset
926 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
927 outer_type = NULL;
928 }
929 /* We found base object. In this case the outer_type
930 is known. */
931 else if (DECL_P (base))
932 {
933 if (visited)
934 delete (visited);
935 /* Only type inconsistent programs can have otr_type that is
936 not part of outer type. */
937 if (otr_type
938 && !contains_type_p (TREE_TYPE (base),
939 offset + offset2, otr_type))
940 {
941 invalid = true;
942 if (instance)
943 *instance = base_pointer;
944 return;
945 }
946 set_by_decl (base, offset + offset2);
947 if (outer_type && maybe_in_construction && stmt)
948 maybe_in_construction
949 = decl_maybe_in_construction_p (base,
950 outer_type,
951 stmt,
952 fndecl);
953 if (instance)
954 *instance = base;
955 return;
956 }
957 else
958 break;
959 }
960 else
961 break;
962 }
963 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
964 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
965 {
966 offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
967 * BITS_PER_UNIT;
968 base_pointer = TREE_OPERAND (base_pointer, 0);
969 }
970 else
971 break;
972 }
973
974 if (visited)
975 delete (visited);
976
977 /* Try to determine type of the outer object. */
978 if (TREE_CODE (base_pointer) == SSA_NAME
979 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
980 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
981 {
982 /* See if parameter is THIS pointer of a method. */
983 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
984 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
985 {
986 outer_type
987 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
988 gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE
989 || TREE_CODE (outer_type) == UNION_TYPE);
990
991 /* Dynamic casting has possibly upcasted the type
992 in the hiearchy. In this case outer type is less
993 informative than inner type and we should forget
994 about it. */
995 if ((otr_type
996 && !contains_type_p (outer_type, offset,
997 otr_type))
998 || !contains_polymorphic_type_p (outer_type))
999 {
1000 outer_type = NULL;
1001 if (instance)
1002 *instance = base_pointer;
1003 return;
1004 }
1005
1006 dynamic = true;
1007
1008 /* If the function is constructor or destructor, then
1009 the type is possibly in construction, but we know
1010 it is not derived type. */
1011 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1012 || DECL_CXX_DESTRUCTOR_P (fndecl))
1013 {
1014 maybe_in_construction = true;
1015 maybe_derived_type = false;
1016 }
1017 else
1018 {
1019 maybe_derived_type = true;
1020 maybe_in_construction = false;
1021 }
1022 if (instance)
1023 *instance = base_pointer;
1024 return;
1025 }
1026 /* Non-PODs passed by value are really passed by invisible
1027 reference. In this case we also know the type of the
1028 object. */
1029 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1030 {
1031 outer_type
1032 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1033 /* Only type inconsistent programs can have otr_type that is
1034 not part of outer type. */
1035 if (otr_type && !contains_type_p (outer_type, offset,
1036 otr_type))
1037 {
1038 invalid = true;
1039 if (instance)
1040 *instance = base_pointer;
1041 return;
1042 }
1043 /* Non-polymorphic types have no interest for us. */
1044 else if (!otr_type && !contains_polymorphic_type_p (outer_type))
1045 {
1046 outer_type = NULL;
1047 if (instance)
1048 *instance = base_pointer;
1049 return;
1050 }
1051 maybe_derived_type = false;
1052 maybe_in_construction = false;
1053 if (instance)
1054 *instance = base_pointer;
1055 return;
1056 }
1057 }
1058
1059 tree base_type = TREE_TYPE (base_pointer);
1060
1061 if (TREE_CODE (base_pointer) == SSA_NAME
1062 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1063 && !(TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL
1064 || TREE_CODE (SSA_NAME_VAR (base_pointer)) == RESULT_DECL))
1065 {
1066 invalid = true;
1067 if (instance)
1068 *instance = base_pointer;
1069 return;
1070 }
1071 if (TREE_CODE (base_pointer) == SSA_NAME
1072 && SSA_NAME_DEF_STMT (base_pointer)
1073 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1074 base_type = TREE_TYPE (gimple_assign_rhs1
1075 (SSA_NAME_DEF_STMT (base_pointer)));
1076
1077 if (base_type && POINTER_TYPE_P (base_type))
1078 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
1079 offset,
1080 true, NULL /* Do not change type here */);
1081 /* TODO: There are multiple ways to derive a type. For instance
1082 if BASE_POINTER is passed to an constructor call prior our refernece.
1083 We do not make this type of flow sensitive analysis yet. */
1084 if (instance)
1085 *instance = base_pointer;
1086 return;
1087 }
1088
1089 /* Structure to be passed in between detect_type_change and
1090 check_stmt_for_type_change. */
1091
1092 struct type_change_info
1093 {
1094 /* Offset into the object where there is the virtual method pointer we are
1095 looking for. */
1096 HOST_WIDE_INT offset;
1097 /* The declaration or SSA_NAME pointer of the base that we are checking for
1098 type change. */
1099 tree instance;
1100 /* The reference to virtual table pointer used. */
1101 tree vtbl_ptr_ref;
1102 tree otr_type;
1103 /* If we actually can tell the type that the object has changed to, it is
1104 stored in this field. Otherwise it remains NULL_TREE. */
1105 tree known_current_type;
1106 HOST_WIDE_INT known_current_offset;
1107
1108 /* Set to true if dynamic type change has been detected. */
1109 bool type_maybe_changed;
1110 /* Set to true if multiple types have been encountered. known_current_type
1111 must be disregarded in that case. */
1112 bool multiple_types_encountered;
1113 /* Set to true if we possibly missed some dynamic type changes and we should
1114 consider the set to be speculative. */
1115 bool speculative;
1116 bool seen_unanalyzed_store;
1117 };
1118
1119 /* Return true if STMT is not call and can modify a virtual method table pointer.
1120 We take advantage of fact that vtable stores must appear within constructor
1121 and destructor functions. */
1122
1123 static bool
1124 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
1125 {
1126 if (is_gimple_assign (stmt))
1127 {
1128 tree lhs = gimple_assign_lhs (stmt);
1129
1130 if (gimple_clobber_p (stmt))
1131 return false;
1132 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
1133 {
1134 if (flag_strict_aliasing
1135 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
1136 return false;
1137
1138 if (TREE_CODE (lhs) == COMPONENT_REF
1139 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1140 return false;
1141 /* In the future we might want to use get_base_ref_and_offset to find
1142 if there is a field corresponding to the offset and if so, proceed
1143 almost like if it was a component ref. */
1144 }
1145 }
1146
1147 /* Code unification may mess with inline stacks. */
1148 if (cfun->after_inlining)
1149 return true;
1150
1151 /* Walk the inline stack and watch out for ctors/dtors.
1152 TODO: Maybe we can require the store to appear in toplevel
1153 block of CTOR/DTOR. */
1154 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
1155 block = BLOCK_SUPERCONTEXT (block))
1156 if (BLOCK_ABSTRACT_ORIGIN (block)
1157 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
1158 return inlined_polymorphic_ctor_dtor_block_p (block, false);
1159 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
1160 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
1161 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
1162 }
1163
1164 /* If STMT can be proved to be an assignment to the virtual method table
1165 pointer of ANALYZED_OBJ and the type associated with the new table
1166 identified, return the type. Otherwise return NULL_TREE if type changes
1167 in unknown way or ERROR_MARK_NODE if type is unchanged. */
1168
1169 static tree
1170 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
1171 HOST_WIDE_INT *type_offset)
1172 {
1173 HOST_WIDE_INT offset, size, max_size;
1174 tree lhs, rhs, base;
1175
1176 if (!gimple_assign_single_p (stmt))
1177 return NULL_TREE;
1178
1179 lhs = gimple_assign_lhs (stmt);
1180 rhs = gimple_assign_rhs1 (stmt);
1181 if (TREE_CODE (lhs) != COMPONENT_REF
1182 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1183 {
1184 if (dump_file)
1185 fprintf (dump_file, " LHS is not virtual table.\n");
1186 return NULL_TREE;
1187 }
1188
1189 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
1190 ;
1191 else
1192 {
1193 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
1194 if (DECL_P (tci->instance))
1195 {
1196 if (base != tci->instance)
1197 {
1198 if (dump_file)
1199 {
1200 fprintf (dump_file, " base:");
1201 print_generic_expr (dump_file, base, TDF_SLIM);
1202 fprintf (dump_file, " does not match instance:");
1203 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1204 fprintf (dump_file, "\n");
1205 }
1206 return NULL_TREE;
1207 }
1208 }
1209 else if (TREE_CODE (base) == MEM_REF)
1210 {
1211 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0))
1212 {
1213 if (dump_file)
1214 {
1215 fprintf (dump_file, " base mem ref:");
1216 print_generic_expr (dump_file, base, TDF_SLIM);
1217 fprintf (dump_file, " does not match instance:");
1218 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1219 fprintf (dump_file, "\n");
1220 }
1221 return NULL_TREE;
1222 }
1223 if (!integer_zerop (TREE_OPERAND (base, 1)))
1224 {
1225 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
1226 {
1227 if (dump_file)
1228 {
1229 fprintf (dump_file, " base mem ref:");
1230 print_generic_expr (dump_file, base, TDF_SLIM);
1231 fprintf (dump_file, " has non-representable offset:");
1232 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1233 fprintf (dump_file, "\n");
1234 }
1235 return NULL_TREE;
1236 }
1237 else
1238 offset += tree_to_shwi (TREE_OPERAND (base, 1)) * BITS_PER_UNIT;
1239 }
1240 }
1241 else if (!operand_equal_p (tci->instance, base, 0)
1242 || tci->offset)
1243 {
1244 if (dump_file)
1245 {
1246 fprintf (dump_file, " base:");
1247 print_generic_expr (dump_file, base, TDF_SLIM);
1248 fprintf (dump_file, " does not match instance:");
1249 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1250 fprintf (dump_file, " with offset %i\n", (int)tci->offset);
1251 }
1252 return tci->offset > POINTER_SIZE ? error_mark_node : NULL_TREE;
1253 }
1254 if (offset != tci->offset
1255 || size != POINTER_SIZE
1256 || max_size != POINTER_SIZE)
1257 {
1258 if (dump_file)
1259 fprintf (dump_file, " wrong offset %i!=%i or size %i\n",
1260 (int)offset, (int)tci->offset, (int)size);
1261 return offset + POINTER_SIZE <= tci->offset
1262 || (max_size != -1
1263 && tci->offset + POINTER_SIZE > offset + max_size)
1264 ? error_mark_node : NULL;
1265 }
1266 }
1267
1268 tree vtable;
1269 unsigned HOST_WIDE_INT offset2;
1270
1271 if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
1272 {
1273 if (dump_file)
1274 fprintf (dump_file, " Failed to lookup binfo\n");
1275 return NULL;
1276 }
1277
1278 tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1279 offset2, vtable);
1280 if (!binfo)
1281 {
1282 if (dump_file)
1283 fprintf (dump_file, " Construction vtable used\n");
1284 /* FIXME: We should suport construction contexts. */
1285 return NULL;
1286 }
1287
1288 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
1289 return DECL_CONTEXT (vtable);
1290 }
1291
1292 /* Record dynamic type change of TCI to TYPE. */
1293
1294 static void
1295 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
1296 {
1297 if (dump_file)
1298 {
1299 if (type)
1300 {
1301 fprintf (dump_file, " Recording type: ");
1302 print_generic_expr (dump_file, type, TDF_SLIM);
1303 fprintf (dump_file, " at offset %i\n", (int)offset);
1304 }
1305 else
1306 fprintf (dump_file, " Recording unknown type\n");
1307 }
1308
1309 /* If we found a constructor of type that is not polymorphic or
1310 that may contain the type in question as a field (not as base),
1311 restrict to the inner class first to make type matching bellow
1312 happier. */
1313 if (type
1314 && (offset
1315 || (TREE_CODE (type) != RECORD_TYPE
1316 || !TYPE_BINFO (type)
1317 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
1318 {
1319 ipa_polymorphic_call_context context;
1320
1321 context.offset = offset;
1322 context.outer_type = type;
1323 context.maybe_in_construction = false;
1324 context.maybe_derived_type = false;
1325 context.dynamic = true;
1326 /* If we failed to find the inner type, we know that the call
1327 would be undefined for type produced here. */
1328 if (!context.restrict_to_inner_class (tci->otr_type))
1329 {
1330 if (dump_file)
1331 fprintf (dump_file, " Ignoring; does not contain otr_type\n");
1332 return;
1333 }
1334 /* Watch for case we reached an POD type and anticipate placement
1335 new. */
1336 if (!context.maybe_derived_type)
1337 {
1338 type = context.outer_type;
1339 offset = context.offset;
1340 }
1341 }
1342 if (tci->type_maybe_changed
1343 && (!types_same_for_odr (type, tci->known_current_type)
1344 || offset != tci->known_current_offset))
1345 tci->multiple_types_encountered = true;
1346 tci->known_current_type = TYPE_MAIN_VARIANT (type);
1347 tci->known_current_offset = offset;
1348 tci->type_maybe_changed = true;
1349 }
1350
1351 /* Callback of walk_aliased_vdefs and a helper function for
1352 detect_type_change to check whether a particular statement may modify
1353 the virtual table pointer, and if possible also determine the new type of
1354 the (sub-)object. It stores its result into DATA, which points to a
1355 type_change_info structure. */
1356
1357 static bool
1358 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
1359 {
1360 gimple stmt = SSA_NAME_DEF_STMT (vdef);
1361 struct type_change_info *tci = (struct type_change_info *) data;
1362 tree fn;
1363
1364 /* If we already gave up, just terminate the rest of walk. */
1365 if (tci->multiple_types_encountered)
1366 return true;
1367
1368 if (is_gimple_call (stmt))
1369 {
1370 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
1371 return false;
1372
1373 /* Check for a constructor call. */
1374 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
1375 && DECL_CXX_CONSTRUCTOR_P (fn)
1376 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1377 && gimple_call_num_args (stmt))
1378 {
1379 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
1380 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1381 HOST_WIDE_INT offset = 0, size, max_size;
1382
1383 if (dump_file)
1384 {
1385 fprintf (dump_file, " Checking constructor call: ");
1386 print_gimple_stmt (dump_file, stmt, 0, 0);
1387 }
1388
1389 /* See if THIS parameter seems like instance pointer. */
1390 if (TREE_CODE (op) == ADDR_EXPR)
1391 {
1392 op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
1393 &offset, &size, &max_size);
1394 if (size != max_size || max_size == -1)
1395 {
1396 tci->speculative = true;
1397 return false;
1398 }
1399 if (op && TREE_CODE (op) == MEM_REF)
1400 {
1401 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
1402 {
1403 tci->speculative = true;
1404 return false;
1405 }
1406 offset += tree_to_shwi (TREE_OPERAND (op, 1))
1407 * BITS_PER_UNIT;
1408 op = TREE_OPERAND (op, 0);
1409 }
1410 else if (DECL_P (op))
1411 ;
1412 else
1413 {
1414 tci->speculative = true;
1415 return false;
1416 }
1417 op = walk_ssa_copies (op);
1418 }
1419 if (operand_equal_p (op, tci->instance, 0)
1420 && TYPE_SIZE (type)
1421 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
1422 && tree_fits_shwi_p (TYPE_SIZE (type))
1423 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset)
1424 {
1425 record_known_type (tci, type, tci->offset - offset);
1426 return true;
1427 }
1428 }
1429 /* Calls may possibly change dynamic type by placement new. Assume
1430 it will not happen, but make result speculative only. */
1431 if (dump_file)
1432 {
1433 fprintf (dump_file, " Function call may change dynamic type:");
1434 print_gimple_stmt (dump_file, stmt, 0, 0);
1435 }
1436 tci->speculative = true;
1437 return false;
1438 }
1439 /* Check for inlined virtual table store. */
1440 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
1441 {
1442 tree type;
1443 HOST_WIDE_INT offset = 0;
1444 if (dump_file)
1445 {
1446 fprintf (dump_file, " Checking vtbl store: ");
1447 print_gimple_stmt (dump_file, stmt, 0, 0);
1448 }
1449
1450 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
1451 if (type == error_mark_node)
1452 return false;
1453 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
1454 if (!type)
1455 {
1456 if (dump_file)
1457 fprintf (dump_file, " Unanalyzed store may change type.\n");
1458 tci->seen_unanalyzed_store = true;
1459 tci->speculative = true;
1460 }
1461 else
1462 record_known_type (tci, type, offset);
1463 return true;
1464 }
1465 else
1466 return false;
1467 }
1468
1469 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1470 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1471 INSTANCE is pointer to the outer instance as returned by
1472 get_polymorphic_context. To avoid creation of temporary expressions,
1473 INSTANCE may also be an declaration of get_polymorphic_context found the
1474 value to be in static storage.
1475
1476 If the type of instance is not fully determined
1477 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1478 is set), try to walk memory writes and find the actual construction of the
1479 instance.
1480
1481 Return true if memory is unchanged from function entry.
1482
1483 We do not include this analysis in the context analysis itself, because
1484 it needs memory SSA to be fully built and the walk may be expensive.
1485 So it is not suitable for use withing fold_stmt and similar uses. */
1486
1487 bool
1488 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
1489 tree otr_object,
1490 tree otr_type,
1491 gimple call)
1492 {
1493 struct type_change_info tci;
1494 ao_ref ao;
1495 bool function_entry_reached = false;
1496 tree instance_ref = NULL;
1497 gimple stmt = call;
1498 /* Remember OFFSET before it is modified by restrict_to_inner_class.
1499 This is because we do not update INSTANCE when walking inwards. */
1500 HOST_WIDE_INT instance_offset = offset;
1501
1502 if (otr_type)
1503 otr_type = TYPE_MAIN_VARIANT (otr_type);
1504
1505 /* Walk into inner type. This may clear maybe_derived_type and save us
1506 from useless work. It also makes later comparsions with static type
1507 easier. */
1508 if (outer_type && otr_type)
1509 {
1510 if (!restrict_to_inner_class (otr_type))
1511 return false;
1512 }
1513
1514 if (!maybe_in_construction && !maybe_derived_type)
1515 return false;
1516
1517 /* We need to obtain refernce to virtual table pointer. It is better
1518 to look it up in the code rather than build our own. This require bit
1519 of pattern matching, but we end up verifying that what we found is
1520 correct.
1521
1522 What we pattern match is:
1523
1524 tmp = instance->_vptr.A; // vtbl ptr load
1525 tmp2 = tmp[otr_token]; // vtable lookup
1526 OBJ_TYPE_REF(tmp2;instance->0) (instance);
1527
1528 We want to start alias oracle walk from vtbl pointer load,
1529 but we may not be able to identify it, for example, when PRE moved the
1530 load around. */
1531
1532 if (gimple_code (call) == GIMPLE_CALL)
1533 {
1534 tree ref = gimple_call_fn (call);
1535 HOST_WIDE_INT offset2, size, max_size;
1536
1537 if (TREE_CODE (ref) == OBJ_TYPE_REF)
1538 {
1539 ref = OBJ_TYPE_REF_EXPR (ref);
1540 ref = walk_ssa_copies (ref);
1541
1542 /* If call target is already known, no need to do the expensive
1543 memory walk. */
1544 if (is_gimple_min_invariant (ref))
1545 return false;
1546
1547 /* Check if definition looks like vtable lookup. */
1548 if (TREE_CODE (ref) == SSA_NAME
1549 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1550 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
1551 && TREE_CODE (gimple_assign_rhs1
1552 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
1553 {
1554 ref = get_base_address
1555 (TREE_OPERAND (gimple_assign_rhs1
1556 (SSA_NAME_DEF_STMT (ref)), 0));
1557 ref = walk_ssa_copies (ref);
1558 /* Find base address of the lookup and see if it looks like
1559 vptr load. */
1560 if (TREE_CODE (ref) == SSA_NAME
1561 && !SSA_NAME_IS_DEFAULT_DEF (ref)
1562 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
1563 {
1564 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
1565 tree base_ref = get_ref_base_and_extent
1566 (ref_exp, &offset2, &size, &max_size);
1567
1568 /* Finally verify that what we found looks like read from
1569 OTR_OBJECT or from INSTANCE with offset OFFSET. */
1570 if (base_ref
1571 && ((TREE_CODE (base_ref) == MEM_REF
1572 && ((offset2 == instance_offset
1573 && TREE_OPERAND (base_ref, 0) == instance)
1574 || (!offset2
1575 && TREE_OPERAND (base_ref, 0)
1576 == otr_object)))
1577 || (DECL_P (instance) && base_ref == instance
1578 && offset2 == instance_offset)))
1579 {
1580 stmt = SSA_NAME_DEF_STMT (ref);
1581 instance_ref = ref_exp;
1582 }
1583 }
1584 }
1585 }
1586 }
1587
1588 /* If we failed to look up the refernece in code, build our own. */
1589 if (!instance_ref)
1590 {
1591 /* If the statement in question does not use memory, we can't tell
1592 anything. */
1593 if (!gimple_vuse (stmt))
1594 return false;
1595 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
1596 }
1597 else
1598 /* Otherwise use the real reference. */
1599 ao_ref_init (&ao, instance_ref);
1600
1601 /* We look for vtbl pointer read. */
1602 ao.size = POINTER_SIZE;
1603 ao.max_size = ao.size;
1604 /* We are looking for stores to vptr pointer within the instance of
1605 outer type.
1606 TODO: The vptr pointer type is globally known, we probably should
1607 keep it and do that even when otr_type is unknown. */
1608 if (otr_type)
1609 {
1610 ao.base_alias_set
1611 = get_alias_set (outer_type ? outer_type : otr_type);
1612 ao.ref_alias_set
1613 = get_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
1614 }
1615
1616 if (dump_file)
1617 {
1618 fprintf (dump_file, "Determining dynamic type for call: ");
1619 print_gimple_stmt (dump_file, call, 0, 0);
1620 fprintf (dump_file, " Starting walk at: ");
1621 print_gimple_stmt (dump_file, stmt, 0, 0);
1622 fprintf (dump_file, " instance pointer: ");
1623 print_generic_expr (dump_file, otr_object, TDF_SLIM);
1624 fprintf (dump_file, " Outer instance pointer: ");
1625 print_generic_expr (dump_file, instance, TDF_SLIM);
1626 fprintf (dump_file, " offset: %i (bits)", (int)offset);
1627 fprintf (dump_file, " vtbl reference: ");
1628 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
1629 fprintf (dump_file, "\n");
1630 }
1631
1632 tci.offset = offset;
1633 tci.instance = instance;
1634 tci.vtbl_ptr_ref = instance_ref;
1635 gcc_assert (TREE_CODE (instance) != MEM_REF);
1636 tci.known_current_type = NULL_TREE;
1637 tci.known_current_offset = 0;
1638 tci.otr_type = otr_type;
1639 tci.type_maybe_changed = false;
1640 tci.multiple_types_encountered = false;
1641 tci.speculative = false;
1642 tci.seen_unanalyzed_store = false;
1643
1644 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
1645 &tci, NULL, &function_entry_reached);
1646
1647 /* If we did not find any type changing statements, we may still drop
1648 maybe_in_construction flag if the context already have outer type.
1649
1650 Here we make special assumptions about both constructors and
1651 destructors which are all the functions that are allowed to alter the
1652 VMT pointers. It assumes that destructors begin with assignment into
1653 all VMT pointers and that constructors essentially look in the
1654 following way:
1655
1656 1) The very first thing they do is that they call constructors of
1657 ancestor sub-objects that have them.
1658
1659 2) Then VMT pointers of this and all its ancestors is set to new
1660 values corresponding to the type corresponding to the constructor.
1661
1662 3) Only afterwards, other stuff such as constructor of member
1663 sub-objects and the code written by the user is run. Only this may
1664 include calling virtual functions, directly or indirectly.
1665
1666 4) placement new can not be used to change type of non-POD statically
1667 allocated variables.
1668
1669 There is no way to call a constructor of an ancestor sub-object in any
1670 other way.
1671
1672 This means that we do not have to care whether constructors get the
1673 correct type information because they will always change it (in fact,
1674 if we define the type to be given by the VMT pointer, it is undefined).
1675
1676 The most important fact to derive from the above is that if, for some
1677 statement in the section 3, we try to detect whether the dynamic type
1678 has changed, we can safely ignore all calls as we examine the function
1679 body backwards until we reach statements in section 2 because these
1680 calls cannot be ancestor constructors or destructors (if the input is
1681 not bogus) and so do not change the dynamic type (this holds true only
1682 for automatically allocated objects but at the moment we devirtualize
1683 only these). We then must detect that statements in section 2 change
1684 the dynamic type and can try to derive the new type. That is enough
1685 and we can stop, we will never see the calls into constructors of
1686 sub-objects in this code.
1687
1688 Therefore if the static outer type was found (outer_type)
1689 we can safely ignore tci.speculative that is set on calls and give up
1690 only if there was dyanmic type store that may affect given variable
1691 (seen_unanalyzed_store) */
1692
1693 if (!tci.type_maybe_changed
1694 || (outer_type
1695 && !dynamic
1696 && !tci.seen_unanalyzed_store
1697 && !tci.multiple_types_encountered
1698 && offset == tci.offset
1699 && types_same_for_odr (tci.known_current_type,
1700 outer_type)))
1701 {
1702 if (!outer_type || tci.seen_unanalyzed_store)
1703 return false;
1704 if (maybe_in_construction)
1705 maybe_in_construction = false;
1706 if (dump_file)
1707 fprintf (dump_file, " No dynamic type change found.\n");
1708 return true;
1709 }
1710
1711 if (tci.known_current_type
1712 && !function_entry_reached
1713 && !tci.multiple_types_encountered)
1714 {
1715 if (!tci.speculative)
1716 {
1717 outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1718 offset = tci.known_current_offset;
1719 dynamic = true;
1720 maybe_in_construction = false;
1721 maybe_derived_type = false;
1722 if (dump_file)
1723 fprintf (dump_file, " Determined dynamic type.\n");
1724 }
1725 else if (!speculative_outer_type
1726 || speculative_maybe_derived_type)
1727 {
1728 speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1729 speculative_offset = tci.known_current_offset;
1730 speculative_maybe_derived_type = false;
1731 if (dump_file)
1732 fprintf (dump_file, " Determined speculative dynamic type.\n");
1733 }
1734 }
1735 else if (dump_file)
1736 {
1737 fprintf (dump_file, " Found multiple types%s%s\n",
1738 function_entry_reached ? " (function entry reached)" : "",
1739 function_entry_reached ? " (multiple types encountered)" : "");
1740 }
1741
1742 return false;
1743 }
1744
1745 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1746 seems consistent (and useful) with what we already have in the non-speculative context. */
1747
1748 bool
1749 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type,
1750 HOST_WIDE_INT spec_offset,
1751 bool spec_maybe_derived_type,
1752 tree otr_type) const
1753 {
1754 if (!flag_devirtualize_speculatively)
1755 return false;
1756
1757 /* Non-polymorphic types are useless for deriving likely polymorphic
1758 call targets. */
1759 if (!spec_outer_type || !contains_polymorphic_type_p (spec_outer_type))
1760 return false;
1761
1762 /* If we know nothing, speculation is always good. */
1763 if (!outer_type)
1764 return true;
1765
1766 /* Speculation is only useful to avoid derived types.
1767 This is not 100% true for placement new, where the outer context may
1768 turn out to be useless, but ignore these for now. */
1769 if (!maybe_derived_type)
1770 return false;
1771
1772 /* If types agrees, speculation is consistent, but it makes sense only
1773 when it says something new. */
1774 if (types_must_be_same_for_odr (spec_outer_type, outer_type))
1775 return maybe_derived_type && !spec_maybe_derived_type;
1776
1777 /* If speculation does not contain the type in question, ignore it. */
1778 if (otr_type
1779 && !contains_type_p (spec_outer_type, spec_offset, otr_type, false, true))
1780 return false;
1781
1782 /* If outer type already contains speculation as a filed,
1783 it is useless. We already know from OUTER_TYPE
1784 SPEC_TYPE and that it is not in the construction. */
1785 if (contains_type_p (outer_type, offset - spec_offset,
1786 spec_outer_type, false, false))
1787 return false;
1788
1789 /* If speculative outer type is not more specified than outer
1790 type, just give up.
1791 We can only decide this safely if we can compare types with OUTER_TYPE.
1792 */
1793 if ((!in_lto_p || odr_type_p (outer_type))
1794 && !contains_type_p (spec_outer_type,
1795 spec_offset - offset,
1796 outer_type, false))
1797 return false;
1798 return true;
1799 }
1800
1801 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1802 NEW_MAYBE_DERIVED_TYPE
1803 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1804
1805 bool
1806 ipa_polymorphic_call_context::combine_speculation_with
1807 (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1808 tree otr_type)
1809 {
1810 if (!new_outer_type)
1811 return false;
1812
1813 /* restrict_to_inner_class may eliminate wrong speculation making our job
1814 easeier. */
1815 if (otr_type)
1816 restrict_to_inner_class (otr_type);
1817
1818 if (!speculation_consistent_p (new_outer_type, new_offset,
1819 new_maybe_derived_type, otr_type))
1820 return false;
1821
1822 /* New speculation is a win in case we have no speculation or new
1823 speculation does not consider derivations. */
1824 if (!speculative_outer_type
1825 || (speculative_maybe_derived_type
1826 && !new_maybe_derived_type))
1827 {
1828 speculative_outer_type = new_outer_type;
1829 speculative_offset = new_offset;
1830 speculative_maybe_derived_type = new_maybe_derived_type;
1831 return true;
1832 }
1833 else if (types_must_be_same_for_odr (speculative_outer_type,
1834 new_outer_type))
1835 {
1836 if (speculative_offset != new_offset)
1837 {
1838 /* OK we have two contexts that seems valid but they disagree,
1839 just give up.
1840
1841 This is not a lattice operation, so we may want to drop it later. */
1842 if (dump_file && (dump_flags & TDF_DETAILS))
1843 fprintf (dump_file,
1844 "Speculative outer types match, "
1845 "offset mismatch -> invalid speculation\n");
1846 clear_speculation ();
1847 return true;
1848 }
1849 else
1850 {
1851 if (speculative_maybe_derived_type && !new_maybe_derived_type)
1852 {
1853 speculative_maybe_derived_type = false;
1854 return true;
1855 }
1856 else
1857 return false;
1858 }
1859 }
1860 /* Choose type that contains the other. This one either contains the outer
1861 as a field (thus giving exactly one target) or is deeper in the type
1862 hiearchy. */
1863 else if (speculative_outer_type
1864 && speculative_maybe_derived_type
1865 && (new_offset > speculative_offset
1866 || (new_offset == speculative_offset
1867 && contains_type_p (new_outer_type,
1868 0, speculative_outer_type, false))))
1869 {
1870 tree old_outer_type = speculative_outer_type;
1871 HOST_WIDE_INT old_offset = speculative_offset;
1872 bool old_maybe_derived_type = speculative_maybe_derived_type;
1873
1874 speculative_outer_type = new_outer_type;
1875 speculative_offset = new_offset;
1876 speculative_maybe_derived_type = new_maybe_derived_type;
1877
1878 if (otr_type)
1879 restrict_to_inner_class (otr_type);
1880
1881 /* If the speculation turned out to make no sense, revert to sensible
1882 one. */
1883 if (!speculative_outer_type)
1884 {
1885 speculative_outer_type = old_outer_type;
1886 speculative_offset = old_offset;
1887 speculative_maybe_derived_type = old_maybe_derived_type;
1888 return false;
1889 }
1890 return (old_offset != speculative_offset
1891 || old_maybe_derived_type != speculative_maybe_derived_type
1892 || types_must_be_same_for_odr (speculative_outer_type,
1893 new_outer_type));
1894 }
1895 return false;
1896 }
1897
1898 /* Make speculation less specific so
1899 NEW_OUTER_TYPE, NEW_OFFSET, NEW_MAYBE_DERIVED_TYPE is also included.
1900 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */
1901
1902 bool
1903 ipa_polymorphic_call_context::meet_speculation_with
1904 (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1905 tree otr_type)
1906 {
1907 if (!new_outer_type && speculative_outer_type)
1908 {
1909 clear_speculation ();
1910 return true;
1911 }
1912
1913 /* restrict_to_inner_class may eliminate wrong speculation making our job
1914 easeier. */
1915 if (otr_type)
1916 restrict_to_inner_class (otr_type);
1917
1918 if (!speculative_outer_type
1919 || !speculation_consistent_p (speculative_outer_type,
1920 speculative_offset,
1921 speculative_maybe_derived_type,
1922 otr_type))
1923 return false;
1924
1925 if (!speculation_consistent_p (new_outer_type, new_offset,
1926 new_maybe_derived_type, otr_type))
1927 {
1928 clear_speculation ();
1929 return true;
1930 }
1931
1932 else if (types_must_be_same_for_odr (speculative_outer_type,
1933 new_outer_type))
1934 {
1935 if (speculative_offset != new_offset)
1936 {
1937 clear_speculation ();
1938 return true;
1939 }
1940 else
1941 {
1942 if (!speculative_maybe_derived_type && new_maybe_derived_type)
1943 {
1944 speculative_maybe_derived_type = true;
1945 return true;
1946 }
1947 else
1948 return false;
1949 }
1950 }
1951 /* See if one type contains the other as a field (not base). */
1952 else if (contains_type_p (new_outer_type, new_offset - speculative_offset,
1953 speculative_outer_type, false, false))
1954 return false;
1955 else if (contains_type_p (speculative_outer_type,
1956 speculative_offset - new_offset,
1957 new_outer_type, false, false))
1958 {
1959 speculative_outer_type = new_outer_type;
1960 speculative_offset = new_offset;
1961 speculative_maybe_derived_type = new_maybe_derived_type;
1962 return true;
1963 }
1964 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
1965 else if (contains_type_p (new_outer_type,
1966 new_offset - speculative_offset,
1967 speculative_outer_type, false, true))
1968 {
1969 if (!speculative_maybe_derived_type)
1970 {
1971 speculative_maybe_derived_type = true;
1972 return true;
1973 }
1974 return false;
1975 }
1976 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
1977 else if (contains_type_p (speculative_outer_type,
1978 speculative_offset - new_offset, new_outer_type, false, true))
1979 {
1980 speculative_outer_type = new_outer_type;
1981 speculative_offset = new_offset;
1982 speculative_maybe_derived_type = true;
1983 return true;
1984 }
1985 else
1986 {
1987 if (dump_file && (dump_flags & TDF_DETAILS))
1988 fprintf (dump_file, "Giving up on speculative meet\n");
1989 clear_speculation ();
1990 return true;
1991 }
1992 }
1993
1994 /* Assume that both THIS and a given context is valid and strenghten THIS
1995 if possible. Return true if any strenghtening was made.
1996 If actual type the context is being used in is known, OTR_TYPE should be
1997 set accordingly. This improves quality of combined result. */
1998
1999 bool
2000 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx,
2001 tree otr_type)
2002 {
2003 bool updated = false;
2004
2005 if (ctx.useless_p () || invalid)
2006 return false;
2007
2008 /* Restricting context to inner type makes merging easier, however do not
2009 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
2010 if (otr_type && !invalid && !ctx.invalid)
2011 {
2012 restrict_to_inner_class (otr_type);
2013 ctx.restrict_to_inner_class (otr_type);
2014 if(invalid)
2015 return false;
2016 }
2017
2018 if (dump_file && (dump_flags & TDF_DETAILS))
2019 {
2020 fprintf (dump_file, "Polymorphic call context combine:");
2021 dump (dump_file);
2022 fprintf (dump_file, "With context: ");
2023 ctx.dump (dump_file);
2024 if (otr_type)
2025 {
2026 fprintf (dump_file, "To be used with type: ");
2027 print_generic_expr (dump_file, otr_type, TDF_SLIM);
2028 fprintf (dump_file, "\n");
2029 }
2030 }
2031
2032 /* If call is known to be invalid, we are done. */
2033 if (ctx.invalid)
2034 {
2035 if (dump_file && (dump_flags & TDF_DETAILS))
2036 fprintf (dump_file, "-> Invalid context\n");
2037 goto invalidate;
2038 }
2039
2040 if (!ctx.outer_type)
2041 ;
2042 else if (!outer_type)
2043 {
2044 outer_type = ctx.outer_type;
2045 offset = ctx.offset;
2046 dynamic = ctx.dynamic;
2047 maybe_in_construction = ctx.maybe_in_construction;
2048 maybe_derived_type = ctx.maybe_derived_type;
2049 updated = true;
2050 }
2051 /* If types are known to be same, merging is quite easy. */
2052 else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2053 {
2054 if (offset != ctx.offset
2055 && TYPE_SIZE (outer_type)
2056 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2057 {
2058 if (dump_file && (dump_flags & TDF_DETAILS))
2059 fprintf (dump_file, "Outer types match, offset mismatch -> invalid\n");
2060 clear_speculation ();
2061 clear_outer_type ();
2062 invalid = true;
2063 return true;
2064 }
2065 if (dump_file && (dump_flags & TDF_DETAILS))
2066 fprintf (dump_file, "Outer types match, merging flags\n");
2067 if (maybe_in_construction && !ctx.maybe_in_construction)
2068 {
2069 updated = true;
2070 maybe_in_construction = false;
2071 }
2072 if (maybe_derived_type && !ctx.maybe_derived_type)
2073 {
2074 updated = true;
2075 maybe_derived_type = false;
2076 }
2077 if (dynamic && !ctx.dynamic)
2078 {
2079 updated = true;
2080 dynamic = false;
2081 }
2082 }
2083 /* If we know the type precisely, there is not much to improve. */
2084 else if (!maybe_derived_type && !maybe_in_construction
2085 && !ctx.maybe_derived_type && !ctx.maybe_in_construction)
2086 {
2087 /* It may be easy to check if second context permits the first
2088 and set INVALID otherwise. This is not easy to do in general;
2089 contains_type_p may return false negatives for non-comparable
2090 types.
2091
2092 If OTR_TYPE is known, we however can expect that
2093 restrict_to_inner_class should have discovered the same base
2094 type. */
2095 if (otr_type && !ctx.maybe_in_construction && !ctx.maybe_derived_type)
2096 {
2097 if (dump_file && (dump_flags & TDF_DETAILS))
2098 fprintf (dump_file, "Contextes disagree -> invalid\n");
2099 goto invalidate;
2100 }
2101 }
2102 /* See if one type contains the other as a field (not base).
2103 In this case we want to choose the wider type, because it contains
2104 more information. */
2105 else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2106 outer_type, false, false))
2107 {
2108 if (dump_file && (dump_flags & TDF_DETAILS))
2109 fprintf (dump_file, "Second type contain the first as a field\n");
2110
2111 if (maybe_derived_type)
2112 {
2113 outer_type = ctx.outer_type;
2114 maybe_derived_type = ctx.maybe_derived_type;
2115 offset = ctx.offset;
2116 dynamic = ctx.dynamic;
2117 updated = true;
2118 }
2119
2120 /* If we do not know how the context is being used, we can
2121 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
2122 to other component of OUTER_TYPE later and we know nothing
2123 about it. */
2124 if (otr_type && maybe_in_construction
2125 && !ctx.maybe_in_construction)
2126 {
2127 maybe_in_construction = false;
2128 updated = true;
2129 }
2130 }
2131 else if (contains_type_p (outer_type, offset - ctx.offset,
2132 ctx.outer_type, false, false))
2133 {
2134 if (dump_file && (dump_flags & TDF_DETAILS))
2135 fprintf (dump_file, "First type contain the second as a field\n");
2136
2137 if (otr_type && maybe_in_construction
2138 && !ctx.maybe_in_construction)
2139 {
2140 maybe_in_construction = false;
2141 updated = true;
2142 }
2143 }
2144 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2145 else if (contains_type_p (ctx.outer_type,
2146 ctx.offset - offset, outer_type, false, true))
2147 {
2148 if (dump_file && (dump_flags & TDF_DETAILS))
2149 fprintf (dump_file, "First type is base of second\n");
2150 if (!maybe_derived_type)
2151 {
2152 if (!ctx.maybe_in_construction
2153 && types_odr_comparable (outer_type, ctx.outer_type))
2154 {
2155 if (dump_file && (dump_flags & TDF_DETAILS))
2156 fprintf (dump_file, "Second context does not permit base -> invalid\n");
2157 goto invalidate;
2158 }
2159 }
2160 /* Pick variant deeper in the hiearchy. */
2161 else
2162 {
2163 outer_type = ctx.outer_type;
2164 maybe_in_construction = ctx.maybe_in_construction;
2165 maybe_derived_type = ctx.maybe_derived_type;
2166 offset = ctx.offset;
2167 dynamic = ctx.dynamic;
2168 updated = true;
2169 }
2170 }
2171 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2172 else if (contains_type_p (outer_type,
2173 offset - ctx.offset, ctx.outer_type, false, true))
2174 {
2175 if (dump_file && (dump_flags & TDF_DETAILS))
2176 fprintf (dump_file, "Second type is base of first\n");
2177 if (!ctx.maybe_derived_type)
2178 {
2179 if (!maybe_in_construction
2180 && types_odr_comparable (outer_type, ctx.outer_type))
2181 {
2182 if (dump_file && (dump_flags & TDF_DETAILS))
2183 fprintf (dump_file, "First context does not permit base -> invalid\n");
2184 goto invalidate;
2185 }
2186 /* Pick the base type. */
2187 else if (maybe_in_construction)
2188 {
2189 outer_type = ctx.outer_type;
2190 maybe_in_construction = ctx.maybe_in_construction;
2191 maybe_derived_type = ctx.maybe_derived_type;
2192 offset = ctx.offset;
2193 dynamic = ctx.dynamic;
2194 updated = true;
2195 }
2196 }
2197 }
2198 /* TODO handle merging using hiearchy. */
2199 else if (dump_file && (dump_flags & TDF_DETAILS))
2200 fprintf (dump_file, "Giving up on merge\n");
2201
2202 updated |= combine_speculation_with (ctx.speculative_outer_type,
2203 ctx.speculative_offset,
2204 ctx.speculative_maybe_derived_type,
2205 otr_type);
2206
2207 if (updated && dump_file && (dump_flags & TDF_DETAILS))
2208 {
2209 fprintf (dump_file, "Updated as: ");
2210 dump (dump_file);
2211 fprintf (dump_file, "\n");
2212 }
2213 return updated;
2214
2215 invalidate:
2216 invalid = true;
2217 clear_speculation ();
2218 clear_outer_type ();
2219 return true;
2220 }
2221
2222 /* Take non-speculative info, merge it with speculative and clear speculation.
2223 Used when we no longer manage to keep track of actual outer type, but we
2224 think it is still there.
2225
2226 If OTR_TYPE is set, the transformation can be done more effectively assuming
2227 that context is going to be used only that way. */
2228
2229 void
2230 ipa_polymorphic_call_context::make_speculative (tree otr_type)
2231 {
2232 tree spec_outer_type = outer_type;
2233 HOST_WIDE_INT spec_offset = offset;
2234 bool spec_maybe_derived_type = maybe_derived_type;
2235
2236 if (invalid)
2237 {
2238 invalid = false;
2239 clear_outer_type ();
2240 clear_speculation ();
2241 return;
2242 }
2243 if (!outer_type)
2244 return;
2245 clear_outer_type ();
2246 combine_speculation_with (spec_outer_type, spec_offset,
2247 spec_maybe_derived_type,
2248 otr_type);
2249 }
2250
2251 /* Use when we can not track dynamic type change. This speculatively assume
2252 type change is not happening. */
2253
2254 void
2255 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor,
2256 tree otr_type)
2257 {
2258 if (dynamic)
2259 make_speculative (otr_type);
2260 else if (in_poly_cdtor)
2261 maybe_in_construction = true;
2262 }
2263
2264 /* Return TRUE if this context conveys the same information as OTHER. */
2265
2266 bool
2267 ipa_polymorphic_call_context::equal_to
2268 (const ipa_polymorphic_call_context &x) const
2269 {
2270 if (useless_p ())
2271 return x.useless_p ();
2272 if (invalid)
2273 return x.invalid;
2274 if (x.useless_p () || x.invalid)
2275 return false;
2276
2277 if (outer_type)
2278 {
2279 if (!x.outer_type
2280 || !types_odr_comparable (outer_type, x.outer_type)
2281 || !types_same_for_odr (outer_type, x.outer_type)
2282 || offset != x.offset
2283 || maybe_in_construction != x.maybe_in_construction
2284 || maybe_derived_type != x.maybe_derived_type
2285 || dynamic != x.dynamic)
2286 return false;
2287 }
2288 else if (x.outer_type)
2289 return false;
2290
2291
2292 if (speculative_outer_type
2293 && speculation_consistent_p (speculative_outer_type, speculative_offset,
2294 speculative_maybe_derived_type, NULL_TREE))
2295 {
2296 if (!x.speculative_outer_type)
2297 return false;
2298
2299 if (!types_odr_comparable (speculative_outer_type,
2300 x.speculative_outer_type)
2301 || !types_same_for_odr (speculative_outer_type,
2302 x.speculative_outer_type)
2303 || speculative_offset != x.speculative_offset
2304 || speculative_maybe_derived_type != x.speculative_maybe_derived_type)
2305 return false;
2306 }
2307 else if (x.speculative_outer_type
2308 && x.speculation_consistent_p (x.speculative_outer_type,
2309 x.speculative_offset,
2310 x.speculative_maybe_derived_type,
2311 NULL))
2312 return false;
2313
2314 return true;
2315 }
2316
2317 /* Modify context to be strictly less restrictive than CTX. */
2318
2319 bool
2320 ipa_polymorphic_call_context::meet_with (ipa_polymorphic_call_context ctx,
2321 tree otr_type)
2322 {
2323 bool updated = false;
2324
2325 if (useless_p () || ctx.invalid)
2326 return false;
2327
2328 /* Restricting context to inner type makes merging easier, however do not
2329 do that unless we know how the context is used (OTR_TYPE is non-NULL) */
2330 if (otr_type && !useless_p () && !ctx.useless_p ())
2331 {
2332 restrict_to_inner_class (otr_type);
2333 ctx.restrict_to_inner_class (otr_type);
2334 if(invalid)
2335 return false;
2336 }
2337
2338 if (equal_to (ctx))
2339 return false;
2340
2341 if (ctx.useless_p () || invalid)
2342 {
2343 *this = ctx;
2344 return true;
2345 }
2346
2347 if (dump_file && (dump_flags & TDF_DETAILS))
2348 {
2349 fprintf (dump_file, "Polymorphic call context meet:");
2350 dump (dump_file);
2351 fprintf (dump_file, "With context: ");
2352 ctx.dump (dump_file);
2353 if (otr_type)
2354 {
2355 fprintf (dump_file, "To be used with type: ");
2356 print_generic_expr (dump_file, otr_type, TDF_SLIM);
2357 fprintf (dump_file, "\n");
2358 }
2359 }
2360
2361 if (!dynamic && ctx.dynamic)
2362 {
2363 dynamic = true;
2364 updated = true;
2365 }
2366
2367 /* If call is known to be invalid, we are done. */
2368 if (!outer_type)
2369 ;
2370 else if (!ctx.outer_type)
2371 {
2372 clear_outer_type ();
2373 updated = true;
2374 }
2375 /* If types are known to be same, merging is quite easy. */
2376 else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2377 {
2378 if (offset != ctx.offset
2379 && TYPE_SIZE (outer_type)
2380 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2381 {
2382 if (dump_file && (dump_flags & TDF_DETAILS))
2383 fprintf (dump_file, "Outer types match, offset mismatch -> clearing\n");
2384 clear_outer_type ();
2385 return true;
2386 }
2387 if (dump_file && (dump_flags & TDF_DETAILS))
2388 fprintf (dump_file, "Outer types match, merging flags\n");
2389 if (!maybe_in_construction && ctx.maybe_in_construction)
2390 {
2391 updated = true;
2392 maybe_in_construction = true;
2393 }
2394 if (!maybe_derived_type && ctx.maybe_derived_type)
2395 {
2396 updated = true;
2397 maybe_derived_type = true;
2398 }
2399 if (!dynamic && ctx.dynamic)
2400 {
2401 updated = true;
2402 dynamic = true;
2403 }
2404 }
2405 /* See if one type contains the other as a field (not base). */
2406 else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2407 outer_type, false, false))
2408 {
2409 if (dump_file && (dump_flags & TDF_DETAILS))
2410 fprintf (dump_file, "Second type contain the first as a field\n");
2411
2412 /* The second type is more specified, so we keep the first.
2413 We need to set DYNAMIC flag to avoid declaring context INVALID
2414 of OFFSET ends up being out of range. */
2415 if (!dynamic
2416 && (ctx.dynamic
2417 || (!otr_type
2418 && (!TYPE_SIZE (ctx.outer_type)
2419 || !TYPE_SIZE (outer_type)
2420 || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2421 TYPE_SIZE (outer_type), 0)))))
2422 {
2423 dynamic = true;
2424 updated = true;
2425 }
2426 }
2427 else if (contains_type_p (outer_type, offset - ctx.offset,
2428 ctx.outer_type, false, false))
2429 {
2430 if (dump_file && (dump_flags & TDF_DETAILS))
2431 fprintf (dump_file, "First type contain the second as a field\n");
2432
2433 if (!dynamic
2434 && (ctx.dynamic
2435 || (!otr_type
2436 && (!TYPE_SIZE (ctx.outer_type)
2437 || !TYPE_SIZE (outer_type)
2438 || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2439 TYPE_SIZE (outer_type), 0)))))
2440 dynamic = true;
2441 outer_type = ctx.outer_type;
2442 offset = ctx.offset;
2443 dynamic = ctx.dynamic;
2444 maybe_in_construction = ctx.maybe_in_construction;
2445 maybe_derived_type = ctx.maybe_derived_type;
2446 updated = true;
2447 }
2448 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */
2449 else if (contains_type_p (ctx.outer_type,
2450 ctx.offset - offset, outer_type, false, true))
2451 {
2452 if (dump_file && (dump_flags & TDF_DETAILS))
2453 fprintf (dump_file, "First type is base of second\n");
2454 if (!maybe_derived_type)
2455 {
2456 maybe_derived_type = true;
2457 updated = true;
2458 }
2459 if (!maybe_in_construction && ctx.maybe_in_construction)
2460 {
2461 maybe_in_construction = true;
2462 updated = true;
2463 }
2464 if (!dynamic && ctx.dynamic)
2465 {
2466 dynamic = true;
2467 updated = true;
2468 }
2469 }
2470 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */
2471 else if (contains_type_p (outer_type,
2472 offset - ctx.offset, ctx.outer_type, false, true))
2473 {
2474 if (dump_file && (dump_flags & TDF_DETAILS))
2475 fprintf (dump_file, "Second type is base of first\n");
2476 outer_type = ctx.outer_type;
2477 offset = ctx.offset;
2478 updated = true;
2479 if (!maybe_derived_type)
2480 maybe_derived_type = true;
2481 if (!maybe_in_construction && ctx.maybe_in_construction)
2482 maybe_in_construction = true;
2483 if (!dynamic && ctx.dynamic)
2484 dynamic = true;
2485 }
2486 /* TODO handle merging using hiearchy. */
2487 else
2488 {
2489 if (dump_file && (dump_flags & TDF_DETAILS))
2490 fprintf (dump_file, "Giving up on meet\n");
2491 clear_outer_type ();
2492 updated = true;
2493 }
2494
2495 updated |= meet_speculation_with (ctx.speculative_outer_type,
2496 ctx.speculative_offset,
2497 ctx.speculative_maybe_derived_type,
2498 otr_type);
2499
2500 if (updated && dump_file && (dump_flags & TDF_DETAILS))
2501 {
2502 fprintf (dump_file, "Updated as: ");
2503 dump (dump_file);
2504 fprintf (dump_file, "\n");
2505 }
2506 return updated;
2507 }