]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
[Ada] Warning for out-of-order record representation clauses
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55 #include "tree-cfgcleanup.h"
56
57 /* Function summary where the parameter infos are actually stored. */
58 ipa_node_params_t *ipa_node_params_sum = NULL;
59
60 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
61
62 /* Edge summary for IPA-CP edge information. */
63 ipa_edge_args_sum_t *ipa_edge_args_sum;
64
65 /* Traits for a hash table for reusing already existing ipa_bits. */
66
67 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
68 {
69 typedef ipa_bits *value_type;
70 typedef ipa_bits *compare_type;
71 static hashval_t
72 hash (const ipa_bits *p)
73 {
74 hashval_t t = (hashval_t) p->value.to_shwi ();
75 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
76 }
77 static bool
78 equal (const ipa_bits *a, const ipa_bits *b)
79 {
80 return a->value == b->value && a->mask == b->mask;
81 }
82 static void
83 mark_empty (ipa_bits *&p)
84 {
85 p = NULL;
86 }
87 static bool
88 is_empty (const ipa_bits *p)
89 {
90 return p == NULL;
91 }
92 static bool
93 is_deleted (const ipa_bits *p)
94 {
95 return p == reinterpret_cast<const ipa_bits *> (1);
96 }
97 static void
98 mark_deleted (ipa_bits *&p)
99 {
100 p = reinterpret_cast<ipa_bits *> (1);
101 }
102 };
103
104 /* Hash table for avoid repeated allocations of equal ipa_bits. */
105 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106
107 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
108 the equiv bitmap is not hashed and is expected to be NULL. */
109
110 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range_base *>
111 {
112 typedef value_range_base *value_type;
113 typedef value_range_base *compare_type;
114 static hashval_t
115 hash (const value_range_base *p)
116 {
117 inchash::hash hstate (p->kind ());
118 inchash::add_expr (p->min (), hstate);
119 inchash::add_expr (p->max (), hstate);
120 return hstate.end ();
121 }
122 static bool
123 equal (const value_range_base *a, const value_range_base *b)
124 {
125 return a->equal_p (*b);
126 }
127 static void
128 mark_empty (value_range_base *&p)
129 {
130 p = NULL;
131 }
132 static bool
133 is_empty (const value_range_base *p)
134 {
135 return p == NULL;
136 }
137 static bool
138 is_deleted (const value_range_base *p)
139 {
140 return p == reinterpret_cast<const value_range_base *> (1);
141 }
142 static void
143 mark_deleted (value_range_base *&p)
144 {
145 p = reinterpret_cast<value_range_base *> (1);
146 }
147 };
148
149 /* Hash table for avoid repeated allocations of equal value_ranges. */
150 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
151
152 /* Holders of ipa cgraph hooks: */
153 static struct cgraph_node_hook_list *function_insertion_hook_holder;
154
155 /* Description of a reference to an IPA constant. */
156 struct ipa_cst_ref_desc
157 {
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge *cs;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc *next_duplicate;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
164 int refcount;
165 };
166
167 /* Allocation pool for reference descriptions. */
168
169 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
170 ("IPA-PROP ref descriptions");
171
172 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
174
175 static bool
176 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
177 {
178 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
179
180 if (!fs_opts)
181 return false;
182 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
183 }
184
185 /* Return index of the formal whose tree is PTREE in function which corresponds
186 to INFO. */
187
188 static int
189 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
190 tree ptree)
191 {
192 int i, count;
193
194 count = vec_safe_length (descriptors);
195 for (i = 0; i < count; i++)
196 if ((*descriptors)[i].decl_or_type == ptree)
197 return i;
198
199 return -1;
200 }
201
202 /* Return index of the formal whose tree is PTREE in function which corresponds
203 to INFO. */
204
205 int
206 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
207 {
208 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
209 }
210
211 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
212 NODE. */
213
214 static void
215 ipa_populate_param_decls (struct cgraph_node *node,
216 vec<ipa_param_descriptor, va_gc> &descriptors)
217 {
218 tree fndecl;
219 tree fnargs;
220 tree parm;
221 int param_num;
222
223 fndecl = node->decl;
224 gcc_assert (gimple_has_body_p (fndecl));
225 fnargs = DECL_ARGUMENTS (fndecl);
226 param_num = 0;
227 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
228 {
229 descriptors[param_num].decl_or_type = parm;
230 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
231 true);
232 param_num++;
233 }
234 }
235
236 /* Return how many formal parameters FNDECL has. */
237
238 int
239 count_formal_params (tree fndecl)
240 {
241 tree parm;
242 int count = 0;
243 gcc_assert (gimple_has_body_p (fndecl));
244
245 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
246 count++;
247
248 return count;
249 }
250
251 /* Return the declaration of Ith formal parameter of the function corresponding
252 to INFO. Note there is no setter function as this array is built just once
253 using ipa_initialize_node_params. */
254
255 void
256 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
257 {
258 fprintf (file, "param #%i", i);
259 if ((*info->descriptors)[i].decl_or_type)
260 {
261 fprintf (file, " ");
262 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
263 }
264 }
265
266 /* If necessary, allocate vector of parameter descriptors in info of NODE.
267 Return true if they were allocated, false if not. */
268
269 static bool
270 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
271 {
272 class ipa_node_params *info = IPA_NODE_REF (node);
273
274 if (!info->descriptors && param_count)
275 {
276 vec_safe_grow_cleared (info->descriptors, param_count);
277 return true;
278 }
279 else
280 return false;
281 }
282
283 /* Initialize the ipa_node_params structure associated with NODE by counting
284 the function parameters, creating the descriptors and populating their
285 param_decls. */
286
287 void
288 ipa_initialize_node_params (struct cgraph_node *node)
289 {
290 class ipa_node_params *info = IPA_NODE_REF (node);
291
292 if (!info->descriptors
293 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
294 ipa_populate_param_decls (node, *info->descriptors);
295 }
296
297 /* Print the jump functions associated with call graph edge CS to file F. */
298
299 static void
300 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
301 {
302 int i, count;
303
304 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
305 for (i = 0; i < count; i++)
306 {
307 struct ipa_jump_func *jump_func;
308 enum jump_func_type type;
309
310 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
311 type = jump_func->type;
312
313 fprintf (f, " param %d: ", i);
314 if (type == IPA_JF_UNKNOWN)
315 fprintf (f, "UNKNOWN\n");
316 else if (type == IPA_JF_CONST)
317 {
318 tree val = jump_func->value.constant.value;
319 fprintf (f, "CONST: ");
320 print_generic_expr (f, val);
321 if (TREE_CODE (val) == ADDR_EXPR
322 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
323 {
324 fprintf (f, " -> ");
325 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
326 }
327 fprintf (f, "\n");
328 }
329 else if (type == IPA_JF_PASS_THROUGH)
330 {
331 fprintf (f, "PASS THROUGH: ");
332 fprintf (f, "%d, op %s",
333 jump_func->value.pass_through.formal_id,
334 get_tree_code_name(jump_func->value.pass_through.operation));
335 if (jump_func->value.pass_through.operation != NOP_EXPR)
336 {
337 fprintf (f, " ");
338 print_generic_expr (f, jump_func->value.pass_through.operand);
339 }
340 if (jump_func->value.pass_through.agg_preserved)
341 fprintf (f, ", agg_preserved");
342 fprintf (f, "\n");
343 }
344 else if (type == IPA_JF_ANCESTOR)
345 {
346 fprintf (f, "ANCESTOR: ");
347 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
348 jump_func->value.ancestor.formal_id,
349 jump_func->value.ancestor.offset);
350 if (jump_func->value.ancestor.agg_preserved)
351 fprintf (f, ", agg_preserved");
352 fprintf (f, "\n");
353 }
354
355 if (jump_func->agg.items)
356 {
357 struct ipa_agg_jf_item *item;
358 int j;
359
360 fprintf (f, " Aggregate passed by %s:\n",
361 jump_func->agg.by_ref ? "reference" : "value");
362 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
363 {
364 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
365 item->offset);
366 if (TYPE_P (item->value))
367 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
368 tree_to_uhwi (TYPE_SIZE (item->value)));
369 else
370 {
371 fprintf (f, "cst: ");
372 print_generic_expr (f, item->value);
373 }
374 fprintf (f, "\n");
375 }
376 }
377
378 class ipa_polymorphic_call_context *ctx
379 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
380 if (ctx && !ctx->useless_p ())
381 {
382 fprintf (f, " Context: ");
383 ctx->dump (dump_file);
384 }
385
386 if (jump_func->bits)
387 {
388 fprintf (f, " value: ");
389 print_hex (jump_func->bits->value, f);
390 fprintf (f, ", mask: ");
391 print_hex (jump_func->bits->mask, f);
392 fprintf (f, "\n");
393 }
394 else
395 fprintf (f, " Unknown bits\n");
396
397 if (jump_func->m_vr)
398 {
399 fprintf (f, " VR ");
400 fprintf (f, "%s[",
401 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
402 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
403 fprintf (f, ", ");
404 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
405 fprintf (f, "]\n");
406 }
407 else
408 fprintf (f, " Unknown VR\n");
409 }
410 }
411
412
413 /* Print the jump functions of all arguments on all call graph edges going from
414 NODE to file F. */
415
416 void
417 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
418 {
419 struct cgraph_edge *cs;
420
421 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
422 for (cs = node->callees; cs; cs = cs->next_callee)
423 {
424 if (!ipa_edge_args_info_available_for_edge_p (cs))
425 continue;
426
427 fprintf (f, " callsite %s -> %s : \n",
428 node->dump_name (),
429 cs->callee->dump_name ());
430 ipa_print_node_jump_functions_for_edge (f, cs);
431 }
432
433 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
434 {
435 class cgraph_indirect_call_info *ii;
436 if (!ipa_edge_args_info_available_for_edge_p (cs))
437 continue;
438
439 ii = cs->indirect_info;
440 if (ii->agg_contents)
441 fprintf (f, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
443 ii->member_ptr ? "member ptr" : "aggregate",
444 ii->param_index, ii->offset,
445 ii->by_ref ? "by reference" : "by_value");
446 else
447 fprintf (f, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC,
449 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
450 ii->offset);
451
452 if (cs->call_stmt)
453 {
454 fprintf (f, ", for stmt ");
455 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
456 }
457 else
458 fprintf (f, "\n");
459 if (ii->polymorphic)
460 ii->context.dump (f);
461 ipa_print_node_jump_functions_for_edge (f, cs);
462 }
463 }
464
465 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
466
467 void
468 ipa_print_all_jump_functions (FILE *f)
469 {
470 struct cgraph_node *node;
471
472 fprintf (f, "\nJump functions:\n");
473 FOR_EACH_FUNCTION (node)
474 {
475 ipa_print_node_jump_functions (f, node);
476 }
477 }
478
479 /* Set jfunc to be a know-really nothing jump function. */
480
481 static void
482 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
483 {
484 jfunc->type = IPA_JF_UNKNOWN;
485 jfunc->bits = NULL;
486 jfunc->m_vr = NULL;
487 }
488
489 /* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
491
492 static void
493 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
494 struct ipa_jump_func *src)
495
496 {
497 gcc_checking_assert (src->type == IPA_JF_CONST);
498 dst->type = IPA_JF_CONST;
499 dst->value.constant = src->value.constant;
500 }
501
502 /* Set JFUNC to be a constant jmp function. */
503
504 static void
505 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
506 struct cgraph_edge *cs)
507 {
508 jfunc->type = IPA_JF_CONST;
509 jfunc->value.constant.value = unshare_expr_without_location (constant);
510
511 if (TREE_CODE (constant) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
513 {
514 struct ipa_cst_ref_desc *rdesc;
515
516 rdesc = ipa_refdesc_pool.allocate ();
517 rdesc->cs = cs;
518 rdesc->next_duplicate = NULL;
519 rdesc->refcount = 1;
520 jfunc->value.constant.rdesc = rdesc;
521 }
522 else
523 jfunc->value.constant.rdesc = NULL;
524 }
525
526 /* Set JFUNC to be a simple pass-through jump function. */
527 static void
528 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
529 bool agg_preserved)
530 {
531 jfunc->type = IPA_JF_PASS_THROUGH;
532 jfunc->value.pass_through.operand = NULL_TREE;
533 jfunc->value.pass_through.formal_id = formal_id;
534 jfunc->value.pass_through.operation = NOP_EXPR;
535 jfunc->value.pass_through.agg_preserved = agg_preserved;
536 }
537
538 /* Set JFUNC to be an unary pass through jump function. */
539
540 static void
541 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
542 enum tree_code operation)
543 {
544 jfunc->type = IPA_JF_PASS_THROUGH;
545 jfunc->value.pass_through.operand = NULL_TREE;
546 jfunc->value.pass_through.formal_id = formal_id;
547 jfunc->value.pass_through.operation = operation;
548 jfunc->value.pass_through.agg_preserved = false;
549 }
550 /* Set JFUNC to be an arithmetic pass through jump function. */
551
552 static void
553 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
554 tree operand, enum tree_code operation)
555 {
556 jfunc->type = IPA_JF_PASS_THROUGH;
557 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
558 jfunc->value.pass_through.formal_id = formal_id;
559 jfunc->value.pass_through.operation = operation;
560 jfunc->value.pass_through.agg_preserved = false;
561 }
562
563 /* Set JFUNC to be an ancestor jump function. */
564
565 static void
566 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
567 int formal_id, bool agg_preserved)
568 {
569 jfunc->type = IPA_JF_ANCESTOR;
570 jfunc->value.ancestor.formal_id = formal_id;
571 jfunc->value.ancestor.offset = offset;
572 jfunc->value.ancestor.agg_preserved = agg_preserved;
573 }
574
575 /* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
577
578 static struct ipa_bb_info *
579 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
580 {
581 gcc_checking_assert (fbi);
582 return &fbi->bb_infos[bb->index];
583 }
584
585 /* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
587
588 struct prop_type_change_info
589 {
590 /* Offset into the object where there is the virtual method pointer we are
591 looking for. */
592 HOST_WIDE_INT offset;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
594 type change. */
595 tree object;
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed;
598 };
599
600 /* Return true if STMT can modify a virtual method table pointer.
601
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
606
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
609
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
612
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
616
617 There is no way to call a constructor of an ancestor sub-object in any
618 other way.
619
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
623
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
635 */
636
637 static bool
638 stmt_may_be_vtbl_ptr_store (gimple *stmt)
639 {
640 if (is_gimple_call (stmt))
641 return false;
642 if (gimple_clobber_p (stmt))
643 return false;
644 else if (is_gimple_assign (stmt))
645 {
646 tree lhs = gimple_assign_lhs (stmt);
647
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
649 {
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
652 return false;
653
654 if (TREE_CODE (lhs) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
656 return false;
657 /* In the future we might want to use get_ref_base_and_extent to find
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
660 }
661 }
662 return true;
663 }
664
665 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
668 prop_type_change_info structure. */
669
670 static bool
671 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
672 {
673 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
674 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
675
676 if (stmt_may_be_vtbl_ptr_store (stmt))
677 {
678 tci->type_maybe_changed = true;
679 return true;
680 }
681 else
682 return false;
683 }
684
685 /* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
688
689 Generally functions are not allowed to change type of such instances,
690 but they call destructors. We assume that methods cannot destroy the THIS
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
693
694 static bool
695 param_type_may_change_p (tree function, tree arg, gimple *call)
696 {
697 /* Pure functions cannot do any changes on the dynamic type;
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
700 return false;
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
707
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
712 return true;
713 if (TREE_CODE (arg) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg)
715 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
716 {
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
719 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
720 /* THIS pointer of an method - here we want to watch constructors
721 and destructors as those definitely may change the dynamic
722 type. */
723 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function)
725 && !DECL_CXX_DESTRUCTOR_P (function)
726 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
727 {
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
730 block = BLOCK_SUPERCONTEXT (block))
731 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
732 return true;
733 return false;
734 }
735 }
736 return true;
737 }
738
739 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
744 returned by get_ref_base_and_extent, as is the offset.
745
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
748
749 static bool
750 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
751 tree base, tree comp_type, gcall *call,
752 struct ipa_jump_func *jfunc,
753 HOST_WIDE_INT offset)
754 {
755 struct prop_type_change_info tci;
756 ao_ref ao;
757
758 gcc_checking_assert (DECL_P (arg)
759 || TREE_CODE (arg) == MEM_REF
760 || handled_component_p (arg));
761
762 comp_type = TYPE_MAIN_VARIANT (comp_type);
763
764 /* Const calls cannot call virtual methods through VMT and so type changes do
765 not matter. */
766 if (!flag_devirtualize || !gimple_vuse (call)
767 /* Be sure expected_type is polymorphic. */
768 || !comp_type
769 || TREE_CODE (comp_type) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
772 return true;
773
774 ao_ref_init (&ao, arg);
775 ao.base = base;
776 ao.offset = offset;
777 ao.size = POINTER_SIZE;
778 ao.max_size = ao.size;
779
780 tci.offset = offset;
781 tci.object = get_base_address (arg);
782 tci.type_maybe_changed = false;
783
784 int walked
785 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
786 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
787
788 if (walked >= 0 && !tci.type_maybe_changed)
789 return false;
790
791 ipa_set_jf_unknown (jfunc);
792 return true;
793 }
794
795 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
796 If it is, return true and fill in the jump function JFUNC with relevant type
797 information or set it to unknown. ARG is the object itself (not a pointer
798 to it, unless dereferenced). BASE is the base of the memory access as
799 returned by get_ref_base_and_extent, as is the offset. */
800
801 static bool
802 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
803 tree comp_type, gcall *call, struct ipa_jump_func *jfunc,
804 HOST_WIDE_INT offset)
805 {
806 if (!flag_devirtualize)
807 return false;
808
809 if (TREE_CODE (base) == MEM_REF
810 && !param_type_may_change_p (current_function_decl,
811 TREE_OPERAND (base, 0),
812 call))
813 return false;
814 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
815 call, jfunc, offset);
816 }
817
818 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
819 SSA name (its dereference will become the base and the offset is assumed to
820 be zero). */
821
822 static bool
823 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
824 gcall *call, struct ipa_jump_func *jfunc)
825 {
826 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
827 if (!flag_devirtualize
828 || !POINTER_TYPE_P (TREE_TYPE (arg)))
829 return false;
830
831 if (!param_type_may_change_p (current_function_decl, arg, call))
832 return false;
833
834 arg = build2 (MEM_REF, ptr_type_node, arg,
835 build_int_cst (ptr_type_node, 0));
836
837 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
838 call, jfunc, 0);
839 }
840
841 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
842 boolean variable pointed to by DATA. */
843
844 static bool
845 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
846 void *data)
847 {
848 bool *b = (bool *) data;
849 *b = true;
850 return true;
851 }
852
853 /* Find the nearest valid aa status for parameter specified by INDEX that
854 dominates BB. */
855
856 static struct ipa_param_aa_status *
857 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
858 int index)
859 {
860 while (true)
861 {
862 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
863 if (!bb)
864 return NULL;
865 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
866 if (!bi->param_aa_statuses.is_empty ()
867 && bi->param_aa_statuses[index].valid)
868 return &bi->param_aa_statuses[index];
869 }
870 }
871
872 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
873 structures and/or intialize the result with a dominating description as
874 necessary. */
875
876 static struct ipa_param_aa_status *
877 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
878 int index)
879 {
880 gcc_checking_assert (fbi);
881 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
882 if (bi->param_aa_statuses.is_empty ())
883 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
884 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
885 if (!paa->valid)
886 {
887 gcc_checking_assert (!paa->parm_modified
888 && !paa->ref_modified
889 && !paa->pt_modified);
890 struct ipa_param_aa_status *dom_paa;
891 dom_paa = find_dominating_aa_status (fbi, bb, index);
892 if (dom_paa)
893 *paa = *dom_paa;
894 else
895 paa->valid = true;
896 }
897
898 return paa;
899 }
900
901 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
902 a value known not to be modified in this function before reaching the
903 statement STMT. FBI holds information about the function we have so far
904 gathered but do not survive the summary building stage. */
905
906 static bool
907 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
908 gimple *stmt, tree parm_load)
909 {
910 struct ipa_param_aa_status *paa;
911 bool modified = false;
912 ao_ref refd;
913
914 tree base = get_base_address (parm_load);
915 gcc_assert (TREE_CODE (base) == PARM_DECL);
916 if (TREE_READONLY (base))
917 return true;
918
919 gcc_checking_assert (fbi);
920 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
921 if (paa->parm_modified)
922 return false;
923
924 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
925 ao_ref_init (&refd, parm_load);
926 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
927 &modified, NULL, NULL,
928 fbi->aa_walk_budget + 1);
929 if (walked < 0)
930 {
931 modified = true;
932 if (fbi)
933 fbi->aa_walk_budget = 0;
934 }
935 else if (fbi)
936 fbi->aa_walk_budget -= walked;
937 if (paa && modified)
938 paa->parm_modified = true;
939 return !modified;
940 }
941
942 /* If STMT is an assignment that loads a value from an parameter declaration,
943 return the index of the parameter in ipa_node_params which has not been
944 modified. Otherwise return -1. */
945
946 static int
947 load_from_unmodified_param (struct ipa_func_body_info *fbi,
948 vec<ipa_param_descriptor, va_gc> *descriptors,
949 gimple *stmt)
950 {
951 int index;
952 tree op1;
953
954 if (!gimple_assign_single_p (stmt))
955 return -1;
956
957 op1 = gimple_assign_rhs1 (stmt);
958 if (TREE_CODE (op1) != PARM_DECL)
959 return -1;
960
961 index = ipa_get_param_decl_index_1 (descriptors, op1);
962 if (index < 0
963 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
964 return -1;
965
966 return index;
967 }
968
969 /* Return true if memory reference REF (which must be a load through parameter
970 with INDEX) loads data that are known to be unmodified in this function
971 before reaching statement STMT. */
972
973 static bool
974 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
975 int index, gimple *stmt, tree ref)
976 {
977 struct ipa_param_aa_status *paa;
978 bool modified = false;
979 ao_ref refd;
980
981 gcc_checking_assert (fbi);
982 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
983 if (paa->ref_modified)
984 return false;
985
986 gcc_checking_assert (gimple_vuse (stmt));
987 ao_ref_init (&refd, ref);
988 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
989 &modified, NULL, NULL,
990 fbi->aa_walk_budget + 1);
991 if (walked < 0)
992 {
993 modified = true;
994 fbi->aa_walk_budget = 0;
995 }
996 else
997 fbi->aa_walk_budget -= walked;
998 if (modified)
999 paa->ref_modified = true;
1000 return !modified;
1001 }
1002
1003 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1004 is known to be unmodified in this function before reaching call statement
1005 CALL into which it is passed. FBI describes the function body. */
1006
1007 static bool
1008 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1009 gimple *call, tree parm)
1010 {
1011 bool modified = false;
1012 ao_ref refd;
1013
1014 /* It's unnecessary to calculate anything about memory contnets for a const
1015 function because it is not goin to use it. But do not cache the result
1016 either. Also, no such calculations for non-pointers. */
1017 if (!gimple_vuse (call)
1018 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1019 return false;
1020
1021 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1022 gimple_bb (call),
1023 index);
1024 if (paa->pt_modified)
1025 return false;
1026
1027 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1028 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1029 &modified, NULL, NULL,
1030 fbi->aa_walk_budget + 1);
1031 if (walked < 0)
1032 {
1033 fbi->aa_walk_budget = 0;
1034 modified = true;
1035 }
1036 else
1037 fbi->aa_walk_budget -= walked;
1038 if (modified)
1039 paa->pt_modified = true;
1040 return !modified;
1041 }
1042
1043 /* Return true if we can prove that OP is a memory reference loading
1044 data from an aggregate passed as a parameter.
1045
1046 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1047 false if it cannot prove that the value has not been modified before the
1048 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1049 if it cannot prove the value has not been modified, in that case it will
1050 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1051
1052 INFO and PARMS_AINFO describe parameters of the current function (but the
1053 latter can be NULL), STMT is the load statement. If function returns true,
1054 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1055 within the aggregate and whether it is a load from a value passed by
1056 reference respectively. */
1057
1058 bool
1059 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1060 vec<ipa_param_descriptor, va_gc> *descriptors,
1061 gimple *stmt, tree op, int *index_p,
1062 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1063 bool *by_ref_p, bool *guaranteed_unmodified)
1064 {
1065 int index;
1066 HOST_WIDE_INT size;
1067 bool reverse;
1068 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1069
1070 if (!base)
1071 return false;
1072
1073 if (DECL_P (base))
1074 {
1075 int index = ipa_get_param_decl_index_1 (descriptors, base);
1076 if (index >= 0
1077 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1078 {
1079 *index_p = index;
1080 *by_ref_p = false;
1081 if (size_p)
1082 *size_p = size;
1083 if (guaranteed_unmodified)
1084 *guaranteed_unmodified = true;
1085 return true;
1086 }
1087 return false;
1088 }
1089
1090 if (TREE_CODE (base) != MEM_REF
1091 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1092 || !integer_zerop (TREE_OPERAND (base, 1)))
1093 return false;
1094
1095 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1096 {
1097 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1098 index = ipa_get_param_decl_index_1 (descriptors, parm);
1099 }
1100 else
1101 {
1102 /* This branch catches situations where a pointer parameter is not a
1103 gimple register, for example:
1104
1105 void hip7(S*) (struct S * p)
1106 {
1107 void (*<T2e4>) (struct S *) D.1867;
1108 struct S * p.1;
1109
1110 <bb 2>:
1111 p.1_1 = p;
1112 D.1867_2 = p.1_1->f;
1113 D.1867_2 ();
1114 gdp = &p;
1115 */
1116
1117 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1118 index = load_from_unmodified_param (fbi, descriptors, def);
1119 }
1120
1121 if (index >= 0)
1122 {
1123 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1124 if (!data_preserved && !guaranteed_unmodified)
1125 return false;
1126
1127 *index_p = index;
1128 *by_ref_p = true;
1129 if (size_p)
1130 *size_p = size;
1131 if (guaranteed_unmodified)
1132 *guaranteed_unmodified = data_preserved;
1133 return true;
1134 }
1135 return false;
1136 }
1137
1138 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1139 of an assignment statement STMT, try to determine whether we are actually
1140 handling any of the following cases and construct an appropriate jump
1141 function into JFUNC if so:
1142
1143 1) The passed value is loaded from a formal parameter which is not a gimple
1144 register (most probably because it is addressable, the value has to be
1145 scalar) and we can guarantee the value has not changed. This case can
1146 therefore be described by a simple pass-through jump function. For example:
1147
1148 foo (int a)
1149 {
1150 int a.0;
1151
1152 a.0_2 = a;
1153 bar (a.0_2);
1154
1155 2) The passed value can be described by a simple arithmetic pass-through
1156 jump function. E.g.
1157
1158 foo (int a)
1159 {
1160 int D.2064;
1161
1162 D.2064_4 = a.1(D) + 4;
1163 bar (D.2064_4);
1164
1165 This case can also occur in combination of the previous one, e.g.:
1166
1167 foo (int a, int z)
1168 {
1169 int a.0;
1170 int D.2064;
1171
1172 a.0_3 = a;
1173 D.2064_4 = a.0_3 + 4;
1174 foo (D.2064_4);
1175
1176 3) The passed value is an address of an object within another one (which
1177 also passed by reference). Such situations are described by an ancestor
1178 jump function and describe situations such as:
1179
1180 B::foo() (struct B * const this)
1181 {
1182 struct A * D.1845;
1183
1184 D.1845_2 = &this_1(D)->D.1748;
1185 A::bar (D.1845_2);
1186
1187 INFO is the structure describing individual parameters access different
1188 stages of IPA optimizations. PARMS_AINFO contains the information that is
1189 only needed for intraprocedural analysis. */
1190
1191 static void
1192 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1193 class ipa_node_params *info,
1194 struct ipa_jump_func *jfunc,
1195 gcall *call, gimple *stmt, tree name,
1196 tree param_type)
1197 {
1198 HOST_WIDE_INT offset, size;
1199 tree op1, tc_ssa, base, ssa;
1200 bool reverse;
1201 int index;
1202
1203 op1 = gimple_assign_rhs1 (stmt);
1204
1205 if (TREE_CODE (op1) == SSA_NAME)
1206 {
1207 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1208 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1209 else
1210 index = load_from_unmodified_param (fbi, info->descriptors,
1211 SSA_NAME_DEF_STMT (op1));
1212 tc_ssa = op1;
1213 }
1214 else
1215 {
1216 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1217 tc_ssa = gimple_assign_lhs (stmt);
1218 }
1219
1220 if (index >= 0)
1221 {
1222 switch (gimple_assign_rhs_class (stmt))
1223 {
1224 case GIMPLE_BINARY_RHS:
1225 {
1226 tree op2 = gimple_assign_rhs2 (stmt);
1227 if (!is_gimple_ip_invariant (op2)
1228 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1229 != tcc_comparison)
1230 && !useless_type_conversion_p (TREE_TYPE (name),
1231 TREE_TYPE (op1))))
1232 return;
1233
1234 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1235 gimple_assign_rhs_code (stmt));
1236 break;
1237 }
1238 case GIMPLE_SINGLE_RHS:
1239 {
1240 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1241 tc_ssa);
1242 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1243 break;
1244 }
1245 case GIMPLE_UNARY_RHS:
1246 if (is_gimple_assign (stmt)
1247 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1248 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1249 ipa_set_jf_unary_pass_through (jfunc, index,
1250 gimple_assign_rhs_code (stmt));
1251 default:;
1252 }
1253 return;
1254 }
1255
1256 if (TREE_CODE (op1) != ADDR_EXPR)
1257 return;
1258 op1 = TREE_OPERAND (op1, 0);
1259 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1260 return;
1261 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1262 offset_int mem_offset;
1263 if (!base
1264 || TREE_CODE (base) != MEM_REF
1265 || !mem_ref_offset (base).is_constant (&mem_offset))
1266 return;
1267 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1268 ssa = TREE_OPERAND (base, 0);
1269 if (TREE_CODE (ssa) != SSA_NAME
1270 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1271 || offset < 0)
1272 return;
1273
1274 /* Dynamic types are changed in constructors and destructors. */
1275 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1276 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1277 ipa_set_ancestor_jf (jfunc, offset, index,
1278 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1279 }
1280
1281 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1282 it looks like:
1283
1284 iftmp.1_3 = &obj_2(D)->D.1762;
1285
1286 The base of the MEM_REF must be a default definition SSA NAME of a
1287 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1288 whole MEM_REF expression is returned and the offset calculated from any
1289 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1290 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1291
1292 static tree
1293 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1294 {
1295 HOST_WIDE_INT size;
1296 tree expr, parm, obj;
1297 bool reverse;
1298
1299 if (!gimple_assign_single_p (assign))
1300 return NULL_TREE;
1301 expr = gimple_assign_rhs1 (assign);
1302
1303 if (TREE_CODE (expr) != ADDR_EXPR)
1304 return NULL_TREE;
1305 expr = TREE_OPERAND (expr, 0);
1306 obj = expr;
1307 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1308
1309 offset_int mem_offset;
1310 if (!expr
1311 || TREE_CODE (expr) != MEM_REF
1312 || !mem_ref_offset (expr).is_constant (&mem_offset))
1313 return NULL_TREE;
1314 parm = TREE_OPERAND (expr, 0);
1315 if (TREE_CODE (parm) != SSA_NAME
1316 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1317 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1318 return NULL_TREE;
1319
1320 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1321 *obj_p = obj;
1322 return expr;
1323 }
1324
1325
1326 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1327 statement PHI, try to find out whether NAME is in fact a
1328 multiple-inheritance typecast from a descendant into an ancestor of a formal
1329 parameter and thus can be described by an ancestor jump function and if so,
1330 write the appropriate function into JFUNC.
1331
1332 Essentially we want to match the following pattern:
1333
1334 if (obj_2(D) != 0B)
1335 goto <bb 3>;
1336 else
1337 goto <bb 4>;
1338
1339 <bb 3>:
1340 iftmp.1_3 = &obj_2(D)->D.1762;
1341
1342 <bb 4>:
1343 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1344 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1345 return D.1879_6; */
1346
1347 static void
1348 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1349 class ipa_node_params *info,
1350 struct ipa_jump_func *jfunc,
1351 gcall *call, gphi *phi)
1352 {
1353 HOST_WIDE_INT offset;
1354 gimple *assign, *cond;
1355 basic_block phi_bb, assign_bb, cond_bb;
1356 tree tmp, parm, expr, obj;
1357 int index, i;
1358
1359 if (gimple_phi_num_args (phi) != 2)
1360 return;
1361
1362 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1363 tmp = PHI_ARG_DEF (phi, 0);
1364 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1365 tmp = PHI_ARG_DEF (phi, 1);
1366 else
1367 return;
1368 if (TREE_CODE (tmp) != SSA_NAME
1369 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1370 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1371 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1372 return;
1373
1374 assign = SSA_NAME_DEF_STMT (tmp);
1375 assign_bb = gimple_bb (assign);
1376 if (!single_pred_p (assign_bb))
1377 return;
1378 expr = get_ancestor_addr_info (assign, &obj, &offset);
1379 if (!expr)
1380 return;
1381 parm = TREE_OPERAND (expr, 0);
1382 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1383 if (index < 0)
1384 return;
1385
1386 cond_bb = single_pred (assign_bb);
1387 cond = last_stmt (cond_bb);
1388 if (!cond
1389 || gimple_code (cond) != GIMPLE_COND
1390 || gimple_cond_code (cond) != NE_EXPR
1391 || gimple_cond_lhs (cond) != parm
1392 || !integer_zerop (gimple_cond_rhs (cond)))
1393 return;
1394
1395 phi_bb = gimple_bb (phi);
1396 for (i = 0; i < 2; i++)
1397 {
1398 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1399 if (pred != assign_bb && pred != cond_bb)
1400 return;
1401 }
1402
1403 ipa_set_ancestor_jf (jfunc, offset, index,
1404 parm_ref_data_pass_through_p (fbi, index, call, parm));
1405 }
1406
1407 /* Inspect the given TYPE and return true iff it has the same structure (the
1408 same number of fields of the same types) as a C++ member pointer. If
1409 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1410 corresponding fields there. */
1411
1412 static bool
1413 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1414 {
1415 tree fld;
1416
1417 if (TREE_CODE (type) != RECORD_TYPE)
1418 return false;
1419
1420 fld = TYPE_FIELDS (type);
1421 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1422 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1423 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1424 return false;
1425
1426 if (method_ptr)
1427 *method_ptr = fld;
1428
1429 fld = DECL_CHAIN (fld);
1430 if (!fld || INTEGRAL_TYPE_P (fld)
1431 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1432 return false;
1433 if (delta)
1434 *delta = fld;
1435
1436 if (DECL_CHAIN (fld))
1437 return false;
1438
1439 return true;
1440 }
1441
1442 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1443 return the rhs of its defining statement. Otherwise return RHS as it
1444 is. */
1445
1446 static inline tree
1447 get_ssa_def_if_simple_copy (tree rhs)
1448 {
1449 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1450 {
1451 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1452
1453 if (gimple_assign_single_p (def_stmt))
1454 rhs = gimple_assign_rhs1 (def_stmt);
1455 else
1456 break;
1457 }
1458 return rhs;
1459 }
1460
1461 /* Simple linked list, describing known contents of an aggregate before
1462 call. */
1463
1464 struct ipa_known_agg_contents_list
1465 {
1466 /* Offset and size of the described part of the aggregate. */
1467 HOST_WIDE_INT offset, size;
1468 /* Known constant value or NULL if the contents is known to be unknown. */
1469 tree constant;
1470 /* Pointer to the next structure in the list. */
1471 struct ipa_known_agg_contents_list *next;
1472 };
1473
1474 /* Add a known content item into a linked list of ipa_known_agg_contents_list
1475 structure, in which all elements are sorted ascendingly by offset. */
1476
1477 static inline void
1478 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1479 struct ipa_known_agg_contents_list *item)
1480 {
1481 struct ipa_known_agg_contents_list *list = *plist;
1482
1483 for (; list; list = list->next)
1484 {
1485 if (list->offset >= item->offset)
1486 break;
1487
1488 plist = &list->next;
1489 }
1490
1491 item->next = list;
1492 *plist = item;
1493 }
1494
1495 /* Check whether a given known content is clobbered by certain element in
1496 a linked list of ipa_known_agg_contents_list. */
1497
1498 static inline bool
1499 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1500 struct ipa_known_agg_contents_list *item)
1501 {
1502 for (; list; list = list->next)
1503 {
1504 if (list->offset >= item->offset)
1505 return list->offset < item->offset + item->size;
1506
1507 if (list->offset + list->size > item->offset)
1508 return true;
1509 }
1510
1511 return false;
1512 }
1513
1514 /* Build aggregate jump function from LIST, assuming there are exactly
1515 CONST_COUNT constant entries there and that offset of the passed argument
1516 is ARG_OFFSET and store it into JFUNC. */
1517
1518 static void
1519 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1520 int const_count, HOST_WIDE_INT arg_offset,
1521 struct ipa_jump_func *jfunc)
1522 {
1523 vec_alloc (jfunc->agg.items, const_count);
1524 while (list)
1525 {
1526 if (list->constant)
1527 {
1528 struct ipa_agg_jf_item item;
1529 item.offset = list->offset - arg_offset;
1530 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1531 item.value = unshare_expr_without_location (list->constant);
1532 jfunc->agg.items->quick_push (item);
1533 }
1534 list = list->next;
1535 }
1536 }
1537
1538 /* If STMT is a memory store to the object whose address is BASE, extract
1539 information (offset, size, and value) into CONTENT, and return true,
1540 otherwise we conservatively assume the whole object is modified with
1541 unknown content, and return false. CHECK_REF means that access to object
1542 is expected to be in form of MEM_REF expression. */
1543
1544 static bool
1545 extract_mem_content (gimple *stmt, tree base, bool check_ref,
1546 struct ipa_known_agg_contents_list *content)
1547 {
1548 HOST_WIDE_INT lhs_offset, lhs_size;
1549 tree lhs, rhs, lhs_base;
1550 bool reverse;
1551
1552 if (!gimple_assign_single_p (stmt))
1553 return false;
1554
1555 lhs = gimple_assign_lhs (stmt);
1556 rhs = gimple_assign_rhs1 (stmt);
1557
1558 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1559 || TREE_CODE (lhs) == BIT_FIELD_REF
1560 || contains_bitfld_component_ref_p (lhs))
1561 return false;
1562
1563 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1564 &lhs_size, &reverse);
1565 if (!lhs_base)
1566 return false;
1567
1568 if (check_ref)
1569 {
1570 if (TREE_CODE (lhs_base) != MEM_REF
1571 || TREE_OPERAND (lhs_base, 0) != base
1572 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1573 return false;
1574 }
1575 else if (lhs_base != base)
1576 return false;
1577
1578 rhs = get_ssa_def_if_simple_copy (rhs);
1579
1580 content->size = lhs_size;
1581 content->offset = lhs_offset;
1582 content->constant = is_gimple_ip_invariant (rhs) ? rhs : NULL_TREE;
1583 content->next = NULL;
1584
1585 return true;
1586 }
1587
1588 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1589 in ARG is filled in with constant values. ARG can either be an aggregate
1590 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1591 aggregate. JFUNC is the jump function into which the constants are
1592 subsequently stored. AA_WALK_BUDGET_P points to limit on number of
1593 statements we allow get_continuation_for_phi to examine. */
1594
1595 static void
1596 determine_known_aggregate_parts (gcall *call, tree arg,
1597 tree arg_type,
1598 struct ipa_jump_func *jfunc,
1599 unsigned *aa_walk_budget_p)
1600 {
1601 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1602 bitmap visited = NULL;
1603 int item_count = 0, const_count = 0;
1604 int ipa_max_agg_items = PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS);
1605 HOST_WIDE_INT arg_offset, arg_size;
1606 tree arg_base;
1607 bool check_ref, by_ref;
1608 ao_ref r;
1609
1610 if (ipa_max_agg_items == 0)
1611 return;
1612
1613 /* The function operates in three stages. First, we prepare check_ref, r,
1614 arg_base and arg_offset based on what is actually passed as an actual
1615 argument. */
1616
1617 if (POINTER_TYPE_P (arg_type))
1618 {
1619 by_ref = true;
1620 if (TREE_CODE (arg) == SSA_NAME)
1621 {
1622 tree type_size;
1623 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1624 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1625 return;
1626 check_ref = true;
1627 arg_base = arg;
1628 arg_offset = 0;
1629 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1630 arg_size = tree_to_uhwi (type_size);
1631 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1632 }
1633 else if (TREE_CODE (arg) == ADDR_EXPR)
1634 {
1635 bool reverse;
1636
1637 arg = TREE_OPERAND (arg, 0);
1638 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1639 &arg_size, &reverse);
1640 if (!arg_base)
1641 return;
1642 if (DECL_P (arg_base))
1643 {
1644 check_ref = false;
1645 ao_ref_init (&r, arg_base);
1646 }
1647 else
1648 return;
1649 }
1650 else
1651 return;
1652 }
1653 else
1654 {
1655 bool reverse;
1656
1657 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1658
1659 by_ref = false;
1660 check_ref = false;
1661 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1662 &arg_size, &reverse);
1663 if (!arg_base)
1664 return;
1665
1666 ao_ref_init (&r, arg);
1667 }
1668
1669 /* Second stage traverses virtual SSA web backwards starting from the call
1670 statement, only looks at individual dominating virtual operand (its
1671 definition dominates the call), as long as it is confident that content
1672 of the aggregate is affected by definition of the virtual operand, it
1673 builds a sorted linked list of ipa_agg_jf_list describing that. */
1674
1675 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1676 {
1677 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
1678
1679 if (gimple_code (stmt) == GIMPLE_PHI)
1680 {
1681 dom_vuse = get_continuation_for_phi (stmt, &r, true,
1682 *aa_walk_budget_p,
1683 &visited, false, NULL, NULL);
1684 continue;
1685 }
1686
1687 if (stmt_may_clobber_ref_p_1 (stmt, &r))
1688 {
1689 struct ipa_known_agg_contents_list *content
1690 = XALLOCA (struct ipa_known_agg_contents_list);
1691
1692 if (!extract_mem_content (stmt, arg_base, check_ref, content))
1693 break;
1694
1695 /* Now we get a dominating virtual operand, and need to check
1696 whether its value is clobbered any other dominating one. */
1697 if (content->constant
1698 && !clobber_by_agg_contents_list_p (all_list, content))
1699 {
1700 struct ipa_known_agg_contents_list *copy
1701 = XALLOCA (struct ipa_known_agg_contents_list);
1702
1703 /* Add to the list consisting of only dominating virtual
1704 operands, whose definitions can finally reach the call. */
1705 add_to_agg_contents_list (&list, (*copy = *content, copy));
1706
1707 if (++const_count == ipa_max_agg_items)
1708 break;
1709 }
1710
1711 /* Add to the list consisting of all dominating virtual operands. */
1712 add_to_agg_contents_list (&all_list, content);
1713
1714 if (++item_count == 2 * ipa_max_agg_items)
1715 break;
1716 }
1717 dom_vuse = gimple_vuse (stmt);
1718 }
1719
1720 if (visited)
1721 BITMAP_FREE (visited);
1722
1723 /* Third stage just goes over the list and creates an appropriate vector of
1724 ipa_agg_jf_item structures out of it, of course only if there are
1725 any known constants to begin with. */
1726
1727 if (const_count)
1728 {
1729 jfunc->agg.by_ref = by_ref;
1730 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1731 }
1732 }
1733
1734
1735 /* Return the Ith param type of callee associated with call graph
1736 edge E. */
1737
1738 tree
1739 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1740 {
1741 int n;
1742 tree type = (e->callee
1743 ? TREE_TYPE (e->callee->decl)
1744 : gimple_call_fntype (e->call_stmt));
1745 tree t = TYPE_ARG_TYPES (type);
1746
1747 for (n = 0; n < i; n++)
1748 {
1749 if (!t)
1750 break;
1751 t = TREE_CHAIN (t);
1752 }
1753 if (t)
1754 return TREE_VALUE (t);
1755 if (!e->callee)
1756 return NULL;
1757 t = DECL_ARGUMENTS (e->callee->decl);
1758 for (n = 0; n < i; n++)
1759 {
1760 if (!t)
1761 return NULL;
1762 t = TREE_CHAIN (t);
1763 }
1764 if (t)
1765 return TREE_TYPE (t);
1766 return NULL;
1767 }
1768
1769 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1770 allocated structure or a previously existing one shared with other jump
1771 functions and/or transformation summaries. */
1772
1773 ipa_bits *
1774 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1775 {
1776 ipa_bits tmp;
1777 tmp.value = value;
1778 tmp.mask = mask;
1779
1780 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1781 if (*slot)
1782 return *slot;
1783
1784 ipa_bits *res = ggc_alloc<ipa_bits> ();
1785 res->value = value;
1786 res->mask = mask;
1787 *slot = res;
1788
1789 return res;
1790 }
1791
1792 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1793 table in order to avoid creating multiple same ipa_bits structures. */
1794
1795 static void
1796 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1797 const widest_int &mask)
1798 {
1799 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1800 }
1801
1802 /* Return a pointer to a value_range just like *TMP, but either find it in
1803 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1804
1805 static value_range_base *
1806 ipa_get_value_range (value_range_base *tmp)
1807 {
1808 value_range_base **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1809 if (*slot)
1810 return *slot;
1811
1812 value_range_base *vr = ggc_alloc<value_range_base> ();
1813 *vr = *tmp;
1814 *slot = vr;
1815
1816 return vr;
1817 }
1818
1819 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1820 equiv set. Use hash table in order to avoid creating multiple same copies of
1821 value_ranges. */
1822
1823 static value_range_base *
1824 ipa_get_value_range (enum value_range_kind type, tree min, tree max)
1825 {
1826 value_range_base tmp (type, min, max);
1827 return ipa_get_value_range (&tmp);
1828 }
1829
1830 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1831 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1832 same value_range structures. */
1833
1834 static void
1835 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
1836 tree min, tree max)
1837 {
1838 jf->m_vr = ipa_get_value_range (type, min, max);
1839 }
1840
1841 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
1842 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1843
1844 static void
1845 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range_base *tmp)
1846 {
1847 jf->m_vr = ipa_get_value_range (tmp);
1848 }
1849
1850 /* Compute jump function for all arguments of callsite CS and insert the
1851 information in the jump_functions array in the ipa_edge_args corresponding
1852 to this callsite. */
1853
1854 static void
1855 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1856 struct cgraph_edge *cs)
1857 {
1858 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
1859 class ipa_edge_args *args = IPA_EDGE_REF (cs);
1860 gcall *call = cs->call_stmt;
1861 int n, arg_num = gimple_call_num_args (call);
1862 bool useful_context = false;
1863
1864 if (arg_num == 0 || args->jump_functions)
1865 return;
1866 vec_safe_grow_cleared (args->jump_functions, arg_num);
1867 if (flag_devirtualize)
1868 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1869
1870 if (gimple_call_internal_p (call))
1871 return;
1872 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1873 return;
1874
1875 for (n = 0; n < arg_num; n++)
1876 {
1877 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1878 tree arg = gimple_call_arg (call, n);
1879 tree param_type = ipa_get_callee_param_type (cs, n);
1880 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1881 {
1882 tree instance;
1883 class ipa_polymorphic_call_context context (cs->caller->decl,
1884 arg, cs->call_stmt,
1885 &instance);
1886 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
1887 &fbi->aa_walk_budget);
1888 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1889 if (!context.useless_p ())
1890 useful_context = true;
1891 }
1892
1893 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1894 {
1895 bool addr_nonzero = false;
1896 bool strict_overflow = false;
1897
1898 if (TREE_CODE (arg) == SSA_NAME
1899 && param_type
1900 && get_ptr_nonnull (arg))
1901 addr_nonzero = true;
1902 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1903 addr_nonzero = true;
1904
1905 if (addr_nonzero)
1906 {
1907 tree z = build_int_cst (TREE_TYPE (arg), 0);
1908 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1909 }
1910 else
1911 gcc_assert (!jfunc->m_vr);
1912 }
1913 else
1914 {
1915 wide_int min, max;
1916 value_range_kind type;
1917 if (TREE_CODE (arg) == SSA_NAME
1918 && param_type
1919 && (type = get_range_info (arg, &min, &max))
1920 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1921 {
1922 value_range_base resvr;
1923 value_range_base tmpvr (type,
1924 wide_int_to_tree (TREE_TYPE (arg), min),
1925 wide_int_to_tree (TREE_TYPE (arg), max));
1926 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1927 &tmpvr, TREE_TYPE (arg));
1928 if (!resvr.undefined_p () && !resvr.varying_p ())
1929 ipa_set_jfunc_vr (jfunc, &resvr);
1930 else
1931 gcc_assert (!jfunc->m_vr);
1932 }
1933 else
1934 gcc_assert (!jfunc->m_vr);
1935 }
1936
1937 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1938 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1939 {
1940 if (TREE_CODE (arg) == SSA_NAME)
1941 ipa_set_jfunc_bits (jfunc, 0,
1942 widest_int::from (get_nonzero_bits (arg),
1943 TYPE_SIGN (TREE_TYPE (arg))));
1944 else
1945 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1946 }
1947 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1948 {
1949 unsigned HOST_WIDE_INT bitpos;
1950 unsigned align;
1951
1952 get_pointer_alignment_1 (arg, &align, &bitpos);
1953 widest_int mask = wi::bit_and_not
1954 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1955 align / BITS_PER_UNIT - 1);
1956 widest_int value = bitpos / BITS_PER_UNIT;
1957 ipa_set_jfunc_bits (jfunc, value, mask);
1958 }
1959 else
1960 gcc_assert (!jfunc->bits);
1961
1962 if (is_gimple_ip_invariant (arg)
1963 || (VAR_P (arg)
1964 && is_global_var (arg)
1965 && TREE_READONLY (arg)))
1966 ipa_set_jf_constant (jfunc, arg, cs);
1967 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1968 && TREE_CODE (arg) == PARM_DECL)
1969 {
1970 int index = ipa_get_param_decl_index (info, arg);
1971
1972 gcc_assert (index >=0);
1973 /* Aggregate passed by value, check for pass-through, otherwise we
1974 will attempt to fill in aggregate contents later in this
1975 for cycle. */
1976 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1977 {
1978 ipa_set_jf_simple_pass_through (jfunc, index, false);
1979 continue;
1980 }
1981 }
1982 else if (TREE_CODE (arg) == SSA_NAME)
1983 {
1984 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1985 {
1986 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1987 if (index >= 0)
1988 {
1989 bool agg_p;
1990 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1991 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1992 }
1993 }
1994 else
1995 {
1996 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1997 if (is_gimple_assign (stmt))
1998 compute_complex_assign_jump_func (fbi, info, jfunc,
1999 call, stmt, arg, param_type);
2000 else if (gimple_code (stmt) == GIMPLE_PHI)
2001 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2002 call,
2003 as_a <gphi *> (stmt));
2004 }
2005 }
2006
2007 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2008 passed (because type conversions are ignored in gimple). Usually we can
2009 safely get type from function declaration, but in case of K&R prototypes or
2010 variadic functions we can try our luck with type of the pointer passed.
2011 TODO: Since we look for actual initialization of the memory object, we may better
2012 work out the type based on the memory stores we find. */
2013 if (!param_type)
2014 param_type = TREE_TYPE (arg);
2015
2016 if ((jfunc->type != IPA_JF_PASS_THROUGH
2017 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2018 && (jfunc->type != IPA_JF_ANCESTOR
2019 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2020 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2021 || POINTER_TYPE_P (param_type)))
2022 determine_known_aggregate_parts (call, arg, param_type, jfunc,
2023 &fbi->aa_walk_budget);
2024 }
2025 if (!useful_context)
2026 vec_free (args->polymorphic_call_contexts);
2027 }
2028
2029 /* Compute jump functions for all edges - both direct and indirect - outgoing
2030 from BB. */
2031
2032 static void
2033 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2034 {
2035 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2036 int i;
2037 struct cgraph_edge *cs;
2038
2039 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2040 {
2041 struct cgraph_node *callee = cs->callee;
2042
2043 if (callee)
2044 {
2045 callee->ultimate_alias_target ();
2046 /* We do not need to bother analyzing calls to unknown functions
2047 unless they may become known during lto/whopr. */
2048 if (!callee->definition && !flag_lto)
2049 continue;
2050 }
2051 ipa_compute_jump_functions_for_edge (fbi, cs);
2052 }
2053 }
2054
2055 /* If STMT looks like a statement loading a value from a member pointer formal
2056 parameter, return that parameter and store the offset of the field to
2057 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2058 might be clobbered). If USE_DELTA, then we look for a use of the delta
2059 field rather than the pfn. */
2060
2061 static tree
2062 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2063 HOST_WIDE_INT *offset_p)
2064 {
2065 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2066
2067 if (!gimple_assign_single_p (stmt))
2068 return NULL_TREE;
2069
2070 rhs = gimple_assign_rhs1 (stmt);
2071 if (TREE_CODE (rhs) == COMPONENT_REF)
2072 {
2073 ref_field = TREE_OPERAND (rhs, 1);
2074 rhs = TREE_OPERAND (rhs, 0);
2075 }
2076 else
2077 ref_field = NULL_TREE;
2078 if (TREE_CODE (rhs) != MEM_REF)
2079 return NULL_TREE;
2080 rec = TREE_OPERAND (rhs, 0);
2081 if (TREE_CODE (rec) != ADDR_EXPR)
2082 return NULL_TREE;
2083 rec = TREE_OPERAND (rec, 0);
2084 if (TREE_CODE (rec) != PARM_DECL
2085 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2086 return NULL_TREE;
2087 ref_offset = TREE_OPERAND (rhs, 1);
2088
2089 if (use_delta)
2090 fld = delta_field;
2091 else
2092 fld = ptr_field;
2093 if (offset_p)
2094 *offset_p = int_bit_position (fld);
2095
2096 if (ref_field)
2097 {
2098 if (integer_nonzerop (ref_offset))
2099 return NULL_TREE;
2100 return ref_field == fld ? rec : NULL_TREE;
2101 }
2102 else
2103 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2104 : NULL_TREE;
2105 }
2106
2107 /* Returns true iff T is an SSA_NAME defined by a statement. */
2108
2109 static bool
2110 ipa_is_ssa_with_stmt_def (tree t)
2111 {
2112 if (TREE_CODE (t) == SSA_NAME
2113 && !SSA_NAME_IS_DEFAULT_DEF (t))
2114 return true;
2115 else
2116 return false;
2117 }
2118
2119 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2120 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2121 indirect call graph edge. */
2122
2123 static struct cgraph_edge *
2124 ipa_note_param_call (struct cgraph_node *node, int param_index,
2125 gcall *stmt)
2126 {
2127 struct cgraph_edge *cs;
2128
2129 cs = node->get_edge (stmt);
2130 cs->indirect_info->param_index = param_index;
2131 cs->indirect_info->agg_contents = 0;
2132 cs->indirect_info->member_ptr = 0;
2133 cs->indirect_info->guaranteed_unmodified = 0;
2134 return cs;
2135 }
2136
2137 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2138 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2139 intermediate information about each formal parameter. Currently it checks
2140 whether the call calls a pointer that is a formal parameter and if so, the
2141 parameter is marked with the called flag and an indirect call graph edge
2142 describing the call is created. This is very simple for ordinary pointers
2143 represented in SSA but not-so-nice when it comes to member pointers. The
2144 ugly part of this function does nothing more than trying to match the
2145 pattern of such a call. An example of such a pattern is the gimple dump
2146 below, the call is on the last line:
2147
2148 <bb 2>:
2149 f$__delta_5 = f.__delta;
2150 f$__pfn_24 = f.__pfn;
2151
2152 or
2153 <bb 2>:
2154 f$__delta_5 = MEM[(struct *)&f];
2155 f$__pfn_24 = MEM[(struct *)&f + 4B];
2156
2157 and a few lines below:
2158
2159 <bb 5>
2160 D.2496_3 = (int) f$__pfn_24;
2161 D.2497_4 = D.2496_3 & 1;
2162 if (D.2497_4 != 0)
2163 goto <bb 3>;
2164 else
2165 goto <bb 4>;
2166
2167 <bb 6>:
2168 D.2500_7 = (unsigned int) f$__delta_5;
2169 D.2501_8 = &S + D.2500_7;
2170 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2171 D.2503_10 = *D.2502_9;
2172 D.2504_12 = f$__pfn_24 + -1;
2173 D.2505_13 = (unsigned int) D.2504_12;
2174 D.2506_14 = D.2503_10 + D.2505_13;
2175 D.2507_15 = *D.2506_14;
2176 iftmp.11_16 = (String:: *) D.2507_15;
2177
2178 <bb 7>:
2179 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2180 D.2500_19 = (unsigned int) f$__delta_5;
2181 D.2508_20 = &S + D.2500_19;
2182 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2183
2184 Such patterns are results of simple calls to a member pointer:
2185
2186 int doprinting (int (MyString::* f)(int) const)
2187 {
2188 MyString S ("somestring");
2189
2190 return (S.*f)(4);
2191 }
2192
2193 Moreover, the function also looks for called pointers loaded from aggregates
2194 passed by value or reference. */
2195
2196 static void
2197 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2198 tree target)
2199 {
2200 class ipa_node_params *info = fbi->info;
2201 HOST_WIDE_INT offset;
2202 bool by_ref;
2203
2204 if (SSA_NAME_IS_DEFAULT_DEF (target))
2205 {
2206 tree var = SSA_NAME_VAR (target);
2207 int index = ipa_get_param_decl_index (info, var);
2208 if (index >= 0)
2209 ipa_note_param_call (fbi->node, index, call);
2210 return;
2211 }
2212
2213 int index;
2214 gimple *def = SSA_NAME_DEF_STMT (target);
2215 bool guaranteed_unmodified;
2216 if (gimple_assign_single_p (def)
2217 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2218 gimple_assign_rhs1 (def), &index, &offset,
2219 NULL, &by_ref, &guaranteed_unmodified))
2220 {
2221 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2222 cs->indirect_info->offset = offset;
2223 cs->indirect_info->agg_contents = 1;
2224 cs->indirect_info->by_ref = by_ref;
2225 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2226 return;
2227 }
2228
2229 /* Now we need to try to match the complex pattern of calling a member
2230 pointer. */
2231 if (gimple_code (def) != GIMPLE_PHI
2232 || gimple_phi_num_args (def) != 2
2233 || !POINTER_TYPE_P (TREE_TYPE (target))
2234 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2235 return;
2236
2237 /* First, we need to check whether one of these is a load from a member
2238 pointer that is a parameter to this function. */
2239 tree n1 = PHI_ARG_DEF (def, 0);
2240 tree n2 = PHI_ARG_DEF (def, 1);
2241 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2242 return;
2243 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2244 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2245
2246 tree rec;
2247 basic_block bb, virt_bb;
2248 basic_block join = gimple_bb (def);
2249 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2250 {
2251 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2252 return;
2253
2254 bb = EDGE_PRED (join, 0)->src;
2255 virt_bb = gimple_bb (d2);
2256 }
2257 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2258 {
2259 bb = EDGE_PRED (join, 1)->src;
2260 virt_bb = gimple_bb (d1);
2261 }
2262 else
2263 return;
2264
2265 /* Second, we need to check that the basic blocks are laid out in the way
2266 corresponding to the pattern. */
2267
2268 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2269 || single_pred (virt_bb) != bb
2270 || single_succ (virt_bb) != join)
2271 return;
2272
2273 /* Third, let's see that the branching is done depending on the least
2274 significant bit of the pfn. */
2275
2276 gimple *branch = last_stmt (bb);
2277 if (!branch || gimple_code (branch) != GIMPLE_COND)
2278 return;
2279
2280 if ((gimple_cond_code (branch) != NE_EXPR
2281 && gimple_cond_code (branch) != EQ_EXPR)
2282 || !integer_zerop (gimple_cond_rhs (branch)))
2283 return;
2284
2285 tree cond = gimple_cond_lhs (branch);
2286 if (!ipa_is_ssa_with_stmt_def (cond))
2287 return;
2288
2289 def = SSA_NAME_DEF_STMT (cond);
2290 if (!is_gimple_assign (def)
2291 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2292 || !integer_onep (gimple_assign_rhs2 (def)))
2293 return;
2294
2295 cond = gimple_assign_rhs1 (def);
2296 if (!ipa_is_ssa_with_stmt_def (cond))
2297 return;
2298
2299 def = SSA_NAME_DEF_STMT (cond);
2300
2301 if (is_gimple_assign (def)
2302 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2303 {
2304 cond = gimple_assign_rhs1 (def);
2305 if (!ipa_is_ssa_with_stmt_def (cond))
2306 return;
2307 def = SSA_NAME_DEF_STMT (cond);
2308 }
2309
2310 tree rec2;
2311 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2312 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2313 == ptrmemfunc_vbit_in_delta),
2314 NULL);
2315 if (rec != rec2)
2316 return;
2317
2318 index = ipa_get_param_decl_index (info, rec);
2319 if (index >= 0
2320 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2321 {
2322 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2323 cs->indirect_info->offset = offset;
2324 cs->indirect_info->agg_contents = 1;
2325 cs->indirect_info->member_ptr = 1;
2326 cs->indirect_info->guaranteed_unmodified = 1;
2327 }
2328
2329 return;
2330 }
2331
2332 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2333 object referenced in the expression is a formal parameter of the caller
2334 FBI->node (described by FBI->info), create a call note for the
2335 statement. */
2336
2337 static void
2338 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2339 gcall *call, tree target)
2340 {
2341 tree obj = OBJ_TYPE_REF_OBJECT (target);
2342 int index;
2343 HOST_WIDE_INT anc_offset;
2344
2345 if (!flag_devirtualize)
2346 return;
2347
2348 if (TREE_CODE (obj) != SSA_NAME)
2349 return;
2350
2351 class ipa_node_params *info = fbi->info;
2352 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2353 {
2354 struct ipa_jump_func jfunc;
2355 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2356 return;
2357
2358 anc_offset = 0;
2359 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2360 gcc_assert (index >= 0);
2361 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2362 call, &jfunc))
2363 return;
2364 }
2365 else
2366 {
2367 struct ipa_jump_func jfunc;
2368 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2369 tree expr;
2370
2371 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2372 if (!expr)
2373 return;
2374 index = ipa_get_param_decl_index (info,
2375 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2376 gcc_assert (index >= 0);
2377 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2378 call, &jfunc, anc_offset))
2379 return;
2380 }
2381
2382 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2383 class cgraph_indirect_call_info *ii = cs->indirect_info;
2384 ii->offset = anc_offset;
2385 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2386 ii->otr_type = obj_type_ref_class (target);
2387 ii->polymorphic = 1;
2388 }
2389
2390 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2391 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2392 containing intermediate information about each formal parameter. */
2393
2394 static void
2395 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2396 {
2397 tree target = gimple_call_fn (call);
2398
2399 if (!target
2400 || (TREE_CODE (target) != SSA_NAME
2401 && !virtual_method_call_p (target)))
2402 return;
2403
2404 struct cgraph_edge *cs = fbi->node->get_edge (call);
2405 /* If we previously turned the call into a direct call, there is
2406 no need to analyze. */
2407 if (cs && !cs->indirect_unknown_callee)
2408 return;
2409
2410 if (cs->indirect_info->polymorphic && flag_devirtualize)
2411 {
2412 tree instance;
2413 tree target = gimple_call_fn (call);
2414 ipa_polymorphic_call_context context (current_function_decl,
2415 target, call, &instance);
2416
2417 gcc_checking_assert (cs->indirect_info->otr_type
2418 == obj_type_ref_class (target));
2419 gcc_checking_assert (cs->indirect_info->otr_token
2420 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2421
2422 cs->indirect_info->vptr_changed
2423 = !context.get_dynamic_type (instance,
2424 OBJ_TYPE_REF_OBJECT (target),
2425 obj_type_ref_class (target), call,
2426 &fbi->aa_walk_budget);
2427 cs->indirect_info->context = context;
2428 }
2429
2430 if (TREE_CODE (target) == SSA_NAME)
2431 ipa_analyze_indirect_call_uses (fbi, call, target);
2432 else if (virtual_method_call_p (target))
2433 ipa_analyze_virtual_call_uses (fbi, call, target);
2434 }
2435
2436
2437 /* Analyze the call statement STMT with respect to formal parameters (described
2438 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2439 formal parameters are called. */
2440
2441 static void
2442 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2443 {
2444 if (is_gimple_call (stmt))
2445 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2446 }
2447
2448 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2449 If OP is a parameter declaration, mark it as used in the info structure
2450 passed in DATA. */
2451
2452 static bool
2453 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2454 {
2455 class ipa_node_params *info = (class ipa_node_params *) data;
2456
2457 op = get_base_address (op);
2458 if (op
2459 && TREE_CODE (op) == PARM_DECL)
2460 {
2461 int index = ipa_get_param_decl_index (info, op);
2462 gcc_assert (index >= 0);
2463 ipa_set_param_used (info, index, true);
2464 }
2465
2466 return false;
2467 }
2468
2469 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2470 the findings in various structures of the associated ipa_node_params
2471 structure, such as parameter flags, notes etc. FBI holds various data about
2472 the function being analyzed. */
2473
2474 static void
2475 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2476 {
2477 gimple_stmt_iterator gsi;
2478 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2479 {
2480 gimple *stmt = gsi_stmt (gsi);
2481
2482 if (is_gimple_debug (stmt))
2483 continue;
2484
2485 ipa_analyze_stmt_uses (fbi, stmt);
2486 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2487 visit_ref_for_mod_analysis,
2488 visit_ref_for_mod_analysis,
2489 visit_ref_for_mod_analysis);
2490 }
2491 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2492 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2493 visit_ref_for_mod_analysis,
2494 visit_ref_for_mod_analysis,
2495 visit_ref_for_mod_analysis);
2496 }
2497
2498 /* Calculate controlled uses of parameters of NODE. */
2499
2500 static void
2501 ipa_analyze_controlled_uses (struct cgraph_node *node)
2502 {
2503 class ipa_node_params *info = IPA_NODE_REF (node);
2504
2505 for (int i = 0; i < ipa_get_param_count (info); i++)
2506 {
2507 tree parm = ipa_get_param (info, i);
2508 int controlled_uses = 0;
2509
2510 /* For SSA regs see if parameter is used. For non-SSA we compute
2511 the flag during modification analysis. */
2512 if (is_gimple_reg (parm))
2513 {
2514 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2515 parm);
2516 if (ddef && !has_zero_uses (ddef))
2517 {
2518 imm_use_iterator imm_iter;
2519 use_operand_p use_p;
2520
2521 ipa_set_param_used (info, i, true);
2522 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2523 if (!is_gimple_call (USE_STMT (use_p)))
2524 {
2525 if (!is_gimple_debug (USE_STMT (use_p)))
2526 {
2527 controlled_uses = IPA_UNDESCRIBED_USE;
2528 break;
2529 }
2530 }
2531 else
2532 controlled_uses++;
2533 }
2534 else
2535 controlled_uses = 0;
2536 }
2537 else
2538 controlled_uses = IPA_UNDESCRIBED_USE;
2539 ipa_set_controlled_uses (info, i, controlled_uses);
2540 }
2541 }
2542
2543 /* Free stuff in BI. */
2544
2545 static void
2546 free_ipa_bb_info (struct ipa_bb_info *bi)
2547 {
2548 bi->cg_edges.release ();
2549 bi->param_aa_statuses.release ();
2550 }
2551
2552 /* Dominator walker driving the analysis. */
2553
2554 class analysis_dom_walker : public dom_walker
2555 {
2556 public:
2557 analysis_dom_walker (struct ipa_func_body_info *fbi)
2558 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2559
2560 virtual edge before_dom_children (basic_block);
2561
2562 private:
2563 struct ipa_func_body_info *m_fbi;
2564 };
2565
2566 edge
2567 analysis_dom_walker::before_dom_children (basic_block bb)
2568 {
2569 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2570 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2571 return NULL;
2572 }
2573
2574 /* Release body info FBI. */
2575
2576 void
2577 ipa_release_body_info (struct ipa_func_body_info *fbi)
2578 {
2579 int i;
2580 struct ipa_bb_info *bi;
2581
2582 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2583 free_ipa_bb_info (bi);
2584 fbi->bb_infos.release ();
2585 }
2586
2587 /* Initialize the array describing properties of formal parameters
2588 of NODE, analyze their uses and compute jump functions associated
2589 with actual arguments of calls from within NODE. */
2590
2591 void
2592 ipa_analyze_node (struct cgraph_node *node)
2593 {
2594 struct ipa_func_body_info fbi;
2595 class ipa_node_params *info;
2596
2597 ipa_check_create_node_params ();
2598 ipa_check_create_edge_args ();
2599 info = IPA_NODE_REF (node);
2600
2601 if (info->analysis_done)
2602 return;
2603 info->analysis_done = 1;
2604
2605 if (ipa_func_spec_opts_forbid_analysis_p (node))
2606 {
2607 for (int i = 0; i < ipa_get_param_count (info); i++)
2608 {
2609 ipa_set_param_used (info, i, true);
2610 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2611 }
2612 return;
2613 }
2614
2615 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2616 push_cfun (func);
2617 calculate_dominance_info (CDI_DOMINATORS);
2618 ipa_initialize_node_params (node);
2619 ipa_analyze_controlled_uses (node);
2620
2621 fbi.node = node;
2622 fbi.info = IPA_NODE_REF (node);
2623 fbi.bb_infos = vNULL;
2624 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2625 fbi.param_count = ipa_get_param_count (info);
2626 fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
2627
2628 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2629 {
2630 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2631 bi->cg_edges.safe_push (cs);
2632 }
2633
2634 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2635 {
2636 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2637 bi->cg_edges.safe_push (cs);
2638 }
2639
2640 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2641
2642 ipa_release_body_info (&fbi);
2643 free_dominance_info (CDI_DOMINATORS);
2644 pop_cfun ();
2645 }
2646
2647 /* Update the jump functions associated with call graph edge E when the call
2648 graph edge CS is being inlined, assuming that E->caller is already (possibly
2649 indirectly) inlined into CS->callee and that E has not been inlined. */
2650
2651 static void
2652 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2653 struct cgraph_edge *e)
2654 {
2655 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2656 class ipa_edge_args *args = IPA_EDGE_REF (e);
2657 int count = ipa_get_cs_argument_count (args);
2658 int i;
2659
2660 for (i = 0; i < count; i++)
2661 {
2662 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2663 class ipa_polymorphic_call_context *dst_ctx
2664 = ipa_get_ith_polymorhic_call_context (args, i);
2665
2666 if (dst->type == IPA_JF_ANCESTOR)
2667 {
2668 struct ipa_jump_func *src;
2669 int dst_fid = dst->value.ancestor.formal_id;
2670 class ipa_polymorphic_call_context *src_ctx
2671 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2672
2673 /* Variable number of arguments can cause havoc if we try to access
2674 one that does not exist in the inlined edge. So make sure we
2675 don't. */
2676 if (dst_fid >= ipa_get_cs_argument_count (top))
2677 {
2678 ipa_set_jf_unknown (dst);
2679 continue;
2680 }
2681
2682 src = ipa_get_ith_jump_func (top, dst_fid);
2683
2684 if (src_ctx && !src_ctx->useless_p ())
2685 {
2686 class ipa_polymorphic_call_context ctx = *src_ctx;
2687
2688 /* TODO: Make type preserved safe WRT contexts. */
2689 if (!ipa_get_jf_ancestor_type_preserved (dst))
2690 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2691 ctx.offset_by (dst->value.ancestor.offset);
2692 if (!ctx.useless_p ())
2693 {
2694 if (!dst_ctx)
2695 {
2696 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2697 count);
2698 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2699 }
2700
2701 dst_ctx->combine_with (ctx);
2702 }
2703 }
2704
2705 if (src->agg.items
2706 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2707 {
2708 struct ipa_agg_jf_item *item;
2709 int j;
2710
2711 /* Currently we do not produce clobber aggregate jump functions,
2712 replace with merging when we do. */
2713 gcc_assert (!dst->agg.items);
2714
2715 dst->agg.items = vec_safe_copy (src->agg.items);
2716 dst->agg.by_ref = src->agg.by_ref;
2717 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2718 item->offset -= dst->value.ancestor.offset;
2719 }
2720
2721 if (src->type == IPA_JF_PASS_THROUGH
2722 && src->value.pass_through.operation == NOP_EXPR)
2723 {
2724 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2725 dst->value.ancestor.agg_preserved &=
2726 src->value.pass_through.agg_preserved;
2727 }
2728 else if (src->type == IPA_JF_PASS_THROUGH
2729 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2730 {
2731 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2732 dst->value.ancestor.agg_preserved = false;
2733 }
2734 else if (src->type == IPA_JF_ANCESTOR)
2735 {
2736 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2737 dst->value.ancestor.offset += src->value.ancestor.offset;
2738 dst->value.ancestor.agg_preserved &=
2739 src->value.ancestor.agg_preserved;
2740 }
2741 else
2742 ipa_set_jf_unknown (dst);
2743 }
2744 else if (dst->type == IPA_JF_PASS_THROUGH)
2745 {
2746 struct ipa_jump_func *src;
2747 /* We must check range due to calls with variable number of arguments
2748 and we cannot combine jump functions with operations. */
2749 if (dst->value.pass_through.operation == NOP_EXPR
2750 && (dst->value.pass_through.formal_id
2751 < ipa_get_cs_argument_count (top)))
2752 {
2753 int dst_fid = dst->value.pass_through.formal_id;
2754 src = ipa_get_ith_jump_func (top, dst_fid);
2755 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2756 class ipa_polymorphic_call_context *src_ctx
2757 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2758
2759 if (src_ctx && !src_ctx->useless_p ())
2760 {
2761 class ipa_polymorphic_call_context ctx = *src_ctx;
2762
2763 /* TODO: Make type preserved safe WRT contexts. */
2764 if (!ipa_get_jf_pass_through_type_preserved (dst))
2765 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2766 if (!ctx.useless_p ())
2767 {
2768 if (!dst_ctx)
2769 {
2770 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2771 count);
2772 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2773 }
2774 dst_ctx->combine_with (ctx);
2775 }
2776 }
2777 switch (src->type)
2778 {
2779 case IPA_JF_UNKNOWN:
2780 ipa_set_jf_unknown (dst);
2781 break;
2782 case IPA_JF_CONST:
2783 ipa_set_jf_cst_copy (dst, src);
2784 break;
2785
2786 case IPA_JF_PASS_THROUGH:
2787 {
2788 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2789 enum tree_code operation;
2790 operation = ipa_get_jf_pass_through_operation (src);
2791
2792 if (operation == NOP_EXPR)
2793 {
2794 bool agg_p;
2795 agg_p = dst_agg_p
2796 && ipa_get_jf_pass_through_agg_preserved (src);
2797 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2798 }
2799 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2800 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2801 else
2802 {
2803 tree operand = ipa_get_jf_pass_through_operand (src);
2804 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2805 operation);
2806 }
2807 break;
2808 }
2809 case IPA_JF_ANCESTOR:
2810 {
2811 bool agg_p;
2812 agg_p = dst_agg_p
2813 && ipa_get_jf_ancestor_agg_preserved (src);
2814 ipa_set_ancestor_jf (dst,
2815 ipa_get_jf_ancestor_offset (src),
2816 ipa_get_jf_ancestor_formal_id (src),
2817 agg_p);
2818 break;
2819 }
2820 default:
2821 gcc_unreachable ();
2822 }
2823
2824 if (src->agg.items
2825 && (dst_agg_p || !src->agg.by_ref))
2826 {
2827 /* Currently we do not produce clobber aggregate jump
2828 functions, replace with merging when we do. */
2829 gcc_assert (!dst->agg.items);
2830
2831 dst->agg.by_ref = src->agg.by_ref;
2832 dst->agg.items = vec_safe_copy (src->agg.items);
2833 }
2834 }
2835 else
2836 ipa_set_jf_unknown (dst);
2837 }
2838 }
2839 }
2840
2841 /* If TARGET is an addr_expr of a function declaration, make it the
2842 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2843 Otherwise, return NULL. */
2844
2845 struct cgraph_edge *
2846 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2847 bool speculative)
2848 {
2849 struct cgraph_node *callee;
2850 bool unreachable = false;
2851
2852 if (TREE_CODE (target) == ADDR_EXPR)
2853 target = TREE_OPERAND (target, 0);
2854 if (TREE_CODE (target) != FUNCTION_DECL)
2855 {
2856 target = canonicalize_constructor_val (target, NULL);
2857 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2858 {
2859 /* Member pointer call that goes through a VMT lookup. */
2860 if (ie->indirect_info->member_ptr
2861 /* Or if target is not an invariant expression and we do not
2862 know if it will evaulate to function at runtime.
2863 This can happen when folding through &VAR, where &VAR
2864 is IP invariant, but VAR itself is not.
2865
2866 TODO: Revisit this when GCC 5 is branched. It seems that
2867 member_ptr check is not needed and that we may try to fold
2868 the expression and see if VAR is readonly. */
2869 || !is_gimple_ip_invariant (target))
2870 {
2871 if (dump_enabled_p ())
2872 {
2873 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2874 "discovered direct call non-invariant %s\n",
2875 ie->caller->dump_name ());
2876 }
2877 return NULL;
2878 }
2879
2880
2881 if (dump_enabled_p ())
2882 {
2883 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2884 "discovered direct call to non-function in %s, "
2885 "making it __builtin_unreachable\n",
2886 ie->caller->dump_name ());
2887 }
2888
2889 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2890 callee = cgraph_node::get_create (target);
2891 unreachable = true;
2892 }
2893 else
2894 callee = cgraph_node::get (target);
2895 }
2896 else
2897 callee = cgraph_node::get (target);
2898
2899 /* Because may-edges are not explicitely represented and vtable may be external,
2900 we may create the first reference to the object in the unit. */
2901 if (!callee || callee->global.inlined_to)
2902 {
2903
2904 /* We are better to ensure we can refer to it.
2905 In the case of static functions we are out of luck, since we already
2906 removed its body. In the case of public functions we may or may
2907 not introduce the reference. */
2908 if (!canonicalize_constructor_val (target, NULL)
2909 || !TREE_PUBLIC (target))
2910 {
2911 if (dump_file)
2912 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2913 "(%s -> %s) but cannot refer to it. Giving up.\n",
2914 ie->caller->dump_name (),
2915 ie->callee->dump_name ());
2916 return NULL;
2917 }
2918 callee = cgraph_node::get_create (target);
2919 }
2920
2921 /* If the edge is already speculated. */
2922 if (speculative && ie->speculative)
2923 {
2924 struct cgraph_edge *e2;
2925 struct ipa_ref *ref;
2926 ie->speculative_call_info (e2, ie, ref);
2927 if (e2->callee->ultimate_alias_target ()
2928 != callee->ultimate_alias_target ())
2929 {
2930 if (dump_file)
2931 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2932 "target (%s -> %s) but the call is already "
2933 "speculated to %s. Giving up.\n",
2934 ie->caller->dump_name (), callee->dump_name (),
2935 e2->callee->dump_name ());
2936 }
2937 else
2938 {
2939 if (dump_file)
2940 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2941 "(%s -> %s) this agree with previous speculation.\n",
2942 ie->caller->dump_name (), callee->dump_name ());
2943 }
2944 return NULL;
2945 }
2946
2947 if (!dbg_cnt (devirt))
2948 return NULL;
2949
2950 ipa_check_create_node_params ();
2951
2952 /* We cannot make edges to inline clones. It is bug that someone removed
2953 the cgraph node too early. */
2954 gcc_assert (!callee->global.inlined_to);
2955
2956 if (dump_file && !unreachable)
2957 {
2958 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2959 "(%s -> %s), for stmt ",
2960 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2961 speculative ? "speculative" : "known",
2962 ie->caller->dump_name (),
2963 callee->dump_name ());
2964 if (ie->call_stmt)
2965 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2966 else
2967 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2968 }
2969 if (dump_enabled_p ())
2970 {
2971 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2972 "converting indirect call in %s to direct call to %s\n",
2973 ie->caller->name (), callee->name ());
2974 }
2975 if (!speculative)
2976 {
2977 struct cgraph_edge *orig = ie;
2978 ie = ie->make_direct (callee);
2979 /* If we resolved speculative edge the cost is already up to date
2980 for direct call (adjusted by inline_edge_duplication_hook). */
2981 if (ie == orig)
2982 {
2983 ipa_call_summary *es = ipa_call_summaries->get (ie);
2984 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2985 - eni_size_weights.call_cost);
2986 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2987 - eni_time_weights.call_cost);
2988 }
2989 }
2990 else
2991 {
2992 if (!callee->can_be_discarded_p ())
2993 {
2994 cgraph_node *alias;
2995 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2996 if (alias)
2997 callee = alias;
2998 }
2999 /* make_speculative will update ie's cost to direct call cost. */
3000 ie = ie->make_speculative
3001 (callee, ie->count.apply_scale (8, 10));
3002 }
3003
3004 return ie;
3005 }
3006
3007 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3008 CONSTRUCTOR and return it. Return NULL if the search fails for some
3009 reason. */
3010
3011 static tree
3012 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3013 {
3014 tree type = TREE_TYPE (constructor);
3015 if (TREE_CODE (type) != ARRAY_TYPE
3016 && TREE_CODE (type) != RECORD_TYPE)
3017 return NULL;
3018
3019 unsigned ix;
3020 tree index, val;
3021 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3022 {
3023 HOST_WIDE_INT elt_offset;
3024 if (TREE_CODE (type) == ARRAY_TYPE)
3025 {
3026 offset_int off;
3027 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3028 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3029
3030 if (index)
3031 {
3032 if (TREE_CODE (index) == RANGE_EXPR)
3033 off = wi::to_offset (TREE_OPERAND (index, 0));
3034 else
3035 off = wi::to_offset (index);
3036 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3037 {
3038 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3039 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3040 off = wi::sext (off - wi::to_offset (low_bound),
3041 TYPE_PRECISION (TREE_TYPE (index)));
3042 }
3043 off *= wi::to_offset (unit_size);
3044 /* ??? Handle more than just the first index of a
3045 RANGE_EXPR. */
3046 }
3047 else
3048 off = wi::to_offset (unit_size) * ix;
3049
3050 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3051 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3052 continue;
3053 elt_offset = off.to_shwi ();
3054 }
3055 else if (TREE_CODE (type) == RECORD_TYPE)
3056 {
3057 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3058 if (DECL_BIT_FIELD (index))
3059 continue;
3060 elt_offset = int_bit_position (index);
3061 }
3062 else
3063 gcc_unreachable ();
3064
3065 if (elt_offset > req_offset)
3066 return NULL;
3067
3068 if (TREE_CODE (val) == CONSTRUCTOR)
3069 return find_constructor_constant_at_offset (val,
3070 req_offset - elt_offset);
3071
3072 if (elt_offset == req_offset
3073 && is_gimple_reg_type (TREE_TYPE (val))
3074 && is_gimple_ip_invariant (val))
3075 return val;
3076 }
3077 return NULL;
3078 }
3079
3080 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3081 invariant from a static constructor and if so, return it. Otherwise return
3082 NULL. */
3083
3084 static tree
3085 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3086 {
3087 if (by_ref)
3088 {
3089 if (TREE_CODE (scalar) != ADDR_EXPR)
3090 return NULL;
3091 scalar = TREE_OPERAND (scalar, 0);
3092 }
3093
3094 if (!VAR_P (scalar)
3095 || !is_global_var (scalar)
3096 || !TREE_READONLY (scalar)
3097 || !DECL_INITIAL (scalar)
3098 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3099 return NULL;
3100
3101 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3102 }
3103
3104 /* Retrieve value from aggregate jump function AGG or static initializer of
3105 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3106 none. BY_REF specifies whether the value has to be passed by reference or
3107 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3108 to is set to true if the value comes from an initializer of a constant. */
3109
3110 tree
3111 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3112 HOST_WIDE_INT offset, bool by_ref,
3113 bool *from_global_constant)
3114 {
3115 struct ipa_agg_jf_item *item;
3116 int i;
3117
3118 if (scalar)
3119 {
3120 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3121 if (res)
3122 {
3123 if (from_global_constant)
3124 *from_global_constant = true;
3125 return res;
3126 }
3127 }
3128
3129 if (!agg
3130 || by_ref != agg->by_ref)
3131 return NULL;
3132
3133 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3134 if (item->offset == offset)
3135 {
3136 /* Currently we do not have clobber values, return NULL for them once
3137 we do. */
3138 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3139 if (from_global_constant)
3140 *from_global_constant = false;
3141 return item->value;
3142 }
3143 return NULL;
3144 }
3145
3146 /* Remove a reference to SYMBOL from the list of references of a node given by
3147 reference description RDESC. Return true if the reference has been
3148 successfully found and removed. */
3149
3150 static bool
3151 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3152 {
3153 struct ipa_ref *to_del;
3154 struct cgraph_edge *origin;
3155
3156 origin = rdesc->cs;
3157 if (!origin)
3158 return false;
3159 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3160 origin->lto_stmt_uid);
3161 if (!to_del)
3162 return false;
3163
3164 to_del->remove_reference ();
3165 if (dump_file)
3166 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3167 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3168 return true;
3169 }
3170
3171 /* If JFUNC has a reference description with refcount different from
3172 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3173 NULL. JFUNC must be a constant jump function. */
3174
3175 static struct ipa_cst_ref_desc *
3176 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3177 {
3178 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3179 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3180 return rdesc;
3181 else
3182 return NULL;
3183 }
3184
3185 /* If the value of constant jump function JFUNC is an address of a function
3186 declaration, return the associated call graph node. Otherwise return
3187 NULL. */
3188
3189 static cgraph_node *
3190 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3191 {
3192 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3193 tree cst = ipa_get_jf_constant (jfunc);
3194 if (TREE_CODE (cst) != ADDR_EXPR
3195 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3196 return NULL;
3197
3198 return cgraph_node::get (TREE_OPERAND (cst, 0));
3199 }
3200
3201
3202 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3203 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3204 the edge specified in the rdesc. Return false if either the symbol or the
3205 reference could not be found, otherwise return true. */
3206
3207 static bool
3208 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3209 {
3210 struct ipa_cst_ref_desc *rdesc;
3211 if (jfunc->type == IPA_JF_CONST
3212 && (rdesc = jfunc_rdesc_usable (jfunc))
3213 && --rdesc->refcount == 0)
3214 {
3215 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3216 if (!symbol)
3217 return false;
3218
3219 return remove_described_reference (symbol, rdesc);
3220 }
3221 return true;
3222 }
3223
3224 /* Try to find a destination for indirect edge IE that corresponds to a simple
3225 call or a call of a member function pointer and where the destination is a
3226 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3227 the type of the parameter to which the result of JFUNC is passed. If it can
3228 be determined, return the newly direct edge, otherwise return NULL.
3229 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3230
3231 static struct cgraph_edge *
3232 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3233 struct ipa_jump_func *jfunc, tree target_type,
3234 class ipa_node_params *new_root_info)
3235 {
3236 struct cgraph_edge *cs;
3237 tree target;
3238 bool agg_contents = ie->indirect_info->agg_contents;
3239 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3240 if (agg_contents)
3241 {
3242 bool from_global_constant;
3243 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3244 ie->indirect_info->offset,
3245 ie->indirect_info->by_ref,
3246 &from_global_constant);
3247 if (target
3248 && !from_global_constant
3249 && !ie->indirect_info->guaranteed_unmodified)
3250 return NULL;
3251 }
3252 else
3253 target = scalar;
3254 if (!target)
3255 return NULL;
3256 cs = ipa_make_edge_direct_to_target (ie, target);
3257
3258 if (cs && !agg_contents)
3259 {
3260 bool ok;
3261 gcc_checking_assert (cs->callee
3262 && (cs != ie
3263 || jfunc->type != IPA_JF_CONST
3264 || !cgraph_node_for_jfunc (jfunc)
3265 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3266 ok = try_decrement_rdesc_refcount (jfunc);
3267 gcc_checking_assert (ok);
3268 }
3269
3270 return cs;
3271 }
3272
3273 /* Return the target to be used in cases of impossible devirtualization. IE
3274 and target (the latter can be NULL) are dumped when dumping is enabled. */
3275
3276 tree
3277 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3278 {
3279 if (dump_file)
3280 {
3281 if (target)
3282 fprintf (dump_file,
3283 "Type inconsistent devirtualization: %s->%s\n",
3284 ie->caller->dump_name (),
3285 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3286 else
3287 fprintf (dump_file,
3288 "No devirtualization target in %s\n",
3289 ie->caller->dump_name ());
3290 }
3291 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3292 cgraph_node::get_create (new_target);
3293 return new_target;
3294 }
3295
3296 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3297 call based on a formal parameter which is described by jump function JFUNC
3298 and if it can be determined, make it direct and return the direct edge.
3299 Otherwise, return NULL. CTX describes the polymorphic context that the
3300 parameter the call is based on brings along with it. */
3301
3302 static struct cgraph_edge *
3303 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3304 struct ipa_jump_func *jfunc,
3305 class ipa_polymorphic_call_context ctx)
3306 {
3307 tree target = NULL;
3308 bool speculative = false;
3309
3310 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3311 return NULL;
3312
3313 gcc_assert (!ie->indirect_info->by_ref);
3314
3315 /* Try to do lookup via known virtual table pointer value. */
3316 if (!ie->indirect_info->vptr_changed
3317 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3318 {
3319 tree vtable;
3320 unsigned HOST_WIDE_INT offset;
3321 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3322 : NULL;
3323 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3324 ie->indirect_info->offset,
3325 true);
3326 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3327 {
3328 bool can_refer;
3329 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3330 vtable, offset, &can_refer);
3331 if (can_refer)
3332 {
3333 if (!t
3334 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3335 || !possible_polymorphic_call_target_p
3336 (ie, cgraph_node::get (t)))
3337 {
3338 /* Do not speculate builtin_unreachable, it is stupid! */
3339 if (!ie->indirect_info->vptr_changed)
3340 target = ipa_impossible_devirt_target (ie, target);
3341 else
3342 target = NULL;
3343 }
3344 else
3345 {
3346 target = t;
3347 speculative = ie->indirect_info->vptr_changed;
3348 }
3349 }
3350 }
3351 }
3352
3353 ipa_polymorphic_call_context ie_context (ie);
3354 vec <cgraph_node *>targets;
3355 bool final;
3356
3357 ctx.offset_by (ie->indirect_info->offset);
3358 if (ie->indirect_info->vptr_changed)
3359 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3360 ie->indirect_info->otr_type);
3361 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3362 targets = possible_polymorphic_call_targets
3363 (ie->indirect_info->otr_type,
3364 ie->indirect_info->otr_token,
3365 ctx, &final);
3366 if (final && targets.length () <= 1)
3367 {
3368 speculative = false;
3369 if (targets.length () == 1)
3370 target = targets[0]->decl;
3371 else
3372 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3373 }
3374 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3375 && !ie->speculative && ie->maybe_hot_p ())
3376 {
3377 cgraph_node *n;
3378 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3379 ie->indirect_info->otr_token,
3380 ie->indirect_info->context);
3381 if (n)
3382 {
3383 target = n->decl;
3384 speculative = true;
3385 }
3386 }
3387
3388 if (target)
3389 {
3390 if (!possible_polymorphic_call_target_p
3391 (ie, cgraph_node::get_create (target)))
3392 {
3393 if (speculative)
3394 return NULL;
3395 target = ipa_impossible_devirt_target (ie, target);
3396 }
3397 return ipa_make_edge_direct_to_target (ie, target, speculative);
3398 }
3399 else
3400 return NULL;
3401 }
3402
3403 /* Update the param called notes associated with NODE when CS is being inlined,
3404 assuming NODE is (potentially indirectly) inlined into CS->callee.
3405 Moreover, if the callee is discovered to be constant, create a new cgraph
3406 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3407 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3408
3409 static bool
3410 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3411 struct cgraph_node *node,
3412 vec<cgraph_edge *> *new_edges)
3413 {
3414 class ipa_edge_args *top;
3415 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3416 class ipa_node_params *new_root_info, *inlined_node_info;
3417 bool res = false;
3418
3419 ipa_check_create_edge_args ();
3420 top = IPA_EDGE_REF (cs);
3421 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3422 ? cs->caller->global.inlined_to
3423 : cs->caller);
3424 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3425
3426 for (ie = node->indirect_calls; ie; ie = next_ie)
3427 {
3428 class cgraph_indirect_call_info *ici = ie->indirect_info;
3429 struct ipa_jump_func *jfunc;
3430 int param_index;
3431 cgraph_node *spec_target = NULL;
3432
3433 next_ie = ie->next_callee;
3434
3435 if (ici->param_index == -1)
3436 continue;
3437
3438 /* We must check range due to calls with variable number of arguments: */
3439 if (ici->param_index >= ipa_get_cs_argument_count (top))
3440 {
3441 ici->param_index = -1;
3442 continue;
3443 }
3444
3445 param_index = ici->param_index;
3446 jfunc = ipa_get_ith_jump_func (top, param_index);
3447
3448 if (ie->speculative)
3449 {
3450 struct cgraph_edge *de;
3451 struct ipa_ref *ref;
3452 ie->speculative_call_info (de, ie, ref);
3453 spec_target = de->callee;
3454 }
3455
3456 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3457 new_direct_edge = NULL;
3458 else if (ici->polymorphic)
3459 {
3460 ipa_polymorphic_call_context ctx;
3461 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3462 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3463 }
3464 else
3465 {
3466 tree target_type = ipa_get_type (inlined_node_info, param_index);
3467 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3468 target_type,
3469 new_root_info);
3470 }
3471
3472 /* If speculation was removed, then we need to do nothing. */
3473 if (new_direct_edge && new_direct_edge != ie
3474 && new_direct_edge->callee == spec_target)
3475 {
3476 new_direct_edge->indirect_inlining_edge = 1;
3477 top = IPA_EDGE_REF (cs);
3478 res = true;
3479 if (!new_direct_edge->speculative)
3480 continue;
3481 }
3482 else if (new_direct_edge)
3483 {
3484 new_direct_edge->indirect_inlining_edge = 1;
3485 if (new_direct_edge->call_stmt)
3486 new_direct_edge->call_stmt_cannot_inline_p
3487 = !gimple_check_call_matching_types (
3488 new_direct_edge->call_stmt,
3489 new_direct_edge->callee->decl, false);
3490 if (new_edges)
3491 {
3492 new_edges->safe_push (new_direct_edge);
3493 res = true;
3494 }
3495 top = IPA_EDGE_REF (cs);
3496 /* If speculative edge was introduced we still need to update
3497 call info of the indirect edge. */
3498 if (!new_direct_edge->speculative)
3499 continue;
3500 }
3501 if (jfunc->type == IPA_JF_PASS_THROUGH
3502 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3503 {
3504 if (ici->agg_contents
3505 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3506 && !ici->polymorphic)
3507 ici->param_index = -1;
3508 else
3509 {
3510 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3511 if (ici->polymorphic
3512 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3513 ici->vptr_changed = true;
3514 }
3515 }
3516 else if (jfunc->type == IPA_JF_ANCESTOR)
3517 {
3518 if (ici->agg_contents
3519 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3520 && !ici->polymorphic)
3521 ici->param_index = -1;
3522 else
3523 {
3524 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3525 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3526 if (ici->polymorphic
3527 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3528 ici->vptr_changed = true;
3529 }
3530 }
3531 else
3532 /* Either we can find a destination for this edge now or never. */
3533 ici->param_index = -1;
3534 }
3535
3536 return res;
3537 }
3538
3539 /* Recursively traverse subtree of NODE (including node) made of inlined
3540 cgraph_edges when CS has been inlined and invoke
3541 update_indirect_edges_after_inlining on all nodes and
3542 update_jump_functions_after_inlining on all non-inlined edges that lead out
3543 of this subtree. Newly discovered indirect edges will be added to
3544 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3545 created. */
3546
3547 static bool
3548 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3549 struct cgraph_node *node,
3550 vec<cgraph_edge *> *new_edges)
3551 {
3552 struct cgraph_edge *e;
3553 bool res;
3554
3555 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3556
3557 for (e = node->callees; e; e = e->next_callee)
3558 if (!e->inline_failed)
3559 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3560 else
3561 update_jump_functions_after_inlining (cs, e);
3562 for (e = node->indirect_calls; e; e = e->next_callee)
3563 update_jump_functions_after_inlining (cs, e);
3564
3565 return res;
3566 }
3567
3568 /* Combine two controlled uses counts as done during inlining. */
3569
3570 static int
3571 combine_controlled_uses_counters (int c, int d)
3572 {
3573 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3574 return IPA_UNDESCRIBED_USE;
3575 else
3576 return c + d - 1;
3577 }
3578
3579 /* Propagate number of controlled users from CS->caleee to the new root of the
3580 tree of inlined nodes. */
3581
3582 static void
3583 propagate_controlled_uses (struct cgraph_edge *cs)
3584 {
3585 class ipa_edge_args *args = IPA_EDGE_REF (cs);
3586 struct cgraph_node *new_root = cs->caller->global.inlined_to
3587 ? cs->caller->global.inlined_to : cs->caller;
3588 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3589 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3590 int count, i;
3591
3592 count = MIN (ipa_get_cs_argument_count (args),
3593 ipa_get_param_count (old_root_info));
3594 for (i = 0; i < count; i++)
3595 {
3596 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3597 struct ipa_cst_ref_desc *rdesc;
3598
3599 if (jf->type == IPA_JF_PASS_THROUGH)
3600 {
3601 int src_idx, c, d;
3602 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3603 c = ipa_get_controlled_uses (new_root_info, src_idx);
3604 d = ipa_get_controlled_uses (old_root_info, i);
3605
3606 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3607 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3608 c = combine_controlled_uses_counters (c, d);
3609 ipa_set_controlled_uses (new_root_info, src_idx, c);
3610 if (c == 0 && new_root_info->ipcp_orig_node)
3611 {
3612 struct cgraph_node *n;
3613 struct ipa_ref *ref;
3614 tree t = new_root_info->known_csts[src_idx];
3615
3616 if (t && TREE_CODE (t) == ADDR_EXPR
3617 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3618 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3619 && (ref = new_root->find_reference (n, NULL, 0)))
3620 {
3621 if (dump_file)
3622 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3623 "reference from %s to %s.\n",
3624 new_root->dump_name (),
3625 n->dump_name ());
3626 ref->remove_reference ();
3627 }
3628 }
3629 }
3630 else if (jf->type == IPA_JF_CONST
3631 && (rdesc = jfunc_rdesc_usable (jf)))
3632 {
3633 int d = ipa_get_controlled_uses (old_root_info, i);
3634 int c = rdesc->refcount;
3635 rdesc->refcount = combine_controlled_uses_counters (c, d);
3636 if (rdesc->refcount == 0)
3637 {
3638 tree cst = ipa_get_jf_constant (jf);
3639 struct cgraph_node *n;
3640 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3641 && TREE_CODE (TREE_OPERAND (cst, 0))
3642 == FUNCTION_DECL);
3643 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3644 if (n)
3645 {
3646 struct cgraph_node *clone;
3647 bool ok;
3648 ok = remove_described_reference (n, rdesc);
3649 gcc_checking_assert (ok);
3650
3651 clone = cs->caller;
3652 while (clone->global.inlined_to
3653 && clone != rdesc->cs->caller
3654 && IPA_NODE_REF (clone)->ipcp_orig_node)
3655 {
3656 struct ipa_ref *ref;
3657 ref = clone->find_reference (n, NULL, 0);
3658 if (ref)
3659 {
3660 if (dump_file)
3661 fprintf (dump_file, "ipa-prop: Removing "
3662 "cloning-created reference "
3663 "from %s to %s.\n",
3664 clone->dump_name (),
3665 n->dump_name ());
3666 ref->remove_reference ();
3667 }
3668 clone = clone->callers->caller;
3669 }
3670 }
3671 }
3672 }
3673 }
3674
3675 for (i = ipa_get_param_count (old_root_info);
3676 i < ipa_get_cs_argument_count (args);
3677 i++)
3678 {
3679 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3680
3681 if (jf->type == IPA_JF_CONST)
3682 {
3683 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3684 if (rdesc)
3685 rdesc->refcount = IPA_UNDESCRIBED_USE;
3686 }
3687 else if (jf->type == IPA_JF_PASS_THROUGH)
3688 ipa_set_controlled_uses (new_root_info,
3689 jf->value.pass_through.formal_id,
3690 IPA_UNDESCRIBED_USE);
3691 }
3692 }
3693
3694 /* Update jump functions and call note functions on inlining the call site CS.
3695 CS is expected to lead to a node already cloned by
3696 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3697 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3698 created. */
3699
3700 bool
3701 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3702 vec<cgraph_edge *> *new_edges)
3703 {
3704 bool changed;
3705 /* Do nothing if the preparation phase has not been carried out yet
3706 (i.e. during early inlining). */
3707 if (!ipa_node_params_sum)
3708 return false;
3709 gcc_assert (ipa_edge_args_sum);
3710
3711 propagate_controlled_uses (cs);
3712 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3713
3714 return changed;
3715 }
3716
3717 /* Ensure that array of edge arguments infos is big enough to accommodate a
3718 structure for all edges and reallocates it if not. Also, allocate
3719 associated hash tables is they do not already exist. */
3720
3721 void
3722 ipa_check_create_edge_args (void)
3723 {
3724 if (!ipa_edge_args_sum)
3725 ipa_edge_args_sum
3726 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3727 ipa_edge_args_sum_t (symtab, true));
3728 if (!ipa_bits_hash_table)
3729 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3730 if (!ipa_vr_hash_table)
3731 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3732 }
3733
3734 /* Free all ipa_edge structures. */
3735
3736 void
3737 ipa_free_all_edge_args (void)
3738 {
3739 if (!ipa_edge_args_sum)
3740 return;
3741
3742 ipa_edge_args_sum->release ();
3743 ipa_edge_args_sum = NULL;
3744 }
3745
3746 /* Free all ipa_node_params structures. */
3747
3748 void
3749 ipa_free_all_node_params (void)
3750 {
3751 ipa_node_params_sum->release ();
3752 ipa_node_params_sum = NULL;
3753 }
3754
3755 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3756 tables if they do not already exist. */
3757
3758 void
3759 ipcp_transformation_initialize (void)
3760 {
3761 if (!ipa_bits_hash_table)
3762 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3763 if (!ipa_vr_hash_table)
3764 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3765 if (ipcp_transformation_sum == NULL)
3766 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
3767 }
3768
3769 /* Set the aggregate replacements of NODE to be AGGVALS. */
3770
3771 void
3772 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3773 struct ipa_agg_replacement_value *aggvals)
3774 {
3775 ipcp_transformation_initialize ();
3776 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3777 s->agg_values = aggvals;
3778 }
3779
3780 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3781 count data structures accordingly. */
3782
3783 void
3784 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3785 {
3786 if (args->jump_functions)
3787 {
3788 struct ipa_jump_func *jf;
3789 int i;
3790 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3791 {
3792 struct ipa_cst_ref_desc *rdesc;
3793 try_decrement_rdesc_refcount (jf);
3794 if (jf->type == IPA_JF_CONST
3795 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3796 && rdesc->cs == cs)
3797 rdesc->cs = NULL;
3798 }
3799 }
3800 }
3801
3802 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3803 reference count data strucutres accordingly. */
3804
3805 void
3806 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3807 ipa_edge_args *old_args, ipa_edge_args *new_args)
3808 {
3809 unsigned int i;
3810
3811 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3812 if (old_args->polymorphic_call_contexts)
3813 new_args->polymorphic_call_contexts
3814 = vec_safe_copy (old_args->polymorphic_call_contexts);
3815
3816 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3817 {
3818 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3819 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3820
3821 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3822
3823 if (src_jf->type == IPA_JF_CONST)
3824 {
3825 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3826
3827 if (!src_rdesc)
3828 dst_jf->value.constant.rdesc = NULL;
3829 else if (src->caller == dst->caller)
3830 {
3831 struct ipa_ref *ref;
3832 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3833 gcc_checking_assert (n);
3834 ref = src->caller->find_reference (n, src->call_stmt,
3835 src->lto_stmt_uid);
3836 gcc_checking_assert (ref);
3837 dst->caller->clone_reference (ref, ref->stmt);
3838
3839 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3840 dst_rdesc->cs = dst;
3841 dst_rdesc->refcount = src_rdesc->refcount;
3842 dst_rdesc->next_duplicate = NULL;
3843 dst_jf->value.constant.rdesc = dst_rdesc;
3844 }
3845 else if (src_rdesc->cs == src)
3846 {
3847 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3848 dst_rdesc->cs = dst;
3849 dst_rdesc->refcount = src_rdesc->refcount;
3850 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3851 src_rdesc->next_duplicate = dst_rdesc;
3852 dst_jf->value.constant.rdesc = dst_rdesc;
3853 }
3854 else
3855 {
3856 struct ipa_cst_ref_desc *dst_rdesc;
3857 /* This can happen during inlining, when a JFUNC can refer to a
3858 reference taken in a function up in the tree of inline clones.
3859 We need to find the duplicate that refers to our tree of
3860 inline clones. */
3861
3862 gcc_assert (dst->caller->global.inlined_to);
3863 for (dst_rdesc = src_rdesc->next_duplicate;
3864 dst_rdesc;
3865 dst_rdesc = dst_rdesc->next_duplicate)
3866 {
3867 struct cgraph_node *top;
3868 top = dst_rdesc->cs->caller->global.inlined_to
3869 ? dst_rdesc->cs->caller->global.inlined_to
3870 : dst_rdesc->cs->caller;
3871 if (dst->caller->global.inlined_to == top)
3872 break;
3873 }
3874 gcc_assert (dst_rdesc);
3875 dst_jf->value.constant.rdesc = dst_rdesc;
3876 }
3877 }
3878 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3879 && src->caller == dst->caller)
3880 {
3881 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3882 ? dst->caller->global.inlined_to : dst->caller;
3883 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3884 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3885
3886 int c = ipa_get_controlled_uses (root_info, idx);
3887 if (c != IPA_UNDESCRIBED_USE)
3888 {
3889 c++;
3890 ipa_set_controlled_uses (root_info, idx, c);
3891 }
3892 }
3893 }
3894 }
3895
3896 /* Analyze newly added function into callgraph. */
3897
3898 static void
3899 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3900 {
3901 if (node->has_gimple_body_p ())
3902 ipa_analyze_node (node);
3903 }
3904
3905 /* Hook that is called by summary when a node is duplicated. */
3906
3907 void
3908 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3909 ipa_node_params *old_info,
3910 ipa_node_params *new_info)
3911 {
3912 ipa_agg_replacement_value *old_av, *new_av;
3913
3914 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3915 new_info->lattices = NULL;
3916 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3917 new_info->known_csts = old_info->known_csts.copy ();
3918 new_info->known_contexts = old_info->known_contexts.copy ();
3919
3920 new_info->analysis_done = old_info->analysis_done;
3921 new_info->node_enqueued = old_info->node_enqueued;
3922 new_info->versionable = old_info->versionable;
3923
3924 old_av = ipa_get_agg_replacements_for_node (src);
3925 if (old_av)
3926 {
3927 new_av = NULL;
3928 while (old_av)
3929 {
3930 struct ipa_agg_replacement_value *v;
3931
3932 v = ggc_alloc<ipa_agg_replacement_value> ();
3933 memcpy (v, old_av, sizeof (*v));
3934 v->next = new_av;
3935 new_av = v;
3936 old_av = old_av->next;
3937 }
3938 ipa_set_node_agg_value_chain (dst, new_av);
3939 }
3940
3941 ipcp_transformation *src_trans = ipcp_get_transformation_summary (src);
3942
3943 if (src_trans)
3944 {
3945 ipcp_transformation_initialize ();
3946 src_trans = ipcp_transformation_sum->get_create (src);
3947 ipcp_transformation *dst_trans
3948 = ipcp_transformation_sum->get_create (dst);
3949
3950 dst_trans->bits = vec_safe_copy (src_trans->bits);
3951
3952 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3953 vec<ipa_vr, va_gc> *&dst_vr
3954 = ipcp_get_transformation_summary (dst)->m_vr;
3955 if (vec_safe_length (src_trans->m_vr) > 0)
3956 {
3957 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3958 for (unsigned i = 0; i < src_vr->length (); ++i)
3959 dst_vr->quick_push ((*src_vr)[i]);
3960 }
3961 }
3962 }
3963
3964 /* Register our cgraph hooks if they are not already there. */
3965
3966 void
3967 ipa_register_cgraph_hooks (void)
3968 {
3969 ipa_check_create_node_params ();
3970 ipa_check_create_edge_args ();
3971
3972 function_insertion_hook_holder =
3973 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3974 }
3975
3976 /* Unregister our cgraph hooks if they are not already there. */
3977
3978 static void
3979 ipa_unregister_cgraph_hooks (void)
3980 {
3981 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3982 function_insertion_hook_holder = NULL;
3983 }
3984
3985 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3986 longer needed after ipa-cp. */
3987
3988 void
3989 ipa_free_all_structures_after_ipa_cp (void)
3990 {
3991 if (!optimize && !in_lto_p)
3992 {
3993 ipa_free_all_edge_args ();
3994 ipa_free_all_node_params ();
3995 ipcp_sources_pool.release ();
3996 ipcp_cst_values_pool.release ();
3997 ipcp_poly_ctx_values_pool.release ();
3998 ipcp_agg_lattice_pool.release ();
3999 ipa_unregister_cgraph_hooks ();
4000 ipa_refdesc_pool.release ();
4001 }
4002 }
4003
4004 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4005 longer needed after indirect inlining. */
4006
4007 void
4008 ipa_free_all_structures_after_iinln (void)
4009 {
4010 ipa_free_all_edge_args ();
4011 ipa_free_all_node_params ();
4012 ipa_unregister_cgraph_hooks ();
4013 ipcp_sources_pool.release ();
4014 ipcp_cst_values_pool.release ();
4015 ipcp_poly_ctx_values_pool.release ();
4016 ipcp_agg_lattice_pool.release ();
4017 ipa_refdesc_pool.release ();
4018 }
4019
4020 /* Print ipa_tree_map data structures of all functions in the
4021 callgraph to F. */
4022
4023 void
4024 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4025 {
4026 int i, count;
4027 class ipa_node_params *info;
4028
4029 if (!node->definition)
4030 return;
4031 info = IPA_NODE_REF (node);
4032 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4033 count = ipa_get_param_count (info);
4034 for (i = 0; i < count; i++)
4035 {
4036 int c;
4037
4038 fprintf (f, " ");
4039 ipa_dump_param (f, info, i);
4040 if (ipa_is_param_used (info, i))
4041 fprintf (f, " used");
4042 c = ipa_get_controlled_uses (info, i);
4043 if (c == IPA_UNDESCRIBED_USE)
4044 fprintf (f, " undescribed_use");
4045 else
4046 fprintf (f, " controlled_uses=%i", c);
4047 fprintf (f, "\n");
4048 }
4049 }
4050
4051 /* Print ipa_tree_map data structures of all functions in the
4052 callgraph to F. */
4053
4054 void
4055 ipa_print_all_params (FILE * f)
4056 {
4057 struct cgraph_node *node;
4058
4059 fprintf (f, "\nFunction parameters:\n");
4060 FOR_EACH_FUNCTION (node)
4061 ipa_print_node_params (f, node);
4062 }
4063
4064 /* Dump the AV linked list. */
4065
4066 void
4067 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4068 {
4069 bool comma = false;
4070 fprintf (f, " Aggregate replacements:");
4071 for (; av; av = av->next)
4072 {
4073 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4074 av->index, av->offset);
4075 print_generic_expr (f, av->value);
4076 comma = true;
4077 }
4078 fprintf (f, "\n");
4079 }
4080
4081 /* Stream out jump function JUMP_FUNC to OB. */
4082
4083 static void
4084 ipa_write_jump_function (struct output_block *ob,
4085 struct ipa_jump_func *jump_func)
4086 {
4087 struct ipa_agg_jf_item *item;
4088 struct bitpack_d bp;
4089 int i, count;
4090 int flag = 0;
4091
4092 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4093 as well as WPA memory by handling them specially. */
4094 if (jump_func->type == IPA_JF_CONST
4095 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4096 flag = 1;
4097
4098 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4099 switch (jump_func->type)
4100 {
4101 case IPA_JF_UNKNOWN:
4102 break;
4103 case IPA_JF_CONST:
4104 gcc_assert (
4105 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4106 stream_write_tree (ob,
4107 flag
4108 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4109 : jump_func->value.constant.value, true);
4110 break;
4111 case IPA_JF_PASS_THROUGH:
4112 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4113 if (jump_func->value.pass_through.operation == NOP_EXPR)
4114 {
4115 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4116 bp = bitpack_create (ob->main_stream);
4117 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4118 streamer_write_bitpack (&bp);
4119 }
4120 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4121 == tcc_unary)
4122 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4123 else
4124 {
4125 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4126 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4127 }
4128 break;
4129 case IPA_JF_ANCESTOR:
4130 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4131 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4132 bp = bitpack_create (ob->main_stream);
4133 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4134 streamer_write_bitpack (&bp);
4135 break;
4136 }
4137
4138 count = vec_safe_length (jump_func->agg.items);
4139 streamer_write_uhwi (ob, count);
4140 if (count)
4141 {
4142 bp = bitpack_create (ob->main_stream);
4143 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4144 streamer_write_bitpack (&bp);
4145 }
4146
4147 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4148 {
4149 streamer_write_uhwi (ob, item->offset);
4150 stream_write_tree (ob, item->value, true);
4151 }
4152
4153 bp = bitpack_create (ob->main_stream);
4154 bp_pack_value (&bp, !!jump_func->bits, 1);
4155 streamer_write_bitpack (&bp);
4156 if (jump_func->bits)
4157 {
4158 streamer_write_widest_int (ob, jump_func->bits->value);
4159 streamer_write_widest_int (ob, jump_func->bits->mask);
4160 }
4161 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4162 streamer_write_bitpack (&bp);
4163 if (jump_func->m_vr)
4164 {
4165 streamer_write_enum (ob->main_stream, value_rang_type,
4166 VR_LAST, jump_func->m_vr->kind ());
4167 stream_write_tree (ob, jump_func->m_vr->min (), true);
4168 stream_write_tree (ob, jump_func->m_vr->max (), true);
4169 }
4170 }
4171
4172 /* Read in jump function JUMP_FUNC from IB. */
4173
4174 static void
4175 ipa_read_jump_function (class lto_input_block *ib,
4176 struct ipa_jump_func *jump_func,
4177 struct cgraph_edge *cs,
4178 class data_in *data_in,
4179 bool prevails)
4180 {
4181 enum jump_func_type jftype;
4182 enum tree_code operation;
4183 int i, count;
4184 int val = streamer_read_uhwi (ib);
4185 bool flag = val & 1;
4186
4187 jftype = (enum jump_func_type) (val / 2);
4188 switch (jftype)
4189 {
4190 case IPA_JF_UNKNOWN:
4191 ipa_set_jf_unknown (jump_func);
4192 break;
4193 case IPA_JF_CONST:
4194 {
4195 tree t = stream_read_tree (ib, data_in);
4196 if (flag && prevails)
4197 t = build_fold_addr_expr (t);
4198 ipa_set_jf_constant (jump_func, t, cs);
4199 }
4200 break;
4201 case IPA_JF_PASS_THROUGH:
4202 operation = (enum tree_code) streamer_read_uhwi (ib);
4203 if (operation == NOP_EXPR)
4204 {
4205 int formal_id = streamer_read_uhwi (ib);
4206 struct bitpack_d bp = streamer_read_bitpack (ib);
4207 bool agg_preserved = bp_unpack_value (&bp, 1);
4208 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4209 }
4210 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4211 {
4212 int formal_id = streamer_read_uhwi (ib);
4213 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4214 }
4215 else
4216 {
4217 tree operand = stream_read_tree (ib, data_in);
4218 int formal_id = streamer_read_uhwi (ib);
4219 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4220 operation);
4221 }
4222 break;
4223 case IPA_JF_ANCESTOR:
4224 {
4225 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4226 int formal_id = streamer_read_uhwi (ib);
4227 struct bitpack_d bp = streamer_read_bitpack (ib);
4228 bool agg_preserved = bp_unpack_value (&bp, 1);
4229 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4230 break;
4231 }
4232 default:
4233 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4234 }
4235
4236 count = streamer_read_uhwi (ib);
4237 if (prevails)
4238 vec_alloc (jump_func->agg.items, count);
4239 if (count)
4240 {
4241 struct bitpack_d bp = streamer_read_bitpack (ib);
4242 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4243 }
4244 for (i = 0; i < count; i++)
4245 {
4246 struct ipa_agg_jf_item item;
4247 item.offset = streamer_read_uhwi (ib);
4248 item.value = stream_read_tree (ib, data_in);
4249 if (prevails)
4250 jump_func->agg.items->quick_push (item);
4251 }
4252
4253 struct bitpack_d bp = streamer_read_bitpack (ib);
4254 bool bits_known = bp_unpack_value (&bp, 1);
4255 if (bits_known)
4256 {
4257 widest_int value = streamer_read_widest_int (ib);
4258 widest_int mask = streamer_read_widest_int (ib);
4259 if (prevails)
4260 ipa_set_jfunc_bits (jump_func, value, mask);
4261 }
4262 else
4263 jump_func->bits = NULL;
4264
4265 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4266 bool vr_known = bp_unpack_value (&vr_bp, 1);
4267 if (vr_known)
4268 {
4269 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4270 VR_LAST);
4271 tree min = stream_read_tree (ib, data_in);
4272 tree max = stream_read_tree (ib, data_in);
4273 if (prevails)
4274 ipa_set_jfunc_vr (jump_func, type, min, max);
4275 }
4276 else
4277 jump_func->m_vr = NULL;
4278 }
4279
4280 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4281 relevant to indirect inlining to OB. */
4282
4283 static void
4284 ipa_write_indirect_edge_info (struct output_block *ob,
4285 struct cgraph_edge *cs)
4286 {
4287 class cgraph_indirect_call_info *ii = cs->indirect_info;
4288 struct bitpack_d bp;
4289
4290 streamer_write_hwi (ob, ii->param_index);
4291 bp = bitpack_create (ob->main_stream);
4292 bp_pack_value (&bp, ii->polymorphic, 1);
4293 bp_pack_value (&bp, ii->agg_contents, 1);
4294 bp_pack_value (&bp, ii->member_ptr, 1);
4295 bp_pack_value (&bp, ii->by_ref, 1);
4296 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4297 bp_pack_value (&bp, ii->vptr_changed, 1);
4298 streamer_write_bitpack (&bp);
4299 if (ii->agg_contents || ii->polymorphic)
4300 streamer_write_hwi (ob, ii->offset);
4301 else
4302 gcc_assert (ii->offset == 0);
4303
4304 if (ii->polymorphic)
4305 {
4306 streamer_write_hwi (ob, ii->otr_token);
4307 stream_write_tree (ob, ii->otr_type, true);
4308 ii->context.stream_out (ob);
4309 }
4310 }
4311
4312 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4313 relevant to indirect inlining from IB. */
4314
4315 static void
4316 ipa_read_indirect_edge_info (class lto_input_block *ib,
4317 class data_in *data_in,
4318 struct cgraph_edge *cs)
4319 {
4320 class cgraph_indirect_call_info *ii = cs->indirect_info;
4321 struct bitpack_d bp;
4322
4323 ii->param_index = (int) streamer_read_hwi (ib);
4324 bp = streamer_read_bitpack (ib);
4325 ii->polymorphic = bp_unpack_value (&bp, 1);
4326 ii->agg_contents = bp_unpack_value (&bp, 1);
4327 ii->member_ptr = bp_unpack_value (&bp, 1);
4328 ii->by_ref = bp_unpack_value (&bp, 1);
4329 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4330 ii->vptr_changed = bp_unpack_value (&bp, 1);
4331 if (ii->agg_contents || ii->polymorphic)
4332 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4333 else
4334 ii->offset = 0;
4335 if (ii->polymorphic)
4336 {
4337 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4338 ii->otr_type = stream_read_tree (ib, data_in);
4339 ii->context.stream_in (ib, data_in);
4340 }
4341 }
4342
4343 /* Stream out NODE info to OB. */
4344
4345 static void
4346 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4347 {
4348 int node_ref;
4349 lto_symtab_encoder_t encoder;
4350 class ipa_node_params *info = IPA_NODE_REF (node);
4351 int j;
4352 struct cgraph_edge *e;
4353 struct bitpack_d bp;
4354
4355 encoder = ob->decl_state->symtab_node_encoder;
4356 node_ref = lto_symtab_encoder_encode (encoder, node);
4357 streamer_write_uhwi (ob, node_ref);
4358
4359 streamer_write_uhwi (ob, ipa_get_param_count (info));
4360 for (j = 0; j < ipa_get_param_count (info); j++)
4361 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4362 bp = bitpack_create (ob->main_stream);
4363 gcc_assert (info->analysis_done
4364 || ipa_get_param_count (info) == 0);
4365 gcc_assert (!info->node_enqueued);
4366 gcc_assert (!info->ipcp_orig_node);
4367 for (j = 0; j < ipa_get_param_count (info); j++)
4368 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4369 streamer_write_bitpack (&bp);
4370 for (j = 0; j < ipa_get_param_count (info); j++)
4371 {
4372 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4373 stream_write_tree (ob, ipa_get_type (info, j), true);
4374 }
4375 for (e = node->callees; e; e = e->next_callee)
4376 {
4377 class ipa_edge_args *args = IPA_EDGE_REF (e);
4378
4379 streamer_write_uhwi (ob,
4380 ipa_get_cs_argument_count (args) * 2
4381 + (args->polymorphic_call_contexts != NULL));
4382 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4383 {
4384 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4385 if (args->polymorphic_call_contexts != NULL)
4386 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4387 }
4388 }
4389 for (e = node->indirect_calls; e; e = e->next_callee)
4390 {
4391 class ipa_edge_args *args = IPA_EDGE_REF (e);
4392
4393 streamer_write_uhwi (ob,
4394 ipa_get_cs_argument_count (args) * 2
4395 + (args->polymorphic_call_contexts != NULL));
4396 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4397 {
4398 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4399 if (args->polymorphic_call_contexts != NULL)
4400 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4401 }
4402 ipa_write_indirect_edge_info (ob, e);
4403 }
4404 }
4405
4406 /* Stream in edge E from IB. */
4407
4408 static void
4409 ipa_read_edge_info (class lto_input_block *ib,
4410 class data_in *data_in,
4411 struct cgraph_edge *e, bool prevails)
4412 {
4413 int count = streamer_read_uhwi (ib);
4414 bool contexts_computed = count & 1;
4415
4416 count /= 2;
4417 if (!count)
4418 return;
4419 if (prevails && e->possibly_call_in_translation_unit_p ())
4420 {
4421 class ipa_edge_args *args = IPA_EDGE_REF (e);
4422 vec_safe_grow_cleared (args->jump_functions, count);
4423 if (contexts_computed)
4424 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4425 for (int k = 0; k < count; k++)
4426 {
4427 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4428 data_in, prevails);
4429 if (contexts_computed)
4430 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4431 (ib, data_in);
4432 }
4433 }
4434 else
4435 {
4436 for (int k = 0; k < count; k++)
4437 {
4438 struct ipa_jump_func dummy;
4439 ipa_read_jump_function (ib, &dummy, e,
4440 data_in, prevails);
4441 if (contexts_computed)
4442 {
4443 class ipa_polymorphic_call_context ctx;
4444 ctx.stream_in (ib, data_in);
4445 }
4446 }
4447 }
4448 }
4449
4450 /* Stream in NODE info from IB. */
4451
4452 static void
4453 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4454 class data_in *data_in)
4455 {
4456 int k;
4457 struct cgraph_edge *e;
4458 struct bitpack_d bp;
4459 bool prevails = node->prevailing_p ();
4460 class ipa_node_params *info = prevails ? IPA_NODE_REF (node) : NULL;
4461
4462 int param_count = streamer_read_uhwi (ib);
4463 if (prevails)
4464 {
4465 ipa_alloc_node_params (node, param_count);
4466 for (k = 0; k < param_count; k++)
4467 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4468 if (ipa_get_param_count (info) != 0)
4469 info->analysis_done = true;
4470 info->node_enqueued = false;
4471 }
4472 else
4473 for (k = 0; k < param_count; k++)
4474 streamer_read_uhwi (ib);
4475
4476 bp = streamer_read_bitpack (ib);
4477 for (k = 0; k < param_count; k++)
4478 {
4479 bool used = bp_unpack_value (&bp, 1);
4480
4481 if (prevails)
4482 ipa_set_param_used (info, k, used);
4483 }
4484 for (k = 0; k < param_count; k++)
4485 {
4486 int nuses = streamer_read_hwi (ib);
4487 tree type = stream_read_tree (ib, data_in);
4488
4489 if (prevails)
4490 {
4491 ipa_set_controlled_uses (info, k, nuses);
4492 (*info->descriptors)[k].decl_or_type = type;
4493 }
4494 }
4495 for (e = node->callees; e; e = e->next_callee)
4496 ipa_read_edge_info (ib, data_in, e, prevails);
4497 for (e = node->indirect_calls; e; e = e->next_callee)
4498 {
4499 ipa_read_edge_info (ib, data_in, e, prevails);
4500 ipa_read_indirect_edge_info (ib, data_in, e);
4501 }
4502 }
4503
4504 /* Write jump functions for nodes in SET. */
4505
4506 void
4507 ipa_prop_write_jump_functions (void)
4508 {
4509 struct cgraph_node *node;
4510 struct output_block *ob;
4511 unsigned int count = 0;
4512 lto_symtab_encoder_iterator lsei;
4513 lto_symtab_encoder_t encoder;
4514
4515 if (!ipa_node_params_sum || !ipa_edge_args_sum)
4516 return;
4517
4518 ob = create_output_block (LTO_section_jump_functions);
4519 encoder = ob->decl_state->symtab_node_encoder;
4520 ob->symbol = NULL;
4521 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4522 lsei_next_function_in_partition (&lsei))
4523 {
4524 node = lsei_cgraph_node (lsei);
4525 if (node->has_gimple_body_p ()
4526 && IPA_NODE_REF (node) != NULL)
4527 count++;
4528 }
4529
4530 streamer_write_uhwi (ob, count);
4531
4532 /* Process all of the functions. */
4533 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4534 lsei_next_function_in_partition (&lsei))
4535 {
4536 node = lsei_cgraph_node (lsei);
4537 if (node->has_gimple_body_p ()
4538 && IPA_NODE_REF (node) != NULL)
4539 ipa_write_node_info (ob, node);
4540 }
4541 streamer_write_char_stream (ob->main_stream, 0);
4542 produce_asm (ob, NULL);
4543 destroy_output_block (ob);
4544 }
4545
4546 /* Read section in file FILE_DATA of length LEN with data DATA. */
4547
4548 static void
4549 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4550 size_t len)
4551 {
4552 const struct lto_function_header *header =
4553 (const struct lto_function_header *) data;
4554 const int cfg_offset = sizeof (struct lto_function_header);
4555 const int main_offset = cfg_offset + header->cfg_size;
4556 const int string_offset = main_offset + header->main_size;
4557 class data_in *data_in;
4558 unsigned int i;
4559 unsigned int count;
4560
4561 lto_input_block ib_main ((const char *) data + main_offset,
4562 header->main_size, file_data->mode_table);
4563
4564 data_in =
4565 lto_data_in_create (file_data, (const char *) data + string_offset,
4566 header->string_size, vNULL);
4567 count = streamer_read_uhwi (&ib_main);
4568
4569 for (i = 0; i < count; i++)
4570 {
4571 unsigned int index;
4572 struct cgraph_node *node;
4573 lto_symtab_encoder_t encoder;
4574
4575 index = streamer_read_uhwi (&ib_main);
4576 encoder = file_data->symtab_node_encoder;
4577 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4578 index));
4579 gcc_assert (node->definition);
4580 ipa_read_node_info (&ib_main, node, data_in);
4581 }
4582 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4583 len);
4584 lto_data_in_delete (data_in);
4585 }
4586
4587 /* Read ipcp jump functions. */
4588
4589 void
4590 ipa_prop_read_jump_functions (void)
4591 {
4592 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4593 struct lto_file_decl_data *file_data;
4594 unsigned int j = 0;
4595
4596 ipa_check_create_node_params ();
4597 ipa_check_create_edge_args ();
4598 ipa_register_cgraph_hooks ();
4599
4600 while ((file_data = file_data_vec[j++]))
4601 {
4602 size_t len;
4603 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4604
4605 if (data)
4606 ipa_prop_read_section (file_data, data, len);
4607 }
4608 }
4609
4610 void
4611 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4612 {
4613 int node_ref;
4614 unsigned int count = 0;
4615 lto_symtab_encoder_t encoder;
4616 struct ipa_agg_replacement_value *aggvals, *av;
4617
4618 aggvals = ipa_get_agg_replacements_for_node (node);
4619 encoder = ob->decl_state->symtab_node_encoder;
4620 node_ref = lto_symtab_encoder_encode (encoder, node);
4621 streamer_write_uhwi (ob, node_ref);
4622
4623 for (av = aggvals; av; av = av->next)
4624 count++;
4625 streamer_write_uhwi (ob, count);
4626
4627 for (av = aggvals; av; av = av->next)
4628 {
4629 struct bitpack_d bp;
4630
4631 streamer_write_uhwi (ob, av->offset);
4632 streamer_write_uhwi (ob, av->index);
4633 stream_write_tree (ob, av->value, true);
4634
4635 bp = bitpack_create (ob->main_stream);
4636 bp_pack_value (&bp, av->by_ref, 1);
4637 streamer_write_bitpack (&bp);
4638 }
4639
4640 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4641 if (ts && vec_safe_length (ts->m_vr) > 0)
4642 {
4643 count = ts->m_vr->length ();
4644 streamer_write_uhwi (ob, count);
4645 for (unsigned i = 0; i < count; ++i)
4646 {
4647 struct bitpack_d bp;
4648 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4649 bp = bitpack_create (ob->main_stream);
4650 bp_pack_value (&bp, parm_vr->known, 1);
4651 streamer_write_bitpack (&bp);
4652 if (parm_vr->known)
4653 {
4654 streamer_write_enum (ob->main_stream, value_rang_type,
4655 VR_LAST, parm_vr->type);
4656 streamer_write_wide_int (ob, parm_vr->min);
4657 streamer_write_wide_int (ob, parm_vr->max);
4658 }
4659 }
4660 }
4661 else
4662 streamer_write_uhwi (ob, 0);
4663
4664 if (ts && vec_safe_length (ts->bits) > 0)
4665 {
4666 count = ts->bits->length ();
4667 streamer_write_uhwi (ob, count);
4668
4669 for (unsigned i = 0; i < count; ++i)
4670 {
4671 const ipa_bits *bits_jfunc = (*ts->bits)[i];
4672 struct bitpack_d bp = bitpack_create (ob->main_stream);
4673 bp_pack_value (&bp, !!bits_jfunc, 1);
4674 streamer_write_bitpack (&bp);
4675 if (bits_jfunc)
4676 {
4677 streamer_write_widest_int (ob, bits_jfunc->value);
4678 streamer_write_widest_int (ob, bits_jfunc->mask);
4679 }
4680 }
4681 }
4682 else
4683 streamer_write_uhwi (ob, 0);
4684 }
4685
4686 /* Stream in the aggregate value replacement chain for NODE from IB. */
4687
4688 static void
4689 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4690 data_in *data_in)
4691 {
4692 struct ipa_agg_replacement_value *aggvals = NULL;
4693 unsigned int count, i;
4694
4695 count = streamer_read_uhwi (ib);
4696 for (i = 0; i <count; i++)
4697 {
4698 struct ipa_agg_replacement_value *av;
4699 struct bitpack_d bp;
4700
4701 av = ggc_alloc<ipa_agg_replacement_value> ();
4702 av->offset = streamer_read_uhwi (ib);
4703 av->index = streamer_read_uhwi (ib);
4704 av->value = stream_read_tree (ib, data_in);
4705 bp = streamer_read_bitpack (ib);
4706 av->by_ref = bp_unpack_value (&bp, 1);
4707 av->next = aggvals;
4708 aggvals = av;
4709 }
4710 ipa_set_node_agg_value_chain (node, aggvals);
4711
4712 count = streamer_read_uhwi (ib);
4713 if (count > 0)
4714 {
4715 ipcp_transformation_initialize ();
4716 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4717 vec_safe_grow_cleared (ts->m_vr, count);
4718 for (i = 0; i < count; i++)
4719 {
4720 ipa_vr *parm_vr;
4721 parm_vr = &(*ts->m_vr)[i];
4722 struct bitpack_d bp;
4723 bp = streamer_read_bitpack (ib);
4724 parm_vr->known = bp_unpack_value (&bp, 1);
4725 if (parm_vr->known)
4726 {
4727 parm_vr->type = streamer_read_enum (ib, value_range_kind,
4728 VR_LAST);
4729 parm_vr->min = streamer_read_wide_int (ib);
4730 parm_vr->max = streamer_read_wide_int (ib);
4731 }
4732 }
4733 }
4734 count = streamer_read_uhwi (ib);
4735 if (count > 0)
4736 {
4737 ipcp_transformation_initialize ();
4738 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4739 vec_safe_grow_cleared (ts->bits, count);
4740
4741 for (i = 0; i < count; i++)
4742 {
4743 struct bitpack_d bp = streamer_read_bitpack (ib);
4744 bool known = bp_unpack_value (&bp, 1);
4745 if (known)
4746 {
4747 ipa_bits *bits
4748 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4749 streamer_read_widest_int (ib));
4750 (*ts->bits)[i] = bits;
4751 }
4752 }
4753 }
4754 }
4755
4756 /* Write all aggregate replacement for nodes in set. */
4757
4758 void
4759 ipcp_write_transformation_summaries (void)
4760 {
4761 struct cgraph_node *node;
4762 struct output_block *ob;
4763 unsigned int count = 0;
4764 lto_symtab_encoder_iterator lsei;
4765 lto_symtab_encoder_t encoder;
4766
4767 ob = create_output_block (LTO_section_ipcp_transform);
4768 encoder = ob->decl_state->symtab_node_encoder;
4769 ob->symbol = NULL;
4770 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4771 lsei_next_function_in_partition (&lsei))
4772 {
4773 node = lsei_cgraph_node (lsei);
4774 if (node->has_gimple_body_p ())
4775 count++;
4776 }
4777
4778 streamer_write_uhwi (ob, count);
4779
4780 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4781 lsei_next_function_in_partition (&lsei))
4782 {
4783 node = lsei_cgraph_node (lsei);
4784 if (node->has_gimple_body_p ())
4785 write_ipcp_transformation_info (ob, node);
4786 }
4787 streamer_write_char_stream (ob->main_stream, 0);
4788 produce_asm (ob, NULL);
4789 destroy_output_block (ob);
4790 }
4791
4792 /* Read replacements section in file FILE_DATA of length LEN with data
4793 DATA. */
4794
4795 static void
4796 read_replacements_section (struct lto_file_decl_data *file_data,
4797 const char *data,
4798 size_t len)
4799 {
4800 const struct lto_function_header *header =
4801 (const struct lto_function_header *) data;
4802 const int cfg_offset = sizeof (struct lto_function_header);
4803 const int main_offset = cfg_offset + header->cfg_size;
4804 const int string_offset = main_offset + header->main_size;
4805 class data_in *data_in;
4806 unsigned int i;
4807 unsigned int count;
4808
4809 lto_input_block ib_main ((const char *) data + main_offset,
4810 header->main_size, file_data->mode_table);
4811
4812 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4813 header->string_size, vNULL);
4814 count = streamer_read_uhwi (&ib_main);
4815
4816 for (i = 0; i < count; i++)
4817 {
4818 unsigned int index;
4819 struct cgraph_node *node;
4820 lto_symtab_encoder_t encoder;
4821
4822 index = streamer_read_uhwi (&ib_main);
4823 encoder = file_data->symtab_node_encoder;
4824 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4825 index));
4826 gcc_assert (node->definition);
4827 read_ipcp_transformation_info (&ib_main, node, data_in);
4828 }
4829 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4830 len);
4831 lto_data_in_delete (data_in);
4832 }
4833
4834 /* Read IPA-CP aggregate replacements. */
4835
4836 void
4837 ipcp_read_transformation_summaries (void)
4838 {
4839 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4840 struct lto_file_decl_data *file_data;
4841 unsigned int j = 0;
4842
4843 while ((file_data = file_data_vec[j++]))
4844 {
4845 size_t len;
4846 const char *data = lto_get_section_data (file_data,
4847 LTO_section_ipcp_transform,
4848 NULL, &len);
4849 if (data)
4850 read_replacements_section (file_data, data, len);
4851 }
4852 }
4853
4854 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4855 NODE. */
4856
4857 static void
4858 adjust_agg_replacement_values (struct cgraph_node *node,
4859 struct ipa_agg_replacement_value *aggval)
4860 {
4861 struct ipa_agg_replacement_value *v;
4862 int i, c = 0, d = 0, *adj;
4863
4864 if (!node->clone.combined_args_to_skip)
4865 return;
4866
4867 for (v = aggval; v; v = v->next)
4868 {
4869 gcc_assert (v->index >= 0);
4870 if (c < v->index)
4871 c = v->index;
4872 }
4873 c++;
4874
4875 adj = XALLOCAVEC (int, c);
4876 for (i = 0; i < c; i++)
4877 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4878 {
4879 adj[i] = -1;
4880 d++;
4881 }
4882 else
4883 adj[i] = i - d;
4884
4885 for (v = aggval; v; v = v->next)
4886 v->index = adj[v->index];
4887 }
4888
4889 /* Dominator walker driving the ipcp modification phase. */
4890
4891 class ipcp_modif_dom_walker : public dom_walker
4892 {
4893 public:
4894 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4895 vec<ipa_param_descriptor, va_gc> *descs,
4896 struct ipa_agg_replacement_value *av,
4897 bool *sc, bool *cc)
4898 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4899 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4900
4901 virtual edge before_dom_children (basic_block);
4902
4903 private:
4904 struct ipa_func_body_info *m_fbi;
4905 vec<ipa_param_descriptor, va_gc> *m_descriptors;
4906 struct ipa_agg_replacement_value *m_aggval;
4907 bool *m_something_changed, *m_cfg_changed;
4908 };
4909
4910 edge
4911 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4912 {
4913 gimple_stmt_iterator gsi;
4914 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4915 {
4916 struct ipa_agg_replacement_value *v;
4917 gimple *stmt = gsi_stmt (gsi);
4918 tree rhs, val, t;
4919 HOST_WIDE_INT offset;
4920 poly_int64 size;
4921 int index;
4922 bool by_ref, vce;
4923
4924 if (!gimple_assign_load_p (stmt))
4925 continue;
4926 rhs = gimple_assign_rhs1 (stmt);
4927 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4928 continue;
4929
4930 vce = false;
4931 t = rhs;
4932 while (handled_component_p (t))
4933 {
4934 /* V_C_E can do things like convert an array of integers to one
4935 bigger integer and similar things we do not handle below. */
4936 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4937 {
4938 vce = true;
4939 break;
4940 }
4941 t = TREE_OPERAND (t, 0);
4942 }
4943 if (vce)
4944 continue;
4945
4946 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4947 &offset, &size, &by_ref))
4948 continue;
4949 for (v = m_aggval; v; v = v->next)
4950 if (v->index == index
4951 && v->offset == offset)
4952 break;
4953 if (!v
4954 || v->by_ref != by_ref
4955 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
4956 size))
4957 continue;
4958
4959 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4960 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4961 {
4962 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4963 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4964 else if (TYPE_SIZE (TREE_TYPE (rhs))
4965 == TYPE_SIZE (TREE_TYPE (v->value)))
4966 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4967 else
4968 {
4969 if (dump_file)
4970 {
4971 fprintf (dump_file, " const ");
4972 print_generic_expr (dump_file, v->value);
4973 fprintf (dump_file, " can't be converted to type of ");
4974 print_generic_expr (dump_file, rhs);
4975 fprintf (dump_file, "\n");
4976 }
4977 continue;
4978 }
4979 }
4980 else
4981 val = v->value;
4982
4983 if (dump_file && (dump_flags & TDF_DETAILS))
4984 {
4985 fprintf (dump_file, "Modifying stmt:\n ");
4986 print_gimple_stmt (dump_file, stmt, 0);
4987 }
4988 gimple_assign_set_rhs_from_tree (&gsi, val);
4989 update_stmt (stmt);
4990
4991 if (dump_file && (dump_flags & TDF_DETAILS))
4992 {
4993 fprintf (dump_file, "into:\n ");
4994 print_gimple_stmt (dump_file, stmt, 0);
4995 fprintf (dump_file, "\n");
4996 }
4997
4998 *m_something_changed = true;
4999 if (maybe_clean_eh_stmt (stmt)
5000 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5001 *m_cfg_changed = true;
5002 }
5003 return NULL;
5004 }
5005
5006 /* Update bits info of formal parameters as described in
5007 ipcp_transformation. */
5008
5009 static void
5010 ipcp_update_bits (struct cgraph_node *node)
5011 {
5012 tree parm = DECL_ARGUMENTS (node->decl);
5013 tree next_parm = parm;
5014 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5015
5016 if (!ts || vec_safe_length (ts->bits) == 0)
5017 return;
5018
5019 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5020 unsigned count = bits.length ();
5021
5022 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5023 {
5024 if (node->clone.combined_args_to_skip
5025 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5026 continue;
5027
5028 gcc_checking_assert (parm);
5029 next_parm = DECL_CHAIN (parm);
5030
5031 if (!bits[i]
5032 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5033 || POINTER_TYPE_P (TREE_TYPE (parm)))
5034 || !is_gimple_reg (parm))
5035 continue;
5036
5037 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5038 if (!ddef)
5039 continue;
5040
5041 if (dump_file)
5042 {
5043 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5044 print_hex (bits[i]->mask, dump_file);
5045 fprintf (dump_file, "\n");
5046 }
5047
5048 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5049 {
5050 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5051 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5052
5053 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5054 | wide_int::from (bits[i]->value, prec, sgn);
5055 set_nonzero_bits (ddef, nonzero_bits);
5056 }
5057 else
5058 {
5059 unsigned tem = bits[i]->mask.to_uhwi ();
5060 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5061 unsigned align = tem & -tem;
5062 unsigned misalign = bitpos & (align - 1);
5063
5064 if (align > 1)
5065 {
5066 if (dump_file)
5067 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5068
5069 unsigned old_align, old_misalign;
5070 struct ptr_info_def *pi = get_ptr_info (ddef);
5071 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5072
5073 if (old_known
5074 && old_align > align)
5075 {
5076 if (dump_file)
5077 {
5078 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5079 if ((old_misalign & (align - 1)) != misalign)
5080 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5081 old_misalign, misalign);
5082 }
5083 continue;
5084 }
5085
5086 if (old_known
5087 && ((misalign & (old_align - 1)) != old_misalign)
5088 && dump_file)
5089 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5090 old_misalign, misalign);
5091
5092 set_ptr_info_alignment (pi, align, misalign);
5093 }
5094 }
5095 }
5096 }
5097
5098 /* Update value range of formal parameters as described in
5099 ipcp_transformation. */
5100
5101 static void
5102 ipcp_update_vr (struct cgraph_node *node)
5103 {
5104 tree fndecl = node->decl;
5105 tree parm = DECL_ARGUMENTS (fndecl);
5106 tree next_parm = parm;
5107 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5108 if (!ts || vec_safe_length (ts->m_vr) == 0)
5109 return;
5110 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5111 unsigned count = vr.length ();
5112
5113 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5114 {
5115 if (node->clone.combined_args_to_skip
5116 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5117 continue;
5118 gcc_checking_assert (parm);
5119 next_parm = DECL_CHAIN (parm);
5120 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5121
5122 if (!ddef || !is_gimple_reg (parm))
5123 continue;
5124
5125 if (vr[i].known
5126 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5127 {
5128 tree type = TREE_TYPE (ddef);
5129 unsigned prec = TYPE_PRECISION (type);
5130 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5131 {
5132 if (dump_file)
5133 {
5134 fprintf (dump_file, "Setting value range of param %u ", i);
5135 fprintf (dump_file, "%s[",
5136 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5137 print_decs (vr[i].min, dump_file);
5138 fprintf (dump_file, ", ");
5139 print_decs (vr[i].max, dump_file);
5140 fprintf (dump_file, "]\n");
5141 }
5142 set_range_info (ddef, vr[i].type,
5143 wide_int_storage::from (vr[i].min, prec,
5144 TYPE_SIGN (type)),
5145 wide_int_storage::from (vr[i].max, prec,
5146 TYPE_SIGN (type)));
5147 }
5148 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5149 && vr[i].type == VR_ANTI_RANGE
5150 && wi::eq_p (vr[i].min, 0)
5151 && wi::eq_p (vr[i].max, 0))
5152 {
5153 if (dump_file)
5154 fprintf (dump_file, "Setting nonnull for %u\n", i);
5155 set_ptr_nonnull (ddef);
5156 }
5157 }
5158 }
5159 }
5160
5161 /* IPCP transformation phase doing propagation of aggregate values. */
5162
5163 unsigned int
5164 ipcp_transform_function (struct cgraph_node *node)
5165 {
5166 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5167 struct ipa_func_body_info fbi;
5168 struct ipa_agg_replacement_value *aggval;
5169 int param_count;
5170 bool cfg_changed = false, something_changed = false;
5171
5172 gcc_checking_assert (cfun);
5173 gcc_checking_assert (current_function_decl);
5174
5175 if (dump_file)
5176 fprintf (dump_file, "Modification phase of node %s\n",
5177 node->dump_name ());
5178
5179 ipcp_update_bits (node);
5180 ipcp_update_vr (node);
5181 aggval = ipa_get_agg_replacements_for_node (node);
5182 if (!aggval)
5183 return 0;
5184 param_count = count_formal_params (node->decl);
5185 if (param_count == 0)
5186 return 0;
5187 adjust_agg_replacement_values (node, aggval);
5188 if (dump_file)
5189 ipa_dump_agg_replacement_values (dump_file, aggval);
5190
5191 fbi.node = node;
5192 fbi.info = NULL;
5193 fbi.bb_infos = vNULL;
5194 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5195 fbi.param_count = param_count;
5196 fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
5197
5198 vec_safe_grow_cleared (descriptors, param_count);
5199 ipa_populate_param_decls (node, *descriptors);
5200 calculate_dominance_info (CDI_DOMINATORS);
5201 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5202 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5203
5204 int i;
5205 struct ipa_bb_info *bi;
5206 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5207 free_ipa_bb_info (bi);
5208 fbi.bb_infos.release ();
5209 free_dominance_info (CDI_DOMINATORS);
5210
5211 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5212 s->agg_values = NULL;
5213 s->bits = NULL;
5214 s->m_vr = NULL;
5215
5216 vec_free (descriptors);
5217
5218 if (!something_changed)
5219 return 0;
5220
5221 if (cfg_changed)
5222 delete_unreachable_blocks_update_callgraph (node, false);
5223
5224 return TODO_update_ssa_only_virtuals;
5225 }
5226
5227 #include "gt-ipa-prop.h"