]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
sh.c: Do not include algorithm.
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "predict.h"
25 #include "vec.h"
26 #include "hashtab.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "tm.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "expr.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
51 #include "target.h"
52 #include "hash-map.h"
53 #include "plugin-api.h"
54 #include "ipa-ref.h"
55 #include "cgraph.h"
56 #include "alloc-pool.h"
57 #include "ipa-prop.h"
58 #include "bitmap.h"
59 #include "gimple-ssa.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "tree-into-ssa.h"
64 #include "tree-dfa.h"
65 #include "tree-pass.h"
66 #include "tree-inline.h"
67 #include "ipa-inline.h"
68 #include "flags.h"
69 #include "diagnostic.h"
70 #include "gimple-pretty-print.h"
71 #include "lto-streamer.h"
72 #include "data-streamer.h"
73 #include "tree-streamer.h"
74 #include "params.h"
75 #include "ipa-utils.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "dbgcnt.h"
79 #include "domwalk.h"
80 #include "builtins.h"
81 #include "calls.h"
82
83 /* Intermediate information that we get from alias analysis about a particular
84 parameter in a particular basic_block. When a parameter or the memory it
85 references is marked modified, we use that information in all dominatd
86 blocks without cosulting alias analysis oracle. */
87
88 struct param_aa_status
89 {
90 /* Set when this structure contains meaningful information. If not, the
91 structure describing a dominating BB should be used instead. */
92 bool valid;
93
94 /* Whether we have seen something which might have modified the data in
95 question. PARM is for the parameter itself, REF is for data it points to
96 but using the alias type of individual accesses and PT is the same thing
97 but for computing aggregate pass-through functions using a very inclusive
98 ao_ref. */
99 bool parm_modified, ref_modified, pt_modified;
100 };
101
102 /* Information related to a given BB that used only when looking at function
103 body. */
104
105 struct ipa_bb_info
106 {
107 /* Call graph edges going out of this BB. */
108 vec<cgraph_edge *> cg_edges;
109 /* Alias analysis statuses of each formal parameter at this bb. */
110 vec<param_aa_status> param_aa_statuses;
111 };
112
113 /* Structure with global information that is only used when looking at function
114 body. */
115
116 struct func_body_info
117 {
118 /* The node that is being analyzed. */
119 cgraph_node *node;
120
121 /* Its info. */
122 struct ipa_node_params *info;
123
124 /* Information about individual BBs. */
125 vec<ipa_bb_info> bb_infos;
126
127 /* Number of parameters. */
128 int param_count;
129
130 /* Number of statements already walked by when analyzing this function. */
131 unsigned int aa_walked;
132 };
133
134 /* Vector where the parameter infos are actually stored. */
135 vec<ipa_node_params> ipa_node_params_vector;
136 /* Vector of known aggregate values in cloned nodes. */
137 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
138 /* Vector where the parameter infos are actually stored. */
139 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
140
141 /* Holders of ipa cgraph hooks: */
142 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
143 static struct cgraph_node_hook_list *node_removal_hook_holder;
144 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
145 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
146 static struct cgraph_node_hook_list *function_insertion_hook_holder;
147
148 /* Description of a reference to an IPA constant. */
149 struct ipa_cst_ref_desc
150 {
151 /* Edge that corresponds to the statement which took the reference. */
152 struct cgraph_edge *cs;
153 /* Linked list of duplicates created when call graph edges are cloned. */
154 struct ipa_cst_ref_desc *next_duplicate;
155 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
156 if out of control. */
157 int refcount;
158 };
159
160 /* Allocation pool for reference descriptions. */
161
162 static alloc_pool ipa_refdesc_pool;
163
164 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
165 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
166
167 static bool
168 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
169 {
170 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
171 struct cl_optimization *os;
172
173 if (!fs_opts)
174 return false;
175 os = TREE_OPTIMIZATION (fs_opts);
176 return !os->x_optimize || !os->x_flag_ipa_cp;
177 }
178
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
181
182 static int
183 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
184 {
185 int i, count;
186
187 count = descriptors.length ();
188 for (i = 0; i < count; i++)
189 if (descriptors[i].decl == ptree)
190 return i;
191
192 return -1;
193 }
194
195 /* Return index of the formal whose tree is PTREE in function which corresponds
196 to INFO. */
197
198 int
199 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
200 {
201 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
202 }
203
204 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
205 NODE. */
206
207 static void
208 ipa_populate_param_decls (struct cgraph_node *node,
209 vec<ipa_param_descriptor> &descriptors)
210 {
211 tree fndecl;
212 tree fnargs;
213 tree parm;
214 int param_num;
215
216 fndecl = node->decl;
217 gcc_assert (gimple_has_body_p (fndecl));
218 fnargs = DECL_ARGUMENTS (fndecl);
219 param_num = 0;
220 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
221 {
222 descriptors[param_num].decl = parm;
223 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
224 true);
225 param_num++;
226 }
227 }
228
229 /* Return how many formal parameters FNDECL has. */
230
231 int
232 count_formal_params (tree fndecl)
233 {
234 tree parm;
235 int count = 0;
236 gcc_assert (gimple_has_body_p (fndecl));
237
238 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
239 count++;
240
241 return count;
242 }
243
244 /* Return the declaration of Ith formal parameter of the function corresponding
245 to INFO. Note there is no setter function as this array is built just once
246 using ipa_initialize_node_params. */
247
248 void
249 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
250 {
251 fprintf (file, "param #%i", i);
252 if (info->descriptors[i].decl)
253 {
254 fprintf (file, " ");
255 print_generic_expr (file, info->descriptors[i].decl, 0);
256 }
257 }
258
259 /* Initialize the ipa_node_params structure associated with NODE
260 to hold PARAM_COUNT parameters. */
261
262 void
263 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
264 {
265 struct ipa_node_params *info = IPA_NODE_REF (node);
266
267 if (!info->descriptors.exists () && param_count)
268 info->descriptors.safe_grow_cleared (param_count);
269 }
270
271 /* Initialize the ipa_node_params structure associated with NODE by counting
272 the function parameters, creating the descriptors and populating their
273 param_decls. */
274
275 void
276 ipa_initialize_node_params (struct cgraph_node *node)
277 {
278 struct ipa_node_params *info = IPA_NODE_REF (node);
279
280 if (!info->descriptors.exists ())
281 {
282 ipa_alloc_node_params (node, count_formal_params (node->decl));
283 ipa_populate_param_decls (node, info->descriptors);
284 }
285 }
286
287 /* Print the jump functions associated with call graph edge CS to file F. */
288
289 static void
290 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
291 {
292 int i, count;
293
294 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
295 for (i = 0; i < count; i++)
296 {
297 struct ipa_jump_func *jump_func;
298 enum jump_func_type type;
299
300 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
301 type = jump_func->type;
302
303 fprintf (f, " param %d: ", i);
304 if (type == IPA_JF_UNKNOWN)
305 fprintf (f, "UNKNOWN\n");
306 else if (type == IPA_JF_CONST)
307 {
308 tree val = jump_func->value.constant.value;
309 fprintf (f, "CONST: ");
310 print_generic_expr (f, val, 0);
311 if (TREE_CODE (val) == ADDR_EXPR
312 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
313 {
314 fprintf (f, " -> ");
315 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
316 0);
317 }
318 fprintf (f, "\n");
319 }
320 else if (type == IPA_JF_PASS_THROUGH)
321 {
322 fprintf (f, "PASS THROUGH: ");
323 fprintf (f, "%d, op %s",
324 jump_func->value.pass_through.formal_id,
325 get_tree_code_name(jump_func->value.pass_through.operation));
326 if (jump_func->value.pass_through.operation != NOP_EXPR)
327 {
328 fprintf (f, " ");
329 print_generic_expr (f,
330 jump_func->value.pass_through.operand, 0);
331 }
332 if (jump_func->value.pass_through.agg_preserved)
333 fprintf (f, ", agg_preserved");
334 fprintf (f, "\n");
335 }
336 else if (type == IPA_JF_ANCESTOR)
337 {
338 fprintf (f, "ANCESTOR: ");
339 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC,
340 jump_func->value.ancestor.formal_id,
341 jump_func->value.ancestor.offset);
342 if (jump_func->value.ancestor.agg_preserved)
343 fprintf (f, ", agg_preserved");
344 fprintf (f, "\n");
345 }
346
347 if (jump_func->agg.items)
348 {
349 struct ipa_agg_jf_item *item;
350 int j;
351
352 fprintf (f, " Aggregate passed by %s:\n",
353 jump_func->agg.by_ref ? "reference" : "value");
354 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
355 {
356 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
357 item->offset);
358 if (TYPE_P (item->value))
359 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
360 tree_to_uhwi (TYPE_SIZE (item->value)));
361 else
362 {
363 fprintf (f, "cst: ");
364 print_generic_expr (f, item->value, 0);
365 }
366 fprintf (f, "\n");
367 }
368 }
369
370 struct ipa_polymorphic_call_context *ctx
371 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
372 if (ctx && !ctx->useless_p ())
373 {
374 fprintf (f, " Context: ");
375 ctx->dump (dump_file);
376 }
377 }
378 }
379
380
381 /* Print the jump functions of all arguments on all call graph edges going from
382 NODE to file F. */
383
384 void
385 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
386 {
387 struct cgraph_edge *cs;
388
389 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
390 node->order);
391 for (cs = node->callees; cs; cs = cs->next_callee)
392 {
393 if (!ipa_edge_args_info_available_for_edge_p (cs))
394 continue;
395
396 fprintf (f, " callsite %s/%i -> %s/%i : \n",
397 xstrdup (node->name ()), node->order,
398 xstrdup (cs->callee->name ()),
399 cs->callee->order);
400 ipa_print_node_jump_functions_for_edge (f, cs);
401 }
402
403 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
404 {
405 struct cgraph_indirect_call_info *ii;
406 if (!ipa_edge_args_info_available_for_edge_p (cs))
407 continue;
408
409 ii = cs->indirect_info;
410 if (ii->agg_contents)
411 fprintf (f, " indirect %s callsite, calling param %i, "
412 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
413 ii->member_ptr ? "member ptr" : "aggregate",
414 ii->param_index, ii->offset,
415 ii->by_ref ? "by reference" : "by_value");
416 else
417 fprintf (f, " indirect %s callsite, calling param %i, "
418 "offset " HOST_WIDE_INT_PRINT_DEC,
419 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
420 ii->offset);
421
422 if (cs->call_stmt)
423 {
424 fprintf (f, ", for stmt ");
425 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
426 }
427 else
428 fprintf (f, "\n");
429 if (ii->polymorphic)
430 ii->context.dump (f);
431 ipa_print_node_jump_functions_for_edge (f, cs);
432 }
433 }
434
435 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
436
437 void
438 ipa_print_all_jump_functions (FILE *f)
439 {
440 struct cgraph_node *node;
441
442 fprintf (f, "\nJump functions:\n");
443 FOR_EACH_FUNCTION (node)
444 {
445 ipa_print_node_jump_functions (f, node);
446 }
447 }
448
449 /* Set JFUNC to be a copy of another jmp (to be used by jump function
450 combination code). The two functions will share their rdesc. */
451
452 static void
453 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
454 struct ipa_jump_func *src)
455
456 {
457 gcc_checking_assert (src->type == IPA_JF_CONST);
458 dst->type = IPA_JF_CONST;
459 dst->value.constant = src->value.constant;
460 }
461
462 /* Set JFUNC to be a constant jmp function. */
463
464 static void
465 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
466 struct cgraph_edge *cs)
467 {
468 constant = unshare_expr (constant);
469 if (constant && EXPR_P (constant))
470 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
471 jfunc->type = IPA_JF_CONST;
472 jfunc->value.constant.value = unshare_expr_without_location (constant);
473
474 if (TREE_CODE (constant) == ADDR_EXPR
475 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
476 {
477 struct ipa_cst_ref_desc *rdesc;
478 if (!ipa_refdesc_pool)
479 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
480 sizeof (struct ipa_cst_ref_desc), 32);
481
482 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
483 rdesc->cs = cs;
484 rdesc->next_duplicate = NULL;
485 rdesc->refcount = 1;
486 jfunc->value.constant.rdesc = rdesc;
487 }
488 else
489 jfunc->value.constant.rdesc = NULL;
490 }
491
492 /* Set JFUNC to be a simple pass-through jump function. */
493 static void
494 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
495 bool agg_preserved)
496 {
497 jfunc->type = IPA_JF_PASS_THROUGH;
498 jfunc->value.pass_through.operand = NULL_TREE;
499 jfunc->value.pass_through.formal_id = formal_id;
500 jfunc->value.pass_through.operation = NOP_EXPR;
501 jfunc->value.pass_through.agg_preserved = agg_preserved;
502 }
503
504 /* Set JFUNC to be an arithmetic pass through jump function. */
505
506 static void
507 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
508 tree operand, enum tree_code operation)
509 {
510 jfunc->type = IPA_JF_PASS_THROUGH;
511 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
512 jfunc->value.pass_through.formal_id = formal_id;
513 jfunc->value.pass_through.operation = operation;
514 jfunc->value.pass_through.agg_preserved = false;
515 }
516
517 /* Set JFUNC to be an ancestor jump function. */
518
519 static void
520 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
521 int formal_id, bool agg_preserved)
522 {
523 jfunc->type = IPA_JF_ANCESTOR;
524 jfunc->value.ancestor.formal_id = formal_id;
525 jfunc->value.ancestor.offset = offset;
526 jfunc->value.ancestor.agg_preserved = agg_preserved;
527 }
528
529 /* Get IPA BB information about the given BB. FBI is the context of analyzis
530 of this function body. */
531
532 static struct ipa_bb_info *
533 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
534 {
535 gcc_checking_assert (fbi);
536 return &fbi->bb_infos[bb->index];
537 }
538
539 /* Structure to be passed in between detect_type_change and
540 check_stmt_for_type_change. */
541
542 struct prop_type_change_info
543 {
544 /* Offset into the object where there is the virtual method pointer we are
545 looking for. */
546 HOST_WIDE_INT offset;
547 /* The declaration or SSA_NAME pointer of the base that we are checking for
548 type change. */
549 tree object;
550 /* Set to true if dynamic type change has been detected. */
551 bool type_maybe_changed;
552 };
553
554 /* Return true if STMT can modify a virtual method table pointer.
555
556 This function makes special assumptions about both constructors and
557 destructors which are all the functions that are allowed to alter the VMT
558 pointers. It assumes that destructors begin with assignment into all VMT
559 pointers and that constructors essentially look in the following way:
560
561 1) The very first thing they do is that they call constructors of ancestor
562 sub-objects that have them.
563
564 2) Then VMT pointers of this and all its ancestors is set to new values
565 corresponding to the type corresponding to the constructor.
566
567 3) Only afterwards, other stuff such as constructor of member sub-objects
568 and the code written by the user is run. Only this may include calling
569 virtual functions, directly or indirectly.
570
571 There is no way to call a constructor of an ancestor sub-object in any
572 other way.
573
574 This means that we do not have to care whether constructors get the correct
575 type information because they will always change it (in fact, if we define
576 the type to be given by the VMT pointer, it is undefined).
577
578 The most important fact to derive from the above is that if, for some
579 statement in the section 3, we try to detect whether the dynamic type has
580 changed, we can safely ignore all calls as we examine the function body
581 backwards until we reach statements in section 2 because these calls cannot
582 be ancestor constructors or destructors (if the input is not bogus) and so
583 do not change the dynamic type (this holds true only for automatically
584 allocated objects but at the moment we devirtualize only these). We then
585 must detect that statements in section 2 change the dynamic type and can try
586 to derive the new type. That is enough and we can stop, we will never see
587 the calls into constructors of sub-objects in this code. Therefore we can
588 safely ignore all call statements that we traverse.
589 */
590
591 static bool
592 stmt_may_be_vtbl_ptr_store (gimple stmt)
593 {
594 if (is_gimple_call (stmt))
595 return false;
596 if (gimple_clobber_p (stmt))
597 return false;
598 else if (is_gimple_assign (stmt))
599 {
600 tree lhs = gimple_assign_lhs (stmt);
601
602 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
603 {
604 if (flag_strict_aliasing
605 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
606 return false;
607
608 if (TREE_CODE (lhs) == COMPONENT_REF
609 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
610 return false;
611 /* In the future we might want to use get_base_ref_and_offset to find
612 if there is a field corresponding to the offset and if so, proceed
613 almost like if it was a component ref. */
614 }
615 }
616 return true;
617 }
618
619 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
620 to check whether a particular statement may modify the virtual table
621 pointerIt stores its result into DATA, which points to a
622 prop_type_change_info structure. */
623
624 static bool
625 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
626 {
627 gimple stmt = SSA_NAME_DEF_STMT (vdef);
628 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
629
630 if (stmt_may_be_vtbl_ptr_store (stmt))
631 {
632 tci->type_maybe_changed = true;
633 return true;
634 }
635 else
636 return false;
637 }
638
639 /* See if ARG is PARAM_DECl describing instance passed by pointer
640 or reference in FUNCTION. Return false if the dynamic type may change
641 in between beggining of the function until CALL is invoked.
642
643 Generally functions are not allowed to change type of such instances,
644 but they call destructors. We assume that methods can not destroy the THIS
645 pointer. Also as a special cases, constructor and destructors may change
646 type of the THIS pointer. */
647
648 static bool
649 param_type_may_change_p (tree function, tree arg, gimple call)
650 {
651 /* Pure functions can not do any changes on the dynamic type;
652 that require writting to memory. */
653 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
654 return false;
655 /* We need to check if we are within inlined consturctor
656 or destructor (ideally we would have way to check that the
657 inline cdtor is actually working on ARG, but we don't have
658 easy tie on this, so punt on all non-pure cdtors.
659 We may also record the types of cdtors and once we know type
660 of the instance match them.
661
662 Also code unification optimizations may merge calls from
663 different blocks making return values unreliable. So
664 do nothing during late optimization. */
665 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
666 return true;
667 if (TREE_CODE (arg) == SSA_NAME
668 && SSA_NAME_IS_DEFAULT_DEF (arg)
669 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
670 {
671 /* Normal (non-THIS) argument. */
672 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
673 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
674 /* THIS pointer of an method - here we we want to watch constructors
675 and destructors as those definitely may change the dynamic
676 type. */
677 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
678 && !DECL_CXX_CONSTRUCTOR_P (function)
679 && !DECL_CXX_DESTRUCTOR_P (function)
680 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
681 {
682 /* Walk the inline stack and watch out for ctors/dtors. */
683 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
684 block = BLOCK_SUPERCONTEXT (block))
685 if (BLOCK_ABSTRACT_ORIGIN (block)
686 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
687 {
688 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
689
690 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
691 continue;
692 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
693 && (DECL_CXX_CONSTRUCTOR_P (fn)
694 || DECL_CXX_DESTRUCTOR_P (fn)))
695 return true;
696 }
697 return false;
698 }
699 }
700 return true;
701 }
702
703 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
704 callsite CALL) by looking for assignments to its virtual table pointer. If
705 it is, return true and fill in the jump function JFUNC with relevant type
706 information or set it to unknown. ARG is the object itself (not a pointer
707 to it, unless dereferenced). BASE is the base of the memory access as
708 returned by get_ref_base_and_extent, as is the offset.
709
710 This is helper function for detect_type_change and detect_type_change_ssa
711 that does the heavy work which is usually unnecesary. */
712
713 static bool
714 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
715 gimple call, struct ipa_jump_func *jfunc,
716 HOST_WIDE_INT offset)
717 {
718 struct prop_type_change_info tci;
719 ao_ref ao;
720 bool entry_reached = false;
721
722 gcc_checking_assert (DECL_P (arg)
723 || TREE_CODE (arg) == MEM_REF
724 || handled_component_p (arg));
725
726 comp_type = TYPE_MAIN_VARIANT (comp_type);
727
728 /* Const calls cannot call virtual methods through VMT and so type changes do
729 not matter. */
730 if (!flag_devirtualize || !gimple_vuse (call)
731 /* Be sure expected_type is polymorphic. */
732 || !comp_type
733 || TREE_CODE (comp_type) != RECORD_TYPE
734 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
735 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
736 return true;
737
738 ao_ref_init (&ao, arg);
739 ao.base = base;
740 ao.offset = offset;
741 ao.size = POINTER_SIZE;
742 ao.max_size = ao.size;
743
744 tci.offset = offset;
745 tci.object = get_base_address (arg);
746 tci.type_maybe_changed = false;
747
748 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
749 &tci, NULL, &entry_reached);
750 if (!tci.type_maybe_changed)
751 return false;
752
753 jfunc->type = IPA_JF_UNKNOWN;
754 return true;
755 }
756
757 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
758 If it is, return true and fill in the jump function JFUNC with relevant type
759 information or set it to unknown. ARG is the object itself (not a pointer
760 to it, unless dereferenced). BASE is the base of the memory access as
761 returned by get_ref_base_and_extent, as is the offset. */
762
763 static bool
764 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
765 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
766 {
767 if (!flag_devirtualize)
768 return false;
769
770 if (TREE_CODE (base) == MEM_REF
771 && !param_type_may_change_p (current_function_decl,
772 TREE_OPERAND (base, 0),
773 call))
774 return false;
775 return detect_type_change_from_memory_writes (arg, base, comp_type,
776 call, jfunc, offset);
777 }
778
779 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
780 SSA name (its dereference will become the base and the offset is assumed to
781 be zero). */
782
783 static bool
784 detect_type_change_ssa (tree arg, tree comp_type,
785 gimple call, struct ipa_jump_func *jfunc)
786 {
787 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
788 if (!flag_devirtualize
789 || !POINTER_TYPE_P (TREE_TYPE (arg)))
790 return false;
791
792 if (!param_type_may_change_p (current_function_decl, arg, call))
793 return false;
794
795 arg = build2 (MEM_REF, ptr_type_node, arg,
796 build_int_cst (ptr_type_node, 0));
797
798 return detect_type_change_from_memory_writes (arg, arg, comp_type,
799 call, jfunc, 0);
800 }
801
802 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
803 boolean variable pointed to by DATA. */
804
805 static bool
806 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
807 void *data)
808 {
809 bool *b = (bool *) data;
810 *b = true;
811 return true;
812 }
813
814 /* Return true if we have already walked so many statements in AA that we
815 should really just start giving up. */
816
817 static bool
818 aa_overwalked (struct func_body_info *fbi)
819 {
820 gcc_checking_assert (fbi);
821 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
822 }
823
824 /* Find the nearest valid aa status for parameter specified by INDEX that
825 dominates BB. */
826
827 static struct param_aa_status *
828 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
829 int index)
830 {
831 while (true)
832 {
833 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
834 if (!bb)
835 return NULL;
836 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
837 if (!bi->param_aa_statuses.is_empty ()
838 && bi->param_aa_statuses[index].valid)
839 return &bi->param_aa_statuses[index];
840 }
841 }
842
843 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
844 structures and/or intialize the result with a dominating description as
845 necessary. */
846
847 static struct param_aa_status *
848 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
849 int index)
850 {
851 gcc_checking_assert (fbi);
852 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
853 if (bi->param_aa_statuses.is_empty ())
854 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
855 struct param_aa_status *paa = &bi->param_aa_statuses[index];
856 if (!paa->valid)
857 {
858 gcc_checking_assert (!paa->parm_modified
859 && !paa->ref_modified
860 && !paa->pt_modified);
861 struct param_aa_status *dom_paa;
862 dom_paa = find_dominating_aa_status (fbi, bb, index);
863 if (dom_paa)
864 *paa = *dom_paa;
865 else
866 paa->valid = true;
867 }
868
869 return paa;
870 }
871
872 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
873 a value known not to be modified in this function before reaching the
874 statement STMT. FBI holds information about the function we have so far
875 gathered but do not survive the summary building stage. */
876
877 static bool
878 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
879 gimple stmt, tree parm_load)
880 {
881 struct param_aa_status *paa;
882 bool modified = false;
883 ao_ref refd;
884
885 /* FIXME: FBI can be NULL if we are being called from outside
886 ipa_node_analysis or ipcp_transform_function, which currently happens
887 during inlining analysis. It would be great to extend fbi's lifetime and
888 always have it. Currently, we are just not afraid of too much walking in
889 that case. */
890 if (fbi)
891 {
892 if (aa_overwalked (fbi))
893 return false;
894 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
895 if (paa->parm_modified)
896 return false;
897 }
898 else
899 paa = NULL;
900
901 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
902 ao_ref_init (&refd, parm_load);
903 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
904 &modified, NULL);
905 if (fbi)
906 fbi->aa_walked += walked;
907 if (paa && modified)
908 paa->parm_modified = true;
909 return !modified;
910 }
911
912 /* If STMT is an assignment that loads a value from an parameter declaration,
913 return the index of the parameter in ipa_node_params which has not been
914 modified. Otherwise return -1. */
915
916 static int
917 load_from_unmodified_param (struct func_body_info *fbi,
918 vec<ipa_param_descriptor> descriptors,
919 gimple stmt)
920 {
921 int index;
922 tree op1;
923
924 if (!gimple_assign_single_p (stmt))
925 return -1;
926
927 op1 = gimple_assign_rhs1 (stmt);
928 if (TREE_CODE (op1) != PARM_DECL)
929 return -1;
930
931 index = ipa_get_param_decl_index_1 (descriptors, op1);
932 if (index < 0
933 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
934 return -1;
935
936 return index;
937 }
938
939 /* Return true if memory reference REF (which must be a load through parameter
940 with INDEX) loads data that are known to be unmodified in this function
941 before reaching statement STMT. */
942
943 static bool
944 parm_ref_data_preserved_p (struct func_body_info *fbi,
945 int index, gimple stmt, tree ref)
946 {
947 struct param_aa_status *paa;
948 bool modified = false;
949 ao_ref refd;
950
951 /* FIXME: FBI can be NULL if we are being called from outside
952 ipa_node_analysis or ipcp_transform_function, which currently happens
953 during inlining analysis. It would be great to extend fbi's lifetime and
954 always have it. Currently, we are just not afraid of too much walking in
955 that case. */
956 if (fbi)
957 {
958 if (aa_overwalked (fbi))
959 return false;
960 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
961 if (paa->ref_modified)
962 return false;
963 }
964 else
965 paa = NULL;
966
967 gcc_checking_assert (gimple_vuse (stmt));
968 ao_ref_init (&refd, ref);
969 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
970 &modified, NULL);
971 if (fbi)
972 fbi->aa_walked += walked;
973 if (paa && modified)
974 paa->ref_modified = true;
975 return !modified;
976 }
977
978 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
979 is known to be unmodified in this function before reaching call statement
980 CALL into which it is passed. FBI describes the function body. */
981
982 static bool
983 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
984 gimple call, tree parm)
985 {
986 bool modified = false;
987 ao_ref refd;
988
989 /* It's unnecessary to calculate anything about memory contnets for a const
990 function because it is not goin to use it. But do not cache the result
991 either. Also, no such calculations for non-pointers. */
992 if (!gimple_vuse (call)
993 || !POINTER_TYPE_P (TREE_TYPE (parm))
994 || aa_overwalked (fbi))
995 return false;
996
997 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
998 index);
999 if (paa->pt_modified)
1000 return false;
1001
1002 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1003 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1004 &modified, NULL);
1005 fbi->aa_walked += walked;
1006 if (modified)
1007 paa->pt_modified = true;
1008 return !modified;
1009 }
1010
1011 /* Return true if we can prove that OP is a memory reference loading unmodified
1012 data from an aggregate passed as a parameter and if the aggregate is passed
1013 by reference, that the alias type of the load corresponds to the type of the
1014 formal parameter (so that we can rely on this type for TBAA in callers).
1015 INFO and PARMS_AINFO describe parameters of the current function (but the
1016 latter can be NULL), STMT is the load statement. If function returns true,
1017 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1018 within the aggregate and whether it is a load from a value passed by
1019 reference respectively. */
1020
1021 static bool
1022 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1023 vec<ipa_param_descriptor> descriptors,
1024 gimple stmt, tree op, int *index_p,
1025 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1026 bool *by_ref_p)
1027 {
1028 int index;
1029 HOST_WIDE_INT size, max_size;
1030 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1031
1032 if (max_size == -1 || max_size != size || *offset_p < 0)
1033 return false;
1034
1035 if (DECL_P (base))
1036 {
1037 int index = ipa_get_param_decl_index_1 (descriptors, base);
1038 if (index >= 0
1039 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1040 {
1041 *index_p = index;
1042 *by_ref_p = false;
1043 if (size_p)
1044 *size_p = size;
1045 return true;
1046 }
1047 return false;
1048 }
1049
1050 if (TREE_CODE (base) != MEM_REF
1051 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1052 || !integer_zerop (TREE_OPERAND (base, 1)))
1053 return false;
1054
1055 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1056 {
1057 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1058 index = ipa_get_param_decl_index_1 (descriptors, parm);
1059 }
1060 else
1061 {
1062 /* This branch catches situations where a pointer parameter is not a
1063 gimple register, for example:
1064
1065 void hip7(S*) (struct S * p)
1066 {
1067 void (*<T2e4>) (struct S *) D.1867;
1068 struct S * p.1;
1069
1070 <bb 2>:
1071 p.1_1 = p;
1072 D.1867_2 = p.1_1->f;
1073 D.1867_2 ();
1074 gdp = &p;
1075 */
1076
1077 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1078 index = load_from_unmodified_param (fbi, descriptors, def);
1079 }
1080
1081 if (index >= 0
1082 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1083 {
1084 *index_p = index;
1085 *by_ref_p = true;
1086 if (size_p)
1087 *size_p = size;
1088 return true;
1089 }
1090 return false;
1091 }
1092
1093 /* Just like the previous function, just without the param_analysis_info
1094 pointer, for users outside of this file. */
1095
1096 bool
1097 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1098 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1099 bool *by_ref_p)
1100 {
1101 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1102 offset_p, NULL, by_ref_p);
1103 }
1104
1105 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1106 of an assignment statement STMT, try to determine whether we are actually
1107 handling any of the following cases and construct an appropriate jump
1108 function into JFUNC if so:
1109
1110 1) The passed value is loaded from a formal parameter which is not a gimple
1111 register (most probably because it is addressable, the value has to be
1112 scalar) and we can guarantee the value has not changed. This case can
1113 therefore be described by a simple pass-through jump function. For example:
1114
1115 foo (int a)
1116 {
1117 int a.0;
1118
1119 a.0_2 = a;
1120 bar (a.0_2);
1121
1122 2) The passed value can be described by a simple arithmetic pass-through
1123 jump function. E.g.
1124
1125 foo (int a)
1126 {
1127 int D.2064;
1128
1129 D.2064_4 = a.1(D) + 4;
1130 bar (D.2064_4);
1131
1132 This case can also occur in combination of the previous one, e.g.:
1133
1134 foo (int a, int z)
1135 {
1136 int a.0;
1137 int D.2064;
1138
1139 a.0_3 = a;
1140 D.2064_4 = a.0_3 + 4;
1141 foo (D.2064_4);
1142
1143 3) The passed value is an address of an object within another one (which
1144 also passed by reference). Such situations are described by an ancestor
1145 jump function and describe situations such as:
1146
1147 B::foo() (struct B * const this)
1148 {
1149 struct A * D.1845;
1150
1151 D.1845_2 = &this_1(D)->D.1748;
1152 A::bar (D.1845_2);
1153
1154 INFO is the structure describing individual parameters access different
1155 stages of IPA optimizations. PARMS_AINFO contains the information that is
1156 only needed for intraprocedural analysis. */
1157
1158 static void
1159 compute_complex_assign_jump_func (struct func_body_info *fbi,
1160 struct ipa_node_params *info,
1161 struct ipa_jump_func *jfunc,
1162 gimple call, gimple stmt, tree name,
1163 tree param_type)
1164 {
1165 HOST_WIDE_INT offset, size, max_size;
1166 tree op1, tc_ssa, base, ssa;
1167 int index;
1168
1169 op1 = gimple_assign_rhs1 (stmt);
1170
1171 if (TREE_CODE (op1) == SSA_NAME)
1172 {
1173 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1174 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1175 else
1176 index = load_from_unmodified_param (fbi, info->descriptors,
1177 SSA_NAME_DEF_STMT (op1));
1178 tc_ssa = op1;
1179 }
1180 else
1181 {
1182 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1183 tc_ssa = gimple_assign_lhs (stmt);
1184 }
1185
1186 if (index >= 0)
1187 {
1188 tree op2 = gimple_assign_rhs2 (stmt);
1189
1190 if (op2)
1191 {
1192 if (!is_gimple_ip_invariant (op2)
1193 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1194 && !useless_type_conversion_p (TREE_TYPE (name),
1195 TREE_TYPE (op1))))
1196 return;
1197
1198 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1199 gimple_assign_rhs_code (stmt));
1200 }
1201 else if (gimple_assign_single_p (stmt))
1202 {
1203 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1204 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1205 }
1206 return;
1207 }
1208
1209 if (TREE_CODE (op1) != ADDR_EXPR)
1210 return;
1211 op1 = TREE_OPERAND (op1, 0);
1212 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1213 return;
1214 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1215 if (TREE_CODE (base) != MEM_REF
1216 /* If this is a varying address, punt. */
1217 || max_size == -1
1218 || max_size != size)
1219 return;
1220 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1221 ssa = TREE_OPERAND (base, 0);
1222 if (TREE_CODE (ssa) != SSA_NAME
1223 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1224 || offset < 0)
1225 return;
1226
1227 /* Dynamic types are changed in constructors and destructors. */
1228 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1229 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1230 ipa_set_ancestor_jf (jfunc, offset, index,
1231 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1232 }
1233
1234 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1235 it looks like:
1236
1237 iftmp.1_3 = &obj_2(D)->D.1762;
1238
1239 The base of the MEM_REF must be a default definition SSA NAME of a
1240 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1241 whole MEM_REF expression is returned and the offset calculated from any
1242 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1243 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1244
1245 static tree
1246 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1247 {
1248 HOST_WIDE_INT size, max_size;
1249 tree expr, parm, obj;
1250
1251 if (!gimple_assign_single_p (assign))
1252 return NULL_TREE;
1253 expr = gimple_assign_rhs1 (assign);
1254
1255 if (TREE_CODE (expr) != ADDR_EXPR)
1256 return NULL_TREE;
1257 expr = TREE_OPERAND (expr, 0);
1258 obj = expr;
1259 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1260
1261 if (TREE_CODE (expr) != MEM_REF
1262 /* If this is a varying address, punt. */
1263 || max_size == -1
1264 || max_size != size
1265 || *offset < 0)
1266 return NULL_TREE;
1267 parm = TREE_OPERAND (expr, 0);
1268 if (TREE_CODE (parm) != SSA_NAME
1269 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1270 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1271 return NULL_TREE;
1272
1273 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1274 *obj_p = obj;
1275 return expr;
1276 }
1277
1278
1279 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1280 statement PHI, try to find out whether NAME is in fact a
1281 multiple-inheritance typecast from a descendant into an ancestor of a formal
1282 parameter and thus can be described by an ancestor jump function and if so,
1283 write the appropriate function into JFUNC.
1284
1285 Essentially we want to match the following pattern:
1286
1287 if (obj_2(D) != 0B)
1288 goto <bb 3>;
1289 else
1290 goto <bb 4>;
1291
1292 <bb 3>:
1293 iftmp.1_3 = &obj_2(D)->D.1762;
1294
1295 <bb 4>:
1296 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1297 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1298 return D.1879_6; */
1299
1300 static void
1301 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1302 struct ipa_node_params *info,
1303 struct ipa_jump_func *jfunc,
1304 gimple call, gimple phi)
1305 {
1306 HOST_WIDE_INT offset;
1307 gimple assign, cond;
1308 basic_block phi_bb, assign_bb, cond_bb;
1309 tree tmp, parm, expr, obj;
1310 int index, i;
1311
1312 if (gimple_phi_num_args (phi) != 2)
1313 return;
1314
1315 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1316 tmp = PHI_ARG_DEF (phi, 0);
1317 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1318 tmp = PHI_ARG_DEF (phi, 1);
1319 else
1320 return;
1321 if (TREE_CODE (tmp) != SSA_NAME
1322 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1323 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1324 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1325 return;
1326
1327 assign = SSA_NAME_DEF_STMT (tmp);
1328 assign_bb = gimple_bb (assign);
1329 if (!single_pred_p (assign_bb))
1330 return;
1331 expr = get_ancestor_addr_info (assign, &obj, &offset);
1332 if (!expr)
1333 return;
1334 parm = TREE_OPERAND (expr, 0);
1335 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1336 if (index < 0)
1337 return;
1338
1339 cond_bb = single_pred (assign_bb);
1340 cond = last_stmt (cond_bb);
1341 if (!cond
1342 || gimple_code (cond) != GIMPLE_COND
1343 || gimple_cond_code (cond) != NE_EXPR
1344 || gimple_cond_lhs (cond) != parm
1345 || !integer_zerop (gimple_cond_rhs (cond)))
1346 return;
1347
1348 phi_bb = gimple_bb (phi);
1349 for (i = 0; i < 2; i++)
1350 {
1351 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1352 if (pred != assign_bb && pred != cond_bb)
1353 return;
1354 }
1355
1356 ipa_set_ancestor_jf (jfunc, offset, index,
1357 parm_ref_data_pass_through_p (fbi, index, call, parm));
1358 }
1359
1360 /* Inspect the given TYPE and return true iff it has the same structure (the
1361 same number of fields of the same types) as a C++ member pointer. If
1362 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1363 corresponding fields there. */
1364
1365 static bool
1366 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1367 {
1368 tree fld;
1369
1370 if (TREE_CODE (type) != RECORD_TYPE)
1371 return false;
1372
1373 fld = TYPE_FIELDS (type);
1374 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1375 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1376 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1377 return false;
1378
1379 if (method_ptr)
1380 *method_ptr = fld;
1381
1382 fld = DECL_CHAIN (fld);
1383 if (!fld || INTEGRAL_TYPE_P (fld)
1384 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1385 return false;
1386 if (delta)
1387 *delta = fld;
1388
1389 if (DECL_CHAIN (fld))
1390 return false;
1391
1392 return true;
1393 }
1394
1395 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1396 return the rhs of its defining statement. Otherwise return RHS as it
1397 is. */
1398
1399 static inline tree
1400 get_ssa_def_if_simple_copy (tree rhs)
1401 {
1402 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1403 {
1404 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1405
1406 if (gimple_assign_single_p (def_stmt))
1407 rhs = gimple_assign_rhs1 (def_stmt);
1408 else
1409 break;
1410 }
1411 return rhs;
1412 }
1413
1414 /* Simple linked list, describing known contents of an aggregate beforere
1415 call. */
1416
1417 struct ipa_known_agg_contents_list
1418 {
1419 /* Offset and size of the described part of the aggregate. */
1420 HOST_WIDE_INT offset, size;
1421 /* Known constant value or NULL if the contents is known to be unknown. */
1422 tree constant;
1423 /* Pointer to the next structure in the list. */
1424 struct ipa_known_agg_contents_list *next;
1425 };
1426
1427 /* Find the proper place in linked list of ipa_known_agg_contents_list
1428 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1429 unless there is a partial overlap, in which case return NULL, or such
1430 element is already there, in which case set *ALREADY_THERE to true. */
1431
1432 static struct ipa_known_agg_contents_list **
1433 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1434 HOST_WIDE_INT lhs_offset,
1435 HOST_WIDE_INT lhs_size,
1436 bool *already_there)
1437 {
1438 struct ipa_known_agg_contents_list **p = list;
1439 while (*p && (*p)->offset < lhs_offset)
1440 {
1441 if ((*p)->offset + (*p)->size > lhs_offset)
1442 return NULL;
1443 p = &(*p)->next;
1444 }
1445
1446 if (*p && (*p)->offset < lhs_offset + lhs_size)
1447 {
1448 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1449 /* We already know this value is subsequently overwritten with
1450 something else. */
1451 *already_there = true;
1452 else
1453 /* Otherwise this is a partial overlap which we cannot
1454 represent. */
1455 return NULL;
1456 }
1457 return p;
1458 }
1459
1460 /* Build aggregate jump function from LIST, assuming there are exactly
1461 CONST_COUNT constant entries there and that th offset of the passed argument
1462 is ARG_OFFSET and store it into JFUNC. */
1463
1464 static void
1465 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1466 int const_count, HOST_WIDE_INT arg_offset,
1467 struct ipa_jump_func *jfunc)
1468 {
1469 vec_alloc (jfunc->agg.items, const_count);
1470 while (list)
1471 {
1472 if (list->constant)
1473 {
1474 struct ipa_agg_jf_item item;
1475 item.offset = list->offset - arg_offset;
1476 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1477 item.value = unshare_expr_without_location (list->constant);
1478 jfunc->agg.items->quick_push (item);
1479 }
1480 list = list->next;
1481 }
1482 }
1483
1484 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1485 in ARG is filled in with constant values. ARG can either be an aggregate
1486 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1487 aggregate. JFUNC is the jump function into which the constants are
1488 subsequently stored. */
1489
1490 static void
1491 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1492 struct ipa_jump_func *jfunc)
1493 {
1494 struct ipa_known_agg_contents_list *list = NULL;
1495 int item_count = 0, const_count = 0;
1496 HOST_WIDE_INT arg_offset, arg_size;
1497 gimple_stmt_iterator gsi;
1498 tree arg_base;
1499 bool check_ref, by_ref;
1500 ao_ref r;
1501
1502 /* The function operates in three stages. First, we prepare check_ref, r,
1503 arg_base and arg_offset based on what is actually passed as an actual
1504 argument. */
1505
1506 if (POINTER_TYPE_P (arg_type))
1507 {
1508 by_ref = true;
1509 if (TREE_CODE (arg) == SSA_NAME)
1510 {
1511 tree type_size;
1512 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1513 return;
1514 check_ref = true;
1515 arg_base = arg;
1516 arg_offset = 0;
1517 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1518 arg_size = tree_to_uhwi (type_size);
1519 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1520 }
1521 else if (TREE_CODE (arg) == ADDR_EXPR)
1522 {
1523 HOST_WIDE_INT arg_max_size;
1524
1525 arg = TREE_OPERAND (arg, 0);
1526 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1527 &arg_max_size);
1528 if (arg_max_size == -1
1529 || arg_max_size != arg_size
1530 || arg_offset < 0)
1531 return;
1532 if (DECL_P (arg_base))
1533 {
1534 check_ref = false;
1535 ao_ref_init (&r, arg_base);
1536 }
1537 else
1538 return;
1539 }
1540 else
1541 return;
1542 }
1543 else
1544 {
1545 HOST_WIDE_INT arg_max_size;
1546
1547 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1548
1549 by_ref = false;
1550 check_ref = false;
1551 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1552 &arg_max_size);
1553 if (arg_max_size == -1
1554 || arg_max_size != arg_size
1555 || arg_offset < 0)
1556 return;
1557
1558 ao_ref_init (&r, arg);
1559 }
1560
1561 /* Second stage walks back the BB, looks at individual statements and as long
1562 as it is confident of how the statements affect contents of the
1563 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1564 describing it. */
1565 gsi = gsi_for_stmt (call);
1566 gsi_prev (&gsi);
1567 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1568 {
1569 struct ipa_known_agg_contents_list *n, **p;
1570 gimple stmt = gsi_stmt (gsi);
1571 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1572 tree lhs, rhs, lhs_base;
1573
1574 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1575 continue;
1576 if (!gimple_assign_single_p (stmt))
1577 break;
1578
1579 lhs = gimple_assign_lhs (stmt);
1580 rhs = gimple_assign_rhs1 (stmt);
1581 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1582 || TREE_CODE (lhs) == BIT_FIELD_REF
1583 || contains_bitfld_component_ref_p (lhs))
1584 break;
1585
1586 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1587 &lhs_max_size);
1588 if (lhs_max_size == -1
1589 || lhs_max_size != lhs_size)
1590 break;
1591
1592 if (check_ref)
1593 {
1594 if (TREE_CODE (lhs_base) != MEM_REF
1595 || TREE_OPERAND (lhs_base, 0) != arg_base
1596 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1597 break;
1598 }
1599 else if (lhs_base != arg_base)
1600 {
1601 if (DECL_P (lhs_base))
1602 continue;
1603 else
1604 break;
1605 }
1606
1607 bool already_there = false;
1608 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1609 &already_there);
1610 if (!p)
1611 break;
1612 if (already_there)
1613 continue;
1614
1615 rhs = get_ssa_def_if_simple_copy (rhs);
1616 n = XALLOCA (struct ipa_known_agg_contents_list);
1617 n->size = lhs_size;
1618 n->offset = lhs_offset;
1619 if (is_gimple_ip_invariant (rhs))
1620 {
1621 n->constant = rhs;
1622 const_count++;
1623 }
1624 else
1625 n->constant = NULL_TREE;
1626 n->next = *p;
1627 *p = n;
1628
1629 item_count++;
1630 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1631 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1632 break;
1633 }
1634
1635 /* Third stage just goes over the list and creates an appropriate vector of
1636 ipa_agg_jf_item structures out of it, of sourse only if there are
1637 any known constants to begin with. */
1638
1639 if (const_count)
1640 {
1641 jfunc->agg.by_ref = by_ref;
1642 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1643 }
1644 }
1645
1646 static tree
1647 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1648 {
1649 int n;
1650 tree type = (e->callee
1651 ? TREE_TYPE (e->callee->decl)
1652 : gimple_call_fntype (e->call_stmt));
1653 tree t = TYPE_ARG_TYPES (type);
1654
1655 for (n = 0; n < i; n++)
1656 {
1657 if (!t)
1658 break;
1659 t = TREE_CHAIN (t);
1660 }
1661 if (t)
1662 return TREE_VALUE (t);
1663 if (!e->callee)
1664 return NULL;
1665 t = DECL_ARGUMENTS (e->callee->decl);
1666 for (n = 0; n < i; n++)
1667 {
1668 if (!t)
1669 return NULL;
1670 t = TREE_CHAIN (t);
1671 }
1672 if (t)
1673 return TREE_TYPE (t);
1674 return NULL;
1675 }
1676
1677 /* Compute jump function for all arguments of callsite CS and insert the
1678 information in the jump_functions array in the ipa_edge_args corresponding
1679 to this callsite. */
1680
1681 static void
1682 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1683 struct cgraph_edge *cs)
1684 {
1685 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1686 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1687 gimple call = cs->call_stmt;
1688 int n, arg_num = gimple_call_num_args (call);
1689 bool useful_context = false;
1690
1691 if (arg_num == 0 || args->jump_functions)
1692 return;
1693 vec_safe_grow_cleared (args->jump_functions, arg_num);
1694 if (flag_devirtualize)
1695 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1696
1697 if (gimple_call_internal_p (call))
1698 return;
1699 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1700 return;
1701
1702 for (n = 0; n < arg_num; n++)
1703 {
1704 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1705 tree arg = gimple_call_arg (call, n);
1706 tree param_type = ipa_get_callee_param_type (cs, n);
1707 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1708 {
1709 tree instance;
1710 struct ipa_polymorphic_call_context context (cs->caller->decl,
1711 arg, cs->call_stmt,
1712 &instance);
1713 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1714 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1715 if (!context.useless_p ())
1716 useful_context = true;
1717 }
1718
1719 if (is_gimple_ip_invariant (arg))
1720 ipa_set_jf_constant (jfunc, arg, cs);
1721 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1722 && TREE_CODE (arg) == PARM_DECL)
1723 {
1724 int index = ipa_get_param_decl_index (info, arg);
1725
1726 gcc_assert (index >=0);
1727 /* Aggregate passed by value, check for pass-through, otherwise we
1728 will attempt to fill in aggregate contents later in this
1729 for cycle. */
1730 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1731 {
1732 ipa_set_jf_simple_pass_through (jfunc, index, false);
1733 continue;
1734 }
1735 }
1736 else if (TREE_CODE (arg) == SSA_NAME)
1737 {
1738 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1739 {
1740 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1741 if (index >= 0)
1742 {
1743 bool agg_p;
1744 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1745 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1746 }
1747 }
1748 else
1749 {
1750 gimple stmt = SSA_NAME_DEF_STMT (arg);
1751 if (is_gimple_assign (stmt))
1752 compute_complex_assign_jump_func (fbi, info, jfunc,
1753 call, stmt, arg, param_type);
1754 else if (gimple_code (stmt) == GIMPLE_PHI)
1755 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1756 call, stmt);
1757 }
1758 }
1759
1760 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1761 passed (because type conversions are ignored in gimple). Usually we can
1762 safely get type from function declaration, but in case of K&R prototypes or
1763 variadic functions we can try our luck with type of the pointer passed.
1764 TODO: Since we look for actual initialization of the memory object, we may better
1765 work out the type based on the memory stores we find. */
1766 if (!param_type)
1767 param_type = TREE_TYPE (arg);
1768
1769 if ((jfunc->type != IPA_JF_PASS_THROUGH
1770 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1771 && (jfunc->type != IPA_JF_ANCESTOR
1772 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1773 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1774 || POINTER_TYPE_P (param_type)))
1775 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1776 }
1777 if (!useful_context)
1778 vec_free (args->polymorphic_call_contexts);
1779 }
1780
1781 /* Compute jump functions for all edges - both direct and indirect - outgoing
1782 from BB. */
1783
1784 static void
1785 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1786 {
1787 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1788 int i;
1789 struct cgraph_edge *cs;
1790
1791 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1792 {
1793 struct cgraph_node *callee = cs->callee;
1794
1795 if (callee)
1796 {
1797 callee->ultimate_alias_target ();
1798 /* We do not need to bother analyzing calls to unknown functions
1799 unless they may become known during lto/whopr. */
1800 if (!callee->definition && !flag_lto)
1801 continue;
1802 }
1803 ipa_compute_jump_functions_for_edge (fbi, cs);
1804 }
1805 }
1806
1807 /* If STMT looks like a statement loading a value from a member pointer formal
1808 parameter, return that parameter and store the offset of the field to
1809 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1810 might be clobbered). If USE_DELTA, then we look for a use of the delta
1811 field rather than the pfn. */
1812
1813 static tree
1814 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1815 HOST_WIDE_INT *offset_p)
1816 {
1817 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1818
1819 if (!gimple_assign_single_p (stmt))
1820 return NULL_TREE;
1821
1822 rhs = gimple_assign_rhs1 (stmt);
1823 if (TREE_CODE (rhs) == COMPONENT_REF)
1824 {
1825 ref_field = TREE_OPERAND (rhs, 1);
1826 rhs = TREE_OPERAND (rhs, 0);
1827 }
1828 else
1829 ref_field = NULL_TREE;
1830 if (TREE_CODE (rhs) != MEM_REF)
1831 return NULL_TREE;
1832 rec = TREE_OPERAND (rhs, 0);
1833 if (TREE_CODE (rec) != ADDR_EXPR)
1834 return NULL_TREE;
1835 rec = TREE_OPERAND (rec, 0);
1836 if (TREE_CODE (rec) != PARM_DECL
1837 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1838 return NULL_TREE;
1839 ref_offset = TREE_OPERAND (rhs, 1);
1840
1841 if (use_delta)
1842 fld = delta_field;
1843 else
1844 fld = ptr_field;
1845 if (offset_p)
1846 *offset_p = int_bit_position (fld);
1847
1848 if (ref_field)
1849 {
1850 if (integer_nonzerop (ref_offset))
1851 return NULL_TREE;
1852 return ref_field == fld ? rec : NULL_TREE;
1853 }
1854 else
1855 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1856 : NULL_TREE;
1857 }
1858
1859 /* Returns true iff T is an SSA_NAME defined by a statement. */
1860
1861 static bool
1862 ipa_is_ssa_with_stmt_def (tree t)
1863 {
1864 if (TREE_CODE (t) == SSA_NAME
1865 && !SSA_NAME_IS_DEFAULT_DEF (t))
1866 return true;
1867 else
1868 return false;
1869 }
1870
1871 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1872 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1873 indirect call graph edge. */
1874
1875 static struct cgraph_edge *
1876 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1877 {
1878 struct cgraph_edge *cs;
1879
1880 cs = node->get_edge (stmt);
1881 cs->indirect_info->param_index = param_index;
1882 cs->indirect_info->agg_contents = 0;
1883 cs->indirect_info->member_ptr = 0;
1884 return cs;
1885 }
1886
1887 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1888 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1889 intermediate information about each formal parameter. Currently it checks
1890 whether the call calls a pointer that is a formal parameter and if so, the
1891 parameter is marked with the called flag and an indirect call graph edge
1892 describing the call is created. This is very simple for ordinary pointers
1893 represented in SSA but not-so-nice when it comes to member pointers. The
1894 ugly part of this function does nothing more than trying to match the
1895 pattern of such a call. An example of such a pattern is the gimple dump
1896 below, the call is on the last line:
1897
1898 <bb 2>:
1899 f$__delta_5 = f.__delta;
1900 f$__pfn_24 = f.__pfn;
1901
1902 or
1903 <bb 2>:
1904 f$__delta_5 = MEM[(struct *)&f];
1905 f$__pfn_24 = MEM[(struct *)&f + 4B];
1906
1907 and a few lines below:
1908
1909 <bb 5>
1910 D.2496_3 = (int) f$__pfn_24;
1911 D.2497_4 = D.2496_3 & 1;
1912 if (D.2497_4 != 0)
1913 goto <bb 3>;
1914 else
1915 goto <bb 4>;
1916
1917 <bb 6>:
1918 D.2500_7 = (unsigned int) f$__delta_5;
1919 D.2501_8 = &S + D.2500_7;
1920 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1921 D.2503_10 = *D.2502_9;
1922 D.2504_12 = f$__pfn_24 + -1;
1923 D.2505_13 = (unsigned int) D.2504_12;
1924 D.2506_14 = D.2503_10 + D.2505_13;
1925 D.2507_15 = *D.2506_14;
1926 iftmp.11_16 = (String:: *) D.2507_15;
1927
1928 <bb 7>:
1929 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1930 D.2500_19 = (unsigned int) f$__delta_5;
1931 D.2508_20 = &S + D.2500_19;
1932 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1933
1934 Such patterns are results of simple calls to a member pointer:
1935
1936 int doprinting (int (MyString::* f)(int) const)
1937 {
1938 MyString S ("somestring");
1939
1940 return (S.*f)(4);
1941 }
1942
1943 Moreover, the function also looks for called pointers loaded from aggregates
1944 passed by value or reference. */
1945
1946 static void
1947 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
1948 tree target)
1949 {
1950 struct ipa_node_params *info = fbi->info;
1951 HOST_WIDE_INT offset;
1952 bool by_ref;
1953
1954 if (SSA_NAME_IS_DEFAULT_DEF (target))
1955 {
1956 tree var = SSA_NAME_VAR (target);
1957 int index = ipa_get_param_decl_index (info, var);
1958 if (index >= 0)
1959 ipa_note_param_call (fbi->node, index, call);
1960 return;
1961 }
1962
1963 int index;
1964 gimple def = SSA_NAME_DEF_STMT (target);
1965 if (gimple_assign_single_p (def)
1966 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
1967 gimple_assign_rhs1 (def), &index, &offset,
1968 NULL, &by_ref))
1969 {
1970 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
1971 cs->indirect_info->offset = offset;
1972 cs->indirect_info->agg_contents = 1;
1973 cs->indirect_info->by_ref = by_ref;
1974 return;
1975 }
1976
1977 /* Now we need to try to match the complex pattern of calling a member
1978 pointer. */
1979 if (gimple_code (def) != GIMPLE_PHI
1980 || gimple_phi_num_args (def) != 2
1981 || !POINTER_TYPE_P (TREE_TYPE (target))
1982 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1983 return;
1984
1985 /* First, we need to check whether one of these is a load from a member
1986 pointer that is a parameter to this function. */
1987 tree n1 = PHI_ARG_DEF (def, 0);
1988 tree n2 = PHI_ARG_DEF (def, 1);
1989 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1990 return;
1991 gimple d1 = SSA_NAME_DEF_STMT (n1);
1992 gimple d2 = SSA_NAME_DEF_STMT (n2);
1993
1994 tree rec;
1995 basic_block bb, virt_bb;
1996 basic_block join = gimple_bb (def);
1997 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1998 {
1999 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2000 return;
2001
2002 bb = EDGE_PRED (join, 0)->src;
2003 virt_bb = gimple_bb (d2);
2004 }
2005 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2006 {
2007 bb = EDGE_PRED (join, 1)->src;
2008 virt_bb = gimple_bb (d1);
2009 }
2010 else
2011 return;
2012
2013 /* Second, we need to check that the basic blocks are laid out in the way
2014 corresponding to the pattern. */
2015
2016 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2017 || single_pred (virt_bb) != bb
2018 || single_succ (virt_bb) != join)
2019 return;
2020
2021 /* Third, let's see that the branching is done depending on the least
2022 significant bit of the pfn. */
2023
2024 gimple branch = last_stmt (bb);
2025 if (!branch || gimple_code (branch) != GIMPLE_COND)
2026 return;
2027
2028 if ((gimple_cond_code (branch) != NE_EXPR
2029 && gimple_cond_code (branch) != EQ_EXPR)
2030 || !integer_zerop (gimple_cond_rhs (branch)))
2031 return;
2032
2033 tree cond = gimple_cond_lhs (branch);
2034 if (!ipa_is_ssa_with_stmt_def (cond))
2035 return;
2036
2037 def = SSA_NAME_DEF_STMT (cond);
2038 if (!is_gimple_assign (def)
2039 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2040 || !integer_onep (gimple_assign_rhs2 (def)))
2041 return;
2042
2043 cond = gimple_assign_rhs1 (def);
2044 if (!ipa_is_ssa_with_stmt_def (cond))
2045 return;
2046
2047 def = SSA_NAME_DEF_STMT (cond);
2048
2049 if (is_gimple_assign (def)
2050 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2051 {
2052 cond = gimple_assign_rhs1 (def);
2053 if (!ipa_is_ssa_with_stmt_def (cond))
2054 return;
2055 def = SSA_NAME_DEF_STMT (cond);
2056 }
2057
2058 tree rec2;
2059 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2060 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2061 == ptrmemfunc_vbit_in_delta),
2062 NULL);
2063 if (rec != rec2)
2064 return;
2065
2066 index = ipa_get_param_decl_index (info, rec);
2067 if (index >= 0
2068 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2069 {
2070 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2071 cs->indirect_info->offset = offset;
2072 cs->indirect_info->agg_contents = 1;
2073 cs->indirect_info->member_ptr = 1;
2074 }
2075
2076 return;
2077 }
2078
2079 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2080 object referenced in the expression is a formal parameter of the caller
2081 FBI->node (described by FBI->info), create a call note for the
2082 statement. */
2083
2084 static void
2085 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2086 gimple call, tree target)
2087 {
2088 tree obj = OBJ_TYPE_REF_OBJECT (target);
2089 int index;
2090 HOST_WIDE_INT anc_offset;
2091
2092 if (!flag_devirtualize)
2093 return;
2094
2095 if (TREE_CODE (obj) != SSA_NAME)
2096 return;
2097
2098 struct ipa_node_params *info = fbi->info;
2099 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2100 {
2101 struct ipa_jump_func jfunc;
2102 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2103 return;
2104
2105 anc_offset = 0;
2106 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2107 gcc_assert (index >= 0);
2108 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2109 call, &jfunc))
2110 return;
2111 }
2112 else
2113 {
2114 struct ipa_jump_func jfunc;
2115 gimple stmt = SSA_NAME_DEF_STMT (obj);
2116 tree expr;
2117
2118 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2119 if (!expr)
2120 return;
2121 index = ipa_get_param_decl_index (info,
2122 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2123 gcc_assert (index >= 0);
2124 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2125 call, &jfunc, anc_offset))
2126 return;
2127 }
2128
2129 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2130 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2131 ii->offset = anc_offset;
2132 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2133 ii->otr_type = obj_type_ref_class (target);
2134 ii->polymorphic = 1;
2135 }
2136
2137 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2138 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2139 containing intermediate information about each formal parameter. */
2140
2141 static void
2142 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2143 {
2144 tree target = gimple_call_fn (call);
2145
2146 if (!target
2147 || (TREE_CODE (target) != SSA_NAME
2148 && !virtual_method_call_p (target)))
2149 return;
2150
2151 struct cgraph_edge *cs = fbi->node->get_edge (call);
2152 /* If we previously turned the call into a direct call, there is
2153 no need to analyze. */
2154 if (cs && !cs->indirect_unknown_callee)
2155 return;
2156
2157 if (cs->indirect_info->polymorphic)
2158 {
2159 tree instance;
2160 tree target = gimple_call_fn (call);
2161 ipa_polymorphic_call_context context (current_function_decl,
2162 target, call, &instance);
2163
2164 gcc_checking_assert (cs->indirect_info->otr_type
2165 == obj_type_ref_class (target));
2166 gcc_checking_assert (cs->indirect_info->otr_token
2167 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2168
2169 cs->indirect_info->vptr_changed
2170 = !context.get_dynamic_type (instance,
2171 OBJ_TYPE_REF_OBJECT (target),
2172 obj_type_ref_class (target), call);
2173 cs->indirect_info->context = context;
2174 }
2175
2176 if (TREE_CODE (target) == SSA_NAME)
2177 ipa_analyze_indirect_call_uses (fbi, call, target);
2178 else if (virtual_method_call_p (target))
2179 ipa_analyze_virtual_call_uses (fbi, call, target);
2180 }
2181
2182
2183 /* Analyze the call statement STMT with respect to formal parameters (described
2184 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2185 formal parameters are called. */
2186
2187 static void
2188 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2189 {
2190 if (is_gimple_call (stmt))
2191 ipa_analyze_call_uses (fbi, stmt);
2192 }
2193
2194 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2195 If OP is a parameter declaration, mark it as used in the info structure
2196 passed in DATA. */
2197
2198 static bool
2199 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2200 {
2201 struct ipa_node_params *info = (struct ipa_node_params *) data;
2202
2203 op = get_base_address (op);
2204 if (op
2205 && TREE_CODE (op) == PARM_DECL)
2206 {
2207 int index = ipa_get_param_decl_index (info, op);
2208 gcc_assert (index >= 0);
2209 ipa_set_param_used (info, index, true);
2210 }
2211
2212 return false;
2213 }
2214
2215 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2216 the findings in various structures of the associated ipa_node_params
2217 structure, such as parameter flags, notes etc. FBI holds various data about
2218 the function being analyzed. */
2219
2220 static void
2221 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2222 {
2223 gimple_stmt_iterator gsi;
2224 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2225 {
2226 gimple stmt = gsi_stmt (gsi);
2227
2228 if (is_gimple_debug (stmt))
2229 continue;
2230
2231 ipa_analyze_stmt_uses (fbi, stmt);
2232 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2233 visit_ref_for_mod_analysis,
2234 visit_ref_for_mod_analysis,
2235 visit_ref_for_mod_analysis);
2236 }
2237 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2238 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2239 visit_ref_for_mod_analysis,
2240 visit_ref_for_mod_analysis,
2241 visit_ref_for_mod_analysis);
2242 }
2243
2244 /* Calculate controlled uses of parameters of NODE. */
2245
2246 static void
2247 ipa_analyze_controlled_uses (struct cgraph_node *node)
2248 {
2249 struct ipa_node_params *info = IPA_NODE_REF (node);
2250
2251 for (int i = 0; i < ipa_get_param_count (info); i++)
2252 {
2253 tree parm = ipa_get_param (info, i);
2254 int controlled_uses = 0;
2255
2256 /* For SSA regs see if parameter is used. For non-SSA we compute
2257 the flag during modification analysis. */
2258 if (is_gimple_reg (parm))
2259 {
2260 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2261 parm);
2262 if (ddef && !has_zero_uses (ddef))
2263 {
2264 imm_use_iterator imm_iter;
2265 use_operand_p use_p;
2266
2267 ipa_set_param_used (info, i, true);
2268 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2269 if (!is_gimple_call (USE_STMT (use_p)))
2270 {
2271 if (!is_gimple_debug (USE_STMT (use_p)))
2272 {
2273 controlled_uses = IPA_UNDESCRIBED_USE;
2274 break;
2275 }
2276 }
2277 else
2278 controlled_uses++;
2279 }
2280 else
2281 controlled_uses = 0;
2282 }
2283 else
2284 controlled_uses = IPA_UNDESCRIBED_USE;
2285 ipa_set_controlled_uses (info, i, controlled_uses);
2286 }
2287 }
2288
2289 /* Free stuff in BI. */
2290
2291 static void
2292 free_ipa_bb_info (struct ipa_bb_info *bi)
2293 {
2294 bi->cg_edges.release ();
2295 bi->param_aa_statuses.release ();
2296 }
2297
2298 /* Dominator walker driving the analysis. */
2299
2300 class analysis_dom_walker : public dom_walker
2301 {
2302 public:
2303 analysis_dom_walker (struct func_body_info *fbi)
2304 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2305
2306 virtual void before_dom_children (basic_block);
2307
2308 private:
2309 struct func_body_info *m_fbi;
2310 };
2311
2312 void
2313 analysis_dom_walker::before_dom_children (basic_block bb)
2314 {
2315 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2316 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2317 }
2318
2319 /* Initialize the array describing properties of of formal parameters
2320 of NODE, analyze their uses and compute jump functions associated
2321 with actual arguments of calls from within NODE. */
2322
2323 void
2324 ipa_analyze_node (struct cgraph_node *node)
2325 {
2326 struct func_body_info fbi;
2327 struct ipa_node_params *info;
2328
2329 ipa_check_create_node_params ();
2330 ipa_check_create_edge_args ();
2331 info = IPA_NODE_REF (node);
2332
2333 if (info->analysis_done)
2334 return;
2335 info->analysis_done = 1;
2336
2337 if (ipa_func_spec_opts_forbid_analysis_p (node))
2338 {
2339 for (int i = 0; i < ipa_get_param_count (info); i++)
2340 {
2341 ipa_set_param_used (info, i, true);
2342 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2343 }
2344 return;
2345 }
2346
2347 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2348 push_cfun (func);
2349 calculate_dominance_info (CDI_DOMINATORS);
2350 ipa_initialize_node_params (node);
2351 ipa_analyze_controlled_uses (node);
2352
2353 fbi.node = node;
2354 fbi.info = IPA_NODE_REF (node);
2355 fbi.bb_infos = vNULL;
2356 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2357 fbi.param_count = ipa_get_param_count (info);
2358 fbi.aa_walked = 0;
2359
2360 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2361 {
2362 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2363 bi->cg_edges.safe_push (cs);
2364 }
2365
2366 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2367 {
2368 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2369 bi->cg_edges.safe_push (cs);
2370 }
2371
2372 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2373
2374 int i;
2375 struct ipa_bb_info *bi;
2376 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2377 free_ipa_bb_info (bi);
2378 fbi.bb_infos.release ();
2379 free_dominance_info (CDI_DOMINATORS);
2380 pop_cfun ();
2381 }
2382
2383 /* Update the jump functions associated with call graph edge E when the call
2384 graph edge CS is being inlined, assuming that E->caller is already (possibly
2385 indirectly) inlined into CS->callee and that E has not been inlined. */
2386
2387 static void
2388 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2389 struct cgraph_edge *e)
2390 {
2391 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2392 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2393 int count = ipa_get_cs_argument_count (args);
2394 int i;
2395
2396 for (i = 0; i < count; i++)
2397 {
2398 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2399 struct ipa_polymorphic_call_context *dst_ctx
2400 = ipa_get_ith_polymorhic_call_context (args, i);
2401
2402 if (dst->type == IPA_JF_ANCESTOR)
2403 {
2404 struct ipa_jump_func *src;
2405 int dst_fid = dst->value.ancestor.formal_id;
2406 struct ipa_polymorphic_call_context *src_ctx
2407 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2408
2409 /* Variable number of arguments can cause havoc if we try to access
2410 one that does not exist in the inlined edge. So make sure we
2411 don't. */
2412 if (dst_fid >= ipa_get_cs_argument_count (top))
2413 {
2414 dst->type = IPA_JF_UNKNOWN;
2415 continue;
2416 }
2417
2418 src = ipa_get_ith_jump_func (top, dst_fid);
2419
2420 if (src_ctx && !src_ctx->useless_p ())
2421 {
2422 struct ipa_polymorphic_call_context ctx = *src_ctx;
2423
2424 /* TODO: Make type preserved safe WRT contexts. */
2425 if (!ipa_get_jf_ancestor_type_preserved (dst))
2426 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2427 ctx.offset_by (dst->value.ancestor.offset);
2428 if (!ctx.useless_p ())
2429 {
2430 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2431 count);
2432 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2433 }
2434 dst_ctx->combine_with (ctx);
2435 }
2436
2437 if (src->agg.items
2438 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2439 {
2440 struct ipa_agg_jf_item *item;
2441 int j;
2442
2443 /* Currently we do not produce clobber aggregate jump functions,
2444 replace with merging when we do. */
2445 gcc_assert (!dst->agg.items);
2446
2447 dst->agg.items = vec_safe_copy (src->agg.items);
2448 dst->agg.by_ref = src->agg.by_ref;
2449 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2450 item->offset -= dst->value.ancestor.offset;
2451 }
2452
2453 if (src->type == IPA_JF_PASS_THROUGH
2454 && src->value.pass_through.operation == NOP_EXPR)
2455 {
2456 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2457 dst->value.ancestor.agg_preserved &=
2458 src->value.pass_through.agg_preserved;
2459 }
2460 else if (src->type == IPA_JF_ANCESTOR)
2461 {
2462 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2463 dst->value.ancestor.offset += src->value.ancestor.offset;
2464 dst->value.ancestor.agg_preserved &=
2465 src->value.ancestor.agg_preserved;
2466 }
2467 else
2468 dst->type = IPA_JF_UNKNOWN;
2469 }
2470 else if (dst->type == IPA_JF_PASS_THROUGH)
2471 {
2472 struct ipa_jump_func *src;
2473 /* We must check range due to calls with variable number of arguments
2474 and we cannot combine jump functions with operations. */
2475 if (dst->value.pass_through.operation == NOP_EXPR
2476 && (dst->value.pass_through.formal_id
2477 < ipa_get_cs_argument_count (top)))
2478 {
2479 int dst_fid = dst->value.pass_through.formal_id;
2480 src = ipa_get_ith_jump_func (top, dst_fid);
2481 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2482 struct ipa_polymorphic_call_context *src_ctx
2483 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2484
2485 if (src_ctx && !src_ctx->useless_p ())
2486 {
2487 struct ipa_polymorphic_call_context ctx = *src_ctx;
2488
2489 /* TODO: Make type preserved safe WRT contexts. */
2490 if (!ipa_get_jf_pass_through_type_preserved (dst))
2491 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2492 if (!ctx.useless_p ())
2493 {
2494 if (!dst_ctx)
2495 {
2496 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2497 count);
2498 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2499 }
2500 dst_ctx->combine_with (ctx);
2501 }
2502 }
2503 switch (src->type)
2504 {
2505 case IPA_JF_UNKNOWN:
2506 dst->type = IPA_JF_UNKNOWN;
2507 break;
2508 case IPA_JF_CONST:
2509 ipa_set_jf_cst_copy (dst, src);
2510 break;
2511
2512 case IPA_JF_PASS_THROUGH:
2513 {
2514 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2515 enum tree_code operation;
2516 operation = ipa_get_jf_pass_through_operation (src);
2517
2518 if (operation == NOP_EXPR)
2519 {
2520 bool agg_p;
2521 agg_p = dst_agg_p
2522 && ipa_get_jf_pass_through_agg_preserved (src);
2523 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2524 }
2525 else
2526 {
2527 tree operand = ipa_get_jf_pass_through_operand (src);
2528 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2529 operation);
2530 }
2531 break;
2532 }
2533 case IPA_JF_ANCESTOR:
2534 {
2535 bool agg_p;
2536 agg_p = dst_agg_p
2537 && ipa_get_jf_ancestor_agg_preserved (src);
2538 ipa_set_ancestor_jf (dst,
2539 ipa_get_jf_ancestor_offset (src),
2540 ipa_get_jf_ancestor_formal_id (src),
2541 agg_p);
2542 break;
2543 }
2544 default:
2545 gcc_unreachable ();
2546 }
2547
2548 if (src->agg.items
2549 && (dst_agg_p || !src->agg.by_ref))
2550 {
2551 /* Currently we do not produce clobber aggregate jump
2552 functions, replace with merging when we do. */
2553 gcc_assert (!dst->agg.items);
2554
2555 dst->agg.by_ref = src->agg.by_ref;
2556 dst->agg.items = vec_safe_copy (src->agg.items);
2557 }
2558 }
2559 else
2560 dst->type = IPA_JF_UNKNOWN;
2561 }
2562 }
2563 }
2564
2565 /* If TARGET is an addr_expr of a function declaration, make it the
2566 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2567 Otherwise, return NULL. */
2568
2569 struct cgraph_edge *
2570 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2571 bool speculative)
2572 {
2573 struct cgraph_node *callee;
2574 struct inline_edge_summary *es = inline_edge_summary (ie);
2575 bool unreachable = false;
2576
2577 if (TREE_CODE (target) == ADDR_EXPR)
2578 target = TREE_OPERAND (target, 0);
2579 if (TREE_CODE (target) != FUNCTION_DECL)
2580 {
2581 target = canonicalize_constructor_val (target, NULL);
2582 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2583 {
2584 if (ie->indirect_info->member_ptr)
2585 /* Member pointer call that goes through a VMT lookup. */
2586 return NULL;
2587
2588 if (dump_enabled_p ())
2589 {
2590 location_t loc = gimple_location_safe (ie->call_stmt);
2591 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2592 "discovered direct call to non-function in %s/%i, "
2593 "making it __builtin_unreachable\n",
2594 ie->caller->name (), ie->caller->order);
2595 }
2596
2597 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2598 callee = cgraph_node::get_create (target);
2599 unreachable = true;
2600 }
2601 else
2602 callee = cgraph_node::get (target);
2603 }
2604 else
2605 callee = cgraph_node::get (target);
2606
2607 /* Because may-edges are not explicitely represented and vtable may be external,
2608 we may create the first reference to the object in the unit. */
2609 if (!callee || callee->global.inlined_to)
2610 {
2611
2612 /* We are better to ensure we can refer to it.
2613 In the case of static functions we are out of luck, since we already
2614 removed its body. In the case of public functions we may or may
2615 not introduce the reference. */
2616 if (!canonicalize_constructor_val (target, NULL)
2617 || !TREE_PUBLIC (target))
2618 {
2619 if (dump_file)
2620 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2621 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2622 xstrdup (ie->caller->name ()),
2623 ie->caller->order,
2624 xstrdup (ie->callee->name ()),
2625 ie->callee->order);
2626 return NULL;
2627 }
2628 callee = cgraph_node::get_create (target);
2629 }
2630
2631 /* If the edge is already speculated. */
2632 if (speculative && ie->speculative)
2633 {
2634 struct cgraph_edge *e2;
2635 struct ipa_ref *ref;
2636 ie->speculative_call_info (e2, ie, ref);
2637 if (e2->callee->ultimate_alias_target ()
2638 != callee->ultimate_alias_target ())
2639 {
2640 if (dump_file)
2641 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2642 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2643 xstrdup (ie->caller->name ()),
2644 ie->caller->order,
2645 xstrdup (callee->name ()),
2646 callee->order,
2647 xstrdup (e2->callee->name ()),
2648 e2->callee->order);
2649 }
2650 else
2651 {
2652 if (dump_file)
2653 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2654 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2655 xstrdup (ie->caller->name ()),
2656 ie->caller->order,
2657 xstrdup (callee->name ()),
2658 callee->order);
2659 }
2660 return NULL;
2661 }
2662
2663 if (!dbg_cnt (devirt))
2664 return NULL;
2665
2666 ipa_check_create_node_params ();
2667
2668 /* We can not make edges to inline clones. It is bug that someone removed
2669 the cgraph node too early. */
2670 gcc_assert (!callee->global.inlined_to);
2671
2672 if (dump_file && !unreachable)
2673 {
2674 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2675 "(%s/%i -> %s/%i), for stmt ",
2676 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2677 speculative ? "speculative" : "known",
2678 xstrdup (ie->caller->name ()),
2679 ie->caller->order,
2680 xstrdup (callee->name ()),
2681 callee->order);
2682 if (ie->call_stmt)
2683 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2684 else
2685 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2686 }
2687 if (dump_enabled_p ())
2688 {
2689 location_t loc = gimple_location_safe (ie->call_stmt);
2690
2691 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2692 "converting indirect call in %s to direct call to %s\n",
2693 ie->caller->name (), callee->name ());
2694 }
2695 if (!speculative)
2696 ie = ie->make_direct (callee);
2697 else
2698 {
2699 if (!callee->can_be_discarded_p ())
2700 {
2701 cgraph_node *alias;
2702 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2703 if (alias)
2704 callee = alias;
2705 }
2706 ie = ie->make_speculative
2707 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2708 }
2709 es = inline_edge_summary (ie);
2710 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2711 - eni_size_weights.call_cost);
2712 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2713 - eni_time_weights.call_cost);
2714
2715 return ie;
2716 }
2717
2718 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2719 return NULL if there is not any. BY_REF specifies whether the value has to
2720 be passed by reference or by value. */
2721
2722 tree
2723 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2724 HOST_WIDE_INT offset, bool by_ref)
2725 {
2726 struct ipa_agg_jf_item *item;
2727 int i;
2728
2729 if (by_ref != agg->by_ref)
2730 return NULL;
2731
2732 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2733 if (item->offset == offset)
2734 {
2735 /* Currently we do not have clobber values, return NULL for them once
2736 we do. */
2737 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2738 return item->value;
2739 }
2740 return NULL;
2741 }
2742
2743 /* Remove a reference to SYMBOL from the list of references of a node given by
2744 reference description RDESC. Return true if the reference has been
2745 successfully found and removed. */
2746
2747 static bool
2748 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2749 {
2750 struct ipa_ref *to_del;
2751 struct cgraph_edge *origin;
2752
2753 origin = rdesc->cs;
2754 if (!origin)
2755 return false;
2756 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2757 origin->lto_stmt_uid);
2758 if (!to_del)
2759 return false;
2760
2761 to_del->remove_reference ();
2762 if (dump_file)
2763 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2764 xstrdup (origin->caller->name ()),
2765 origin->caller->order, xstrdup (symbol->name ()));
2766 return true;
2767 }
2768
2769 /* If JFUNC has a reference description with refcount different from
2770 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2771 NULL. JFUNC must be a constant jump function. */
2772
2773 static struct ipa_cst_ref_desc *
2774 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2775 {
2776 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2777 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2778 return rdesc;
2779 else
2780 return NULL;
2781 }
2782
2783 /* If the value of constant jump function JFUNC is an address of a function
2784 declaration, return the associated call graph node. Otherwise return
2785 NULL. */
2786
2787 static cgraph_node *
2788 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2789 {
2790 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2791 tree cst = ipa_get_jf_constant (jfunc);
2792 if (TREE_CODE (cst) != ADDR_EXPR
2793 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2794 return NULL;
2795
2796 return cgraph_node::get (TREE_OPERAND (cst, 0));
2797 }
2798
2799
2800 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2801 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2802 the edge specified in the rdesc. Return false if either the symbol or the
2803 reference could not be found, otherwise return true. */
2804
2805 static bool
2806 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2807 {
2808 struct ipa_cst_ref_desc *rdesc;
2809 if (jfunc->type == IPA_JF_CONST
2810 && (rdesc = jfunc_rdesc_usable (jfunc))
2811 && --rdesc->refcount == 0)
2812 {
2813 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2814 if (!symbol)
2815 return false;
2816
2817 return remove_described_reference (symbol, rdesc);
2818 }
2819 return true;
2820 }
2821
2822 /* Try to find a destination for indirect edge IE that corresponds to a simple
2823 call or a call of a member function pointer and where the destination is a
2824 pointer formal parameter described by jump function JFUNC. If it can be
2825 determined, return the newly direct edge, otherwise return NULL.
2826 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2827
2828 static struct cgraph_edge *
2829 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2830 struct ipa_jump_func *jfunc,
2831 struct ipa_node_params *new_root_info)
2832 {
2833 struct cgraph_edge *cs;
2834 tree target;
2835 bool agg_contents = ie->indirect_info->agg_contents;
2836
2837 if (ie->indirect_info->agg_contents)
2838 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2839 ie->indirect_info->offset,
2840 ie->indirect_info->by_ref);
2841 else
2842 target = ipa_value_from_jfunc (new_root_info, jfunc);
2843 if (!target)
2844 return NULL;
2845 cs = ipa_make_edge_direct_to_target (ie, target);
2846
2847 if (cs && !agg_contents)
2848 {
2849 bool ok;
2850 gcc_checking_assert (cs->callee
2851 && (cs != ie
2852 || jfunc->type != IPA_JF_CONST
2853 || !cgraph_node_for_jfunc (jfunc)
2854 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2855 ok = try_decrement_rdesc_refcount (jfunc);
2856 gcc_checking_assert (ok);
2857 }
2858
2859 return cs;
2860 }
2861
2862 /* Return the target to be used in cases of impossible devirtualization. IE
2863 and target (the latter can be NULL) are dumped when dumping is enabled. */
2864
2865 tree
2866 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2867 {
2868 if (dump_file)
2869 {
2870 if (target)
2871 fprintf (dump_file,
2872 "Type inconsistent devirtualization: %s/%i->%s\n",
2873 ie->caller->name (), ie->caller->order,
2874 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2875 else
2876 fprintf (dump_file,
2877 "No devirtualization target in %s/%i\n",
2878 ie->caller->name (), ie->caller->order);
2879 }
2880 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2881 cgraph_node::get_create (new_target);
2882 return new_target;
2883 }
2884
2885 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2886 call based on a formal parameter which is described by jump function JFUNC
2887 and if it can be determined, make it direct and return the direct edge.
2888 Otherwise, return NULL. CTX describes the polymorphic context that the
2889 parameter the call is based on brings along with it. */
2890
2891 static struct cgraph_edge *
2892 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2893 struct ipa_jump_func *jfunc,
2894 struct ipa_polymorphic_call_context ctx)
2895 {
2896 tree target = NULL;
2897 bool speculative = false;
2898
2899 if (!flag_devirtualize)
2900 return NULL;
2901
2902 gcc_assert (!ie->indirect_info->by_ref);
2903
2904 /* Try to do lookup via known virtual table pointer value. */
2905 if (!ie->indirect_info->vptr_changed || flag_devirtualize_speculatively)
2906 {
2907 tree vtable;
2908 unsigned HOST_WIDE_INT offset;
2909 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2910 ie->indirect_info->offset,
2911 true);
2912 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2913 {
2914 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2915 vtable, offset);
2916 if (t)
2917 {
2918 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2919 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2920 || !possible_polymorphic_call_target_p
2921 (ie, cgraph_node::get (t)))
2922 {
2923 /* Do not speculate builtin_unreachable, it is stpid! */
2924 if (!ie->indirect_info->vptr_changed)
2925 target = ipa_impossible_devirt_target (ie, target);
2926 }
2927 else
2928 {
2929 target = t;
2930 speculative = ie->indirect_info->vptr_changed;
2931 }
2932 }
2933 }
2934 }
2935
2936 ipa_polymorphic_call_context ie_context (ie);
2937 vec <cgraph_node *>targets;
2938 bool final;
2939
2940 ctx.offset_by (ie->indirect_info->offset);
2941 if (ie->indirect_info->vptr_changed)
2942 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2943 ie->indirect_info->otr_type);
2944 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
2945 targets = possible_polymorphic_call_targets
2946 (ie->indirect_info->otr_type,
2947 ie->indirect_info->otr_token,
2948 ctx, &final);
2949 if (final && targets.length () <= 1)
2950 {
2951 if (targets.length () == 1)
2952 target = targets[0]->decl;
2953 else
2954 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2955 }
2956 else if (!target && flag_devirtualize_speculatively
2957 && !ie->speculative && ie->maybe_hot_p ())
2958 {
2959 cgraph_node *n;
2960 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
2961 ie->indirect_info->otr_token,
2962 ie->indirect_info->context);
2963 if (n)
2964 {
2965 target = n->decl;
2966 speculative = true;
2967 }
2968 }
2969
2970 if (target)
2971 {
2972 if (!possible_polymorphic_call_target_p
2973 (ie, cgraph_node::get_create (target)))
2974 {
2975 if (speculative)
2976 return NULL;
2977 target = ipa_impossible_devirt_target (ie, target);
2978 }
2979 return ipa_make_edge_direct_to_target (ie, target, speculative);
2980 }
2981 else
2982 return NULL;
2983 }
2984
2985 /* Update the param called notes associated with NODE when CS is being inlined,
2986 assuming NODE is (potentially indirectly) inlined into CS->callee.
2987 Moreover, if the callee is discovered to be constant, create a new cgraph
2988 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2989 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2990
2991 static bool
2992 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2993 struct cgraph_node *node,
2994 vec<cgraph_edge *> *new_edges)
2995 {
2996 struct ipa_edge_args *top;
2997 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2998 struct ipa_node_params *new_root_info;
2999 bool res = false;
3000
3001 ipa_check_create_edge_args ();
3002 top = IPA_EDGE_REF (cs);
3003 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3004 ? cs->caller->global.inlined_to
3005 : cs->caller);
3006
3007 for (ie = node->indirect_calls; ie; ie = next_ie)
3008 {
3009 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3010 struct ipa_jump_func *jfunc;
3011 int param_index;
3012
3013 next_ie = ie->next_callee;
3014
3015 if (ici->param_index == -1)
3016 continue;
3017
3018 /* We must check range due to calls with variable number of arguments: */
3019 if (ici->param_index >= ipa_get_cs_argument_count (top))
3020 {
3021 ici->param_index = -1;
3022 continue;
3023 }
3024
3025 param_index = ici->param_index;
3026 jfunc = ipa_get_ith_jump_func (top, param_index);
3027
3028 if (!flag_indirect_inlining)
3029 new_direct_edge = NULL;
3030 else if (ici->polymorphic)
3031 {
3032 ipa_polymorphic_call_context ctx;
3033 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3034 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3035 }
3036 else
3037 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3038 new_root_info);
3039 /* If speculation was removed, then we need to do nothing. */
3040 if (new_direct_edge && new_direct_edge != ie)
3041 {
3042 new_direct_edge->indirect_inlining_edge = 1;
3043 top = IPA_EDGE_REF (cs);
3044 res = true;
3045 }
3046 else if (new_direct_edge)
3047 {
3048 new_direct_edge->indirect_inlining_edge = 1;
3049 if (new_direct_edge->call_stmt)
3050 new_direct_edge->call_stmt_cannot_inline_p
3051 = !gimple_check_call_matching_types (
3052 new_direct_edge->call_stmt,
3053 new_direct_edge->callee->decl, false);
3054 if (new_edges)
3055 {
3056 new_edges->safe_push (new_direct_edge);
3057 res = true;
3058 }
3059 top = IPA_EDGE_REF (cs);
3060 }
3061 else if (jfunc->type == IPA_JF_PASS_THROUGH
3062 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3063 {
3064 if ((ici->agg_contents
3065 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3066 || (ici->polymorphic
3067 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3068 ici->param_index = -1;
3069 else
3070 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3071 }
3072 else if (jfunc->type == IPA_JF_ANCESTOR)
3073 {
3074 if ((ici->agg_contents
3075 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3076 || (ici->polymorphic
3077 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3078 ici->param_index = -1;
3079 else
3080 {
3081 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3082 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3083 }
3084 }
3085 else
3086 /* Either we can find a destination for this edge now or never. */
3087 ici->param_index = -1;
3088 }
3089
3090 return res;
3091 }
3092
3093 /* Recursively traverse subtree of NODE (including node) made of inlined
3094 cgraph_edges when CS has been inlined and invoke
3095 update_indirect_edges_after_inlining on all nodes and
3096 update_jump_functions_after_inlining on all non-inlined edges that lead out
3097 of this subtree. Newly discovered indirect edges will be added to
3098 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3099 created. */
3100
3101 static bool
3102 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3103 struct cgraph_node *node,
3104 vec<cgraph_edge *> *new_edges)
3105 {
3106 struct cgraph_edge *e;
3107 bool res;
3108
3109 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3110
3111 for (e = node->callees; e; e = e->next_callee)
3112 if (!e->inline_failed)
3113 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3114 else
3115 update_jump_functions_after_inlining (cs, e);
3116 for (e = node->indirect_calls; e; e = e->next_callee)
3117 update_jump_functions_after_inlining (cs, e);
3118
3119 return res;
3120 }
3121
3122 /* Combine two controlled uses counts as done during inlining. */
3123
3124 static int
3125 combine_controlled_uses_counters (int c, int d)
3126 {
3127 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3128 return IPA_UNDESCRIBED_USE;
3129 else
3130 return c + d - 1;
3131 }
3132
3133 /* Propagate number of controlled users from CS->caleee to the new root of the
3134 tree of inlined nodes. */
3135
3136 static void
3137 propagate_controlled_uses (struct cgraph_edge *cs)
3138 {
3139 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3140 struct cgraph_node *new_root = cs->caller->global.inlined_to
3141 ? cs->caller->global.inlined_to : cs->caller;
3142 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3143 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3144 int count, i;
3145
3146 count = MIN (ipa_get_cs_argument_count (args),
3147 ipa_get_param_count (old_root_info));
3148 for (i = 0; i < count; i++)
3149 {
3150 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3151 struct ipa_cst_ref_desc *rdesc;
3152
3153 if (jf->type == IPA_JF_PASS_THROUGH)
3154 {
3155 int src_idx, c, d;
3156 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3157 c = ipa_get_controlled_uses (new_root_info, src_idx);
3158 d = ipa_get_controlled_uses (old_root_info, i);
3159
3160 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3161 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3162 c = combine_controlled_uses_counters (c, d);
3163 ipa_set_controlled_uses (new_root_info, src_idx, c);
3164 if (c == 0 && new_root_info->ipcp_orig_node)
3165 {
3166 struct cgraph_node *n;
3167 struct ipa_ref *ref;
3168 tree t = new_root_info->known_csts[src_idx];
3169
3170 if (t && TREE_CODE (t) == ADDR_EXPR
3171 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3172 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3173 && (ref = new_root->find_reference (n, NULL, 0)))
3174 {
3175 if (dump_file)
3176 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3177 "reference from %s/%i to %s/%i.\n",
3178 xstrdup (new_root->name ()),
3179 new_root->order,
3180 xstrdup (n->name ()), n->order);
3181 ref->remove_reference ();
3182 }
3183 }
3184 }
3185 else if (jf->type == IPA_JF_CONST
3186 && (rdesc = jfunc_rdesc_usable (jf)))
3187 {
3188 int d = ipa_get_controlled_uses (old_root_info, i);
3189 int c = rdesc->refcount;
3190 rdesc->refcount = combine_controlled_uses_counters (c, d);
3191 if (rdesc->refcount == 0)
3192 {
3193 tree cst = ipa_get_jf_constant (jf);
3194 struct cgraph_node *n;
3195 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3196 && TREE_CODE (TREE_OPERAND (cst, 0))
3197 == FUNCTION_DECL);
3198 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3199 if (n)
3200 {
3201 struct cgraph_node *clone;
3202 bool ok;
3203 ok = remove_described_reference (n, rdesc);
3204 gcc_checking_assert (ok);
3205
3206 clone = cs->caller;
3207 while (clone->global.inlined_to
3208 && clone != rdesc->cs->caller
3209 && IPA_NODE_REF (clone)->ipcp_orig_node)
3210 {
3211 struct ipa_ref *ref;
3212 ref = clone->find_reference (n, NULL, 0);
3213 if (ref)
3214 {
3215 if (dump_file)
3216 fprintf (dump_file, "ipa-prop: Removing "
3217 "cloning-created reference "
3218 "from %s/%i to %s/%i.\n",
3219 xstrdup (clone->name ()),
3220 clone->order,
3221 xstrdup (n->name ()),
3222 n->order);
3223 ref->remove_reference ();
3224 }
3225 clone = clone->callers->caller;
3226 }
3227 }
3228 }
3229 }
3230 }
3231
3232 for (i = ipa_get_param_count (old_root_info);
3233 i < ipa_get_cs_argument_count (args);
3234 i++)
3235 {
3236 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3237
3238 if (jf->type == IPA_JF_CONST)
3239 {
3240 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3241 if (rdesc)
3242 rdesc->refcount = IPA_UNDESCRIBED_USE;
3243 }
3244 else if (jf->type == IPA_JF_PASS_THROUGH)
3245 ipa_set_controlled_uses (new_root_info,
3246 jf->value.pass_through.formal_id,
3247 IPA_UNDESCRIBED_USE);
3248 }
3249 }
3250
3251 /* Update jump functions and call note functions on inlining the call site CS.
3252 CS is expected to lead to a node already cloned by
3253 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3254 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3255 created. */
3256
3257 bool
3258 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3259 vec<cgraph_edge *> *new_edges)
3260 {
3261 bool changed;
3262 /* Do nothing if the preparation phase has not been carried out yet
3263 (i.e. during early inlining). */
3264 if (!ipa_node_params_vector.exists ())
3265 return false;
3266 gcc_assert (ipa_edge_args_vector);
3267
3268 propagate_controlled_uses (cs);
3269 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3270
3271 return changed;
3272 }
3273
3274 /* Frees all dynamically allocated structures that the argument info points
3275 to. */
3276
3277 void
3278 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3279 {
3280 vec_free (args->jump_functions);
3281 memset (args, 0, sizeof (*args));
3282 }
3283
3284 /* Free all ipa_edge structures. */
3285
3286 void
3287 ipa_free_all_edge_args (void)
3288 {
3289 int i;
3290 struct ipa_edge_args *args;
3291
3292 if (!ipa_edge_args_vector)
3293 return;
3294
3295 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3296 ipa_free_edge_args_substructures (args);
3297
3298 vec_free (ipa_edge_args_vector);
3299 }
3300
3301 /* Frees all dynamically allocated structures that the param info points
3302 to. */
3303
3304 void
3305 ipa_free_node_params_substructures (struct ipa_node_params *info)
3306 {
3307 info->descriptors.release ();
3308 free (info->lattices);
3309 /* Lattice values and their sources are deallocated with their alocation
3310 pool. */
3311 info->known_csts.release ();
3312 info->known_contexts.release ();
3313 memset (info, 0, sizeof (*info));
3314 }
3315
3316 /* Free all ipa_node_params structures. */
3317
3318 void
3319 ipa_free_all_node_params (void)
3320 {
3321 int i;
3322 struct ipa_node_params *info;
3323
3324 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3325 ipa_free_node_params_substructures (info);
3326
3327 ipa_node_params_vector.release ();
3328 }
3329
3330 /* Set the aggregate replacements of NODE to be AGGVALS. */
3331
3332 void
3333 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3334 struct ipa_agg_replacement_value *aggvals)
3335 {
3336 if (vec_safe_length (ipa_node_agg_replacements)
3337 <= (unsigned) symtab->cgraph_max_uid)
3338 vec_safe_grow_cleared (ipa_node_agg_replacements,
3339 symtab->cgraph_max_uid + 1);
3340
3341 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3342 }
3343
3344 /* Hook that is called by cgraph.c when an edge is removed. */
3345
3346 static void
3347 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3348 {
3349 struct ipa_edge_args *args;
3350
3351 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3352 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3353 return;
3354
3355 args = IPA_EDGE_REF (cs);
3356 if (args->jump_functions)
3357 {
3358 struct ipa_jump_func *jf;
3359 int i;
3360 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3361 {
3362 struct ipa_cst_ref_desc *rdesc;
3363 try_decrement_rdesc_refcount (jf);
3364 if (jf->type == IPA_JF_CONST
3365 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3366 && rdesc->cs == cs)
3367 rdesc->cs = NULL;
3368 }
3369 }
3370
3371 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3372 }
3373
3374 /* Hook that is called by cgraph.c when a node is removed. */
3375
3376 static void
3377 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3378 {
3379 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3380 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3381 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3382 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3383 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3384 }
3385
3386 /* Hook that is called by cgraph.c when an edge is duplicated. */
3387
3388 static void
3389 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3390 __attribute__((unused)) void *data)
3391 {
3392 struct ipa_edge_args *old_args, *new_args;
3393 unsigned int i;
3394
3395 ipa_check_create_edge_args ();
3396
3397 old_args = IPA_EDGE_REF (src);
3398 new_args = IPA_EDGE_REF (dst);
3399
3400 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3401 if (old_args->polymorphic_call_contexts)
3402 new_args->polymorphic_call_contexts
3403 = vec_safe_copy (old_args->polymorphic_call_contexts);
3404
3405 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3406 {
3407 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3408 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3409
3410 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3411
3412 if (src_jf->type == IPA_JF_CONST)
3413 {
3414 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3415
3416 if (!src_rdesc)
3417 dst_jf->value.constant.rdesc = NULL;
3418 else if (src->caller == dst->caller)
3419 {
3420 struct ipa_ref *ref;
3421 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3422 gcc_checking_assert (n);
3423 ref = src->caller->find_reference (n, src->call_stmt,
3424 src->lto_stmt_uid);
3425 gcc_checking_assert (ref);
3426 dst->caller->clone_reference (ref, ref->stmt);
3427
3428 gcc_checking_assert (ipa_refdesc_pool);
3429 struct ipa_cst_ref_desc *dst_rdesc
3430 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3431 dst_rdesc->cs = dst;
3432 dst_rdesc->refcount = src_rdesc->refcount;
3433 dst_rdesc->next_duplicate = NULL;
3434 dst_jf->value.constant.rdesc = dst_rdesc;
3435 }
3436 else if (src_rdesc->cs == src)
3437 {
3438 struct ipa_cst_ref_desc *dst_rdesc;
3439 gcc_checking_assert (ipa_refdesc_pool);
3440 dst_rdesc
3441 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3442 dst_rdesc->cs = dst;
3443 dst_rdesc->refcount = src_rdesc->refcount;
3444 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3445 src_rdesc->next_duplicate = dst_rdesc;
3446 dst_jf->value.constant.rdesc = dst_rdesc;
3447 }
3448 else
3449 {
3450 struct ipa_cst_ref_desc *dst_rdesc;
3451 /* This can happen during inlining, when a JFUNC can refer to a
3452 reference taken in a function up in the tree of inline clones.
3453 We need to find the duplicate that refers to our tree of
3454 inline clones. */
3455
3456 gcc_assert (dst->caller->global.inlined_to);
3457 for (dst_rdesc = src_rdesc->next_duplicate;
3458 dst_rdesc;
3459 dst_rdesc = dst_rdesc->next_duplicate)
3460 {
3461 struct cgraph_node *top;
3462 top = dst_rdesc->cs->caller->global.inlined_to
3463 ? dst_rdesc->cs->caller->global.inlined_to
3464 : dst_rdesc->cs->caller;
3465 if (dst->caller->global.inlined_to == top)
3466 break;
3467 }
3468 gcc_assert (dst_rdesc);
3469 dst_jf->value.constant.rdesc = dst_rdesc;
3470 }
3471 }
3472 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3473 && src->caller == dst->caller)
3474 {
3475 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3476 ? dst->caller->global.inlined_to : dst->caller;
3477 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3478 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3479
3480 int c = ipa_get_controlled_uses (root_info, idx);
3481 if (c != IPA_UNDESCRIBED_USE)
3482 {
3483 c++;
3484 ipa_set_controlled_uses (root_info, idx, c);
3485 }
3486 }
3487 }
3488 }
3489
3490 /* Hook that is called by cgraph.c when a node is duplicated. */
3491
3492 static void
3493 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3494 ATTRIBUTE_UNUSED void *data)
3495 {
3496 struct ipa_node_params *old_info, *new_info;
3497 struct ipa_agg_replacement_value *old_av, *new_av;
3498
3499 ipa_check_create_node_params ();
3500 old_info = IPA_NODE_REF (src);
3501 new_info = IPA_NODE_REF (dst);
3502
3503 new_info->descriptors = old_info->descriptors.copy ();
3504 new_info->lattices = NULL;
3505 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3506
3507 new_info->analysis_done = old_info->analysis_done;
3508 new_info->node_enqueued = old_info->node_enqueued;
3509
3510 old_av = ipa_get_agg_replacements_for_node (src);
3511 if (!old_av)
3512 return;
3513
3514 new_av = NULL;
3515 while (old_av)
3516 {
3517 struct ipa_agg_replacement_value *v;
3518
3519 v = ggc_alloc<ipa_agg_replacement_value> ();
3520 memcpy (v, old_av, sizeof (*v));
3521 v->next = new_av;
3522 new_av = v;
3523 old_av = old_av->next;
3524 }
3525 ipa_set_node_agg_value_chain (dst, new_av);
3526 }
3527
3528
3529 /* Analyze newly added function into callgraph. */
3530
3531 static void
3532 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3533 {
3534 if (node->has_gimple_body_p ())
3535 ipa_analyze_node (node);
3536 }
3537
3538 /* Register our cgraph hooks if they are not already there. */
3539
3540 void
3541 ipa_register_cgraph_hooks (void)
3542 {
3543 if (!edge_removal_hook_holder)
3544 edge_removal_hook_holder =
3545 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3546 if (!node_removal_hook_holder)
3547 node_removal_hook_holder =
3548 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3549 if (!edge_duplication_hook_holder)
3550 edge_duplication_hook_holder =
3551 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3552 if (!node_duplication_hook_holder)
3553 node_duplication_hook_holder =
3554 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3555 function_insertion_hook_holder =
3556 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3557 }
3558
3559 /* Unregister our cgraph hooks if they are not already there. */
3560
3561 static void
3562 ipa_unregister_cgraph_hooks (void)
3563 {
3564 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3565 edge_removal_hook_holder = NULL;
3566 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3567 node_removal_hook_holder = NULL;
3568 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3569 edge_duplication_hook_holder = NULL;
3570 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3571 node_duplication_hook_holder = NULL;
3572 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3573 function_insertion_hook_holder = NULL;
3574 }
3575
3576 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3577 longer needed after ipa-cp. */
3578
3579 void
3580 ipa_free_all_structures_after_ipa_cp (void)
3581 {
3582 if (!optimize)
3583 {
3584 ipa_free_all_edge_args ();
3585 ipa_free_all_node_params ();
3586 free_alloc_pool (ipcp_sources_pool);
3587 free_alloc_pool (ipcp_cst_values_pool);
3588 free_alloc_pool (ipcp_poly_ctx_values_pool);
3589 free_alloc_pool (ipcp_agg_lattice_pool);
3590 ipa_unregister_cgraph_hooks ();
3591 if (ipa_refdesc_pool)
3592 free_alloc_pool (ipa_refdesc_pool);
3593 }
3594 }
3595
3596 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3597 longer needed after indirect inlining. */
3598
3599 void
3600 ipa_free_all_structures_after_iinln (void)
3601 {
3602 ipa_free_all_edge_args ();
3603 ipa_free_all_node_params ();
3604 ipa_unregister_cgraph_hooks ();
3605 if (ipcp_sources_pool)
3606 free_alloc_pool (ipcp_sources_pool);
3607 if (ipcp_cst_values_pool)
3608 free_alloc_pool (ipcp_cst_values_pool);
3609 if (ipcp_poly_ctx_values_pool)
3610 free_alloc_pool (ipcp_poly_ctx_values_pool);
3611 if (ipcp_agg_lattice_pool)
3612 free_alloc_pool (ipcp_agg_lattice_pool);
3613 if (ipa_refdesc_pool)
3614 free_alloc_pool (ipa_refdesc_pool);
3615 }
3616
3617 /* Print ipa_tree_map data structures of all functions in the
3618 callgraph to F. */
3619
3620 void
3621 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3622 {
3623 int i, count;
3624 struct ipa_node_params *info;
3625
3626 if (!node->definition)
3627 return;
3628 info = IPA_NODE_REF (node);
3629 fprintf (f, " function %s/%i parameter descriptors:\n",
3630 node->name (), node->order);
3631 count = ipa_get_param_count (info);
3632 for (i = 0; i < count; i++)
3633 {
3634 int c;
3635
3636 fprintf (f, " ");
3637 ipa_dump_param (f, info, i);
3638 if (ipa_is_param_used (info, i))
3639 fprintf (f, " used");
3640 c = ipa_get_controlled_uses (info, i);
3641 if (c == IPA_UNDESCRIBED_USE)
3642 fprintf (f, " undescribed_use");
3643 else
3644 fprintf (f, " controlled_uses=%i", c);
3645 fprintf (f, "\n");
3646 }
3647 }
3648
3649 /* Print ipa_tree_map data structures of all functions in the
3650 callgraph to F. */
3651
3652 void
3653 ipa_print_all_params (FILE * f)
3654 {
3655 struct cgraph_node *node;
3656
3657 fprintf (f, "\nFunction parameters:\n");
3658 FOR_EACH_FUNCTION (node)
3659 ipa_print_node_params (f, node);
3660 }
3661
3662 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3663
3664 vec<tree>
3665 ipa_get_vector_of_formal_parms (tree fndecl)
3666 {
3667 vec<tree> args;
3668 int count;
3669 tree parm;
3670
3671 gcc_assert (!flag_wpa);
3672 count = count_formal_params (fndecl);
3673 args.create (count);
3674 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3675 args.quick_push (parm);
3676
3677 return args;
3678 }
3679
3680 /* Return a heap allocated vector containing types of formal parameters of
3681 function type FNTYPE. */
3682
3683 vec<tree>
3684 ipa_get_vector_of_formal_parm_types (tree fntype)
3685 {
3686 vec<tree> types;
3687 int count = 0;
3688 tree t;
3689
3690 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3691 count++;
3692
3693 types.create (count);
3694 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3695 types.quick_push (TREE_VALUE (t));
3696
3697 return types;
3698 }
3699
3700 /* Modify the function declaration FNDECL and its type according to the plan in
3701 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3702 to reflect the actual parameters being modified which are determined by the
3703 base_index field. */
3704
3705 void
3706 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3707 {
3708 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3709 tree orig_type = TREE_TYPE (fndecl);
3710 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3711
3712 /* The following test is an ugly hack, some functions simply don't have any
3713 arguments in their type. This is probably a bug but well... */
3714 bool care_for_types = (old_arg_types != NULL_TREE);
3715 bool last_parm_void;
3716 vec<tree> otypes;
3717 if (care_for_types)
3718 {
3719 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3720 == void_type_node);
3721 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3722 if (last_parm_void)
3723 gcc_assert (oparms.length () + 1 == otypes.length ());
3724 else
3725 gcc_assert (oparms.length () == otypes.length ());
3726 }
3727 else
3728 {
3729 last_parm_void = false;
3730 otypes.create (0);
3731 }
3732
3733 int len = adjustments.length ();
3734 tree *link = &DECL_ARGUMENTS (fndecl);
3735 tree new_arg_types = NULL;
3736 for (int i = 0; i < len; i++)
3737 {
3738 struct ipa_parm_adjustment *adj;
3739 gcc_assert (link);
3740
3741 adj = &adjustments[i];
3742 tree parm;
3743 if (adj->op == IPA_PARM_OP_NEW)
3744 parm = NULL;
3745 else
3746 parm = oparms[adj->base_index];
3747 adj->base = parm;
3748
3749 if (adj->op == IPA_PARM_OP_COPY)
3750 {
3751 if (care_for_types)
3752 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3753 new_arg_types);
3754 *link = parm;
3755 link = &DECL_CHAIN (parm);
3756 }
3757 else if (adj->op != IPA_PARM_OP_REMOVE)
3758 {
3759 tree new_parm;
3760 tree ptype;
3761
3762 if (adj->by_ref)
3763 ptype = build_pointer_type (adj->type);
3764 else
3765 {
3766 ptype = adj->type;
3767 if (is_gimple_reg_type (ptype))
3768 {
3769 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3770 if (TYPE_ALIGN (ptype) < malign)
3771 ptype = build_aligned_type (ptype, malign);
3772 }
3773 }
3774
3775 if (care_for_types)
3776 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3777
3778 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3779 ptype);
3780 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3781 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3782 DECL_ARTIFICIAL (new_parm) = 1;
3783 DECL_ARG_TYPE (new_parm) = ptype;
3784 DECL_CONTEXT (new_parm) = fndecl;
3785 TREE_USED (new_parm) = 1;
3786 DECL_IGNORED_P (new_parm) = 1;
3787 layout_decl (new_parm, 0);
3788
3789 if (adj->op == IPA_PARM_OP_NEW)
3790 adj->base = NULL;
3791 else
3792 adj->base = parm;
3793 adj->new_decl = new_parm;
3794
3795 *link = new_parm;
3796 link = &DECL_CHAIN (new_parm);
3797 }
3798 }
3799
3800 *link = NULL_TREE;
3801
3802 tree new_reversed = NULL;
3803 if (care_for_types)
3804 {
3805 new_reversed = nreverse (new_arg_types);
3806 if (last_parm_void)
3807 {
3808 if (new_reversed)
3809 TREE_CHAIN (new_arg_types) = void_list_node;
3810 else
3811 new_reversed = void_list_node;
3812 }
3813 }
3814
3815 /* Use copy_node to preserve as much as possible from original type
3816 (debug info, attribute lists etc.)
3817 Exception is METHOD_TYPEs must have THIS argument.
3818 When we are asked to remove it, we need to build new FUNCTION_TYPE
3819 instead. */
3820 tree new_type = NULL;
3821 if (TREE_CODE (orig_type) != METHOD_TYPE
3822 || (adjustments[0].op == IPA_PARM_OP_COPY
3823 && adjustments[0].base_index == 0))
3824 {
3825 new_type = build_distinct_type_copy (orig_type);
3826 TYPE_ARG_TYPES (new_type) = new_reversed;
3827 }
3828 else
3829 {
3830 new_type
3831 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3832 new_reversed));
3833 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3834 DECL_VINDEX (fndecl) = NULL_TREE;
3835 }
3836
3837 /* When signature changes, we need to clear builtin info. */
3838 if (DECL_BUILT_IN (fndecl))
3839 {
3840 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3841 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3842 }
3843
3844 TREE_TYPE (fndecl) = new_type;
3845 DECL_VIRTUAL_P (fndecl) = 0;
3846 DECL_LANG_SPECIFIC (fndecl) = NULL;
3847 otypes.release ();
3848 oparms.release ();
3849 }
3850
3851 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3852 If this is a directly recursive call, CS must be NULL. Otherwise it must
3853 contain the corresponding call graph edge. */
3854
3855 void
3856 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3857 ipa_parm_adjustment_vec adjustments)
3858 {
3859 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3860 vec<tree> vargs;
3861 vec<tree, va_gc> **debug_args = NULL;
3862 gimple new_stmt;
3863 gimple_stmt_iterator gsi, prev_gsi;
3864 tree callee_decl;
3865 int i, len;
3866
3867 len = adjustments.length ();
3868 vargs.create (len);
3869 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3870 current_node->remove_stmt_references (stmt);
3871
3872 gsi = gsi_for_stmt (stmt);
3873 prev_gsi = gsi;
3874 gsi_prev (&prev_gsi);
3875 for (i = 0; i < len; i++)
3876 {
3877 struct ipa_parm_adjustment *adj;
3878
3879 adj = &adjustments[i];
3880
3881 if (adj->op == IPA_PARM_OP_COPY)
3882 {
3883 tree arg = gimple_call_arg (stmt, adj->base_index);
3884
3885 vargs.quick_push (arg);
3886 }
3887 else if (adj->op != IPA_PARM_OP_REMOVE)
3888 {
3889 tree expr, base, off;
3890 location_t loc;
3891 unsigned int deref_align = 0;
3892 bool deref_base = false;
3893
3894 /* We create a new parameter out of the value of the old one, we can
3895 do the following kind of transformations:
3896
3897 - A scalar passed by reference is converted to a scalar passed by
3898 value. (adj->by_ref is false and the type of the original
3899 actual argument is a pointer to a scalar).
3900
3901 - A part of an aggregate is passed instead of the whole aggregate.
3902 The part can be passed either by value or by reference, this is
3903 determined by value of adj->by_ref. Moreover, the code below
3904 handles both situations when the original aggregate is passed by
3905 value (its type is not a pointer) and when it is passed by
3906 reference (it is a pointer to an aggregate).
3907
3908 When the new argument is passed by reference (adj->by_ref is true)
3909 it must be a part of an aggregate and therefore we form it by
3910 simply taking the address of a reference inside the original
3911 aggregate. */
3912
3913 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3914 base = gimple_call_arg (stmt, adj->base_index);
3915 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3916 : EXPR_LOCATION (base);
3917
3918 if (TREE_CODE (base) != ADDR_EXPR
3919 && POINTER_TYPE_P (TREE_TYPE (base)))
3920 off = build_int_cst (adj->alias_ptr_type,
3921 adj->offset / BITS_PER_UNIT);
3922 else
3923 {
3924 HOST_WIDE_INT base_offset;
3925 tree prev_base;
3926 bool addrof;
3927
3928 if (TREE_CODE (base) == ADDR_EXPR)
3929 {
3930 base = TREE_OPERAND (base, 0);
3931 addrof = true;
3932 }
3933 else
3934 addrof = false;
3935 prev_base = base;
3936 base = get_addr_base_and_unit_offset (base, &base_offset);
3937 /* Aggregate arguments can have non-invariant addresses. */
3938 if (!base)
3939 {
3940 base = build_fold_addr_expr (prev_base);
3941 off = build_int_cst (adj->alias_ptr_type,
3942 adj->offset / BITS_PER_UNIT);
3943 }
3944 else if (TREE_CODE (base) == MEM_REF)
3945 {
3946 if (!addrof)
3947 {
3948 deref_base = true;
3949 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3950 }
3951 off = build_int_cst (adj->alias_ptr_type,
3952 base_offset
3953 + adj->offset / BITS_PER_UNIT);
3954 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3955 off);
3956 base = TREE_OPERAND (base, 0);
3957 }
3958 else
3959 {
3960 off = build_int_cst (adj->alias_ptr_type,
3961 base_offset
3962 + adj->offset / BITS_PER_UNIT);
3963 base = build_fold_addr_expr (base);
3964 }
3965 }
3966
3967 if (!adj->by_ref)
3968 {
3969 tree type = adj->type;
3970 unsigned int align;
3971 unsigned HOST_WIDE_INT misalign;
3972
3973 if (deref_base)
3974 {
3975 align = deref_align;
3976 misalign = 0;
3977 }
3978 else
3979 {
3980 get_pointer_alignment_1 (base, &align, &misalign);
3981 if (TYPE_ALIGN (type) > align)
3982 align = TYPE_ALIGN (type);
3983 }
3984 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3985 * BITS_PER_UNIT);
3986 misalign = misalign & (align - 1);
3987 if (misalign != 0)
3988 align = (misalign & -misalign);
3989 if (align < TYPE_ALIGN (type))
3990 type = build_aligned_type (type, align);
3991 base = force_gimple_operand_gsi (&gsi, base,
3992 true, NULL, true, GSI_SAME_STMT);
3993 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3994 /* If expr is not a valid gimple call argument emit
3995 a load into a temporary. */
3996 if (is_gimple_reg_type (TREE_TYPE (expr)))
3997 {
3998 gimple tem = gimple_build_assign (NULL_TREE, expr);
3999 if (gimple_in_ssa_p (cfun))
4000 {
4001 gimple_set_vuse (tem, gimple_vuse (stmt));
4002 expr = make_ssa_name (TREE_TYPE (expr), tem);
4003 }
4004 else
4005 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4006 gimple_assign_set_lhs (tem, expr);
4007 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4008 }
4009 }
4010 else
4011 {
4012 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4013 expr = build_fold_addr_expr (expr);
4014 expr = force_gimple_operand_gsi (&gsi, expr,
4015 true, NULL, true, GSI_SAME_STMT);
4016 }
4017 vargs.quick_push (expr);
4018 }
4019 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4020 {
4021 unsigned int ix;
4022 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4023 gimple def_temp;
4024
4025 arg = gimple_call_arg (stmt, adj->base_index);
4026 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4027 {
4028 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4029 continue;
4030 arg = fold_convert_loc (gimple_location (stmt),
4031 TREE_TYPE (origin), arg);
4032 }
4033 if (debug_args == NULL)
4034 debug_args = decl_debug_args_insert (callee_decl);
4035 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4036 if (ddecl == origin)
4037 {
4038 ddecl = (**debug_args)[ix + 1];
4039 break;
4040 }
4041 if (ddecl == NULL)
4042 {
4043 ddecl = make_node (DEBUG_EXPR_DECL);
4044 DECL_ARTIFICIAL (ddecl) = 1;
4045 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4046 DECL_MODE (ddecl) = DECL_MODE (origin);
4047
4048 vec_safe_push (*debug_args, origin);
4049 vec_safe_push (*debug_args, ddecl);
4050 }
4051 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4052 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4053 }
4054 }
4055
4056 if (dump_file && (dump_flags & TDF_DETAILS))
4057 {
4058 fprintf (dump_file, "replacing stmt:");
4059 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4060 }
4061
4062 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4063 vargs.release ();
4064 if (gimple_call_lhs (stmt))
4065 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4066
4067 gimple_set_block (new_stmt, gimple_block (stmt));
4068 if (gimple_has_location (stmt))
4069 gimple_set_location (new_stmt, gimple_location (stmt));
4070 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4071 gimple_call_copy_flags (new_stmt, stmt);
4072 if (gimple_in_ssa_p (cfun))
4073 {
4074 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4075 if (gimple_vdef (stmt))
4076 {
4077 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4078 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4079 }
4080 }
4081
4082 if (dump_file && (dump_flags & TDF_DETAILS))
4083 {
4084 fprintf (dump_file, "with stmt:");
4085 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4086 fprintf (dump_file, "\n");
4087 }
4088 gsi_replace (&gsi, new_stmt, true);
4089 if (cs)
4090 cs->set_call_stmt (new_stmt);
4091 do
4092 {
4093 current_node->record_stmt_references (gsi_stmt (gsi));
4094 gsi_prev (&gsi);
4095 }
4096 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4097 }
4098
4099 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4100 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4101 specifies whether the function should care about type incompatibility the
4102 current and new expressions. If it is false, the function will leave
4103 incompatibility issues to the caller. Return true iff the expression
4104 was modified. */
4105
4106 bool
4107 ipa_modify_expr (tree *expr, bool convert,
4108 ipa_parm_adjustment_vec adjustments)
4109 {
4110 struct ipa_parm_adjustment *cand
4111 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4112 if (!cand)
4113 return false;
4114
4115 tree src;
4116 if (cand->by_ref)
4117 src = build_simple_mem_ref (cand->new_decl);
4118 else
4119 src = cand->new_decl;
4120
4121 if (dump_file && (dump_flags & TDF_DETAILS))
4122 {
4123 fprintf (dump_file, "About to replace expr ");
4124 print_generic_expr (dump_file, *expr, 0);
4125 fprintf (dump_file, " with ");
4126 print_generic_expr (dump_file, src, 0);
4127 fprintf (dump_file, "\n");
4128 }
4129
4130 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4131 {
4132 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4133 *expr = vce;
4134 }
4135 else
4136 *expr = src;
4137 return true;
4138 }
4139
4140 /* If T is an SSA_NAME, return NULL if it is not a default def or
4141 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4142 the base variable is always returned, regardless if it is a default
4143 def. Return T if it is not an SSA_NAME. */
4144
4145 static tree
4146 get_ssa_base_param (tree t, bool ignore_default_def)
4147 {
4148 if (TREE_CODE (t) == SSA_NAME)
4149 {
4150 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4151 return SSA_NAME_VAR (t);
4152 else
4153 return NULL_TREE;
4154 }
4155 return t;
4156 }
4157
4158 /* Given an expression, return an adjustment entry specifying the
4159 transformation to be done on EXPR. If no suitable adjustment entry
4160 was found, returns NULL.
4161
4162 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4163 default def, otherwise bail on them.
4164
4165 If CONVERT is non-NULL, this function will set *CONVERT if the
4166 expression provided is a component reference. ADJUSTMENTS is the
4167 adjustments vector. */
4168
4169 ipa_parm_adjustment *
4170 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4171 ipa_parm_adjustment_vec adjustments,
4172 bool ignore_default_def)
4173 {
4174 if (TREE_CODE (**expr) == BIT_FIELD_REF
4175 || TREE_CODE (**expr) == IMAGPART_EXPR
4176 || TREE_CODE (**expr) == REALPART_EXPR)
4177 {
4178 *expr = &TREE_OPERAND (**expr, 0);
4179 if (convert)
4180 *convert = true;
4181 }
4182
4183 HOST_WIDE_INT offset, size, max_size;
4184 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4185 if (!base || size == -1 || max_size == -1)
4186 return NULL;
4187
4188 if (TREE_CODE (base) == MEM_REF)
4189 {
4190 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4191 base = TREE_OPERAND (base, 0);
4192 }
4193
4194 base = get_ssa_base_param (base, ignore_default_def);
4195 if (!base || TREE_CODE (base) != PARM_DECL)
4196 return NULL;
4197
4198 struct ipa_parm_adjustment *cand = NULL;
4199 unsigned int len = adjustments.length ();
4200 for (unsigned i = 0; i < len; i++)
4201 {
4202 struct ipa_parm_adjustment *adj = &adjustments[i];
4203
4204 if (adj->base == base
4205 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4206 {
4207 cand = adj;
4208 break;
4209 }
4210 }
4211
4212 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4213 return NULL;
4214 return cand;
4215 }
4216
4217 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4218
4219 static bool
4220 index_in_adjustments_multiple_times_p (int base_index,
4221 ipa_parm_adjustment_vec adjustments)
4222 {
4223 int i, len = adjustments.length ();
4224 bool one = false;
4225
4226 for (i = 0; i < len; i++)
4227 {
4228 struct ipa_parm_adjustment *adj;
4229 adj = &adjustments[i];
4230
4231 if (adj->base_index == base_index)
4232 {
4233 if (one)
4234 return true;
4235 else
4236 one = true;
4237 }
4238 }
4239 return false;
4240 }
4241
4242
4243 /* Return adjustments that should have the same effect on function parameters
4244 and call arguments as if they were first changed according to adjustments in
4245 INNER and then by adjustments in OUTER. */
4246
4247 ipa_parm_adjustment_vec
4248 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4249 ipa_parm_adjustment_vec outer)
4250 {
4251 int i, outlen = outer.length ();
4252 int inlen = inner.length ();
4253 int removals = 0;
4254 ipa_parm_adjustment_vec adjustments, tmp;
4255
4256 tmp.create (inlen);
4257 for (i = 0; i < inlen; i++)
4258 {
4259 struct ipa_parm_adjustment *n;
4260 n = &inner[i];
4261
4262 if (n->op == IPA_PARM_OP_REMOVE)
4263 removals++;
4264 else
4265 {
4266 /* FIXME: Handling of new arguments are not implemented yet. */
4267 gcc_assert (n->op != IPA_PARM_OP_NEW);
4268 tmp.quick_push (*n);
4269 }
4270 }
4271
4272 adjustments.create (outlen + removals);
4273 for (i = 0; i < outlen; i++)
4274 {
4275 struct ipa_parm_adjustment r;
4276 struct ipa_parm_adjustment *out = &outer[i];
4277 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4278
4279 memset (&r, 0, sizeof (r));
4280 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4281 if (out->op == IPA_PARM_OP_REMOVE)
4282 {
4283 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4284 {
4285 r.op = IPA_PARM_OP_REMOVE;
4286 adjustments.quick_push (r);
4287 }
4288 continue;
4289 }
4290 else
4291 {
4292 /* FIXME: Handling of new arguments are not implemented yet. */
4293 gcc_assert (out->op != IPA_PARM_OP_NEW);
4294 }
4295
4296 r.base_index = in->base_index;
4297 r.type = out->type;
4298
4299 /* FIXME: Create nonlocal value too. */
4300
4301 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4302 r.op = IPA_PARM_OP_COPY;
4303 else if (in->op == IPA_PARM_OP_COPY)
4304 r.offset = out->offset;
4305 else if (out->op == IPA_PARM_OP_COPY)
4306 r.offset = in->offset;
4307 else
4308 r.offset = in->offset + out->offset;
4309 adjustments.quick_push (r);
4310 }
4311
4312 for (i = 0; i < inlen; i++)
4313 {
4314 struct ipa_parm_adjustment *n = &inner[i];
4315
4316 if (n->op == IPA_PARM_OP_REMOVE)
4317 adjustments.quick_push (*n);
4318 }
4319
4320 tmp.release ();
4321 return adjustments;
4322 }
4323
4324 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4325 friendly way, assuming they are meant to be applied to FNDECL. */
4326
4327 void
4328 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4329 tree fndecl)
4330 {
4331 int i, len = adjustments.length ();
4332 bool first = true;
4333 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4334
4335 fprintf (file, "IPA param adjustments: ");
4336 for (i = 0; i < len; i++)
4337 {
4338 struct ipa_parm_adjustment *adj;
4339 adj = &adjustments[i];
4340
4341 if (!first)
4342 fprintf (file, " ");
4343 else
4344 first = false;
4345
4346 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4347 print_generic_expr (file, parms[adj->base_index], 0);
4348 if (adj->base)
4349 {
4350 fprintf (file, ", base: ");
4351 print_generic_expr (file, adj->base, 0);
4352 }
4353 if (adj->new_decl)
4354 {
4355 fprintf (file, ", new_decl: ");
4356 print_generic_expr (file, adj->new_decl, 0);
4357 }
4358 if (adj->new_ssa_base)
4359 {
4360 fprintf (file, ", new_ssa_base: ");
4361 print_generic_expr (file, adj->new_ssa_base, 0);
4362 }
4363
4364 if (adj->op == IPA_PARM_OP_COPY)
4365 fprintf (file, ", copy_param");
4366 else if (adj->op == IPA_PARM_OP_REMOVE)
4367 fprintf (file, ", remove_param");
4368 else
4369 fprintf (file, ", offset %li", (long) adj->offset);
4370 if (adj->by_ref)
4371 fprintf (file, ", by_ref");
4372 print_node_brief (file, ", type: ", adj->type, 0);
4373 fprintf (file, "\n");
4374 }
4375 parms.release ();
4376 }
4377
4378 /* Dump the AV linked list. */
4379
4380 void
4381 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4382 {
4383 bool comma = false;
4384 fprintf (f, " Aggregate replacements:");
4385 for (; av; av = av->next)
4386 {
4387 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4388 av->index, av->offset);
4389 print_generic_expr (f, av->value, 0);
4390 comma = true;
4391 }
4392 fprintf (f, "\n");
4393 }
4394
4395 /* Stream out jump function JUMP_FUNC to OB. */
4396
4397 static void
4398 ipa_write_jump_function (struct output_block *ob,
4399 struct ipa_jump_func *jump_func)
4400 {
4401 struct ipa_agg_jf_item *item;
4402 struct bitpack_d bp;
4403 int i, count;
4404
4405 streamer_write_uhwi (ob, jump_func->type);
4406 switch (jump_func->type)
4407 {
4408 case IPA_JF_UNKNOWN:
4409 break;
4410 case IPA_JF_CONST:
4411 gcc_assert (
4412 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4413 stream_write_tree (ob, jump_func->value.constant.value, true);
4414 break;
4415 case IPA_JF_PASS_THROUGH:
4416 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4417 if (jump_func->value.pass_through.operation == NOP_EXPR)
4418 {
4419 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4420 bp = bitpack_create (ob->main_stream);
4421 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4422 streamer_write_bitpack (&bp);
4423 }
4424 else
4425 {
4426 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4427 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4428 }
4429 break;
4430 case IPA_JF_ANCESTOR:
4431 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4432 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4433 bp = bitpack_create (ob->main_stream);
4434 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4435 streamer_write_bitpack (&bp);
4436 break;
4437 }
4438
4439 count = vec_safe_length (jump_func->agg.items);
4440 streamer_write_uhwi (ob, count);
4441 if (count)
4442 {
4443 bp = bitpack_create (ob->main_stream);
4444 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4445 streamer_write_bitpack (&bp);
4446 }
4447
4448 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4449 {
4450 streamer_write_uhwi (ob, item->offset);
4451 stream_write_tree (ob, item->value, true);
4452 }
4453 }
4454
4455 /* Read in jump function JUMP_FUNC from IB. */
4456
4457 static void
4458 ipa_read_jump_function (struct lto_input_block *ib,
4459 struct ipa_jump_func *jump_func,
4460 struct cgraph_edge *cs,
4461 struct data_in *data_in)
4462 {
4463 enum jump_func_type jftype;
4464 enum tree_code operation;
4465 int i, count;
4466
4467 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4468 switch (jftype)
4469 {
4470 case IPA_JF_UNKNOWN:
4471 jump_func->type = IPA_JF_UNKNOWN;
4472 break;
4473 case IPA_JF_CONST:
4474 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4475 break;
4476 case IPA_JF_PASS_THROUGH:
4477 operation = (enum tree_code) streamer_read_uhwi (ib);
4478 if (operation == NOP_EXPR)
4479 {
4480 int formal_id = streamer_read_uhwi (ib);
4481 struct bitpack_d bp = streamer_read_bitpack (ib);
4482 bool agg_preserved = bp_unpack_value (&bp, 1);
4483 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4484 }
4485 else
4486 {
4487 tree operand = stream_read_tree (ib, data_in);
4488 int formal_id = streamer_read_uhwi (ib);
4489 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4490 operation);
4491 }
4492 break;
4493 case IPA_JF_ANCESTOR:
4494 {
4495 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4496 int formal_id = streamer_read_uhwi (ib);
4497 struct bitpack_d bp = streamer_read_bitpack (ib);
4498 bool agg_preserved = bp_unpack_value (&bp, 1);
4499 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4500 break;
4501 }
4502 }
4503
4504 count = streamer_read_uhwi (ib);
4505 vec_alloc (jump_func->agg.items, count);
4506 if (count)
4507 {
4508 struct bitpack_d bp = streamer_read_bitpack (ib);
4509 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4510 }
4511 for (i = 0; i < count; i++)
4512 {
4513 struct ipa_agg_jf_item item;
4514 item.offset = streamer_read_uhwi (ib);
4515 item.value = stream_read_tree (ib, data_in);
4516 jump_func->agg.items->quick_push (item);
4517 }
4518 }
4519
4520 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4521 relevant to indirect inlining to OB. */
4522
4523 static void
4524 ipa_write_indirect_edge_info (struct output_block *ob,
4525 struct cgraph_edge *cs)
4526 {
4527 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4528 struct bitpack_d bp;
4529
4530 streamer_write_hwi (ob, ii->param_index);
4531 bp = bitpack_create (ob->main_stream);
4532 bp_pack_value (&bp, ii->polymorphic, 1);
4533 bp_pack_value (&bp, ii->agg_contents, 1);
4534 bp_pack_value (&bp, ii->member_ptr, 1);
4535 bp_pack_value (&bp, ii->by_ref, 1);
4536 bp_pack_value (&bp, ii->vptr_changed, 1);
4537 streamer_write_bitpack (&bp);
4538 if (ii->agg_contents || ii->polymorphic)
4539 streamer_write_hwi (ob, ii->offset);
4540 else
4541 gcc_assert (ii->offset == 0);
4542
4543 if (ii->polymorphic)
4544 {
4545 streamer_write_hwi (ob, ii->otr_token);
4546 stream_write_tree (ob, ii->otr_type, true);
4547 ii->context.stream_out (ob);
4548 }
4549 }
4550
4551 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4552 relevant to indirect inlining from IB. */
4553
4554 static void
4555 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4556 struct data_in *data_in,
4557 struct cgraph_edge *cs)
4558 {
4559 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4560 struct bitpack_d bp;
4561
4562 ii->param_index = (int) streamer_read_hwi (ib);
4563 bp = streamer_read_bitpack (ib);
4564 ii->polymorphic = bp_unpack_value (&bp, 1);
4565 ii->agg_contents = bp_unpack_value (&bp, 1);
4566 ii->member_ptr = bp_unpack_value (&bp, 1);
4567 ii->by_ref = bp_unpack_value (&bp, 1);
4568 ii->vptr_changed = bp_unpack_value (&bp, 1);
4569 if (ii->agg_contents || ii->polymorphic)
4570 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4571 else
4572 ii->offset = 0;
4573 if (ii->polymorphic)
4574 {
4575 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4576 ii->otr_type = stream_read_tree (ib, data_in);
4577 ii->context.stream_in (ib, data_in);
4578 }
4579 }
4580
4581 /* Stream out NODE info to OB. */
4582
4583 static void
4584 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4585 {
4586 int node_ref;
4587 lto_symtab_encoder_t encoder;
4588 struct ipa_node_params *info = IPA_NODE_REF (node);
4589 int j;
4590 struct cgraph_edge *e;
4591 struct bitpack_d bp;
4592
4593 encoder = ob->decl_state->symtab_node_encoder;
4594 node_ref = lto_symtab_encoder_encode (encoder, node);
4595 streamer_write_uhwi (ob, node_ref);
4596
4597 streamer_write_uhwi (ob, ipa_get_param_count (info));
4598 for (j = 0; j < ipa_get_param_count (info); j++)
4599 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4600 bp = bitpack_create (ob->main_stream);
4601 gcc_assert (info->analysis_done
4602 || ipa_get_param_count (info) == 0);
4603 gcc_assert (!info->node_enqueued);
4604 gcc_assert (!info->ipcp_orig_node);
4605 for (j = 0; j < ipa_get_param_count (info); j++)
4606 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4607 streamer_write_bitpack (&bp);
4608 for (j = 0; j < ipa_get_param_count (info); j++)
4609 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4610 for (e = node->callees; e; e = e->next_callee)
4611 {
4612 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4613
4614 streamer_write_uhwi (ob,
4615 ipa_get_cs_argument_count (args) * 2
4616 + (args->polymorphic_call_contexts != NULL));
4617 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4618 {
4619 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4620 if (args->polymorphic_call_contexts != NULL)
4621 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4622 }
4623 }
4624 for (e = node->indirect_calls; e; e = e->next_callee)
4625 {
4626 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4627
4628 streamer_write_uhwi (ob,
4629 ipa_get_cs_argument_count (args) * 2
4630 + (args->polymorphic_call_contexts != NULL));
4631 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4632 {
4633 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4634 if (args->polymorphic_call_contexts != NULL)
4635 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4636 }
4637 ipa_write_indirect_edge_info (ob, e);
4638 }
4639 }
4640
4641 /* Stream in NODE info from IB. */
4642
4643 static void
4644 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4645 struct data_in *data_in)
4646 {
4647 struct ipa_node_params *info = IPA_NODE_REF (node);
4648 int k;
4649 struct cgraph_edge *e;
4650 struct bitpack_d bp;
4651
4652 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4653
4654 for (k = 0; k < ipa_get_param_count (info); k++)
4655 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4656
4657 bp = streamer_read_bitpack (ib);
4658 if (ipa_get_param_count (info) != 0)
4659 info->analysis_done = true;
4660 info->node_enqueued = false;
4661 for (k = 0; k < ipa_get_param_count (info); k++)
4662 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4663 for (k = 0; k < ipa_get_param_count (info); k++)
4664 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4665 for (e = node->callees; e; e = e->next_callee)
4666 {
4667 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4668 int count = streamer_read_uhwi (ib);
4669 bool contexts_computed = count & 1;
4670 count /= 2;
4671
4672 if (!count)
4673 continue;
4674 vec_safe_grow_cleared (args->jump_functions, count);
4675 if (contexts_computed)
4676 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4677
4678 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4679 {
4680 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4681 data_in);
4682 if (contexts_computed)
4683 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4684 }
4685 }
4686 for (e = node->indirect_calls; e; e = e->next_callee)
4687 {
4688 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4689 int count = streamer_read_uhwi (ib);
4690 bool contexts_computed = count & 1;
4691 count /= 2;
4692
4693 if (count)
4694 {
4695 vec_safe_grow_cleared (args->jump_functions, count);
4696 if (contexts_computed)
4697 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4698 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4699 {
4700 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4701 data_in);
4702 if (contexts_computed)
4703 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4704 }
4705 }
4706 ipa_read_indirect_edge_info (ib, data_in, e);
4707 }
4708 }
4709
4710 /* Write jump functions for nodes in SET. */
4711
4712 void
4713 ipa_prop_write_jump_functions (void)
4714 {
4715 struct cgraph_node *node;
4716 struct output_block *ob;
4717 unsigned int count = 0;
4718 lto_symtab_encoder_iterator lsei;
4719 lto_symtab_encoder_t encoder;
4720
4721
4722 if (!ipa_node_params_vector.exists ())
4723 return;
4724
4725 ob = create_output_block (LTO_section_jump_functions);
4726 encoder = ob->decl_state->symtab_node_encoder;
4727 ob->symbol = NULL;
4728 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4729 lsei_next_function_in_partition (&lsei))
4730 {
4731 node = lsei_cgraph_node (lsei);
4732 if (node->has_gimple_body_p ()
4733 && IPA_NODE_REF (node) != NULL)
4734 count++;
4735 }
4736
4737 streamer_write_uhwi (ob, count);
4738
4739 /* Process all of the functions. */
4740 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4741 lsei_next_function_in_partition (&lsei))
4742 {
4743 node = lsei_cgraph_node (lsei);
4744 if (node->has_gimple_body_p ()
4745 && IPA_NODE_REF (node) != NULL)
4746 ipa_write_node_info (ob, node);
4747 }
4748 streamer_write_char_stream (ob->main_stream, 0);
4749 produce_asm (ob, NULL);
4750 destroy_output_block (ob);
4751 }
4752
4753 /* Read section in file FILE_DATA of length LEN with data DATA. */
4754
4755 static void
4756 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4757 size_t len)
4758 {
4759 const struct lto_function_header *header =
4760 (const struct lto_function_header *) data;
4761 const int cfg_offset = sizeof (struct lto_function_header);
4762 const int main_offset = cfg_offset + header->cfg_size;
4763 const int string_offset = main_offset + header->main_size;
4764 struct data_in *data_in;
4765 unsigned int i;
4766 unsigned int count;
4767
4768 lto_input_block ib_main ((const char *) data + main_offset,
4769 header->main_size);
4770
4771 data_in =
4772 lto_data_in_create (file_data, (const char *) data + string_offset,
4773 header->string_size, vNULL);
4774 count = streamer_read_uhwi (&ib_main);
4775
4776 for (i = 0; i < count; i++)
4777 {
4778 unsigned int index;
4779 struct cgraph_node *node;
4780 lto_symtab_encoder_t encoder;
4781
4782 index = streamer_read_uhwi (&ib_main);
4783 encoder = file_data->symtab_node_encoder;
4784 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4785 index));
4786 gcc_assert (node->definition);
4787 ipa_read_node_info (&ib_main, node, data_in);
4788 }
4789 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4790 len);
4791 lto_data_in_delete (data_in);
4792 }
4793
4794 /* Read ipcp jump functions. */
4795
4796 void
4797 ipa_prop_read_jump_functions (void)
4798 {
4799 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4800 struct lto_file_decl_data *file_data;
4801 unsigned int j = 0;
4802
4803 ipa_check_create_node_params ();
4804 ipa_check_create_edge_args ();
4805 ipa_register_cgraph_hooks ();
4806
4807 while ((file_data = file_data_vec[j++]))
4808 {
4809 size_t len;
4810 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4811
4812 if (data)
4813 ipa_prop_read_section (file_data, data, len);
4814 }
4815 }
4816
4817 /* After merging units, we can get mismatch in argument counts.
4818 Also decl merging might've rendered parameter lists obsolete.
4819 Also compute called_with_variable_arg info. */
4820
4821 void
4822 ipa_update_after_lto_read (void)
4823 {
4824 ipa_check_create_node_params ();
4825 ipa_check_create_edge_args ();
4826 }
4827
4828 void
4829 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4830 {
4831 int node_ref;
4832 unsigned int count = 0;
4833 lto_symtab_encoder_t encoder;
4834 struct ipa_agg_replacement_value *aggvals, *av;
4835
4836 aggvals = ipa_get_agg_replacements_for_node (node);
4837 encoder = ob->decl_state->symtab_node_encoder;
4838 node_ref = lto_symtab_encoder_encode (encoder, node);
4839 streamer_write_uhwi (ob, node_ref);
4840
4841 for (av = aggvals; av; av = av->next)
4842 count++;
4843 streamer_write_uhwi (ob, count);
4844
4845 for (av = aggvals; av; av = av->next)
4846 {
4847 struct bitpack_d bp;
4848
4849 streamer_write_uhwi (ob, av->offset);
4850 streamer_write_uhwi (ob, av->index);
4851 stream_write_tree (ob, av->value, true);
4852
4853 bp = bitpack_create (ob->main_stream);
4854 bp_pack_value (&bp, av->by_ref, 1);
4855 streamer_write_bitpack (&bp);
4856 }
4857 }
4858
4859 /* Stream in the aggregate value replacement chain for NODE from IB. */
4860
4861 static void
4862 read_agg_replacement_chain (struct lto_input_block *ib,
4863 struct cgraph_node *node,
4864 struct data_in *data_in)
4865 {
4866 struct ipa_agg_replacement_value *aggvals = NULL;
4867 unsigned int count, i;
4868
4869 count = streamer_read_uhwi (ib);
4870 for (i = 0; i <count; i++)
4871 {
4872 struct ipa_agg_replacement_value *av;
4873 struct bitpack_d bp;
4874
4875 av = ggc_alloc<ipa_agg_replacement_value> ();
4876 av->offset = streamer_read_uhwi (ib);
4877 av->index = streamer_read_uhwi (ib);
4878 av->value = stream_read_tree (ib, data_in);
4879 bp = streamer_read_bitpack (ib);
4880 av->by_ref = bp_unpack_value (&bp, 1);
4881 av->next = aggvals;
4882 aggvals = av;
4883 }
4884 ipa_set_node_agg_value_chain (node, aggvals);
4885 }
4886
4887 /* Write all aggregate replacement for nodes in set. */
4888
4889 void
4890 ipa_prop_write_all_agg_replacement (void)
4891 {
4892 struct cgraph_node *node;
4893 struct output_block *ob;
4894 unsigned int count = 0;
4895 lto_symtab_encoder_iterator lsei;
4896 lto_symtab_encoder_t encoder;
4897
4898 if (!ipa_node_agg_replacements)
4899 return;
4900
4901 ob = create_output_block (LTO_section_ipcp_transform);
4902 encoder = ob->decl_state->symtab_node_encoder;
4903 ob->symbol = NULL;
4904 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4905 lsei_next_function_in_partition (&lsei))
4906 {
4907 node = lsei_cgraph_node (lsei);
4908 if (node->has_gimple_body_p ()
4909 && ipa_get_agg_replacements_for_node (node) != NULL)
4910 count++;
4911 }
4912
4913 streamer_write_uhwi (ob, count);
4914
4915 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4916 lsei_next_function_in_partition (&lsei))
4917 {
4918 node = lsei_cgraph_node (lsei);
4919 if (node->has_gimple_body_p ()
4920 && ipa_get_agg_replacements_for_node (node) != NULL)
4921 write_agg_replacement_chain (ob, node);
4922 }
4923 streamer_write_char_stream (ob->main_stream, 0);
4924 produce_asm (ob, NULL);
4925 destroy_output_block (ob);
4926 }
4927
4928 /* Read replacements section in file FILE_DATA of length LEN with data
4929 DATA. */
4930
4931 static void
4932 read_replacements_section (struct lto_file_decl_data *file_data,
4933 const char *data,
4934 size_t len)
4935 {
4936 const struct lto_function_header *header =
4937 (const struct lto_function_header *) data;
4938 const int cfg_offset = sizeof (struct lto_function_header);
4939 const int main_offset = cfg_offset + header->cfg_size;
4940 const int string_offset = main_offset + header->main_size;
4941 struct data_in *data_in;
4942 unsigned int i;
4943 unsigned int count;
4944
4945 lto_input_block ib_main ((const char *) data + main_offset,
4946 header->main_size);
4947
4948 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4949 header->string_size, vNULL);
4950 count = streamer_read_uhwi (&ib_main);
4951
4952 for (i = 0; i < count; i++)
4953 {
4954 unsigned int index;
4955 struct cgraph_node *node;
4956 lto_symtab_encoder_t encoder;
4957
4958 index = streamer_read_uhwi (&ib_main);
4959 encoder = file_data->symtab_node_encoder;
4960 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4961 index));
4962 gcc_assert (node->definition);
4963 read_agg_replacement_chain (&ib_main, node, data_in);
4964 }
4965 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4966 len);
4967 lto_data_in_delete (data_in);
4968 }
4969
4970 /* Read IPA-CP aggregate replacements. */
4971
4972 void
4973 ipa_prop_read_all_agg_replacement (void)
4974 {
4975 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4976 struct lto_file_decl_data *file_data;
4977 unsigned int j = 0;
4978
4979 while ((file_data = file_data_vec[j++]))
4980 {
4981 size_t len;
4982 const char *data = lto_get_section_data (file_data,
4983 LTO_section_ipcp_transform,
4984 NULL, &len);
4985 if (data)
4986 read_replacements_section (file_data, data, len);
4987 }
4988 }
4989
4990 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4991 NODE. */
4992
4993 static void
4994 adjust_agg_replacement_values (struct cgraph_node *node,
4995 struct ipa_agg_replacement_value *aggval)
4996 {
4997 struct ipa_agg_replacement_value *v;
4998 int i, c = 0, d = 0, *adj;
4999
5000 if (!node->clone.combined_args_to_skip)
5001 return;
5002
5003 for (v = aggval; v; v = v->next)
5004 {
5005 gcc_assert (v->index >= 0);
5006 if (c < v->index)
5007 c = v->index;
5008 }
5009 c++;
5010
5011 adj = XALLOCAVEC (int, c);
5012 for (i = 0; i < c; i++)
5013 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5014 {
5015 adj[i] = -1;
5016 d++;
5017 }
5018 else
5019 adj[i] = i - d;
5020
5021 for (v = aggval; v; v = v->next)
5022 v->index = adj[v->index];
5023 }
5024
5025 /* Dominator walker driving the ipcp modification phase. */
5026
5027 class ipcp_modif_dom_walker : public dom_walker
5028 {
5029 public:
5030 ipcp_modif_dom_walker (struct func_body_info *fbi,
5031 vec<ipa_param_descriptor> descs,
5032 struct ipa_agg_replacement_value *av,
5033 bool *sc, bool *cc)
5034 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5035 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5036
5037 virtual void before_dom_children (basic_block);
5038
5039 private:
5040 struct func_body_info *m_fbi;
5041 vec<ipa_param_descriptor> m_descriptors;
5042 struct ipa_agg_replacement_value *m_aggval;
5043 bool *m_something_changed, *m_cfg_changed;
5044 };
5045
5046 void
5047 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5048 {
5049 gimple_stmt_iterator gsi;
5050 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5051 {
5052 struct ipa_agg_replacement_value *v;
5053 gimple stmt = gsi_stmt (gsi);
5054 tree rhs, val, t;
5055 HOST_WIDE_INT offset, size;
5056 int index;
5057 bool by_ref, vce;
5058
5059 if (!gimple_assign_load_p (stmt))
5060 continue;
5061 rhs = gimple_assign_rhs1 (stmt);
5062 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5063 continue;
5064
5065 vce = false;
5066 t = rhs;
5067 while (handled_component_p (t))
5068 {
5069 /* V_C_E can do things like convert an array of integers to one
5070 bigger integer and similar things we do not handle below. */
5071 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5072 {
5073 vce = true;
5074 break;
5075 }
5076 t = TREE_OPERAND (t, 0);
5077 }
5078 if (vce)
5079 continue;
5080
5081 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5082 &offset, &size, &by_ref))
5083 continue;
5084 for (v = m_aggval; v; v = v->next)
5085 if (v->index == index
5086 && v->offset == offset)
5087 break;
5088 if (!v
5089 || v->by_ref != by_ref
5090 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5091 continue;
5092
5093 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5094 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5095 {
5096 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5097 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5098 else if (TYPE_SIZE (TREE_TYPE (rhs))
5099 == TYPE_SIZE (TREE_TYPE (v->value)))
5100 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5101 else
5102 {
5103 if (dump_file)
5104 {
5105 fprintf (dump_file, " const ");
5106 print_generic_expr (dump_file, v->value, 0);
5107 fprintf (dump_file, " can't be converted to type of ");
5108 print_generic_expr (dump_file, rhs, 0);
5109 fprintf (dump_file, "\n");
5110 }
5111 continue;
5112 }
5113 }
5114 else
5115 val = v->value;
5116
5117 if (dump_file && (dump_flags & TDF_DETAILS))
5118 {
5119 fprintf (dump_file, "Modifying stmt:\n ");
5120 print_gimple_stmt (dump_file, stmt, 0, 0);
5121 }
5122 gimple_assign_set_rhs_from_tree (&gsi, val);
5123 update_stmt (stmt);
5124
5125 if (dump_file && (dump_flags & TDF_DETAILS))
5126 {
5127 fprintf (dump_file, "into:\n ");
5128 print_gimple_stmt (dump_file, stmt, 0, 0);
5129 fprintf (dump_file, "\n");
5130 }
5131
5132 *m_something_changed = true;
5133 if (maybe_clean_eh_stmt (stmt)
5134 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5135 *m_cfg_changed = true;
5136 }
5137
5138 }
5139
5140 /* IPCP transformation phase doing propagation of aggregate values. */
5141
5142 unsigned int
5143 ipcp_transform_function (struct cgraph_node *node)
5144 {
5145 vec<ipa_param_descriptor> descriptors = vNULL;
5146 struct func_body_info fbi;
5147 struct ipa_agg_replacement_value *aggval;
5148 int param_count;
5149 bool cfg_changed = false, something_changed = false;
5150
5151 gcc_checking_assert (cfun);
5152 gcc_checking_assert (current_function_decl);
5153
5154 if (dump_file)
5155 fprintf (dump_file, "Modification phase of node %s/%i\n",
5156 node->name (), node->order);
5157
5158 aggval = ipa_get_agg_replacements_for_node (node);
5159 if (!aggval)
5160 return 0;
5161 param_count = count_formal_params (node->decl);
5162 if (param_count == 0)
5163 return 0;
5164 adjust_agg_replacement_values (node, aggval);
5165 if (dump_file)
5166 ipa_dump_agg_replacement_values (dump_file, aggval);
5167
5168 fbi.node = node;
5169 fbi.info = NULL;
5170 fbi.bb_infos = vNULL;
5171 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5172 fbi.param_count = param_count;
5173 fbi.aa_walked = 0;
5174
5175 descriptors.safe_grow_cleared (param_count);
5176 ipa_populate_param_decls (node, descriptors);
5177 calculate_dominance_info (CDI_DOMINATORS);
5178 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5179 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5180
5181 int i;
5182 struct ipa_bb_info *bi;
5183 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5184 free_ipa_bb_info (bi);
5185 fbi.bb_infos.release ();
5186 free_dominance_info (CDI_DOMINATORS);
5187 (*ipa_node_agg_replacements)[node->uid] = NULL;
5188 descriptors.release ();
5189
5190 if (!something_changed)
5191 return 0;
5192 else if (cfg_changed)
5193 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5194 else
5195 return TODO_update_ssa_only_virtuals;
5196 }