]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
Add SET_DECL_MODE
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
62
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
67
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
70 {
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
78 };
79
80 /* Allocation pool for reference descriptions. */
81
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
84
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
87
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
90 {
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
92
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
96 }
97
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
100
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
103 {
104 int i, count;
105
106 count = descriptors.length ();
107 for (i = 0; i < count; i++)
108 if (descriptors[i].decl_or_type == ptree)
109 return i;
110
111 return -1;
112 }
113
114 /* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
116
117 int
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
119 {
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121 }
122
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
125
126 static void
127 ipa_populate_param_decls (struct cgraph_node *node,
128 vec<ipa_param_descriptor> &descriptors)
129 {
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
134
135 fndecl = node->decl;
136 gcc_assert (gimple_has_body_p (fndecl));
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
140 {
141 descriptors[param_num].decl_or_type = parm;
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
144 param_num++;
145 }
146 }
147
148 /* Return how many formal parameters FNDECL has. */
149
150 int
151 count_formal_params (tree fndecl)
152 {
153 tree parm;
154 int count = 0;
155 gcc_assert (gimple_has_body_p (fndecl));
156
157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
158 count++;
159
160 return count;
161 }
162
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
166
167 void
168 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
169 {
170 fprintf (file, "param #%i", i);
171 if (info->descriptors[i].decl_or_type)
172 {
173 fprintf (file, " ");
174 print_generic_expr (file, info->descriptors[i].decl_or_type, 0);
175 }
176 }
177
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
180
181 void
182 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
183 {
184 struct ipa_node_params *info = IPA_NODE_REF (node);
185
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
188 }
189
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
193
194 void
195 ipa_initialize_node_params (struct cgraph_node *node)
196 {
197 struct ipa_node_params *info = IPA_NODE_REF (node);
198
199 if (!info->descriptors.exists ())
200 {
201 ipa_alloc_node_params (node, count_formal_params (node->decl));
202 ipa_populate_param_decls (node, info->descriptors);
203 }
204 }
205
206 /* Print the jump functions associated with call graph edge CS to file F. */
207
208 static void
209 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
210 {
211 int i, count;
212
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
215 {
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
218
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
221
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
225 else if (type == IPA_JF_CONST)
226 {
227 tree val = jump_func->value.constant.value;
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
232 {
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
235 0);
236 }
237 fprintf (f, "\n");
238 }
239 else if (type == IPA_JF_PASS_THROUGH)
240 {
241 fprintf (f, "PASS THROUGH: ");
242 fprintf (f, "%d, op %s",
243 jump_func->value.pass_through.formal_id,
244 get_tree_code_name(jump_func->value.pass_through.operation));
245 if (jump_func->value.pass_through.operation != NOP_EXPR)
246 {
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
250 }
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
253 fprintf (f, "\n");
254 }
255 else if (type == IPA_JF_ANCESTOR)
256 {
257 fprintf (f, "ANCESTOR: ");
258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
263 fprintf (f, "\n");
264 }
265
266 if (jump_func->agg.items)
267 {
268 struct ipa_agg_jf_item *item;
269 int j;
270
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
274 {
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
279 tree_to_uhwi (TYPE_SIZE (item->value)));
280 else
281 {
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
284 }
285 fprintf (f, "\n");
286 }
287 }
288
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
292 {
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
295 }
296
297 if (jump_func->bits.known)
298 {
299 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
300 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
301 fprintf (f, "\n");
302 }
303 else
304 fprintf (f, " Unknown bits\n");
305
306 if (jump_func->vr_known)
307 {
308 fprintf (f, " VR ");
309 fprintf (f, "%s[",
310 (jump_func->m_vr.type == VR_ANTI_RANGE) ? "~" : "");
311 print_decs (jump_func->m_vr.min, f);
312 fprintf (f, ", ");
313 print_decs (jump_func->m_vr.max, f);
314 fprintf (f, "]\n");
315 }
316 else
317 fprintf (f, " Unknown VR\n");
318 }
319 }
320
321
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
327 {
328 struct cgraph_edge *cs;
329
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup_for_dump (node->name ()), node->order,
339 xstrdup_for_dump (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
343
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
345 {
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
349
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i, "
359 "offset " HOST_WIDE_INT_PRINT_DEC,
360 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
361 ii->offset);
362
363 if (cs->call_stmt)
364 {
365 fprintf (f, ", for stmt ");
366 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
367 }
368 else
369 fprintf (f, "\n");
370 if (ii->polymorphic)
371 ii->context.dump (f);
372 ipa_print_node_jump_functions_for_edge (f, cs);
373 }
374 }
375
376 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
377
378 void
379 ipa_print_all_jump_functions (FILE *f)
380 {
381 struct cgraph_node *node;
382
383 fprintf (f, "\nJump functions:\n");
384 FOR_EACH_FUNCTION (node)
385 {
386 ipa_print_node_jump_functions (f, node);
387 }
388 }
389
390 /* Set jfunc to be a know-really nothing jump function. */
391
392 static void
393 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
394 {
395 jfunc->type = IPA_JF_UNKNOWN;
396 jfunc->bits.known = false;
397 jfunc->vr_known = false;
398 }
399
400 /* Set JFUNC to be a copy of another jmp (to be used by jump function
401 combination code). The two functions will share their rdesc. */
402
403 static void
404 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
405 struct ipa_jump_func *src)
406
407 {
408 gcc_checking_assert (src->type == IPA_JF_CONST);
409 dst->type = IPA_JF_CONST;
410 dst->value.constant = src->value.constant;
411 }
412
413 /* Set JFUNC to be a constant jmp function. */
414
415 static void
416 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
417 struct cgraph_edge *cs)
418 {
419 jfunc->type = IPA_JF_CONST;
420 jfunc->value.constant.value = unshare_expr_without_location (constant);
421
422 if (TREE_CODE (constant) == ADDR_EXPR
423 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
424 {
425 struct ipa_cst_ref_desc *rdesc;
426
427 rdesc = ipa_refdesc_pool.allocate ();
428 rdesc->cs = cs;
429 rdesc->next_duplicate = NULL;
430 rdesc->refcount = 1;
431 jfunc->value.constant.rdesc = rdesc;
432 }
433 else
434 jfunc->value.constant.rdesc = NULL;
435 }
436
437 /* Set JFUNC to be a simple pass-through jump function. */
438 static void
439 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
440 bool agg_preserved)
441 {
442 jfunc->type = IPA_JF_PASS_THROUGH;
443 jfunc->value.pass_through.operand = NULL_TREE;
444 jfunc->value.pass_through.formal_id = formal_id;
445 jfunc->value.pass_through.operation = NOP_EXPR;
446 jfunc->value.pass_through.agg_preserved = agg_preserved;
447 }
448
449 /* Set JFUNC to be an unary pass through jump function. */
450
451 static void
452 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
453 enum tree_code operation)
454 {
455 jfunc->type = IPA_JF_PASS_THROUGH;
456 jfunc->value.pass_through.operand = NULL_TREE;
457 jfunc->value.pass_through.formal_id = formal_id;
458 jfunc->value.pass_through.operation = operation;
459 jfunc->value.pass_through.agg_preserved = false;
460 }
461 /* Set JFUNC to be an arithmetic pass through jump function. */
462
463 static void
464 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
465 tree operand, enum tree_code operation)
466 {
467 jfunc->type = IPA_JF_PASS_THROUGH;
468 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
469 jfunc->value.pass_through.formal_id = formal_id;
470 jfunc->value.pass_through.operation = operation;
471 jfunc->value.pass_through.agg_preserved = false;
472 }
473
474 /* Set JFUNC to be an ancestor jump function. */
475
476 static void
477 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
478 int formal_id, bool agg_preserved)
479 {
480 jfunc->type = IPA_JF_ANCESTOR;
481 jfunc->value.ancestor.formal_id = formal_id;
482 jfunc->value.ancestor.offset = offset;
483 jfunc->value.ancestor.agg_preserved = agg_preserved;
484 }
485
486 /* Get IPA BB information about the given BB. FBI is the context of analyzis
487 of this function body. */
488
489 static struct ipa_bb_info *
490 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
491 {
492 gcc_checking_assert (fbi);
493 return &fbi->bb_infos[bb->index];
494 }
495
496 /* Structure to be passed in between detect_type_change and
497 check_stmt_for_type_change. */
498
499 struct prop_type_change_info
500 {
501 /* Offset into the object where there is the virtual method pointer we are
502 looking for. */
503 HOST_WIDE_INT offset;
504 /* The declaration or SSA_NAME pointer of the base that we are checking for
505 type change. */
506 tree object;
507 /* Set to true if dynamic type change has been detected. */
508 bool type_maybe_changed;
509 };
510
511 /* Return true if STMT can modify a virtual method table pointer.
512
513 This function makes special assumptions about both constructors and
514 destructors which are all the functions that are allowed to alter the VMT
515 pointers. It assumes that destructors begin with assignment into all VMT
516 pointers and that constructors essentially look in the following way:
517
518 1) The very first thing they do is that they call constructors of ancestor
519 sub-objects that have them.
520
521 2) Then VMT pointers of this and all its ancestors is set to new values
522 corresponding to the type corresponding to the constructor.
523
524 3) Only afterwards, other stuff such as constructor of member sub-objects
525 and the code written by the user is run. Only this may include calling
526 virtual functions, directly or indirectly.
527
528 There is no way to call a constructor of an ancestor sub-object in any
529 other way.
530
531 This means that we do not have to care whether constructors get the correct
532 type information because they will always change it (in fact, if we define
533 the type to be given by the VMT pointer, it is undefined).
534
535 The most important fact to derive from the above is that if, for some
536 statement in the section 3, we try to detect whether the dynamic type has
537 changed, we can safely ignore all calls as we examine the function body
538 backwards until we reach statements in section 2 because these calls cannot
539 be ancestor constructors or destructors (if the input is not bogus) and so
540 do not change the dynamic type (this holds true only for automatically
541 allocated objects but at the moment we devirtualize only these). We then
542 must detect that statements in section 2 change the dynamic type and can try
543 to derive the new type. That is enough and we can stop, we will never see
544 the calls into constructors of sub-objects in this code. Therefore we can
545 safely ignore all call statements that we traverse.
546 */
547
548 static bool
549 stmt_may_be_vtbl_ptr_store (gimple *stmt)
550 {
551 if (is_gimple_call (stmt))
552 return false;
553 if (gimple_clobber_p (stmt))
554 return false;
555 else if (is_gimple_assign (stmt))
556 {
557 tree lhs = gimple_assign_lhs (stmt);
558
559 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
560 {
561 if (flag_strict_aliasing
562 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
563 return false;
564
565 if (TREE_CODE (lhs) == COMPONENT_REF
566 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
567 return false;
568 /* In the future we might want to use get_base_ref_and_offset to find
569 if there is a field corresponding to the offset and if so, proceed
570 almost like if it was a component ref. */
571 }
572 }
573 return true;
574 }
575
576 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
577 to check whether a particular statement may modify the virtual table
578 pointerIt stores its result into DATA, which points to a
579 prop_type_change_info structure. */
580
581 static bool
582 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
583 {
584 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
585 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
586
587 if (stmt_may_be_vtbl_ptr_store (stmt))
588 {
589 tci->type_maybe_changed = true;
590 return true;
591 }
592 else
593 return false;
594 }
595
596 /* See if ARG is PARAM_DECl describing instance passed by pointer
597 or reference in FUNCTION. Return false if the dynamic type may change
598 in between beggining of the function until CALL is invoked.
599
600 Generally functions are not allowed to change type of such instances,
601 but they call destructors. We assume that methods can not destroy the THIS
602 pointer. Also as a special cases, constructor and destructors may change
603 type of the THIS pointer. */
604
605 static bool
606 param_type_may_change_p (tree function, tree arg, gimple *call)
607 {
608 /* Pure functions can not do any changes on the dynamic type;
609 that require writting to memory. */
610 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
611 return false;
612 /* We need to check if we are within inlined consturctor
613 or destructor (ideally we would have way to check that the
614 inline cdtor is actually working on ARG, but we don't have
615 easy tie on this, so punt on all non-pure cdtors.
616 We may also record the types of cdtors and once we know type
617 of the instance match them.
618
619 Also code unification optimizations may merge calls from
620 different blocks making return values unreliable. So
621 do nothing during late optimization. */
622 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
623 return true;
624 if (TREE_CODE (arg) == SSA_NAME
625 && SSA_NAME_IS_DEFAULT_DEF (arg)
626 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
627 {
628 /* Normal (non-THIS) argument. */
629 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
630 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
631 /* THIS pointer of an method - here we want to watch constructors
632 and destructors as those definitely may change the dynamic
633 type. */
634 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
635 && !DECL_CXX_CONSTRUCTOR_P (function)
636 && !DECL_CXX_DESTRUCTOR_P (function)
637 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
638 {
639 /* Walk the inline stack and watch out for ctors/dtors. */
640 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
641 block = BLOCK_SUPERCONTEXT (block))
642 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
643 return true;
644 return false;
645 }
646 }
647 return true;
648 }
649
650 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
651 callsite CALL) by looking for assignments to its virtual table pointer. If
652 it is, return true and fill in the jump function JFUNC with relevant type
653 information or set it to unknown. ARG is the object itself (not a pointer
654 to it, unless dereferenced). BASE is the base of the memory access as
655 returned by get_ref_base_and_extent, as is the offset.
656
657 This is helper function for detect_type_change and detect_type_change_ssa
658 that does the heavy work which is usually unnecesary. */
659
660 static bool
661 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
662 gcall *call, struct ipa_jump_func *jfunc,
663 HOST_WIDE_INT offset)
664 {
665 struct prop_type_change_info tci;
666 ao_ref ao;
667 bool entry_reached = false;
668
669 gcc_checking_assert (DECL_P (arg)
670 || TREE_CODE (arg) == MEM_REF
671 || handled_component_p (arg));
672
673 comp_type = TYPE_MAIN_VARIANT (comp_type);
674
675 /* Const calls cannot call virtual methods through VMT and so type changes do
676 not matter. */
677 if (!flag_devirtualize || !gimple_vuse (call)
678 /* Be sure expected_type is polymorphic. */
679 || !comp_type
680 || TREE_CODE (comp_type) != RECORD_TYPE
681 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
682 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
683 return true;
684
685 ao_ref_init (&ao, arg);
686 ao.base = base;
687 ao.offset = offset;
688 ao.size = POINTER_SIZE;
689 ao.max_size = ao.size;
690
691 tci.offset = offset;
692 tci.object = get_base_address (arg);
693 tci.type_maybe_changed = false;
694
695 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
696 &tci, NULL, &entry_reached);
697 if (!tci.type_maybe_changed)
698 return false;
699
700 ipa_set_jf_unknown (jfunc);
701 return true;
702 }
703
704 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
705 If it is, return true and fill in the jump function JFUNC with relevant type
706 information or set it to unknown. ARG is the object itself (not a pointer
707 to it, unless dereferenced). BASE is the base of the memory access as
708 returned by get_ref_base_and_extent, as is the offset. */
709
710 static bool
711 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
712 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
713 {
714 if (!flag_devirtualize)
715 return false;
716
717 if (TREE_CODE (base) == MEM_REF
718 && !param_type_may_change_p (current_function_decl,
719 TREE_OPERAND (base, 0),
720 call))
721 return false;
722 return detect_type_change_from_memory_writes (arg, base, comp_type,
723 call, jfunc, offset);
724 }
725
726 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
727 SSA name (its dereference will become the base and the offset is assumed to
728 be zero). */
729
730 static bool
731 detect_type_change_ssa (tree arg, tree comp_type,
732 gcall *call, struct ipa_jump_func *jfunc)
733 {
734 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
735 if (!flag_devirtualize
736 || !POINTER_TYPE_P (TREE_TYPE (arg)))
737 return false;
738
739 if (!param_type_may_change_p (current_function_decl, arg, call))
740 return false;
741
742 arg = build2 (MEM_REF, ptr_type_node, arg,
743 build_int_cst (ptr_type_node, 0));
744
745 return detect_type_change_from_memory_writes (arg, arg, comp_type,
746 call, jfunc, 0);
747 }
748
749 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
750 boolean variable pointed to by DATA. */
751
752 static bool
753 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
754 void *data)
755 {
756 bool *b = (bool *) data;
757 *b = true;
758 return true;
759 }
760
761 /* Return true if we have already walked so many statements in AA that we
762 should really just start giving up. */
763
764 static bool
765 aa_overwalked (struct ipa_func_body_info *fbi)
766 {
767 gcc_checking_assert (fbi);
768 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
769 }
770
771 /* Find the nearest valid aa status for parameter specified by INDEX that
772 dominates BB. */
773
774 static struct ipa_param_aa_status *
775 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
776 int index)
777 {
778 while (true)
779 {
780 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
781 if (!bb)
782 return NULL;
783 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
784 if (!bi->param_aa_statuses.is_empty ()
785 && bi->param_aa_statuses[index].valid)
786 return &bi->param_aa_statuses[index];
787 }
788 }
789
790 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
791 structures and/or intialize the result with a dominating description as
792 necessary. */
793
794 static struct ipa_param_aa_status *
795 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
796 int index)
797 {
798 gcc_checking_assert (fbi);
799 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
800 if (bi->param_aa_statuses.is_empty ())
801 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
802 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
803 if (!paa->valid)
804 {
805 gcc_checking_assert (!paa->parm_modified
806 && !paa->ref_modified
807 && !paa->pt_modified);
808 struct ipa_param_aa_status *dom_paa;
809 dom_paa = find_dominating_aa_status (fbi, bb, index);
810 if (dom_paa)
811 *paa = *dom_paa;
812 else
813 paa->valid = true;
814 }
815
816 return paa;
817 }
818
819 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
820 a value known not to be modified in this function before reaching the
821 statement STMT. FBI holds information about the function we have so far
822 gathered but do not survive the summary building stage. */
823
824 static bool
825 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
826 gimple *stmt, tree parm_load)
827 {
828 struct ipa_param_aa_status *paa;
829 bool modified = false;
830 ao_ref refd;
831
832 tree base = get_base_address (parm_load);
833 gcc_assert (TREE_CODE (base) == PARM_DECL);
834 if (TREE_READONLY (base))
835 return true;
836
837 /* FIXME: FBI can be NULL if we are being called from outside
838 ipa_node_analysis or ipcp_transform_function, which currently happens
839 during inlining analysis. It would be great to extend fbi's lifetime and
840 always have it. Currently, we are just not afraid of too much walking in
841 that case. */
842 if (fbi)
843 {
844 if (aa_overwalked (fbi))
845 return false;
846 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
847 if (paa->parm_modified)
848 return false;
849 }
850 else
851 paa = NULL;
852
853 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
854 ao_ref_init (&refd, parm_load);
855 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
856 &modified, NULL);
857 if (fbi)
858 fbi->aa_walked += walked;
859 if (paa && modified)
860 paa->parm_modified = true;
861 return !modified;
862 }
863
864 /* Main worker for load_from_unmodified_param and load_from_param.
865 If STMT is an assignment that loads a value from an parameter declaration,
866 return the index of the parameter in ipa_node_params. Otherwise return -1. */
867
868 static int
869 load_from_param_1 (struct ipa_func_body_info *fbi,
870 vec<ipa_param_descriptor> descriptors,
871 gimple *stmt)
872 {
873 int index;
874 tree op1;
875
876 gcc_checking_assert (is_gimple_assign (stmt));
877 op1 = gimple_assign_rhs1 (stmt);
878 if (TREE_CODE (op1) != PARM_DECL)
879 return -1;
880
881 index = ipa_get_param_decl_index_1 (descriptors, op1);
882 if (index < 0
883 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
884 return -1;
885
886 return index;
887 }
888
889 /* If STMT is an assignment that loads a value from an parameter declaration,
890 return the index of the parameter in ipa_node_params which has not been
891 modified. Otherwise return -1. */
892
893 static int
894 load_from_unmodified_param (struct ipa_func_body_info *fbi,
895 vec<ipa_param_descriptor> descriptors,
896 gimple *stmt)
897 {
898 if (!gimple_assign_single_p (stmt))
899 return -1;
900
901 return load_from_param_1 (fbi, descriptors, stmt);
902 }
903
904 /* If STMT is an assignment that loads a value from an parameter declaration,
905 return the index of the parameter in ipa_node_params. Otherwise return -1. */
906
907 static int
908 load_from_param (struct ipa_func_body_info *fbi,
909 vec<ipa_param_descriptor> descriptors,
910 gimple *stmt)
911 {
912 if (!is_gimple_assign (stmt))
913 return -1;
914
915 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
916 if ((get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
917 && (get_gimple_rhs_class (rhs_code) != GIMPLE_UNARY_RHS))
918 return -1;
919
920 return load_from_param_1 (fbi, descriptors, stmt);
921 }
922
923 /* Return true if memory reference REF (which must be a load through parameter
924 with INDEX) loads data that are known to be unmodified in this function
925 before reaching statement STMT. */
926
927 static bool
928 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
929 int index, gimple *stmt, tree ref)
930 {
931 struct ipa_param_aa_status *paa;
932 bool modified = false;
933 ao_ref refd;
934
935 /* FIXME: FBI can be NULL if we are being called from outside
936 ipa_node_analysis or ipcp_transform_function, which currently happens
937 during inlining analysis. It would be great to extend fbi's lifetime and
938 always have it. Currently, we are just not afraid of too much walking in
939 that case. */
940 if (fbi)
941 {
942 if (aa_overwalked (fbi))
943 return false;
944 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
945 if (paa->ref_modified)
946 return false;
947 }
948 else
949 paa = NULL;
950
951 gcc_checking_assert (gimple_vuse (stmt));
952 ao_ref_init (&refd, ref);
953 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
954 &modified, NULL);
955 if (fbi)
956 fbi->aa_walked += walked;
957 if (paa && modified)
958 paa->ref_modified = true;
959 return !modified;
960 }
961
962 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
963 is known to be unmodified in this function before reaching call statement
964 CALL into which it is passed. FBI describes the function body. */
965
966 static bool
967 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
968 gimple *call, tree parm)
969 {
970 bool modified = false;
971 ao_ref refd;
972
973 /* It's unnecessary to calculate anything about memory contnets for a const
974 function because it is not goin to use it. But do not cache the result
975 either. Also, no such calculations for non-pointers. */
976 if (!gimple_vuse (call)
977 || !POINTER_TYPE_P (TREE_TYPE (parm))
978 || aa_overwalked (fbi))
979 return false;
980
981 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
982 gimple_bb (call),
983 index);
984 if (paa->pt_modified)
985 return false;
986
987 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
988 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
989 &modified, NULL);
990 fbi->aa_walked += walked;
991 if (modified)
992 paa->pt_modified = true;
993 return !modified;
994 }
995
996 /* Return true if we can prove that OP is a memory reference loading
997 data from an aggregate passed as a parameter.
998
999 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1000 false if it cannot prove that the value has not been modified before the
1001 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1002 if it cannot prove the value has not been modified, in that case it will
1003 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1004
1005 INFO and PARMS_AINFO describe parameters of the current function (but the
1006 latter can be NULL), STMT is the load statement. If function returns true,
1007 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1008 within the aggregate and whether it is a load from a value passed by
1009 reference respectively. */
1010
1011 bool
1012 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1013 vec<ipa_param_descriptor> descriptors,
1014 gimple *stmt, tree op, int *index_p,
1015 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1016 bool *by_ref_p, bool *guaranteed_unmodified)
1017 {
1018 int index;
1019 HOST_WIDE_INT size, max_size;
1020 bool reverse;
1021 tree base
1022 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
1023
1024 if (max_size == -1 || max_size != size || *offset_p < 0)
1025 return false;
1026
1027 if (DECL_P (base))
1028 {
1029 int index = ipa_get_param_decl_index_1 (descriptors, base);
1030 if (index >= 0
1031 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1032 {
1033 *index_p = index;
1034 *by_ref_p = false;
1035 if (size_p)
1036 *size_p = size;
1037 if (guaranteed_unmodified)
1038 *guaranteed_unmodified = true;
1039 return true;
1040 }
1041 return false;
1042 }
1043
1044 if (TREE_CODE (base) != MEM_REF
1045 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1046 || !integer_zerop (TREE_OPERAND (base, 1)))
1047 return false;
1048
1049 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1050 {
1051 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1052 index = ipa_get_param_decl_index_1 (descriptors, parm);
1053 }
1054 else
1055 {
1056 /* This branch catches situations where a pointer parameter is not a
1057 gimple register, for example:
1058
1059 void hip7(S*) (struct S * p)
1060 {
1061 void (*<T2e4>) (struct S *) D.1867;
1062 struct S * p.1;
1063
1064 <bb 2>:
1065 p.1_1 = p;
1066 D.1867_2 = p.1_1->f;
1067 D.1867_2 ();
1068 gdp = &p;
1069 */
1070
1071 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1072 index = load_from_unmodified_param (fbi, descriptors, def);
1073 }
1074
1075 if (index >= 0)
1076 {
1077 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1078 if (!data_preserved && !guaranteed_unmodified)
1079 return false;
1080
1081 *index_p = index;
1082 *by_ref_p = true;
1083 if (size_p)
1084 *size_p = size;
1085 if (guaranteed_unmodified)
1086 *guaranteed_unmodified = data_preserved;
1087 return true;
1088 }
1089 return false;
1090 }
1091
1092 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1093 of an assignment statement STMT, try to determine whether we are actually
1094 handling any of the following cases and construct an appropriate jump
1095 function into JFUNC if so:
1096
1097 1) The passed value is loaded from a formal parameter which is not a gimple
1098 register (most probably because it is addressable, the value has to be
1099 scalar) and we can guarantee the value has not changed. This case can
1100 therefore be described by a simple pass-through jump function. For example:
1101
1102 foo (int a)
1103 {
1104 int a.0;
1105
1106 a.0_2 = a;
1107 bar (a.0_2);
1108
1109 2) The passed value can be described by a simple arithmetic pass-through
1110 jump function. E.g.
1111
1112 foo (int a)
1113 {
1114 int D.2064;
1115
1116 D.2064_4 = a.1(D) + 4;
1117 bar (D.2064_4);
1118
1119 This case can also occur in combination of the previous one, e.g.:
1120
1121 foo (int a, int z)
1122 {
1123 int a.0;
1124 int D.2064;
1125
1126 a.0_3 = a;
1127 D.2064_4 = a.0_3 + 4;
1128 foo (D.2064_4);
1129
1130 3) The passed value is an address of an object within another one (which
1131 also passed by reference). Such situations are described by an ancestor
1132 jump function and describe situations such as:
1133
1134 B::foo() (struct B * const this)
1135 {
1136 struct A * D.1845;
1137
1138 D.1845_2 = &this_1(D)->D.1748;
1139 A::bar (D.1845_2);
1140
1141 INFO is the structure describing individual parameters access different
1142 stages of IPA optimizations. PARMS_AINFO contains the information that is
1143 only needed for intraprocedural analysis. */
1144
1145 static void
1146 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1147 struct ipa_node_params *info,
1148 struct ipa_jump_func *jfunc,
1149 gcall *call, gimple *stmt, tree name,
1150 tree param_type)
1151 {
1152 HOST_WIDE_INT offset, size, max_size;
1153 tree op1, tc_ssa, base, ssa;
1154 bool reverse;
1155 int index;
1156 gimple *stmt2 = stmt;
1157
1158 op1 = gimple_assign_rhs1 (stmt);
1159
1160 if (TREE_CODE (op1) == SSA_NAME)
1161 {
1162 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1163 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1164 else
1165 {
1166 index = load_from_param (fbi, info->descriptors,
1167 SSA_NAME_DEF_STMT (op1));
1168 stmt2 = SSA_NAME_DEF_STMT (op1);
1169 }
1170 tc_ssa = op1;
1171 }
1172 else
1173 {
1174 index = load_from_param (fbi, info->descriptors, stmt);
1175 tc_ssa = gimple_assign_lhs (stmt);
1176 }
1177
1178 if (index >= 0)
1179 {
1180 tree op2 = gimple_assign_rhs2 (stmt);
1181
1182 if (op2)
1183 {
1184 if (!is_gimple_ip_invariant (op2)
1185 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1186 && !useless_type_conversion_p (TREE_TYPE (name),
1187 TREE_TYPE (op1))))
1188 return;
1189
1190 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1191 gimple_assign_rhs_code (stmt));
1192 }
1193 else if (gimple_assign_single_p (stmt))
1194 {
1195 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1196 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1197 }
1198 else if (is_gimple_assign (stmt2)
1199 && (gimple_expr_code (stmt2) != NOP_EXPR)
1200 && (TREE_CODE_CLASS (gimple_expr_code (stmt2)) == tcc_unary))
1201 ipa_set_jf_unary_pass_through (jfunc, index,
1202 gimple_assign_rhs_code (stmt2));
1203 return;
1204 }
1205
1206 if (TREE_CODE (op1) != ADDR_EXPR)
1207 return;
1208 op1 = TREE_OPERAND (op1, 0);
1209 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1210 return;
1211 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1212 if (TREE_CODE (base) != MEM_REF
1213 /* If this is a varying address, punt. */
1214 || max_size == -1
1215 || max_size != size)
1216 return;
1217 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1218 ssa = TREE_OPERAND (base, 0);
1219 if (TREE_CODE (ssa) != SSA_NAME
1220 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1221 || offset < 0)
1222 return;
1223
1224 /* Dynamic types are changed in constructors and destructors. */
1225 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1226 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1227 ipa_set_ancestor_jf (jfunc, offset, index,
1228 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1229 }
1230
1231 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1232 it looks like:
1233
1234 iftmp.1_3 = &obj_2(D)->D.1762;
1235
1236 The base of the MEM_REF must be a default definition SSA NAME of a
1237 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1238 whole MEM_REF expression is returned and the offset calculated from any
1239 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1240 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1241
1242 static tree
1243 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1244 {
1245 HOST_WIDE_INT size, max_size;
1246 tree expr, parm, obj;
1247 bool reverse;
1248
1249 if (!gimple_assign_single_p (assign))
1250 return NULL_TREE;
1251 expr = gimple_assign_rhs1 (assign);
1252
1253 if (TREE_CODE (expr) != ADDR_EXPR)
1254 return NULL_TREE;
1255 expr = TREE_OPERAND (expr, 0);
1256 obj = expr;
1257 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1258
1259 if (TREE_CODE (expr) != MEM_REF
1260 /* If this is a varying address, punt. */
1261 || max_size == -1
1262 || max_size != size
1263 || *offset < 0)
1264 return NULL_TREE;
1265 parm = TREE_OPERAND (expr, 0);
1266 if (TREE_CODE (parm) != SSA_NAME
1267 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1268 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1269 return NULL_TREE;
1270
1271 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1272 *obj_p = obj;
1273 return expr;
1274 }
1275
1276
1277 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1278 statement PHI, try to find out whether NAME is in fact a
1279 multiple-inheritance typecast from a descendant into an ancestor of a formal
1280 parameter and thus can be described by an ancestor jump function and if so,
1281 write the appropriate function into JFUNC.
1282
1283 Essentially we want to match the following pattern:
1284
1285 if (obj_2(D) != 0B)
1286 goto <bb 3>;
1287 else
1288 goto <bb 4>;
1289
1290 <bb 3>:
1291 iftmp.1_3 = &obj_2(D)->D.1762;
1292
1293 <bb 4>:
1294 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1295 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1296 return D.1879_6; */
1297
1298 static void
1299 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1300 struct ipa_node_params *info,
1301 struct ipa_jump_func *jfunc,
1302 gcall *call, gphi *phi)
1303 {
1304 HOST_WIDE_INT offset;
1305 gimple *assign, *cond;
1306 basic_block phi_bb, assign_bb, cond_bb;
1307 tree tmp, parm, expr, obj;
1308 int index, i;
1309
1310 if (gimple_phi_num_args (phi) != 2)
1311 return;
1312
1313 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1314 tmp = PHI_ARG_DEF (phi, 0);
1315 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1316 tmp = PHI_ARG_DEF (phi, 1);
1317 else
1318 return;
1319 if (TREE_CODE (tmp) != SSA_NAME
1320 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1321 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1322 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1323 return;
1324
1325 assign = SSA_NAME_DEF_STMT (tmp);
1326 assign_bb = gimple_bb (assign);
1327 if (!single_pred_p (assign_bb))
1328 return;
1329 expr = get_ancestor_addr_info (assign, &obj, &offset);
1330 if (!expr)
1331 return;
1332 parm = TREE_OPERAND (expr, 0);
1333 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1334 if (index < 0)
1335 return;
1336
1337 cond_bb = single_pred (assign_bb);
1338 cond = last_stmt (cond_bb);
1339 if (!cond
1340 || gimple_code (cond) != GIMPLE_COND
1341 || gimple_cond_code (cond) != NE_EXPR
1342 || gimple_cond_lhs (cond) != parm
1343 || !integer_zerop (gimple_cond_rhs (cond)))
1344 return;
1345
1346 phi_bb = gimple_bb (phi);
1347 for (i = 0; i < 2; i++)
1348 {
1349 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1350 if (pred != assign_bb && pred != cond_bb)
1351 return;
1352 }
1353
1354 ipa_set_ancestor_jf (jfunc, offset, index,
1355 parm_ref_data_pass_through_p (fbi, index, call, parm));
1356 }
1357
1358 /* Inspect the given TYPE and return true iff it has the same structure (the
1359 same number of fields of the same types) as a C++ member pointer. If
1360 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1361 corresponding fields there. */
1362
1363 static bool
1364 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1365 {
1366 tree fld;
1367
1368 if (TREE_CODE (type) != RECORD_TYPE)
1369 return false;
1370
1371 fld = TYPE_FIELDS (type);
1372 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1373 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1374 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1375 return false;
1376
1377 if (method_ptr)
1378 *method_ptr = fld;
1379
1380 fld = DECL_CHAIN (fld);
1381 if (!fld || INTEGRAL_TYPE_P (fld)
1382 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1383 return false;
1384 if (delta)
1385 *delta = fld;
1386
1387 if (DECL_CHAIN (fld))
1388 return false;
1389
1390 return true;
1391 }
1392
1393 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1394 return the rhs of its defining statement. Otherwise return RHS as it
1395 is. */
1396
1397 static inline tree
1398 get_ssa_def_if_simple_copy (tree rhs)
1399 {
1400 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1401 {
1402 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1403
1404 if (gimple_assign_single_p (def_stmt))
1405 rhs = gimple_assign_rhs1 (def_stmt);
1406 else
1407 break;
1408 }
1409 return rhs;
1410 }
1411
1412 /* Simple linked list, describing known contents of an aggregate beforere
1413 call. */
1414
1415 struct ipa_known_agg_contents_list
1416 {
1417 /* Offset and size of the described part of the aggregate. */
1418 HOST_WIDE_INT offset, size;
1419 /* Known constant value or NULL if the contents is known to be unknown. */
1420 tree constant;
1421 /* Pointer to the next structure in the list. */
1422 struct ipa_known_agg_contents_list *next;
1423 };
1424
1425 /* Find the proper place in linked list of ipa_known_agg_contents_list
1426 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1427 unless there is a partial overlap, in which case return NULL, or such
1428 element is already there, in which case set *ALREADY_THERE to true. */
1429
1430 static struct ipa_known_agg_contents_list **
1431 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1432 HOST_WIDE_INT lhs_offset,
1433 HOST_WIDE_INT lhs_size,
1434 bool *already_there)
1435 {
1436 struct ipa_known_agg_contents_list **p = list;
1437 while (*p && (*p)->offset < lhs_offset)
1438 {
1439 if ((*p)->offset + (*p)->size > lhs_offset)
1440 return NULL;
1441 p = &(*p)->next;
1442 }
1443
1444 if (*p && (*p)->offset < lhs_offset + lhs_size)
1445 {
1446 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1447 /* We already know this value is subsequently overwritten with
1448 something else. */
1449 *already_there = true;
1450 else
1451 /* Otherwise this is a partial overlap which we cannot
1452 represent. */
1453 return NULL;
1454 }
1455 return p;
1456 }
1457
1458 /* Build aggregate jump function from LIST, assuming there are exactly
1459 CONST_COUNT constant entries there and that th offset of the passed argument
1460 is ARG_OFFSET and store it into JFUNC. */
1461
1462 static void
1463 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1464 int const_count, HOST_WIDE_INT arg_offset,
1465 struct ipa_jump_func *jfunc)
1466 {
1467 vec_alloc (jfunc->agg.items, const_count);
1468 while (list)
1469 {
1470 if (list->constant)
1471 {
1472 struct ipa_agg_jf_item item;
1473 item.offset = list->offset - arg_offset;
1474 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1475 item.value = unshare_expr_without_location (list->constant);
1476 jfunc->agg.items->quick_push (item);
1477 }
1478 list = list->next;
1479 }
1480 }
1481
1482 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1483 in ARG is filled in with constant values. ARG can either be an aggregate
1484 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1485 aggregate. JFUNC is the jump function into which the constants are
1486 subsequently stored. */
1487
1488 static void
1489 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1490 tree arg_type,
1491 struct ipa_jump_func *jfunc)
1492 {
1493 struct ipa_known_agg_contents_list *list = NULL;
1494 int item_count = 0, const_count = 0;
1495 HOST_WIDE_INT arg_offset, arg_size;
1496 gimple_stmt_iterator gsi;
1497 tree arg_base;
1498 bool check_ref, by_ref;
1499 ao_ref r;
1500
1501 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1502 return;
1503
1504 /* The function operates in three stages. First, we prepare check_ref, r,
1505 arg_base and arg_offset based on what is actually passed as an actual
1506 argument. */
1507
1508 if (POINTER_TYPE_P (arg_type))
1509 {
1510 by_ref = true;
1511 if (TREE_CODE (arg) == SSA_NAME)
1512 {
1513 tree type_size;
1514 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1515 return;
1516 check_ref = true;
1517 arg_base = arg;
1518 arg_offset = 0;
1519 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1520 arg_size = tree_to_uhwi (type_size);
1521 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1522 }
1523 else if (TREE_CODE (arg) == ADDR_EXPR)
1524 {
1525 HOST_WIDE_INT arg_max_size;
1526 bool reverse;
1527
1528 arg = TREE_OPERAND (arg, 0);
1529 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1530 &arg_max_size, &reverse);
1531 if (arg_max_size == -1
1532 || arg_max_size != arg_size
1533 || arg_offset < 0)
1534 return;
1535 if (DECL_P (arg_base))
1536 {
1537 check_ref = false;
1538 ao_ref_init (&r, arg_base);
1539 }
1540 else
1541 return;
1542 }
1543 else
1544 return;
1545 }
1546 else
1547 {
1548 HOST_WIDE_INT arg_max_size;
1549 bool reverse;
1550
1551 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1552
1553 by_ref = false;
1554 check_ref = false;
1555 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1556 &arg_max_size, &reverse);
1557 if (arg_max_size == -1
1558 || arg_max_size != arg_size
1559 || arg_offset < 0)
1560 return;
1561
1562 ao_ref_init (&r, arg);
1563 }
1564
1565 /* Second stage walks back the BB, looks at individual statements and as long
1566 as it is confident of how the statements affect contents of the
1567 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1568 describing it. */
1569 gsi = gsi_for_stmt (call);
1570 gsi_prev (&gsi);
1571 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1572 {
1573 struct ipa_known_agg_contents_list *n, **p;
1574 gimple *stmt = gsi_stmt (gsi);
1575 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1576 tree lhs, rhs, lhs_base;
1577 bool reverse;
1578
1579 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1580 continue;
1581 if (!gimple_assign_single_p (stmt))
1582 break;
1583
1584 lhs = gimple_assign_lhs (stmt);
1585 rhs = gimple_assign_rhs1 (stmt);
1586 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1587 || TREE_CODE (lhs) == BIT_FIELD_REF
1588 || contains_bitfld_component_ref_p (lhs))
1589 break;
1590
1591 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1592 &lhs_max_size, &reverse);
1593 if (lhs_max_size == -1
1594 || lhs_max_size != lhs_size)
1595 break;
1596
1597 if (check_ref)
1598 {
1599 if (TREE_CODE (lhs_base) != MEM_REF
1600 || TREE_OPERAND (lhs_base, 0) != arg_base
1601 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1602 break;
1603 }
1604 else if (lhs_base != arg_base)
1605 {
1606 if (DECL_P (lhs_base))
1607 continue;
1608 else
1609 break;
1610 }
1611
1612 bool already_there = false;
1613 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1614 &already_there);
1615 if (!p)
1616 break;
1617 if (already_there)
1618 continue;
1619
1620 rhs = get_ssa_def_if_simple_copy (rhs);
1621 n = XALLOCA (struct ipa_known_agg_contents_list);
1622 n->size = lhs_size;
1623 n->offset = lhs_offset;
1624 if (is_gimple_ip_invariant (rhs))
1625 {
1626 n->constant = rhs;
1627 const_count++;
1628 }
1629 else
1630 n->constant = NULL_TREE;
1631 n->next = *p;
1632 *p = n;
1633
1634 item_count++;
1635 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1636 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1637 break;
1638 }
1639
1640 /* Third stage just goes over the list and creates an appropriate vector of
1641 ipa_agg_jf_item structures out of it, of sourse only if there are
1642 any known constants to begin with. */
1643
1644 if (const_count)
1645 {
1646 jfunc->agg.by_ref = by_ref;
1647 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1648 }
1649 }
1650
1651 /* Return the Ith param type of callee associated with call graph
1652 edge E. */
1653
1654 tree
1655 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1656 {
1657 int n;
1658 tree type = (e->callee
1659 ? TREE_TYPE (e->callee->decl)
1660 : gimple_call_fntype (e->call_stmt));
1661 tree t = TYPE_ARG_TYPES (type);
1662
1663 for (n = 0; n < i; n++)
1664 {
1665 if (!t)
1666 break;
1667 t = TREE_CHAIN (t);
1668 }
1669 if (t)
1670 return TREE_VALUE (t);
1671 if (!e->callee)
1672 return NULL;
1673 t = DECL_ARGUMENTS (e->callee->decl);
1674 for (n = 0; n < i; n++)
1675 {
1676 if (!t)
1677 return NULL;
1678 t = TREE_CHAIN (t);
1679 }
1680 if (t)
1681 return TREE_TYPE (t);
1682 return NULL;
1683 }
1684
1685 /* Compute jump function for all arguments of callsite CS and insert the
1686 information in the jump_functions array in the ipa_edge_args corresponding
1687 to this callsite. */
1688
1689 static void
1690 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1691 struct cgraph_edge *cs)
1692 {
1693 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1694 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1695 gcall *call = cs->call_stmt;
1696 int n, arg_num = gimple_call_num_args (call);
1697 bool useful_context = false;
1698
1699 if (arg_num == 0 || args->jump_functions)
1700 return;
1701 vec_safe_grow_cleared (args->jump_functions, arg_num);
1702 if (flag_devirtualize)
1703 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1704
1705 if (gimple_call_internal_p (call))
1706 return;
1707 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1708 return;
1709
1710 for (n = 0; n < arg_num; n++)
1711 {
1712 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1713 tree arg = gimple_call_arg (call, n);
1714 tree param_type = ipa_get_callee_param_type (cs, n);
1715 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1716 {
1717 tree instance;
1718 struct ipa_polymorphic_call_context context (cs->caller->decl,
1719 arg, cs->call_stmt,
1720 &instance);
1721 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1722 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1723 if (!context.useless_p ())
1724 useful_context = true;
1725 }
1726
1727 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1728 {
1729 bool addr_nonzero = false;
1730 bool strict_overflow = false;
1731
1732 if (TREE_CODE (arg) == SSA_NAME
1733 && param_type
1734 && get_ptr_nonnull (arg))
1735 addr_nonzero = true;
1736 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1737 addr_nonzero = true;
1738
1739 if (addr_nonzero)
1740 {
1741 jfunc->vr_known = true;
1742 jfunc->m_vr.type = VR_ANTI_RANGE;
1743 jfunc->m_vr.min = build_int_cst (TREE_TYPE (arg), 0);
1744 jfunc->m_vr.max = build_int_cst (TREE_TYPE (arg), 0);
1745 jfunc->m_vr.equiv = NULL;
1746 }
1747 else
1748 gcc_assert (!jfunc->vr_known);
1749 }
1750 else
1751 {
1752 wide_int min, max;
1753 value_range_type type;
1754 if (TREE_CODE (arg) == SSA_NAME
1755 && param_type
1756 && (type = get_range_info (arg, &min, &max))
1757 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1758 {
1759 value_range vr;
1760
1761 vr.type = type;
1762 vr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1763 vr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1764 vr.equiv = NULL;
1765 extract_range_from_unary_expr (&jfunc->m_vr,
1766 NOP_EXPR,
1767 param_type,
1768 &vr, TREE_TYPE (arg));
1769 if (jfunc->m_vr.type == VR_RANGE
1770 || jfunc->m_vr.type == VR_ANTI_RANGE)
1771 jfunc->vr_known = true;
1772 else
1773 jfunc->vr_known = false;
1774 }
1775 else
1776 gcc_assert (!jfunc->vr_known);
1777 }
1778
1779 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1780 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1781 {
1782 jfunc->bits.known = true;
1783
1784 if (TREE_CODE (arg) == SSA_NAME)
1785 {
1786 jfunc->bits.value = 0;
1787 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1788 TYPE_SIGN (TREE_TYPE (arg)));
1789 }
1790 else
1791 {
1792 jfunc->bits.value = wi::to_widest (arg);
1793 jfunc->bits.mask = 0;
1794 }
1795 }
1796 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1797 {
1798 unsigned HOST_WIDE_INT bitpos;
1799 unsigned align;
1800
1801 jfunc->bits.known = true;
1802 get_pointer_alignment_1 (arg, &align, &bitpos);
1803 jfunc->bits.mask = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1804 .and_not (align / BITS_PER_UNIT - 1);
1805 jfunc->bits.value = bitpos / BITS_PER_UNIT;
1806 }
1807 else
1808 gcc_assert (!jfunc->bits.known);
1809
1810 if (is_gimple_ip_invariant (arg)
1811 || (VAR_P (arg)
1812 && is_global_var (arg)
1813 && TREE_READONLY (arg)))
1814 ipa_set_jf_constant (jfunc, arg, cs);
1815 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1816 && TREE_CODE (arg) == PARM_DECL)
1817 {
1818 int index = ipa_get_param_decl_index (info, arg);
1819
1820 gcc_assert (index >=0);
1821 /* Aggregate passed by value, check for pass-through, otherwise we
1822 will attempt to fill in aggregate contents later in this
1823 for cycle. */
1824 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1825 {
1826 ipa_set_jf_simple_pass_through (jfunc, index, false);
1827 continue;
1828 }
1829 }
1830 else if (TREE_CODE (arg) == SSA_NAME)
1831 {
1832 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1833 {
1834 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1835 if (index >= 0)
1836 {
1837 bool agg_p;
1838 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1839 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1840 }
1841 }
1842 else
1843 {
1844 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1845 if (is_gimple_assign (stmt))
1846 compute_complex_assign_jump_func (fbi, info, jfunc,
1847 call, stmt, arg, param_type);
1848 else if (gimple_code (stmt) == GIMPLE_PHI)
1849 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1850 call,
1851 as_a <gphi *> (stmt));
1852 }
1853 }
1854
1855 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1856 passed (because type conversions are ignored in gimple). Usually we can
1857 safely get type from function declaration, but in case of K&R prototypes or
1858 variadic functions we can try our luck with type of the pointer passed.
1859 TODO: Since we look for actual initialization of the memory object, we may better
1860 work out the type based on the memory stores we find. */
1861 if (!param_type)
1862 param_type = TREE_TYPE (arg);
1863
1864 if ((jfunc->type != IPA_JF_PASS_THROUGH
1865 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1866 && (jfunc->type != IPA_JF_ANCESTOR
1867 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1868 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1869 || POINTER_TYPE_P (param_type)))
1870 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1871 }
1872 if (!useful_context)
1873 vec_free (args->polymorphic_call_contexts);
1874 }
1875
1876 /* Compute jump functions for all edges - both direct and indirect - outgoing
1877 from BB. */
1878
1879 static void
1880 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1881 {
1882 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1883 int i;
1884 struct cgraph_edge *cs;
1885
1886 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1887 {
1888 struct cgraph_node *callee = cs->callee;
1889
1890 if (callee)
1891 {
1892 callee->ultimate_alias_target ();
1893 /* We do not need to bother analyzing calls to unknown functions
1894 unless they may become known during lto/whopr. */
1895 if (!callee->definition && !flag_lto)
1896 continue;
1897 }
1898 ipa_compute_jump_functions_for_edge (fbi, cs);
1899 }
1900 }
1901
1902 /* If STMT looks like a statement loading a value from a member pointer formal
1903 parameter, return that parameter and store the offset of the field to
1904 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1905 might be clobbered). If USE_DELTA, then we look for a use of the delta
1906 field rather than the pfn. */
1907
1908 static tree
1909 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1910 HOST_WIDE_INT *offset_p)
1911 {
1912 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1913
1914 if (!gimple_assign_single_p (stmt))
1915 return NULL_TREE;
1916
1917 rhs = gimple_assign_rhs1 (stmt);
1918 if (TREE_CODE (rhs) == COMPONENT_REF)
1919 {
1920 ref_field = TREE_OPERAND (rhs, 1);
1921 rhs = TREE_OPERAND (rhs, 0);
1922 }
1923 else
1924 ref_field = NULL_TREE;
1925 if (TREE_CODE (rhs) != MEM_REF)
1926 return NULL_TREE;
1927 rec = TREE_OPERAND (rhs, 0);
1928 if (TREE_CODE (rec) != ADDR_EXPR)
1929 return NULL_TREE;
1930 rec = TREE_OPERAND (rec, 0);
1931 if (TREE_CODE (rec) != PARM_DECL
1932 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1933 return NULL_TREE;
1934 ref_offset = TREE_OPERAND (rhs, 1);
1935
1936 if (use_delta)
1937 fld = delta_field;
1938 else
1939 fld = ptr_field;
1940 if (offset_p)
1941 *offset_p = int_bit_position (fld);
1942
1943 if (ref_field)
1944 {
1945 if (integer_nonzerop (ref_offset))
1946 return NULL_TREE;
1947 return ref_field == fld ? rec : NULL_TREE;
1948 }
1949 else
1950 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1951 : NULL_TREE;
1952 }
1953
1954 /* Returns true iff T is an SSA_NAME defined by a statement. */
1955
1956 static bool
1957 ipa_is_ssa_with_stmt_def (tree t)
1958 {
1959 if (TREE_CODE (t) == SSA_NAME
1960 && !SSA_NAME_IS_DEFAULT_DEF (t))
1961 return true;
1962 else
1963 return false;
1964 }
1965
1966 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1967 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1968 indirect call graph edge. */
1969
1970 static struct cgraph_edge *
1971 ipa_note_param_call (struct cgraph_node *node, int param_index,
1972 gcall *stmt)
1973 {
1974 struct cgraph_edge *cs;
1975
1976 cs = node->get_edge (stmt);
1977 cs->indirect_info->param_index = param_index;
1978 cs->indirect_info->agg_contents = 0;
1979 cs->indirect_info->member_ptr = 0;
1980 cs->indirect_info->guaranteed_unmodified = 0;
1981 return cs;
1982 }
1983
1984 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1985 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1986 intermediate information about each formal parameter. Currently it checks
1987 whether the call calls a pointer that is a formal parameter and if so, the
1988 parameter is marked with the called flag and an indirect call graph edge
1989 describing the call is created. This is very simple for ordinary pointers
1990 represented in SSA but not-so-nice when it comes to member pointers. The
1991 ugly part of this function does nothing more than trying to match the
1992 pattern of such a call. An example of such a pattern is the gimple dump
1993 below, the call is on the last line:
1994
1995 <bb 2>:
1996 f$__delta_5 = f.__delta;
1997 f$__pfn_24 = f.__pfn;
1998
1999 or
2000 <bb 2>:
2001 f$__delta_5 = MEM[(struct *)&f];
2002 f$__pfn_24 = MEM[(struct *)&f + 4B];
2003
2004 and a few lines below:
2005
2006 <bb 5>
2007 D.2496_3 = (int) f$__pfn_24;
2008 D.2497_4 = D.2496_3 & 1;
2009 if (D.2497_4 != 0)
2010 goto <bb 3>;
2011 else
2012 goto <bb 4>;
2013
2014 <bb 6>:
2015 D.2500_7 = (unsigned int) f$__delta_5;
2016 D.2501_8 = &S + D.2500_7;
2017 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2018 D.2503_10 = *D.2502_9;
2019 D.2504_12 = f$__pfn_24 + -1;
2020 D.2505_13 = (unsigned int) D.2504_12;
2021 D.2506_14 = D.2503_10 + D.2505_13;
2022 D.2507_15 = *D.2506_14;
2023 iftmp.11_16 = (String:: *) D.2507_15;
2024
2025 <bb 7>:
2026 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2027 D.2500_19 = (unsigned int) f$__delta_5;
2028 D.2508_20 = &S + D.2500_19;
2029 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2030
2031 Such patterns are results of simple calls to a member pointer:
2032
2033 int doprinting (int (MyString::* f)(int) const)
2034 {
2035 MyString S ("somestring");
2036
2037 return (S.*f)(4);
2038 }
2039
2040 Moreover, the function also looks for called pointers loaded from aggregates
2041 passed by value or reference. */
2042
2043 static void
2044 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2045 tree target)
2046 {
2047 struct ipa_node_params *info = fbi->info;
2048 HOST_WIDE_INT offset;
2049 bool by_ref;
2050
2051 if (SSA_NAME_IS_DEFAULT_DEF (target))
2052 {
2053 tree var = SSA_NAME_VAR (target);
2054 int index = ipa_get_param_decl_index (info, var);
2055 if (index >= 0)
2056 ipa_note_param_call (fbi->node, index, call);
2057 return;
2058 }
2059
2060 int index;
2061 gimple *def = SSA_NAME_DEF_STMT (target);
2062 bool guaranteed_unmodified;
2063 if (gimple_assign_single_p (def)
2064 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2065 gimple_assign_rhs1 (def), &index, &offset,
2066 NULL, &by_ref, &guaranteed_unmodified))
2067 {
2068 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2069 cs->indirect_info->offset = offset;
2070 cs->indirect_info->agg_contents = 1;
2071 cs->indirect_info->by_ref = by_ref;
2072 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2073 return;
2074 }
2075
2076 /* Now we need to try to match the complex pattern of calling a member
2077 pointer. */
2078 if (gimple_code (def) != GIMPLE_PHI
2079 || gimple_phi_num_args (def) != 2
2080 || !POINTER_TYPE_P (TREE_TYPE (target))
2081 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2082 return;
2083
2084 /* First, we need to check whether one of these is a load from a member
2085 pointer that is a parameter to this function. */
2086 tree n1 = PHI_ARG_DEF (def, 0);
2087 tree n2 = PHI_ARG_DEF (def, 1);
2088 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2089 return;
2090 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2091 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2092
2093 tree rec;
2094 basic_block bb, virt_bb;
2095 basic_block join = gimple_bb (def);
2096 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2097 {
2098 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2099 return;
2100
2101 bb = EDGE_PRED (join, 0)->src;
2102 virt_bb = gimple_bb (d2);
2103 }
2104 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2105 {
2106 bb = EDGE_PRED (join, 1)->src;
2107 virt_bb = gimple_bb (d1);
2108 }
2109 else
2110 return;
2111
2112 /* Second, we need to check that the basic blocks are laid out in the way
2113 corresponding to the pattern. */
2114
2115 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2116 || single_pred (virt_bb) != bb
2117 || single_succ (virt_bb) != join)
2118 return;
2119
2120 /* Third, let's see that the branching is done depending on the least
2121 significant bit of the pfn. */
2122
2123 gimple *branch = last_stmt (bb);
2124 if (!branch || gimple_code (branch) != GIMPLE_COND)
2125 return;
2126
2127 if ((gimple_cond_code (branch) != NE_EXPR
2128 && gimple_cond_code (branch) != EQ_EXPR)
2129 || !integer_zerop (gimple_cond_rhs (branch)))
2130 return;
2131
2132 tree cond = gimple_cond_lhs (branch);
2133 if (!ipa_is_ssa_with_stmt_def (cond))
2134 return;
2135
2136 def = SSA_NAME_DEF_STMT (cond);
2137 if (!is_gimple_assign (def)
2138 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2139 || !integer_onep (gimple_assign_rhs2 (def)))
2140 return;
2141
2142 cond = gimple_assign_rhs1 (def);
2143 if (!ipa_is_ssa_with_stmt_def (cond))
2144 return;
2145
2146 def = SSA_NAME_DEF_STMT (cond);
2147
2148 if (is_gimple_assign (def)
2149 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2150 {
2151 cond = gimple_assign_rhs1 (def);
2152 if (!ipa_is_ssa_with_stmt_def (cond))
2153 return;
2154 def = SSA_NAME_DEF_STMT (cond);
2155 }
2156
2157 tree rec2;
2158 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2159 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2160 == ptrmemfunc_vbit_in_delta),
2161 NULL);
2162 if (rec != rec2)
2163 return;
2164
2165 index = ipa_get_param_decl_index (info, rec);
2166 if (index >= 0
2167 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2168 {
2169 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2170 cs->indirect_info->offset = offset;
2171 cs->indirect_info->agg_contents = 1;
2172 cs->indirect_info->member_ptr = 1;
2173 cs->indirect_info->guaranteed_unmodified = 1;
2174 }
2175
2176 return;
2177 }
2178
2179 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2180 object referenced in the expression is a formal parameter of the caller
2181 FBI->node (described by FBI->info), create a call note for the
2182 statement. */
2183
2184 static void
2185 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2186 gcall *call, tree target)
2187 {
2188 tree obj = OBJ_TYPE_REF_OBJECT (target);
2189 int index;
2190 HOST_WIDE_INT anc_offset;
2191
2192 if (!flag_devirtualize)
2193 return;
2194
2195 if (TREE_CODE (obj) != SSA_NAME)
2196 return;
2197
2198 struct ipa_node_params *info = fbi->info;
2199 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2200 {
2201 struct ipa_jump_func jfunc;
2202 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2203 return;
2204
2205 anc_offset = 0;
2206 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2207 gcc_assert (index >= 0);
2208 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2209 call, &jfunc))
2210 return;
2211 }
2212 else
2213 {
2214 struct ipa_jump_func jfunc;
2215 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2216 tree expr;
2217
2218 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2219 if (!expr)
2220 return;
2221 index = ipa_get_param_decl_index (info,
2222 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2223 gcc_assert (index >= 0);
2224 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2225 call, &jfunc, anc_offset))
2226 return;
2227 }
2228
2229 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2230 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2231 ii->offset = anc_offset;
2232 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2233 ii->otr_type = obj_type_ref_class (target);
2234 ii->polymorphic = 1;
2235 }
2236
2237 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2238 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2239 containing intermediate information about each formal parameter. */
2240
2241 static void
2242 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2243 {
2244 tree target = gimple_call_fn (call);
2245
2246 if (!target
2247 || (TREE_CODE (target) != SSA_NAME
2248 && !virtual_method_call_p (target)))
2249 return;
2250
2251 struct cgraph_edge *cs = fbi->node->get_edge (call);
2252 /* If we previously turned the call into a direct call, there is
2253 no need to analyze. */
2254 if (cs && !cs->indirect_unknown_callee)
2255 return;
2256
2257 if (cs->indirect_info->polymorphic && flag_devirtualize)
2258 {
2259 tree instance;
2260 tree target = gimple_call_fn (call);
2261 ipa_polymorphic_call_context context (current_function_decl,
2262 target, call, &instance);
2263
2264 gcc_checking_assert (cs->indirect_info->otr_type
2265 == obj_type_ref_class (target));
2266 gcc_checking_assert (cs->indirect_info->otr_token
2267 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2268
2269 cs->indirect_info->vptr_changed
2270 = !context.get_dynamic_type (instance,
2271 OBJ_TYPE_REF_OBJECT (target),
2272 obj_type_ref_class (target), call);
2273 cs->indirect_info->context = context;
2274 }
2275
2276 if (TREE_CODE (target) == SSA_NAME)
2277 ipa_analyze_indirect_call_uses (fbi, call, target);
2278 else if (virtual_method_call_p (target))
2279 ipa_analyze_virtual_call_uses (fbi, call, target);
2280 }
2281
2282
2283 /* Analyze the call statement STMT with respect to formal parameters (described
2284 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2285 formal parameters are called. */
2286
2287 static void
2288 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2289 {
2290 if (is_gimple_call (stmt))
2291 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2292 }
2293
2294 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2295 If OP is a parameter declaration, mark it as used in the info structure
2296 passed in DATA. */
2297
2298 static bool
2299 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2300 {
2301 struct ipa_node_params *info = (struct ipa_node_params *) data;
2302
2303 op = get_base_address (op);
2304 if (op
2305 && TREE_CODE (op) == PARM_DECL)
2306 {
2307 int index = ipa_get_param_decl_index (info, op);
2308 gcc_assert (index >= 0);
2309 ipa_set_param_used (info, index, true);
2310 }
2311
2312 return false;
2313 }
2314
2315 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2316 the findings in various structures of the associated ipa_node_params
2317 structure, such as parameter flags, notes etc. FBI holds various data about
2318 the function being analyzed. */
2319
2320 static void
2321 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2322 {
2323 gimple_stmt_iterator gsi;
2324 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2325 {
2326 gimple *stmt = gsi_stmt (gsi);
2327
2328 if (is_gimple_debug (stmt))
2329 continue;
2330
2331 ipa_analyze_stmt_uses (fbi, stmt);
2332 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2333 visit_ref_for_mod_analysis,
2334 visit_ref_for_mod_analysis,
2335 visit_ref_for_mod_analysis);
2336 }
2337 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2338 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2339 visit_ref_for_mod_analysis,
2340 visit_ref_for_mod_analysis,
2341 visit_ref_for_mod_analysis);
2342 }
2343
2344 /* Calculate controlled uses of parameters of NODE. */
2345
2346 static void
2347 ipa_analyze_controlled_uses (struct cgraph_node *node)
2348 {
2349 struct ipa_node_params *info = IPA_NODE_REF (node);
2350
2351 for (int i = 0; i < ipa_get_param_count (info); i++)
2352 {
2353 tree parm = ipa_get_param (info, i);
2354 int controlled_uses = 0;
2355
2356 /* For SSA regs see if parameter is used. For non-SSA we compute
2357 the flag during modification analysis. */
2358 if (is_gimple_reg (parm))
2359 {
2360 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2361 parm);
2362 if (ddef && !has_zero_uses (ddef))
2363 {
2364 imm_use_iterator imm_iter;
2365 use_operand_p use_p;
2366
2367 ipa_set_param_used (info, i, true);
2368 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2369 if (!is_gimple_call (USE_STMT (use_p)))
2370 {
2371 if (!is_gimple_debug (USE_STMT (use_p)))
2372 {
2373 controlled_uses = IPA_UNDESCRIBED_USE;
2374 break;
2375 }
2376 }
2377 else
2378 controlled_uses++;
2379 }
2380 else
2381 controlled_uses = 0;
2382 }
2383 else
2384 controlled_uses = IPA_UNDESCRIBED_USE;
2385 ipa_set_controlled_uses (info, i, controlled_uses);
2386 }
2387 }
2388
2389 /* Free stuff in BI. */
2390
2391 static void
2392 free_ipa_bb_info (struct ipa_bb_info *bi)
2393 {
2394 bi->cg_edges.release ();
2395 bi->param_aa_statuses.release ();
2396 }
2397
2398 /* Dominator walker driving the analysis. */
2399
2400 class analysis_dom_walker : public dom_walker
2401 {
2402 public:
2403 analysis_dom_walker (struct ipa_func_body_info *fbi)
2404 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2405
2406 virtual edge before_dom_children (basic_block);
2407
2408 private:
2409 struct ipa_func_body_info *m_fbi;
2410 };
2411
2412 edge
2413 analysis_dom_walker::before_dom_children (basic_block bb)
2414 {
2415 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2416 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2417 return NULL;
2418 }
2419
2420 /* Release body info FBI. */
2421
2422 void
2423 ipa_release_body_info (struct ipa_func_body_info *fbi)
2424 {
2425 int i;
2426 struct ipa_bb_info *bi;
2427
2428 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2429 free_ipa_bb_info (bi);
2430 fbi->bb_infos.release ();
2431 }
2432
2433 /* Initialize the array describing properties of formal parameters
2434 of NODE, analyze their uses and compute jump functions associated
2435 with actual arguments of calls from within NODE. */
2436
2437 void
2438 ipa_analyze_node (struct cgraph_node *node)
2439 {
2440 struct ipa_func_body_info fbi;
2441 struct ipa_node_params *info;
2442
2443 ipa_check_create_node_params ();
2444 ipa_check_create_edge_args ();
2445 info = IPA_NODE_REF (node);
2446
2447 if (info->analysis_done)
2448 return;
2449 info->analysis_done = 1;
2450
2451 if (ipa_func_spec_opts_forbid_analysis_p (node))
2452 {
2453 for (int i = 0; i < ipa_get_param_count (info); i++)
2454 {
2455 ipa_set_param_used (info, i, true);
2456 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2457 }
2458 return;
2459 }
2460
2461 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2462 push_cfun (func);
2463 calculate_dominance_info (CDI_DOMINATORS);
2464 ipa_initialize_node_params (node);
2465 ipa_analyze_controlled_uses (node);
2466
2467 fbi.node = node;
2468 fbi.info = IPA_NODE_REF (node);
2469 fbi.bb_infos = vNULL;
2470 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2471 fbi.param_count = ipa_get_param_count (info);
2472 fbi.aa_walked = 0;
2473
2474 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2475 {
2476 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2477 bi->cg_edges.safe_push (cs);
2478 }
2479
2480 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2481 {
2482 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2483 bi->cg_edges.safe_push (cs);
2484 }
2485
2486 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2487
2488 ipa_release_body_info (&fbi);
2489 free_dominance_info (CDI_DOMINATORS);
2490 pop_cfun ();
2491 }
2492
2493 /* Update the jump functions associated with call graph edge E when the call
2494 graph edge CS is being inlined, assuming that E->caller is already (possibly
2495 indirectly) inlined into CS->callee and that E has not been inlined. */
2496
2497 static void
2498 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2499 struct cgraph_edge *e)
2500 {
2501 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2502 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2503 int count = ipa_get_cs_argument_count (args);
2504 int i;
2505
2506 for (i = 0; i < count; i++)
2507 {
2508 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2509 struct ipa_polymorphic_call_context *dst_ctx
2510 = ipa_get_ith_polymorhic_call_context (args, i);
2511
2512 if (dst->type == IPA_JF_ANCESTOR)
2513 {
2514 struct ipa_jump_func *src;
2515 int dst_fid = dst->value.ancestor.formal_id;
2516 struct ipa_polymorphic_call_context *src_ctx
2517 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2518
2519 /* Variable number of arguments can cause havoc if we try to access
2520 one that does not exist in the inlined edge. So make sure we
2521 don't. */
2522 if (dst_fid >= ipa_get_cs_argument_count (top))
2523 {
2524 ipa_set_jf_unknown (dst);
2525 continue;
2526 }
2527
2528 src = ipa_get_ith_jump_func (top, dst_fid);
2529
2530 if (src_ctx && !src_ctx->useless_p ())
2531 {
2532 struct ipa_polymorphic_call_context ctx = *src_ctx;
2533
2534 /* TODO: Make type preserved safe WRT contexts. */
2535 if (!ipa_get_jf_ancestor_type_preserved (dst))
2536 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2537 ctx.offset_by (dst->value.ancestor.offset);
2538 if (!ctx.useless_p ())
2539 {
2540 if (!dst_ctx)
2541 {
2542 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2543 count);
2544 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2545 }
2546
2547 dst_ctx->combine_with (ctx);
2548 }
2549 }
2550
2551 if (src->agg.items
2552 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2553 {
2554 struct ipa_agg_jf_item *item;
2555 int j;
2556
2557 /* Currently we do not produce clobber aggregate jump functions,
2558 replace with merging when we do. */
2559 gcc_assert (!dst->agg.items);
2560
2561 dst->agg.items = vec_safe_copy (src->agg.items);
2562 dst->agg.by_ref = src->agg.by_ref;
2563 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2564 item->offset -= dst->value.ancestor.offset;
2565 }
2566
2567 if (src->type == IPA_JF_PASS_THROUGH
2568 && src->value.pass_through.operation == NOP_EXPR)
2569 {
2570 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2571 dst->value.ancestor.agg_preserved &=
2572 src->value.pass_through.agg_preserved;
2573 }
2574 else if (src->type == IPA_JF_PASS_THROUGH
2575 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2576 {
2577 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2578 dst->value.ancestor.agg_preserved = false;
2579 }
2580 else if (src->type == IPA_JF_ANCESTOR)
2581 {
2582 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2583 dst->value.ancestor.offset += src->value.ancestor.offset;
2584 dst->value.ancestor.agg_preserved &=
2585 src->value.ancestor.agg_preserved;
2586 }
2587 else
2588 ipa_set_jf_unknown (dst);
2589 }
2590 else if (dst->type == IPA_JF_PASS_THROUGH)
2591 {
2592 struct ipa_jump_func *src;
2593 /* We must check range due to calls with variable number of arguments
2594 and we cannot combine jump functions with operations. */
2595 if (dst->value.pass_through.operation == NOP_EXPR
2596 && (dst->value.pass_through.formal_id
2597 < ipa_get_cs_argument_count (top)))
2598 {
2599 int dst_fid = dst->value.pass_through.formal_id;
2600 src = ipa_get_ith_jump_func (top, dst_fid);
2601 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2602 struct ipa_polymorphic_call_context *src_ctx
2603 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2604
2605 if (src_ctx && !src_ctx->useless_p ())
2606 {
2607 struct ipa_polymorphic_call_context ctx = *src_ctx;
2608
2609 /* TODO: Make type preserved safe WRT contexts. */
2610 if (!ipa_get_jf_pass_through_type_preserved (dst))
2611 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2612 if (!ctx.useless_p ())
2613 {
2614 if (!dst_ctx)
2615 {
2616 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2617 count);
2618 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2619 }
2620 dst_ctx->combine_with (ctx);
2621 }
2622 }
2623 switch (src->type)
2624 {
2625 case IPA_JF_UNKNOWN:
2626 ipa_set_jf_unknown (dst);
2627 break;
2628 case IPA_JF_CONST:
2629 ipa_set_jf_cst_copy (dst, src);
2630 break;
2631
2632 case IPA_JF_PASS_THROUGH:
2633 {
2634 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2635 enum tree_code operation;
2636 operation = ipa_get_jf_pass_through_operation (src);
2637
2638 if (operation == NOP_EXPR)
2639 {
2640 bool agg_p;
2641 agg_p = dst_agg_p
2642 && ipa_get_jf_pass_through_agg_preserved (src);
2643 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2644 }
2645 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2646 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2647 else
2648 {
2649 tree operand = ipa_get_jf_pass_through_operand (src);
2650 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2651 operation);
2652 }
2653 break;
2654 }
2655 case IPA_JF_ANCESTOR:
2656 {
2657 bool agg_p;
2658 agg_p = dst_agg_p
2659 && ipa_get_jf_ancestor_agg_preserved (src);
2660 ipa_set_ancestor_jf (dst,
2661 ipa_get_jf_ancestor_offset (src),
2662 ipa_get_jf_ancestor_formal_id (src),
2663 agg_p);
2664 break;
2665 }
2666 default:
2667 gcc_unreachable ();
2668 }
2669
2670 if (src->agg.items
2671 && (dst_agg_p || !src->agg.by_ref))
2672 {
2673 /* Currently we do not produce clobber aggregate jump
2674 functions, replace with merging when we do. */
2675 gcc_assert (!dst->agg.items);
2676
2677 dst->agg.by_ref = src->agg.by_ref;
2678 dst->agg.items = vec_safe_copy (src->agg.items);
2679 }
2680 }
2681 else
2682 ipa_set_jf_unknown (dst);
2683 }
2684 }
2685 }
2686
2687 /* If TARGET is an addr_expr of a function declaration, make it the
2688 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2689 Otherwise, return NULL. */
2690
2691 struct cgraph_edge *
2692 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2693 bool speculative)
2694 {
2695 struct cgraph_node *callee;
2696 struct inline_edge_summary *es = inline_edge_summary (ie);
2697 bool unreachable = false;
2698
2699 if (TREE_CODE (target) == ADDR_EXPR)
2700 target = TREE_OPERAND (target, 0);
2701 if (TREE_CODE (target) != FUNCTION_DECL)
2702 {
2703 target = canonicalize_constructor_val (target, NULL);
2704 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2705 {
2706 /* Member pointer call that goes through a VMT lookup. */
2707 if (ie->indirect_info->member_ptr
2708 /* Or if target is not an invariant expression and we do not
2709 know if it will evaulate to function at runtime.
2710 This can happen when folding through &VAR, where &VAR
2711 is IP invariant, but VAR itself is not.
2712
2713 TODO: Revisit this when GCC 5 is branched. It seems that
2714 member_ptr check is not needed and that we may try to fold
2715 the expression and see if VAR is readonly. */
2716 || !is_gimple_ip_invariant (target))
2717 {
2718 if (dump_enabled_p ())
2719 {
2720 location_t loc = gimple_location_safe (ie->call_stmt);
2721 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2722 "discovered direct call non-invariant "
2723 "%s/%i\n",
2724 ie->caller->name (), ie->caller->order);
2725 }
2726 return NULL;
2727 }
2728
2729
2730 if (dump_enabled_p ())
2731 {
2732 location_t loc = gimple_location_safe (ie->call_stmt);
2733 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2734 "discovered direct call to non-function in %s/%i, "
2735 "making it __builtin_unreachable\n",
2736 ie->caller->name (), ie->caller->order);
2737 }
2738
2739 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2740 callee = cgraph_node::get_create (target);
2741 unreachable = true;
2742 }
2743 else
2744 callee = cgraph_node::get (target);
2745 }
2746 else
2747 callee = cgraph_node::get (target);
2748
2749 /* Because may-edges are not explicitely represented and vtable may be external,
2750 we may create the first reference to the object in the unit. */
2751 if (!callee || callee->global.inlined_to)
2752 {
2753
2754 /* We are better to ensure we can refer to it.
2755 In the case of static functions we are out of luck, since we already
2756 removed its body. In the case of public functions we may or may
2757 not introduce the reference. */
2758 if (!canonicalize_constructor_val (target, NULL)
2759 || !TREE_PUBLIC (target))
2760 {
2761 if (dump_file)
2762 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2763 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2764 xstrdup_for_dump (ie->caller->name ()),
2765 ie->caller->order,
2766 xstrdup_for_dump (ie->callee->name ()),
2767 ie->callee->order);
2768 return NULL;
2769 }
2770 callee = cgraph_node::get_create (target);
2771 }
2772
2773 /* If the edge is already speculated. */
2774 if (speculative && ie->speculative)
2775 {
2776 struct cgraph_edge *e2;
2777 struct ipa_ref *ref;
2778 ie->speculative_call_info (e2, ie, ref);
2779 if (e2->callee->ultimate_alias_target ()
2780 != callee->ultimate_alias_target ())
2781 {
2782 if (dump_file)
2783 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2784 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2785 xstrdup_for_dump (ie->caller->name ()),
2786 ie->caller->order,
2787 xstrdup_for_dump (callee->name ()),
2788 callee->order,
2789 xstrdup_for_dump (e2->callee->name ()),
2790 e2->callee->order);
2791 }
2792 else
2793 {
2794 if (dump_file)
2795 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2796 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2797 xstrdup_for_dump (ie->caller->name ()),
2798 ie->caller->order,
2799 xstrdup_for_dump (callee->name ()),
2800 callee->order);
2801 }
2802 return NULL;
2803 }
2804
2805 if (!dbg_cnt (devirt))
2806 return NULL;
2807
2808 ipa_check_create_node_params ();
2809
2810 /* We can not make edges to inline clones. It is bug that someone removed
2811 the cgraph node too early. */
2812 gcc_assert (!callee->global.inlined_to);
2813
2814 if (dump_file && !unreachable)
2815 {
2816 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2817 "(%s/%i -> %s/%i), for stmt ",
2818 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2819 speculative ? "speculative" : "known",
2820 xstrdup_for_dump (ie->caller->name ()),
2821 ie->caller->order,
2822 xstrdup_for_dump (callee->name ()),
2823 callee->order);
2824 if (ie->call_stmt)
2825 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2826 else
2827 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2828 }
2829 if (dump_enabled_p ())
2830 {
2831 location_t loc = gimple_location_safe (ie->call_stmt);
2832
2833 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2834 "converting indirect call in %s to direct call to %s\n",
2835 ie->caller->name (), callee->name ());
2836 }
2837 if (!speculative)
2838 {
2839 struct cgraph_edge *orig = ie;
2840 ie = ie->make_direct (callee);
2841 /* If we resolved speculative edge the cost is already up to date
2842 for direct call (adjusted by inline_edge_duplication_hook). */
2843 if (ie == orig)
2844 {
2845 es = inline_edge_summary (ie);
2846 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2847 - eni_size_weights.call_cost);
2848 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2849 - eni_time_weights.call_cost);
2850 }
2851 }
2852 else
2853 {
2854 if (!callee->can_be_discarded_p ())
2855 {
2856 cgraph_node *alias;
2857 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2858 if (alias)
2859 callee = alias;
2860 }
2861 /* make_speculative will update ie's cost to direct call cost. */
2862 ie = ie->make_speculative
2863 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2864 }
2865
2866 return ie;
2867 }
2868
2869 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2870 CONSTRUCTOR and return it. Return NULL if the search fails for some
2871 reason. */
2872
2873 static tree
2874 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2875 {
2876 tree type = TREE_TYPE (constructor);
2877 if (TREE_CODE (type) != ARRAY_TYPE
2878 && TREE_CODE (type) != RECORD_TYPE)
2879 return NULL;
2880
2881 unsigned ix;
2882 tree index, val;
2883 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2884 {
2885 HOST_WIDE_INT elt_offset;
2886 if (TREE_CODE (type) == ARRAY_TYPE)
2887 {
2888 offset_int off;
2889 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2890 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2891
2892 if (index)
2893 {
2894 off = wi::to_offset (index);
2895 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2896 {
2897 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2898 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2899 off = wi::sext (off - wi::to_offset (low_bound),
2900 TYPE_PRECISION (TREE_TYPE (index)));
2901 }
2902 off *= wi::to_offset (unit_size);
2903 }
2904 else
2905 off = wi::to_offset (unit_size) * ix;
2906
2907 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2908 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2909 continue;
2910 elt_offset = off.to_shwi ();
2911 }
2912 else if (TREE_CODE (type) == RECORD_TYPE)
2913 {
2914 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2915 if (DECL_BIT_FIELD (index))
2916 continue;
2917 elt_offset = int_bit_position (index);
2918 }
2919 else
2920 gcc_unreachable ();
2921
2922 if (elt_offset > req_offset)
2923 return NULL;
2924
2925 if (TREE_CODE (val) == CONSTRUCTOR)
2926 return find_constructor_constant_at_offset (val,
2927 req_offset - elt_offset);
2928
2929 if (elt_offset == req_offset
2930 && is_gimple_reg_type (TREE_TYPE (val))
2931 && is_gimple_ip_invariant (val))
2932 return val;
2933 }
2934 return NULL;
2935 }
2936
2937 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2938 invariant from a static constructor and if so, return it. Otherwise return
2939 NULL. */
2940
2941 static tree
2942 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2943 {
2944 if (by_ref)
2945 {
2946 if (TREE_CODE (scalar) != ADDR_EXPR)
2947 return NULL;
2948 scalar = TREE_OPERAND (scalar, 0);
2949 }
2950
2951 if (!VAR_P (scalar)
2952 || !is_global_var (scalar)
2953 || !TREE_READONLY (scalar)
2954 || !DECL_INITIAL (scalar)
2955 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2956 return NULL;
2957
2958 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2959 }
2960
2961 /* Retrieve value from aggregate jump function AGG or static initializer of
2962 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2963 none. BY_REF specifies whether the value has to be passed by reference or
2964 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2965 to is set to true if the value comes from an initializer of a constant. */
2966
2967 tree
2968 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2969 HOST_WIDE_INT offset, bool by_ref,
2970 bool *from_global_constant)
2971 {
2972 struct ipa_agg_jf_item *item;
2973 int i;
2974
2975 if (scalar)
2976 {
2977 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2978 if (res)
2979 {
2980 if (from_global_constant)
2981 *from_global_constant = true;
2982 return res;
2983 }
2984 }
2985
2986 if (!agg
2987 || by_ref != agg->by_ref)
2988 return NULL;
2989
2990 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2991 if (item->offset == offset)
2992 {
2993 /* Currently we do not have clobber values, return NULL for them once
2994 we do. */
2995 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2996 if (from_global_constant)
2997 *from_global_constant = false;
2998 return item->value;
2999 }
3000 return NULL;
3001 }
3002
3003 /* Remove a reference to SYMBOL from the list of references of a node given by
3004 reference description RDESC. Return true if the reference has been
3005 successfully found and removed. */
3006
3007 static bool
3008 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3009 {
3010 struct ipa_ref *to_del;
3011 struct cgraph_edge *origin;
3012
3013 origin = rdesc->cs;
3014 if (!origin)
3015 return false;
3016 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3017 origin->lto_stmt_uid);
3018 if (!to_del)
3019 return false;
3020
3021 to_del->remove_reference ();
3022 if (dump_file)
3023 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3024 xstrdup_for_dump (origin->caller->name ()),
3025 origin->caller->order, xstrdup_for_dump (symbol->name ()));
3026 return true;
3027 }
3028
3029 /* If JFUNC has a reference description with refcount different from
3030 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3031 NULL. JFUNC must be a constant jump function. */
3032
3033 static struct ipa_cst_ref_desc *
3034 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3035 {
3036 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3037 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3038 return rdesc;
3039 else
3040 return NULL;
3041 }
3042
3043 /* If the value of constant jump function JFUNC is an address of a function
3044 declaration, return the associated call graph node. Otherwise return
3045 NULL. */
3046
3047 static cgraph_node *
3048 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3049 {
3050 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3051 tree cst = ipa_get_jf_constant (jfunc);
3052 if (TREE_CODE (cst) != ADDR_EXPR
3053 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3054 return NULL;
3055
3056 return cgraph_node::get (TREE_OPERAND (cst, 0));
3057 }
3058
3059
3060 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3061 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3062 the edge specified in the rdesc. Return false if either the symbol or the
3063 reference could not be found, otherwise return true. */
3064
3065 static bool
3066 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3067 {
3068 struct ipa_cst_ref_desc *rdesc;
3069 if (jfunc->type == IPA_JF_CONST
3070 && (rdesc = jfunc_rdesc_usable (jfunc))
3071 && --rdesc->refcount == 0)
3072 {
3073 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3074 if (!symbol)
3075 return false;
3076
3077 return remove_described_reference (symbol, rdesc);
3078 }
3079 return true;
3080 }
3081
3082 /* Try to find a destination for indirect edge IE that corresponds to a simple
3083 call or a call of a member function pointer and where the destination is a
3084 pointer formal parameter described by jump function JFUNC. If it can be
3085 determined, return the newly direct edge, otherwise return NULL.
3086 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3087
3088 static struct cgraph_edge *
3089 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3090 struct ipa_jump_func *jfunc,
3091 struct ipa_node_params *new_root_info)
3092 {
3093 struct cgraph_edge *cs;
3094 tree target;
3095 bool agg_contents = ie->indirect_info->agg_contents;
3096 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3097 if (agg_contents)
3098 {
3099 bool from_global_constant;
3100 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3101 ie->indirect_info->offset,
3102 ie->indirect_info->by_ref,
3103 &from_global_constant);
3104 if (target
3105 && !from_global_constant
3106 && !ie->indirect_info->guaranteed_unmodified)
3107 return NULL;
3108 }
3109 else
3110 target = scalar;
3111 if (!target)
3112 return NULL;
3113 cs = ipa_make_edge_direct_to_target (ie, target);
3114
3115 if (cs && !agg_contents)
3116 {
3117 bool ok;
3118 gcc_checking_assert (cs->callee
3119 && (cs != ie
3120 || jfunc->type != IPA_JF_CONST
3121 || !cgraph_node_for_jfunc (jfunc)
3122 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3123 ok = try_decrement_rdesc_refcount (jfunc);
3124 gcc_checking_assert (ok);
3125 }
3126
3127 return cs;
3128 }
3129
3130 /* Return the target to be used in cases of impossible devirtualization. IE
3131 and target (the latter can be NULL) are dumped when dumping is enabled. */
3132
3133 tree
3134 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3135 {
3136 if (dump_file)
3137 {
3138 if (target)
3139 fprintf (dump_file,
3140 "Type inconsistent devirtualization: %s/%i->%s\n",
3141 ie->caller->name (), ie->caller->order,
3142 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3143 else
3144 fprintf (dump_file,
3145 "No devirtualization target in %s/%i\n",
3146 ie->caller->name (), ie->caller->order);
3147 }
3148 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3149 cgraph_node::get_create (new_target);
3150 return new_target;
3151 }
3152
3153 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3154 call based on a formal parameter which is described by jump function JFUNC
3155 and if it can be determined, make it direct and return the direct edge.
3156 Otherwise, return NULL. CTX describes the polymorphic context that the
3157 parameter the call is based on brings along with it. */
3158
3159 static struct cgraph_edge *
3160 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3161 struct ipa_jump_func *jfunc,
3162 struct ipa_polymorphic_call_context ctx)
3163 {
3164 tree target = NULL;
3165 bool speculative = false;
3166
3167 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3168 return NULL;
3169
3170 gcc_assert (!ie->indirect_info->by_ref);
3171
3172 /* Try to do lookup via known virtual table pointer value. */
3173 if (!ie->indirect_info->vptr_changed
3174 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3175 {
3176 tree vtable;
3177 unsigned HOST_WIDE_INT offset;
3178 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3179 : NULL;
3180 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3181 ie->indirect_info->offset,
3182 true);
3183 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3184 {
3185 bool can_refer;
3186 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3187 vtable, offset, &can_refer);
3188 if (can_refer)
3189 {
3190 if (!t
3191 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3192 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3193 || !possible_polymorphic_call_target_p
3194 (ie, cgraph_node::get (t)))
3195 {
3196 /* Do not speculate builtin_unreachable, it is stupid! */
3197 if (!ie->indirect_info->vptr_changed)
3198 target = ipa_impossible_devirt_target (ie, target);
3199 else
3200 target = NULL;
3201 }
3202 else
3203 {
3204 target = t;
3205 speculative = ie->indirect_info->vptr_changed;
3206 }
3207 }
3208 }
3209 }
3210
3211 ipa_polymorphic_call_context ie_context (ie);
3212 vec <cgraph_node *>targets;
3213 bool final;
3214
3215 ctx.offset_by (ie->indirect_info->offset);
3216 if (ie->indirect_info->vptr_changed)
3217 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3218 ie->indirect_info->otr_type);
3219 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3220 targets = possible_polymorphic_call_targets
3221 (ie->indirect_info->otr_type,
3222 ie->indirect_info->otr_token,
3223 ctx, &final);
3224 if (final && targets.length () <= 1)
3225 {
3226 speculative = false;
3227 if (targets.length () == 1)
3228 target = targets[0]->decl;
3229 else
3230 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3231 }
3232 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3233 && !ie->speculative && ie->maybe_hot_p ())
3234 {
3235 cgraph_node *n;
3236 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3237 ie->indirect_info->otr_token,
3238 ie->indirect_info->context);
3239 if (n)
3240 {
3241 target = n->decl;
3242 speculative = true;
3243 }
3244 }
3245
3246 if (target)
3247 {
3248 if (!possible_polymorphic_call_target_p
3249 (ie, cgraph_node::get_create (target)))
3250 {
3251 if (speculative)
3252 return NULL;
3253 target = ipa_impossible_devirt_target (ie, target);
3254 }
3255 return ipa_make_edge_direct_to_target (ie, target, speculative);
3256 }
3257 else
3258 return NULL;
3259 }
3260
3261 /* Update the param called notes associated with NODE when CS is being inlined,
3262 assuming NODE is (potentially indirectly) inlined into CS->callee.
3263 Moreover, if the callee is discovered to be constant, create a new cgraph
3264 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3265 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3266
3267 static bool
3268 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3269 struct cgraph_node *node,
3270 vec<cgraph_edge *> *new_edges)
3271 {
3272 struct ipa_edge_args *top;
3273 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3274 struct ipa_node_params *new_root_info;
3275 bool res = false;
3276
3277 ipa_check_create_edge_args ();
3278 top = IPA_EDGE_REF (cs);
3279 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3280 ? cs->caller->global.inlined_to
3281 : cs->caller);
3282
3283 for (ie = node->indirect_calls; ie; ie = next_ie)
3284 {
3285 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3286 struct ipa_jump_func *jfunc;
3287 int param_index;
3288 cgraph_node *spec_target = NULL;
3289
3290 next_ie = ie->next_callee;
3291
3292 if (ici->param_index == -1)
3293 continue;
3294
3295 /* We must check range due to calls with variable number of arguments: */
3296 if (ici->param_index >= ipa_get_cs_argument_count (top))
3297 {
3298 ici->param_index = -1;
3299 continue;
3300 }
3301
3302 param_index = ici->param_index;
3303 jfunc = ipa_get_ith_jump_func (top, param_index);
3304
3305 if (ie->speculative)
3306 {
3307 struct cgraph_edge *de;
3308 struct ipa_ref *ref;
3309 ie->speculative_call_info (de, ie, ref);
3310 spec_target = de->callee;
3311 }
3312
3313 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3314 new_direct_edge = NULL;
3315 else if (ici->polymorphic)
3316 {
3317 ipa_polymorphic_call_context ctx;
3318 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3319 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3320 }
3321 else
3322 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3323 new_root_info);
3324 /* If speculation was removed, then we need to do nothing. */
3325 if (new_direct_edge && new_direct_edge != ie
3326 && new_direct_edge->callee == spec_target)
3327 {
3328 new_direct_edge->indirect_inlining_edge = 1;
3329 top = IPA_EDGE_REF (cs);
3330 res = true;
3331 if (!new_direct_edge->speculative)
3332 continue;
3333 }
3334 else if (new_direct_edge)
3335 {
3336 new_direct_edge->indirect_inlining_edge = 1;
3337 if (new_direct_edge->call_stmt)
3338 new_direct_edge->call_stmt_cannot_inline_p
3339 = !gimple_check_call_matching_types (
3340 new_direct_edge->call_stmt,
3341 new_direct_edge->callee->decl, false);
3342 if (new_edges)
3343 {
3344 new_edges->safe_push (new_direct_edge);
3345 res = true;
3346 }
3347 top = IPA_EDGE_REF (cs);
3348 /* If speculative edge was introduced we still need to update
3349 call info of the indirect edge. */
3350 if (!new_direct_edge->speculative)
3351 continue;
3352 }
3353 if (jfunc->type == IPA_JF_PASS_THROUGH
3354 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3355 {
3356 if (ici->agg_contents
3357 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3358 && !ici->polymorphic)
3359 ici->param_index = -1;
3360 else
3361 {
3362 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3363 if (ici->polymorphic
3364 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3365 ici->vptr_changed = true;
3366 }
3367 }
3368 else if (jfunc->type == IPA_JF_ANCESTOR)
3369 {
3370 if (ici->agg_contents
3371 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3372 && !ici->polymorphic)
3373 ici->param_index = -1;
3374 else
3375 {
3376 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3377 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3378 if (ici->polymorphic
3379 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3380 ici->vptr_changed = true;
3381 }
3382 }
3383 else
3384 /* Either we can find a destination for this edge now or never. */
3385 ici->param_index = -1;
3386 }
3387
3388 return res;
3389 }
3390
3391 /* Recursively traverse subtree of NODE (including node) made of inlined
3392 cgraph_edges when CS has been inlined and invoke
3393 update_indirect_edges_after_inlining on all nodes and
3394 update_jump_functions_after_inlining on all non-inlined edges that lead out
3395 of this subtree. Newly discovered indirect edges will be added to
3396 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3397 created. */
3398
3399 static bool
3400 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3401 struct cgraph_node *node,
3402 vec<cgraph_edge *> *new_edges)
3403 {
3404 struct cgraph_edge *e;
3405 bool res;
3406
3407 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3408
3409 for (e = node->callees; e; e = e->next_callee)
3410 if (!e->inline_failed)
3411 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3412 else
3413 update_jump_functions_after_inlining (cs, e);
3414 for (e = node->indirect_calls; e; e = e->next_callee)
3415 update_jump_functions_after_inlining (cs, e);
3416
3417 return res;
3418 }
3419
3420 /* Combine two controlled uses counts as done during inlining. */
3421
3422 static int
3423 combine_controlled_uses_counters (int c, int d)
3424 {
3425 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3426 return IPA_UNDESCRIBED_USE;
3427 else
3428 return c + d - 1;
3429 }
3430
3431 /* Propagate number of controlled users from CS->caleee to the new root of the
3432 tree of inlined nodes. */
3433
3434 static void
3435 propagate_controlled_uses (struct cgraph_edge *cs)
3436 {
3437 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3438 struct cgraph_node *new_root = cs->caller->global.inlined_to
3439 ? cs->caller->global.inlined_to : cs->caller;
3440 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3441 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3442 int count, i;
3443
3444 count = MIN (ipa_get_cs_argument_count (args),
3445 ipa_get_param_count (old_root_info));
3446 for (i = 0; i < count; i++)
3447 {
3448 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3449 struct ipa_cst_ref_desc *rdesc;
3450
3451 if (jf->type == IPA_JF_PASS_THROUGH)
3452 {
3453 int src_idx, c, d;
3454 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3455 c = ipa_get_controlled_uses (new_root_info, src_idx);
3456 d = ipa_get_controlled_uses (old_root_info, i);
3457
3458 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3459 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3460 c = combine_controlled_uses_counters (c, d);
3461 ipa_set_controlled_uses (new_root_info, src_idx, c);
3462 if (c == 0 && new_root_info->ipcp_orig_node)
3463 {
3464 struct cgraph_node *n;
3465 struct ipa_ref *ref;
3466 tree t = new_root_info->known_csts[src_idx];
3467
3468 if (t && TREE_CODE (t) == ADDR_EXPR
3469 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3470 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3471 && (ref = new_root->find_reference (n, NULL, 0)))
3472 {
3473 if (dump_file)
3474 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3475 "reference from %s/%i to %s/%i.\n",
3476 xstrdup_for_dump (new_root->name ()),
3477 new_root->order,
3478 xstrdup_for_dump (n->name ()), n->order);
3479 ref->remove_reference ();
3480 }
3481 }
3482 }
3483 else if (jf->type == IPA_JF_CONST
3484 && (rdesc = jfunc_rdesc_usable (jf)))
3485 {
3486 int d = ipa_get_controlled_uses (old_root_info, i);
3487 int c = rdesc->refcount;
3488 rdesc->refcount = combine_controlled_uses_counters (c, d);
3489 if (rdesc->refcount == 0)
3490 {
3491 tree cst = ipa_get_jf_constant (jf);
3492 struct cgraph_node *n;
3493 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3494 && TREE_CODE (TREE_OPERAND (cst, 0))
3495 == FUNCTION_DECL);
3496 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3497 if (n)
3498 {
3499 struct cgraph_node *clone;
3500 bool ok;
3501 ok = remove_described_reference (n, rdesc);
3502 gcc_checking_assert (ok);
3503
3504 clone = cs->caller;
3505 while (clone->global.inlined_to
3506 && clone != rdesc->cs->caller
3507 && IPA_NODE_REF (clone)->ipcp_orig_node)
3508 {
3509 struct ipa_ref *ref;
3510 ref = clone->find_reference (n, NULL, 0);
3511 if (ref)
3512 {
3513 if (dump_file)
3514 fprintf (dump_file, "ipa-prop: Removing "
3515 "cloning-created reference "
3516 "from %s/%i to %s/%i.\n",
3517 xstrdup_for_dump (clone->name ()),
3518 clone->order,
3519 xstrdup_for_dump (n->name ()),
3520 n->order);
3521 ref->remove_reference ();
3522 }
3523 clone = clone->callers->caller;
3524 }
3525 }
3526 }
3527 }
3528 }
3529
3530 for (i = ipa_get_param_count (old_root_info);
3531 i < ipa_get_cs_argument_count (args);
3532 i++)
3533 {
3534 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3535
3536 if (jf->type == IPA_JF_CONST)
3537 {
3538 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3539 if (rdesc)
3540 rdesc->refcount = IPA_UNDESCRIBED_USE;
3541 }
3542 else if (jf->type == IPA_JF_PASS_THROUGH)
3543 ipa_set_controlled_uses (new_root_info,
3544 jf->value.pass_through.formal_id,
3545 IPA_UNDESCRIBED_USE);
3546 }
3547 }
3548
3549 /* Update jump functions and call note functions on inlining the call site CS.
3550 CS is expected to lead to a node already cloned by
3551 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3552 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3553 created. */
3554
3555 bool
3556 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3557 vec<cgraph_edge *> *new_edges)
3558 {
3559 bool changed;
3560 /* Do nothing if the preparation phase has not been carried out yet
3561 (i.e. during early inlining). */
3562 if (!ipa_node_params_sum)
3563 return false;
3564 gcc_assert (ipa_edge_args_vector);
3565
3566 propagate_controlled_uses (cs);
3567 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3568
3569 return changed;
3570 }
3571
3572 /* Frees all dynamically allocated structures that the argument info points
3573 to. */
3574
3575 void
3576 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3577 {
3578 vec_free (args->jump_functions);
3579 memset (args, 0, sizeof (*args));
3580 }
3581
3582 /* Free all ipa_edge structures. */
3583
3584 void
3585 ipa_free_all_edge_args (void)
3586 {
3587 int i;
3588 struct ipa_edge_args *args;
3589
3590 if (!ipa_edge_args_vector)
3591 return;
3592
3593 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3594 ipa_free_edge_args_substructures (args);
3595
3596 vec_free (ipa_edge_args_vector);
3597 }
3598
3599 /* Frees all dynamically allocated structures that the param info points
3600 to. */
3601
3602 ipa_node_params::~ipa_node_params ()
3603 {
3604 descriptors.release ();
3605 free (lattices);
3606 /* Lattice values and their sources are deallocated with their alocation
3607 pool. */
3608 known_csts.release ();
3609 known_contexts.release ();
3610
3611 lattices = NULL;
3612 ipcp_orig_node = NULL;
3613 analysis_done = 0;
3614 node_enqueued = 0;
3615 do_clone_for_all_contexts = 0;
3616 is_all_contexts_clone = 0;
3617 node_dead = 0;
3618 }
3619
3620 /* Free all ipa_node_params structures. */
3621
3622 void
3623 ipa_free_all_node_params (void)
3624 {
3625 delete ipa_node_params_sum;
3626 ipa_node_params_sum = NULL;
3627 }
3628
3629 /* Grow ipcp_transformations if necessary. */
3630
3631 void
3632 ipcp_grow_transformations_if_necessary (void)
3633 {
3634 if (vec_safe_length (ipcp_transformations)
3635 <= (unsigned) symtab->cgraph_max_uid)
3636 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3637 }
3638
3639 /* Set the aggregate replacements of NODE to be AGGVALS. */
3640
3641 void
3642 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3643 struct ipa_agg_replacement_value *aggvals)
3644 {
3645 ipcp_grow_transformations_if_necessary ();
3646 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3647 }
3648
3649 /* Hook that is called by cgraph.c when an edge is removed. */
3650
3651 static void
3652 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3653 {
3654 struct ipa_edge_args *args;
3655
3656 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3657 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3658 return;
3659
3660 args = IPA_EDGE_REF (cs);
3661 if (args->jump_functions)
3662 {
3663 struct ipa_jump_func *jf;
3664 int i;
3665 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3666 {
3667 struct ipa_cst_ref_desc *rdesc;
3668 try_decrement_rdesc_refcount (jf);
3669 if (jf->type == IPA_JF_CONST
3670 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3671 && rdesc->cs == cs)
3672 rdesc->cs = NULL;
3673 }
3674 }
3675
3676 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3677 }
3678
3679 /* Hook that is called by cgraph.c when an edge is duplicated. */
3680
3681 static void
3682 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3683 void *)
3684 {
3685 struct ipa_edge_args *old_args, *new_args;
3686 unsigned int i;
3687
3688 ipa_check_create_edge_args ();
3689
3690 old_args = IPA_EDGE_REF (src);
3691 new_args = IPA_EDGE_REF (dst);
3692
3693 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3694 if (old_args->polymorphic_call_contexts)
3695 new_args->polymorphic_call_contexts
3696 = vec_safe_copy (old_args->polymorphic_call_contexts);
3697
3698 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3699 {
3700 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3701 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3702
3703 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3704
3705 if (src_jf->type == IPA_JF_CONST)
3706 {
3707 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3708
3709 if (!src_rdesc)
3710 dst_jf->value.constant.rdesc = NULL;
3711 else if (src->caller == dst->caller)
3712 {
3713 struct ipa_ref *ref;
3714 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3715 gcc_checking_assert (n);
3716 ref = src->caller->find_reference (n, src->call_stmt,
3717 src->lto_stmt_uid);
3718 gcc_checking_assert (ref);
3719 dst->caller->clone_reference (ref, ref->stmt);
3720
3721 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3722 dst_rdesc->cs = dst;
3723 dst_rdesc->refcount = src_rdesc->refcount;
3724 dst_rdesc->next_duplicate = NULL;
3725 dst_jf->value.constant.rdesc = dst_rdesc;
3726 }
3727 else if (src_rdesc->cs == src)
3728 {
3729 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3730 dst_rdesc->cs = dst;
3731 dst_rdesc->refcount = src_rdesc->refcount;
3732 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3733 src_rdesc->next_duplicate = dst_rdesc;
3734 dst_jf->value.constant.rdesc = dst_rdesc;
3735 }
3736 else
3737 {
3738 struct ipa_cst_ref_desc *dst_rdesc;
3739 /* This can happen during inlining, when a JFUNC can refer to a
3740 reference taken in a function up in the tree of inline clones.
3741 We need to find the duplicate that refers to our tree of
3742 inline clones. */
3743
3744 gcc_assert (dst->caller->global.inlined_to);
3745 for (dst_rdesc = src_rdesc->next_duplicate;
3746 dst_rdesc;
3747 dst_rdesc = dst_rdesc->next_duplicate)
3748 {
3749 struct cgraph_node *top;
3750 top = dst_rdesc->cs->caller->global.inlined_to
3751 ? dst_rdesc->cs->caller->global.inlined_to
3752 : dst_rdesc->cs->caller;
3753 if (dst->caller->global.inlined_to == top)
3754 break;
3755 }
3756 gcc_assert (dst_rdesc);
3757 dst_jf->value.constant.rdesc = dst_rdesc;
3758 }
3759 }
3760 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3761 && src->caller == dst->caller)
3762 {
3763 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3764 ? dst->caller->global.inlined_to : dst->caller;
3765 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3766 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3767
3768 int c = ipa_get_controlled_uses (root_info, idx);
3769 if (c != IPA_UNDESCRIBED_USE)
3770 {
3771 c++;
3772 ipa_set_controlled_uses (root_info, idx, c);
3773 }
3774 }
3775 }
3776 }
3777
3778 /* Analyze newly added function into callgraph. */
3779
3780 static void
3781 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3782 {
3783 if (node->has_gimple_body_p ())
3784 ipa_analyze_node (node);
3785 }
3786
3787 /* Hook that is called by summary when a node is duplicated. */
3788
3789 void
3790 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3791 ipa_node_params *old_info,
3792 ipa_node_params *new_info)
3793 {
3794 ipa_agg_replacement_value *old_av, *new_av;
3795
3796 new_info->descriptors = old_info->descriptors.copy ();
3797 new_info->lattices = NULL;
3798 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3799
3800 new_info->analysis_done = old_info->analysis_done;
3801 new_info->node_enqueued = old_info->node_enqueued;
3802 new_info->versionable = old_info->versionable;
3803
3804 old_av = ipa_get_agg_replacements_for_node (src);
3805 if (old_av)
3806 {
3807 new_av = NULL;
3808 while (old_av)
3809 {
3810 struct ipa_agg_replacement_value *v;
3811
3812 v = ggc_alloc<ipa_agg_replacement_value> ();
3813 memcpy (v, old_av, sizeof (*v));
3814 v->next = new_av;
3815 new_av = v;
3816 old_av = old_av->next;
3817 }
3818 ipa_set_node_agg_value_chain (dst, new_av);
3819 }
3820
3821 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3822
3823 if (src_trans)
3824 {
3825 ipcp_grow_transformations_if_necessary ();
3826 src_trans = ipcp_get_transformation_summary (src);
3827 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3828 vec<ipa_vr, va_gc> *&dst_vr
3829 = ipcp_get_transformation_summary (dst)->m_vr;
3830 if (vec_safe_length (src_trans->m_vr) > 0)
3831 {
3832 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3833 for (unsigned i = 0; i < src_vr->length (); ++i)
3834 dst_vr->quick_push ((*src_vr)[i]);
3835 }
3836 }
3837
3838 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3839 {
3840 ipcp_grow_transformations_if_necessary ();
3841 src_trans = ipcp_get_transformation_summary (src);
3842 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3843 vec<ipa_bits, va_gc> *&dst_bits
3844 = ipcp_get_transformation_summary (dst)->bits;
3845 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3846 for (unsigned i = 0; i < src_bits->length (); ++i)
3847 dst_bits->quick_push ((*src_bits)[i]);
3848 }
3849 }
3850
3851 /* Register our cgraph hooks if they are not already there. */
3852
3853 void
3854 ipa_register_cgraph_hooks (void)
3855 {
3856 ipa_check_create_node_params ();
3857
3858 if (!edge_removal_hook_holder)
3859 edge_removal_hook_holder =
3860 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3861 if (!edge_duplication_hook_holder)
3862 edge_duplication_hook_holder =
3863 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3864 function_insertion_hook_holder =
3865 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3866 }
3867
3868 /* Unregister our cgraph hooks if they are not already there. */
3869
3870 static void
3871 ipa_unregister_cgraph_hooks (void)
3872 {
3873 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3874 edge_removal_hook_holder = NULL;
3875 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3876 edge_duplication_hook_holder = NULL;
3877 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3878 function_insertion_hook_holder = NULL;
3879 }
3880
3881 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3882 longer needed after ipa-cp. */
3883
3884 void
3885 ipa_free_all_structures_after_ipa_cp (void)
3886 {
3887 if (!optimize && !in_lto_p)
3888 {
3889 ipa_free_all_edge_args ();
3890 ipa_free_all_node_params ();
3891 ipcp_sources_pool.release ();
3892 ipcp_cst_values_pool.release ();
3893 ipcp_poly_ctx_values_pool.release ();
3894 ipcp_agg_lattice_pool.release ();
3895 ipa_unregister_cgraph_hooks ();
3896 ipa_refdesc_pool.release ();
3897 }
3898 }
3899
3900 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3901 longer needed after indirect inlining. */
3902
3903 void
3904 ipa_free_all_structures_after_iinln (void)
3905 {
3906 ipa_free_all_edge_args ();
3907 ipa_free_all_node_params ();
3908 ipa_unregister_cgraph_hooks ();
3909 ipcp_sources_pool.release ();
3910 ipcp_cst_values_pool.release ();
3911 ipcp_poly_ctx_values_pool.release ();
3912 ipcp_agg_lattice_pool.release ();
3913 ipa_refdesc_pool.release ();
3914 }
3915
3916 /* Print ipa_tree_map data structures of all functions in the
3917 callgraph to F. */
3918
3919 void
3920 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3921 {
3922 int i, count;
3923 struct ipa_node_params *info;
3924
3925 if (!node->definition)
3926 return;
3927 info = IPA_NODE_REF (node);
3928 fprintf (f, " function %s/%i parameter descriptors:\n",
3929 node->name (), node->order);
3930 count = ipa_get_param_count (info);
3931 for (i = 0; i < count; i++)
3932 {
3933 int c;
3934
3935 fprintf (f, " ");
3936 ipa_dump_param (f, info, i);
3937 if (ipa_is_param_used (info, i))
3938 fprintf (f, " used");
3939 c = ipa_get_controlled_uses (info, i);
3940 if (c == IPA_UNDESCRIBED_USE)
3941 fprintf (f, " undescribed_use");
3942 else
3943 fprintf (f, " controlled_uses=%i", c);
3944 fprintf (f, "\n");
3945 }
3946 }
3947
3948 /* Print ipa_tree_map data structures of all functions in the
3949 callgraph to F. */
3950
3951 void
3952 ipa_print_all_params (FILE * f)
3953 {
3954 struct cgraph_node *node;
3955
3956 fprintf (f, "\nFunction parameters:\n");
3957 FOR_EACH_FUNCTION (node)
3958 ipa_print_node_params (f, node);
3959 }
3960
3961 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3962
3963 vec<tree>
3964 ipa_get_vector_of_formal_parms (tree fndecl)
3965 {
3966 vec<tree> args;
3967 int count;
3968 tree parm;
3969
3970 gcc_assert (!flag_wpa);
3971 count = count_formal_params (fndecl);
3972 args.create (count);
3973 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3974 args.quick_push (parm);
3975
3976 return args;
3977 }
3978
3979 /* Return a heap allocated vector containing types of formal parameters of
3980 function type FNTYPE. */
3981
3982 vec<tree>
3983 ipa_get_vector_of_formal_parm_types (tree fntype)
3984 {
3985 vec<tree> types;
3986 int count = 0;
3987 tree t;
3988
3989 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3990 count++;
3991
3992 types.create (count);
3993 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3994 types.quick_push (TREE_VALUE (t));
3995
3996 return types;
3997 }
3998
3999 /* Modify the function declaration FNDECL and its type according to the plan in
4000 ADJUSTMENTS. It also sets base fields of individual adjustments structures
4001 to reflect the actual parameters being modified which are determined by the
4002 base_index field. */
4003
4004 void
4005 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
4006 {
4007 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4008 tree orig_type = TREE_TYPE (fndecl);
4009 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4010
4011 /* The following test is an ugly hack, some functions simply don't have any
4012 arguments in their type. This is probably a bug but well... */
4013 bool care_for_types = (old_arg_types != NULL_TREE);
4014 bool last_parm_void;
4015 vec<tree> otypes;
4016 if (care_for_types)
4017 {
4018 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4019 == void_type_node);
4020 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4021 if (last_parm_void)
4022 gcc_assert (oparms.length () + 1 == otypes.length ());
4023 else
4024 gcc_assert (oparms.length () == otypes.length ());
4025 }
4026 else
4027 {
4028 last_parm_void = false;
4029 otypes.create (0);
4030 }
4031
4032 int len = adjustments.length ();
4033 tree *link = &DECL_ARGUMENTS (fndecl);
4034 tree new_arg_types = NULL;
4035 for (int i = 0; i < len; i++)
4036 {
4037 struct ipa_parm_adjustment *adj;
4038 gcc_assert (link);
4039
4040 adj = &adjustments[i];
4041 tree parm;
4042 if (adj->op == IPA_PARM_OP_NEW)
4043 parm = NULL;
4044 else
4045 parm = oparms[adj->base_index];
4046 adj->base = parm;
4047
4048 if (adj->op == IPA_PARM_OP_COPY)
4049 {
4050 if (care_for_types)
4051 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4052 new_arg_types);
4053 *link = parm;
4054 link = &DECL_CHAIN (parm);
4055 }
4056 else if (adj->op != IPA_PARM_OP_REMOVE)
4057 {
4058 tree new_parm;
4059 tree ptype;
4060
4061 if (adj->by_ref)
4062 ptype = build_pointer_type (adj->type);
4063 else
4064 {
4065 ptype = adj->type;
4066 if (is_gimple_reg_type (ptype))
4067 {
4068 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4069 if (TYPE_ALIGN (ptype) != malign)
4070 ptype = build_aligned_type (ptype, malign);
4071 }
4072 }
4073
4074 if (care_for_types)
4075 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4076
4077 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4078 ptype);
4079 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4080 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4081 DECL_ARTIFICIAL (new_parm) = 1;
4082 DECL_ARG_TYPE (new_parm) = ptype;
4083 DECL_CONTEXT (new_parm) = fndecl;
4084 TREE_USED (new_parm) = 1;
4085 DECL_IGNORED_P (new_parm) = 1;
4086 layout_decl (new_parm, 0);
4087
4088 if (adj->op == IPA_PARM_OP_NEW)
4089 adj->base = NULL;
4090 else
4091 adj->base = parm;
4092 adj->new_decl = new_parm;
4093
4094 *link = new_parm;
4095 link = &DECL_CHAIN (new_parm);
4096 }
4097 }
4098
4099 *link = NULL_TREE;
4100
4101 tree new_reversed = NULL;
4102 if (care_for_types)
4103 {
4104 new_reversed = nreverse (new_arg_types);
4105 if (last_parm_void)
4106 {
4107 if (new_reversed)
4108 TREE_CHAIN (new_arg_types) = void_list_node;
4109 else
4110 new_reversed = void_list_node;
4111 }
4112 }
4113
4114 /* Use copy_node to preserve as much as possible from original type
4115 (debug info, attribute lists etc.)
4116 Exception is METHOD_TYPEs must have THIS argument.
4117 When we are asked to remove it, we need to build new FUNCTION_TYPE
4118 instead. */
4119 tree new_type = NULL;
4120 if (TREE_CODE (orig_type) != METHOD_TYPE
4121 || (adjustments[0].op == IPA_PARM_OP_COPY
4122 && adjustments[0].base_index == 0))
4123 {
4124 new_type = build_distinct_type_copy (orig_type);
4125 TYPE_ARG_TYPES (new_type) = new_reversed;
4126 }
4127 else
4128 {
4129 new_type
4130 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4131 new_reversed));
4132 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4133 DECL_VINDEX (fndecl) = NULL_TREE;
4134 }
4135
4136 /* When signature changes, we need to clear builtin info. */
4137 if (DECL_BUILT_IN (fndecl))
4138 {
4139 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4140 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4141 }
4142
4143 TREE_TYPE (fndecl) = new_type;
4144 DECL_VIRTUAL_P (fndecl) = 0;
4145 DECL_LANG_SPECIFIC (fndecl) = NULL;
4146 otypes.release ();
4147 oparms.release ();
4148 }
4149
4150 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4151 If this is a directly recursive call, CS must be NULL. Otherwise it must
4152 contain the corresponding call graph edge. */
4153
4154 void
4155 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4156 ipa_parm_adjustment_vec adjustments)
4157 {
4158 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4159 vec<tree> vargs;
4160 vec<tree, va_gc> **debug_args = NULL;
4161 gcall *new_stmt;
4162 gimple_stmt_iterator gsi, prev_gsi;
4163 tree callee_decl;
4164 int i, len;
4165
4166 len = adjustments.length ();
4167 vargs.create (len);
4168 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4169 current_node->remove_stmt_references (stmt);
4170
4171 gsi = gsi_for_stmt (stmt);
4172 prev_gsi = gsi;
4173 gsi_prev (&prev_gsi);
4174 for (i = 0; i < len; i++)
4175 {
4176 struct ipa_parm_adjustment *adj;
4177
4178 adj = &adjustments[i];
4179
4180 if (adj->op == IPA_PARM_OP_COPY)
4181 {
4182 tree arg = gimple_call_arg (stmt, adj->base_index);
4183
4184 vargs.quick_push (arg);
4185 }
4186 else if (adj->op != IPA_PARM_OP_REMOVE)
4187 {
4188 tree expr, base, off;
4189 location_t loc;
4190 unsigned int deref_align = 0;
4191 bool deref_base = false;
4192
4193 /* We create a new parameter out of the value of the old one, we can
4194 do the following kind of transformations:
4195
4196 - A scalar passed by reference is converted to a scalar passed by
4197 value. (adj->by_ref is false and the type of the original
4198 actual argument is a pointer to a scalar).
4199
4200 - A part of an aggregate is passed instead of the whole aggregate.
4201 The part can be passed either by value or by reference, this is
4202 determined by value of adj->by_ref. Moreover, the code below
4203 handles both situations when the original aggregate is passed by
4204 value (its type is not a pointer) and when it is passed by
4205 reference (it is a pointer to an aggregate).
4206
4207 When the new argument is passed by reference (adj->by_ref is true)
4208 it must be a part of an aggregate and therefore we form it by
4209 simply taking the address of a reference inside the original
4210 aggregate. */
4211
4212 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4213 base = gimple_call_arg (stmt, adj->base_index);
4214 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4215 : EXPR_LOCATION (base);
4216
4217 if (TREE_CODE (base) != ADDR_EXPR
4218 && POINTER_TYPE_P (TREE_TYPE (base)))
4219 off = build_int_cst (adj->alias_ptr_type,
4220 adj->offset / BITS_PER_UNIT);
4221 else
4222 {
4223 HOST_WIDE_INT base_offset;
4224 tree prev_base;
4225 bool addrof;
4226
4227 if (TREE_CODE (base) == ADDR_EXPR)
4228 {
4229 base = TREE_OPERAND (base, 0);
4230 addrof = true;
4231 }
4232 else
4233 addrof = false;
4234 prev_base = base;
4235 base = get_addr_base_and_unit_offset (base, &base_offset);
4236 /* Aggregate arguments can have non-invariant addresses. */
4237 if (!base)
4238 {
4239 base = build_fold_addr_expr (prev_base);
4240 off = build_int_cst (adj->alias_ptr_type,
4241 adj->offset / BITS_PER_UNIT);
4242 }
4243 else if (TREE_CODE (base) == MEM_REF)
4244 {
4245 if (!addrof)
4246 {
4247 deref_base = true;
4248 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4249 }
4250 off = build_int_cst (adj->alias_ptr_type,
4251 base_offset
4252 + adj->offset / BITS_PER_UNIT);
4253 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4254 off);
4255 base = TREE_OPERAND (base, 0);
4256 }
4257 else
4258 {
4259 off = build_int_cst (adj->alias_ptr_type,
4260 base_offset
4261 + adj->offset / BITS_PER_UNIT);
4262 base = build_fold_addr_expr (base);
4263 }
4264 }
4265
4266 if (!adj->by_ref)
4267 {
4268 tree type = adj->type;
4269 unsigned int align;
4270 unsigned HOST_WIDE_INT misalign;
4271
4272 if (deref_base)
4273 {
4274 align = deref_align;
4275 misalign = 0;
4276 }
4277 else
4278 {
4279 get_pointer_alignment_1 (base, &align, &misalign);
4280 if (TYPE_ALIGN (type) > align)
4281 align = TYPE_ALIGN (type);
4282 }
4283 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4284 * BITS_PER_UNIT);
4285 misalign = misalign & (align - 1);
4286 if (misalign != 0)
4287 align = least_bit_hwi (misalign);
4288 if (align < TYPE_ALIGN (type))
4289 type = build_aligned_type (type, align);
4290 base = force_gimple_operand_gsi (&gsi, base,
4291 true, NULL, true, GSI_SAME_STMT);
4292 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4293 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4294 /* If expr is not a valid gimple call argument emit
4295 a load into a temporary. */
4296 if (is_gimple_reg_type (TREE_TYPE (expr)))
4297 {
4298 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4299 if (gimple_in_ssa_p (cfun))
4300 {
4301 gimple_set_vuse (tem, gimple_vuse (stmt));
4302 expr = make_ssa_name (TREE_TYPE (expr), tem);
4303 }
4304 else
4305 expr = create_tmp_reg (TREE_TYPE (expr));
4306 gimple_assign_set_lhs (tem, expr);
4307 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4308 }
4309 }
4310 else
4311 {
4312 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4313 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4314 expr = build_fold_addr_expr (expr);
4315 expr = force_gimple_operand_gsi (&gsi, expr,
4316 true, NULL, true, GSI_SAME_STMT);
4317 }
4318 vargs.quick_push (expr);
4319 }
4320 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4321 {
4322 unsigned int ix;
4323 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4324 gimple *def_temp;
4325
4326 arg = gimple_call_arg (stmt, adj->base_index);
4327 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4328 {
4329 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4330 continue;
4331 arg = fold_convert_loc (gimple_location (stmt),
4332 TREE_TYPE (origin), arg);
4333 }
4334 if (debug_args == NULL)
4335 debug_args = decl_debug_args_insert (callee_decl);
4336 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4337 if (ddecl == origin)
4338 {
4339 ddecl = (**debug_args)[ix + 1];
4340 break;
4341 }
4342 if (ddecl == NULL)
4343 {
4344 ddecl = make_node (DEBUG_EXPR_DECL);
4345 DECL_ARTIFICIAL (ddecl) = 1;
4346 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4347 SET_DECL_MODE (ddecl, DECL_MODE (origin));
4348
4349 vec_safe_push (*debug_args, origin);
4350 vec_safe_push (*debug_args, ddecl);
4351 }
4352 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4353 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4354 }
4355 }
4356
4357 if (dump_file && (dump_flags & TDF_DETAILS))
4358 {
4359 fprintf (dump_file, "replacing stmt:");
4360 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4361 }
4362
4363 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4364 vargs.release ();
4365 if (gimple_call_lhs (stmt))
4366 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4367
4368 gimple_set_block (new_stmt, gimple_block (stmt));
4369 if (gimple_has_location (stmt))
4370 gimple_set_location (new_stmt, gimple_location (stmt));
4371 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4372 gimple_call_copy_flags (new_stmt, stmt);
4373 if (gimple_in_ssa_p (cfun))
4374 {
4375 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4376 if (gimple_vdef (stmt))
4377 {
4378 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4379 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4380 }
4381 }
4382
4383 if (dump_file && (dump_flags & TDF_DETAILS))
4384 {
4385 fprintf (dump_file, "with stmt:");
4386 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4387 fprintf (dump_file, "\n");
4388 }
4389 gsi_replace (&gsi, new_stmt, true);
4390 if (cs)
4391 cs->set_call_stmt (new_stmt);
4392 do
4393 {
4394 current_node->record_stmt_references (gsi_stmt (gsi));
4395 gsi_prev (&gsi);
4396 }
4397 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4398 }
4399
4400 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4401 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4402 specifies whether the function should care about type incompatibility the
4403 current and new expressions. If it is false, the function will leave
4404 incompatibility issues to the caller. Return true iff the expression
4405 was modified. */
4406
4407 bool
4408 ipa_modify_expr (tree *expr, bool convert,
4409 ipa_parm_adjustment_vec adjustments)
4410 {
4411 struct ipa_parm_adjustment *cand
4412 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4413 if (!cand)
4414 return false;
4415
4416 tree src;
4417 if (cand->by_ref)
4418 {
4419 src = build_simple_mem_ref (cand->new_decl);
4420 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4421 }
4422 else
4423 src = cand->new_decl;
4424
4425 if (dump_file && (dump_flags & TDF_DETAILS))
4426 {
4427 fprintf (dump_file, "About to replace expr ");
4428 print_generic_expr (dump_file, *expr, 0);
4429 fprintf (dump_file, " with ");
4430 print_generic_expr (dump_file, src, 0);
4431 fprintf (dump_file, "\n");
4432 }
4433
4434 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4435 {
4436 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4437 *expr = vce;
4438 }
4439 else
4440 *expr = src;
4441 return true;
4442 }
4443
4444 /* If T is an SSA_NAME, return NULL if it is not a default def or
4445 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4446 the base variable is always returned, regardless if it is a default
4447 def. Return T if it is not an SSA_NAME. */
4448
4449 static tree
4450 get_ssa_base_param (tree t, bool ignore_default_def)
4451 {
4452 if (TREE_CODE (t) == SSA_NAME)
4453 {
4454 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4455 return SSA_NAME_VAR (t);
4456 else
4457 return NULL_TREE;
4458 }
4459 return t;
4460 }
4461
4462 /* Given an expression, return an adjustment entry specifying the
4463 transformation to be done on EXPR. If no suitable adjustment entry
4464 was found, returns NULL.
4465
4466 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4467 default def, otherwise bail on them.
4468
4469 If CONVERT is non-NULL, this function will set *CONVERT if the
4470 expression provided is a component reference. ADJUSTMENTS is the
4471 adjustments vector. */
4472
4473 ipa_parm_adjustment *
4474 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4475 ipa_parm_adjustment_vec adjustments,
4476 bool ignore_default_def)
4477 {
4478 if (TREE_CODE (**expr) == BIT_FIELD_REF
4479 || TREE_CODE (**expr) == IMAGPART_EXPR
4480 || TREE_CODE (**expr) == REALPART_EXPR)
4481 {
4482 *expr = &TREE_OPERAND (**expr, 0);
4483 if (convert)
4484 *convert = true;
4485 }
4486
4487 HOST_WIDE_INT offset, size, max_size;
4488 bool reverse;
4489 tree base
4490 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4491 if (!base || size == -1 || max_size == -1)
4492 return NULL;
4493
4494 if (TREE_CODE (base) == MEM_REF)
4495 {
4496 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4497 base = TREE_OPERAND (base, 0);
4498 }
4499
4500 base = get_ssa_base_param (base, ignore_default_def);
4501 if (!base || TREE_CODE (base) != PARM_DECL)
4502 return NULL;
4503
4504 struct ipa_parm_adjustment *cand = NULL;
4505 unsigned int len = adjustments.length ();
4506 for (unsigned i = 0; i < len; i++)
4507 {
4508 struct ipa_parm_adjustment *adj = &adjustments[i];
4509
4510 if (adj->base == base
4511 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4512 {
4513 cand = adj;
4514 break;
4515 }
4516 }
4517
4518 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4519 return NULL;
4520 return cand;
4521 }
4522
4523 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4524
4525 static bool
4526 index_in_adjustments_multiple_times_p (int base_index,
4527 ipa_parm_adjustment_vec adjustments)
4528 {
4529 int i, len = adjustments.length ();
4530 bool one = false;
4531
4532 for (i = 0; i < len; i++)
4533 {
4534 struct ipa_parm_adjustment *adj;
4535 adj = &adjustments[i];
4536
4537 if (adj->base_index == base_index)
4538 {
4539 if (one)
4540 return true;
4541 else
4542 one = true;
4543 }
4544 }
4545 return false;
4546 }
4547
4548
4549 /* Return adjustments that should have the same effect on function parameters
4550 and call arguments as if they were first changed according to adjustments in
4551 INNER and then by adjustments in OUTER. */
4552
4553 ipa_parm_adjustment_vec
4554 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4555 ipa_parm_adjustment_vec outer)
4556 {
4557 int i, outlen = outer.length ();
4558 int inlen = inner.length ();
4559 int removals = 0;
4560 ipa_parm_adjustment_vec adjustments, tmp;
4561
4562 tmp.create (inlen);
4563 for (i = 0; i < inlen; i++)
4564 {
4565 struct ipa_parm_adjustment *n;
4566 n = &inner[i];
4567
4568 if (n->op == IPA_PARM_OP_REMOVE)
4569 removals++;
4570 else
4571 {
4572 /* FIXME: Handling of new arguments are not implemented yet. */
4573 gcc_assert (n->op != IPA_PARM_OP_NEW);
4574 tmp.quick_push (*n);
4575 }
4576 }
4577
4578 adjustments.create (outlen + removals);
4579 for (i = 0; i < outlen; i++)
4580 {
4581 struct ipa_parm_adjustment r;
4582 struct ipa_parm_adjustment *out = &outer[i];
4583 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4584
4585 memset (&r, 0, sizeof (r));
4586 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4587 if (out->op == IPA_PARM_OP_REMOVE)
4588 {
4589 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4590 {
4591 r.op = IPA_PARM_OP_REMOVE;
4592 adjustments.quick_push (r);
4593 }
4594 continue;
4595 }
4596 else
4597 {
4598 /* FIXME: Handling of new arguments are not implemented yet. */
4599 gcc_assert (out->op != IPA_PARM_OP_NEW);
4600 }
4601
4602 r.base_index = in->base_index;
4603 r.type = out->type;
4604
4605 /* FIXME: Create nonlocal value too. */
4606
4607 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4608 r.op = IPA_PARM_OP_COPY;
4609 else if (in->op == IPA_PARM_OP_COPY)
4610 r.offset = out->offset;
4611 else if (out->op == IPA_PARM_OP_COPY)
4612 r.offset = in->offset;
4613 else
4614 r.offset = in->offset + out->offset;
4615 adjustments.quick_push (r);
4616 }
4617
4618 for (i = 0; i < inlen; i++)
4619 {
4620 struct ipa_parm_adjustment *n = &inner[i];
4621
4622 if (n->op == IPA_PARM_OP_REMOVE)
4623 adjustments.quick_push (*n);
4624 }
4625
4626 tmp.release ();
4627 return adjustments;
4628 }
4629
4630 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4631 friendly way, assuming they are meant to be applied to FNDECL. */
4632
4633 void
4634 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4635 tree fndecl)
4636 {
4637 int i, len = adjustments.length ();
4638 bool first = true;
4639 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4640
4641 fprintf (file, "IPA param adjustments: ");
4642 for (i = 0; i < len; i++)
4643 {
4644 struct ipa_parm_adjustment *adj;
4645 adj = &adjustments[i];
4646
4647 if (!first)
4648 fprintf (file, " ");
4649 else
4650 first = false;
4651
4652 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4653 print_generic_expr (file, parms[adj->base_index], 0);
4654 if (adj->base)
4655 {
4656 fprintf (file, ", base: ");
4657 print_generic_expr (file, adj->base, 0);
4658 }
4659 if (adj->new_decl)
4660 {
4661 fprintf (file, ", new_decl: ");
4662 print_generic_expr (file, adj->new_decl, 0);
4663 }
4664 if (adj->new_ssa_base)
4665 {
4666 fprintf (file, ", new_ssa_base: ");
4667 print_generic_expr (file, adj->new_ssa_base, 0);
4668 }
4669
4670 if (adj->op == IPA_PARM_OP_COPY)
4671 fprintf (file, ", copy_param");
4672 else if (adj->op == IPA_PARM_OP_REMOVE)
4673 fprintf (file, ", remove_param");
4674 else
4675 fprintf (file, ", offset %li", (long) adj->offset);
4676 if (adj->by_ref)
4677 fprintf (file, ", by_ref");
4678 print_node_brief (file, ", type: ", adj->type, 0);
4679 fprintf (file, "\n");
4680 }
4681 parms.release ();
4682 }
4683
4684 /* Dump the AV linked list. */
4685
4686 void
4687 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4688 {
4689 bool comma = false;
4690 fprintf (f, " Aggregate replacements:");
4691 for (; av; av = av->next)
4692 {
4693 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4694 av->index, av->offset);
4695 print_generic_expr (f, av->value, 0);
4696 comma = true;
4697 }
4698 fprintf (f, "\n");
4699 }
4700
4701 /* Stream out jump function JUMP_FUNC to OB. */
4702
4703 static void
4704 ipa_write_jump_function (struct output_block *ob,
4705 struct ipa_jump_func *jump_func)
4706 {
4707 struct ipa_agg_jf_item *item;
4708 struct bitpack_d bp;
4709 int i, count;
4710
4711 streamer_write_uhwi (ob, jump_func->type);
4712 switch (jump_func->type)
4713 {
4714 case IPA_JF_UNKNOWN:
4715 break;
4716 case IPA_JF_CONST:
4717 gcc_assert (
4718 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4719 stream_write_tree (ob, jump_func->value.constant.value, true);
4720 break;
4721 case IPA_JF_PASS_THROUGH:
4722 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4723 if (jump_func->value.pass_through.operation == NOP_EXPR)
4724 {
4725 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4726 bp = bitpack_create (ob->main_stream);
4727 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4728 streamer_write_bitpack (&bp);
4729 }
4730 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4731 == tcc_unary)
4732 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4733 else
4734 {
4735 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4736 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4737 }
4738 break;
4739 case IPA_JF_ANCESTOR:
4740 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4741 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4742 bp = bitpack_create (ob->main_stream);
4743 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4744 streamer_write_bitpack (&bp);
4745 break;
4746 }
4747
4748 count = vec_safe_length (jump_func->agg.items);
4749 streamer_write_uhwi (ob, count);
4750 if (count)
4751 {
4752 bp = bitpack_create (ob->main_stream);
4753 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4754 streamer_write_bitpack (&bp);
4755 }
4756
4757 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4758 {
4759 streamer_write_uhwi (ob, item->offset);
4760 stream_write_tree (ob, item->value, true);
4761 }
4762
4763 bp = bitpack_create (ob->main_stream);
4764 bp_pack_value (&bp, jump_func->bits.known, 1);
4765 streamer_write_bitpack (&bp);
4766 if (jump_func->bits.known)
4767 {
4768 streamer_write_widest_int (ob, jump_func->bits.value);
4769 streamer_write_widest_int (ob, jump_func->bits.mask);
4770 }
4771 bp_pack_value (&bp, jump_func->vr_known, 1);
4772 streamer_write_bitpack (&bp);
4773 if (jump_func->vr_known)
4774 {
4775 streamer_write_enum (ob->main_stream, value_rang_type,
4776 VR_LAST, jump_func->m_vr.type);
4777 stream_write_tree (ob, jump_func->m_vr.min, true);
4778 stream_write_tree (ob, jump_func->m_vr.max, true);
4779 }
4780 }
4781
4782 /* Read in jump function JUMP_FUNC from IB. */
4783
4784 static void
4785 ipa_read_jump_function (struct lto_input_block *ib,
4786 struct ipa_jump_func *jump_func,
4787 struct cgraph_edge *cs,
4788 struct data_in *data_in)
4789 {
4790 enum jump_func_type jftype;
4791 enum tree_code operation;
4792 int i, count;
4793
4794 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4795 switch (jftype)
4796 {
4797 case IPA_JF_UNKNOWN:
4798 ipa_set_jf_unknown (jump_func);
4799 break;
4800 case IPA_JF_CONST:
4801 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4802 break;
4803 case IPA_JF_PASS_THROUGH:
4804 operation = (enum tree_code) streamer_read_uhwi (ib);
4805 if (operation == NOP_EXPR)
4806 {
4807 int formal_id = streamer_read_uhwi (ib);
4808 struct bitpack_d bp = streamer_read_bitpack (ib);
4809 bool agg_preserved = bp_unpack_value (&bp, 1);
4810 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4811 }
4812 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4813 {
4814 int formal_id = streamer_read_uhwi (ib);
4815 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4816 }
4817 else
4818 {
4819 tree operand = stream_read_tree (ib, data_in);
4820 int formal_id = streamer_read_uhwi (ib);
4821 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4822 operation);
4823 }
4824 break;
4825 case IPA_JF_ANCESTOR:
4826 {
4827 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4828 int formal_id = streamer_read_uhwi (ib);
4829 struct bitpack_d bp = streamer_read_bitpack (ib);
4830 bool agg_preserved = bp_unpack_value (&bp, 1);
4831 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4832 break;
4833 }
4834 }
4835
4836 count = streamer_read_uhwi (ib);
4837 vec_alloc (jump_func->agg.items, count);
4838 if (count)
4839 {
4840 struct bitpack_d bp = streamer_read_bitpack (ib);
4841 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4842 }
4843 for (i = 0; i < count; i++)
4844 {
4845 struct ipa_agg_jf_item item;
4846 item.offset = streamer_read_uhwi (ib);
4847 item.value = stream_read_tree (ib, data_in);
4848 jump_func->agg.items->quick_push (item);
4849 }
4850
4851 struct bitpack_d bp = streamer_read_bitpack (ib);
4852 bool bits_known = bp_unpack_value (&bp, 1);
4853 if (bits_known)
4854 {
4855 jump_func->bits.known = true;
4856 jump_func->bits.value = streamer_read_widest_int (ib);
4857 jump_func->bits.mask = streamer_read_widest_int (ib);
4858 }
4859 else
4860 jump_func->bits.known = false;
4861
4862 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4863 bool vr_known = bp_unpack_value (&vr_bp, 1);
4864 if (vr_known)
4865 {
4866 jump_func->vr_known = true;
4867 jump_func->m_vr.type = streamer_read_enum (ib,
4868 value_range_type,
4869 VR_LAST);
4870 jump_func->m_vr.min = stream_read_tree (ib, data_in);
4871 jump_func->m_vr.max = stream_read_tree (ib, data_in);
4872 }
4873 else
4874 jump_func->vr_known = false;
4875 }
4876
4877 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4878 relevant to indirect inlining to OB. */
4879
4880 static void
4881 ipa_write_indirect_edge_info (struct output_block *ob,
4882 struct cgraph_edge *cs)
4883 {
4884 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4885 struct bitpack_d bp;
4886
4887 streamer_write_hwi (ob, ii->param_index);
4888 bp = bitpack_create (ob->main_stream);
4889 bp_pack_value (&bp, ii->polymorphic, 1);
4890 bp_pack_value (&bp, ii->agg_contents, 1);
4891 bp_pack_value (&bp, ii->member_ptr, 1);
4892 bp_pack_value (&bp, ii->by_ref, 1);
4893 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4894 bp_pack_value (&bp, ii->vptr_changed, 1);
4895 streamer_write_bitpack (&bp);
4896 if (ii->agg_contents || ii->polymorphic)
4897 streamer_write_hwi (ob, ii->offset);
4898 else
4899 gcc_assert (ii->offset == 0);
4900
4901 if (ii->polymorphic)
4902 {
4903 streamer_write_hwi (ob, ii->otr_token);
4904 stream_write_tree (ob, ii->otr_type, true);
4905 ii->context.stream_out (ob);
4906 }
4907 }
4908
4909 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4910 relevant to indirect inlining from IB. */
4911
4912 static void
4913 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4914 struct data_in *data_in,
4915 struct cgraph_edge *cs)
4916 {
4917 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4918 struct bitpack_d bp;
4919
4920 ii->param_index = (int) streamer_read_hwi (ib);
4921 bp = streamer_read_bitpack (ib);
4922 ii->polymorphic = bp_unpack_value (&bp, 1);
4923 ii->agg_contents = bp_unpack_value (&bp, 1);
4924 ii->member_ptr = bp_unpack_value (&bp, 1);
4925 ii->by_ref = bp_unpack_value (&bp, 1);
4926 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4927 ii->vptr_changed = bp_unpack_value (&bp, 1);
4928 if (ii->agg_contents || ii->polymorphic)
4929 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4930 else
4931 ii->offset = 0;
4932 if (ii->polymorphic)
4933 {
4934 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4935 ii->otr_type = stream_read_tree (ib, data_in);
4936 ii->context.stream_in (ib, data_in);
4937 }
4938 }
4939
4940 /* Stream out NODE info to OB. */
4941
4942 static void
4943 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4944 {
4945 int node_ref;
4946 lto_symtab_encoder_t encoder;
4947 struct ipa_node_params *info = IPA_NODE_REF (node);
4948 int j;
4949 struct cgraph_edge *e;
4950 struct bitpack_d bp;
4951
4952 encoder = ob->decl_state->symtab_node_encoder;
4953 node_ref = lto_symtab_encoder_encode (encoder, node);
4954 streamer_write_uhwi (ob, node_ref);
4955
4956 streamer_write_uhwi (ob, ipa_get_param_count (info));
4957 for (j = 0; j < ipa_get_param_count (info); j++)
4958 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4959 bp = bitpack_create (ob->main_stream);
4960 gcc_assert (info->analysis_done
4961 || ipa_get_param_count (info) == 0);
4962 gcc_assert (!info->node_enqueued);
4963 gcc_assert (!info->ipcp_orig_node);
4964 for (j = 0; j < ipa_get_param_count (info); j++)
4965 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4966 streamer_write_bitpack (&bp);
4967 for (j = 0; j < ipa_get_param_count (info); j++)
4968 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4969 for (e = node->callees; e; e = e->next_callee)
4970 {
4971 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4972
4973 streamer_write_uhwi (ob,
4974 ipa_get_cs_argument_count (args) * 2
4975 + (args->polymorphic_call_contexts != NULL));
4976 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4977 {
4978 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4979 if (args->polymorphic_call_contexts != NULL)
4980 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4981 }
4982 }
4983 for (e = node->indirect_calls; e; e = e->next_callee)
4984 {
4985 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4986
4987 streamer_write_uhwi (ob,
4988 ipa_get_cs_argument_count (args) * 2
4989 + (args->polymorphic_call_contexts != NULL));
4990 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4991 {
4992 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4993 if (args->polymorphic_call_contexts != NULL)
4994 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4995 }
4996 ipa_write_indirect_edge_info (ob, e);
4997 }
4998 }
4999
5000 /* Stream in NODE info from IB. */
5001
5002 static void
5003 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
5004 struct data_in *data_in)
5005 {
5006 struct ipa_node_params *info = IPA_NODE_REF (node);
5007 int k;
5008 struct cgraph_edge *e;
5009 struct bitpack_d bp;
5010
5011 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
5012
5013 for (k = 0; k < ipa_get_param_count (info); k++)
5014 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
5015
5016 bp = streamer_read_bitpack (ib);
5017 if (ipa_get_param_count (info) != 0)
5018 info->analysis_done = true;
5019 info->node_enqueued = false;
5020 for (k = 0; k < ipa_get_param_count (info); k++)
5021 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
5022 for (k = 0; k < ipa_get_param_count (info); k++)
5023 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
5024 for (e = node->callees; e; e = e->next_callee)
5025 {
5026 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5027 int count = streamer_read_uhwi (ib);
5028 bool contexts_computed = count & 1;
5029 count /= 2;
5030
5031 if (!count)
5032 continue;
5033 vec_safe_grow_cleared (args->jump_functions, count);
5034 if (contexts_computed)
5035 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5036
5037 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5038 {
5039 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5040 data_in);
5041 if (contexts_computed)
5042 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5043 }
5044 }
5045 for (e = node->indirect_calls; e; e = e->next_callee)
5046 {
5047 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5048 int count = streamer_read_uhwi (ib);
5049 bool contexts_computed = count & 1;
5050 count /= 2;
5051
5052 if (count)
5053 {
5054 vec_safe_grow_cleared (args->jump_functions, count);
5055 if (contexts_computed)
5056 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5057 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5058 {
5059 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5060 data_in);
5061 if (contexts_computed)
5062 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5063 }
5064 }
5065 ipa_read_indirect_edge_info (ib, data_in, e);
5066 }
5067 }
5068
5069 /* Write jump functions for nodes in SET. */
5070
5071 void
5072 ipa_prop_write_jump_functions (void)
5073 {
5074 struct cgraph_node *node;
5075 struct output_block *ob;
5076 unsigned int count = 0;
5077 lto_symtab_encoder_iterator lsei;
5078 lto_symtab_encoder_t encoder;
5079
5080 if (!ipa_node_params_sum)
5081 return;
5082
5083 ob = create_output_block (LTO_section_jump_functions);
5084 encoder = ob->decl_state->symtab_node_encoder;
5085 ob->symbol = NULL;
5086 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5087 lsei_next_function_in_partition (&lsei))
5088 {
5089 node = lsei_cgraph_node (lsei);
5090 if (node->has_gimple_body_p ()
5091 && IPA_NODE_REF (node) != NULL)
5092 count++;
5093 }
5094
5095 streamer_write_uhwi (ob, count);
5096
5097 /* Process all of the functions. */
5098 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5099 lsei_next_function_in_partition (&lsei))
5100 {
5101 node = lsei_cgraph_node (lsei);
5102 if (node->has_gimple_body_p ()
5103 && IPA_NODE_REF (node) != NULL)
5104 ipa_write_node_info (ob, node);
5105 }
5106 streamer_write_char_stream (ob->main_stream, 0);
5107 produce_asm (ob, NULL);
5108 destroy_output_block (ob);
5109 }
5110
5111 /* Read section in file FILE_DATA of length LEN with data DATA. */
5112
5113 static void
5114 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5115 size_t len)
5116 {
5117 const struct lto_function_header *header =
5118 (const struct lto_function_header *) data;
5119 const int cfg_offset = sizeof (struct lto_function_header);
5120 const int main_offset = cfg_offset + header->cfg_size;
5121 const int string_offset = main_offset + header->main_size;
5122 struct data_in *data_in;
5123 unsigned int i;
5124 unsigned int count;
5125
5126 lto_input_block ib_main ((const char *) data + main_offset,
5127 header->main_size, file_data->mode_table);
5128
5129 data_in =
5130 lto_data_in_create (file_data, (const char *) data + string_offset,
5131 header->string_size, vNULL);
5132 count = streamer_read_uhwi (&ib_main);
5133
5134 for (i = 0; i < count; i++)
5135 {
5136 unsigned int index;
5137 struct cgraph_node *node;
5138 lto_symtab_encoder_t encoder;
5139
5140 index = streamer_read_uhwi (&ib_main);
5141 encoder = file_data->symtab_node_encoder;
5142 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5143 index));
5144 gcc_assert (node->definition);
5145 ipa_read_node_info (&ib_main, node, data_in);
5146 }
5147 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5148 len);
5149 lto_data_in_delete (data_in);
5150 }
5151
5152 /* Read ipcp jump functions. */
5153
5154 void
5155 ipa_prop_read_jump_functions (void)
5156 {
5157 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5158 struct lto_file_decl_data *file_data;
5159 unsigned int j = 0;
5160
5161 ipa_check_create_node_params ();
5162 ipa_check_create_edge_args ();
5163 ipa_register_cgraph_hooks ();
5164
5165 while ((file_data = file_data_vec[j++]))
5166 {
5167 size_t len;
5168 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5169
5170 if (data)
5171 ipa_prop_read_section (file_data, data, len);
5172 }
5173 }
5174
5175 /* After merging units, we can get mismatch in argument counts.
5176 Also decl merging might've rendered parameter lists obsolete.
5177 Also compute called_with_variable_arg info. */
5178
5179 void
5180 ipa_update_after_lto_read (void)
5181 {
5182 ipa_check_create_node_params ();
5183 ipa_check_create_edge_args ();
5184 }
5185
5186 void
5187 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5188 {
5189 int node_ref;
5190 unsigned int count = 0;
5191 lto_symtab_encoder_t encoder;
5192 struct ipa_agg_replacement_value *aggvals, *av;
5193
5194 aggvals = ipa_get_agg_replacements_for_node (node);
5195 encoder = ob->decl_state->symtab_node_encoder;
5196 node_ref = lto_symtab_encoder_encode (encoder, node);
5197 streamer_write_uhwi (ob, node_ref);
5198
5199 for (av = aggvals; av; av = av->next)
5200 count++;
5201 streamer_write_uhwi (ob, count);
5202
5203 for (av = aggvals; av; av = av->next)
5204 {
5205 struct bitpack_d bp;
5206
5207 streamer_write_uhwi (ob, av->offset);
5208 streamer_write_uhwi (ob, av->index);
5209 stream_write_tree (ob, av->value, true);
5210
5211 bp = bitpack_create (ob->main_stream);
5212 bp_pack_value (&bp, av->by_ref, 1);
5213 streamer_write_bitpack (&bp);
5214 }
5215
5216 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5217 if (ts && vec_safe_length (ts->m_vr) > 0)
5218 {
5219 count = ts->m_vr->length ();
5220 streamer_write_uhwi (ob, count);
5221 for (unsigned i = 0; i < count; ++i)
5222 {
5223 struct bitpack_d bp;
5224 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5225 bp = bitpack_create (ob->main_stream);
5226 bp_pack_value (&bp, parm_vr->known, 1);
5227 streamer_write_bitpack (&bp);
5228 if (parm_vr->known)
5229 {
5230 streamer_write_enum (ob->main_stream, value_rang_type,
5231 VR_LAST, parm_vr->type);
5232 streamer_write_wide_int (ob, parm_vr->min);
5233 streamer_write_wide_int (ob, parm_vr->max);
5234 }
5235 }
5236 }
5237 else
5238 streamer_write_uhwi (ob, 0);
5239
5240 if (ts && vec_safe_length (ts->bits) > 0)
5241 {
5242 count = ts->bits->length ();
5243 streamer_write_uhwi (ob, count);
5244
5245 for (unsigned i = 0; i < count; ++i)
5246 {
5247 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5248 struct bitpack_d bp = bitpack_create (ob->main_stream);
5249 bp_pack_value (&bp, bits_jfunc.known, 1);
5250 streamer_write_bitpack (&bp);
5251 if (bits_jfunc.known)
5252 {
5253 streamer_write_widest_int (ob, bits_jfunc.value);
5254 streamer_write_widest_int (ob, bits_jfunc.mask);
5255 }
5256 }
5257 }
5258 else
5259 streamer_write_uhwi (ob, 0);
5260 }
5261
5262 /* Stream in the aggregate value replacement chain for NODE from IB. */
5263
5264 static void
5265 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5266 data_in *data_in)
5267 {
5268 struct ipa_agg_replacement_value *aggvals = NULL;
5269 unsigned int count, i;
5270
5271 count = streamer_read_uhwi (ib);
5272 for (i = 0; i <count; i++)
5273 {
5274 struct ipa_agg_replacement_value *av;
5275 struct bitpack_d bp;
5276
5277 av = ggc_alloc<ipa_agg_replacement_value> ();
5278 av->offset = streamer_read_uhwi (ib);
5279 av->index = streamer_read_uhwi (ib);
5280 av->value = stream_read_tree (ib, data_in);
5281 bp = streamer_read_bitpack (ib);
5282 av->by_ref = bp_unpack_value (&bp, 1);
5283 av->next = aggvals;
5284 aggvals = av;
5285 }
5286 ipa_set_node_agg_value_chain (node, aggvals);
5287
5288 count = streamer_read_uhwi (ib);
5289 if (count > 0)
5290 {
5291 ipcp_grow_transformations_if_necessary ();
5292
5293 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5294 vec_safe_grow_cleared (ts->m_vr, count);
5295 for (i = 0; i < count; i++)
5296 {
5297 ipa_vr *parm_vr;
5298 parm_vr = &(*ts->m_vr)[i];
5299 struct bitpack_d bp;
5300 bp = streamer_read_bitpack (ib);
5301 parm_vr->known = bp_unpack_value (&bp, 1);
5302 if (parm_vr->known)
5303 {
5304 parm_vr->type = streamer_read_enum (ib, value_range_type,
5305 VR_LAST);
5306 parm_vr->min = streamer_read_wide_int (ib);
5307 parm_vr->max = streamer_read_wide_int (ib);
5308 }
5309 }
5310 }
5311 count = streamer_read_uhwi (ib);
5312 if (count > 0)
5313 {
5314 ipcp_grow_transformations_if_necessary ();
5315
5316 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5317 vec_safe_grow_cleared (ts->bits, count);
5318
5319 for (i = 0; i < count; i++)
5320 {
5321 ipa_bits& bits_jfunc = (*ts->bits)[i];
5322 struct bitpack_d bp = streamer_read_bitpack (ib);
5323 bits_jfunc.known = bp_unpack_value (&bp, 1);
5324 if (bits_jfunc.known)
5325 {
5326 bits_jfunc.value = streamer_read_widest_int (ib);
5327 bits_jfunc.mask = streamer_read_widest_int (ib);
5328 }
5329 }
5330 }
5331 }
5332
5333 /* Write all aggregate replacement for nodes in set. */
5334
5335 void
5336 ipcp_write_transformation_summaries (void)
5337 {
5338 struct cgraph_node *node;
5339 struct output_block *ob;
5340 unsigned int count = 0;
5341 lto_symtab_encoder_iterator lsei;
5342 lto_symtab_encoder_t encoder;
5343
5344 ob = create_output_block (LTO_section_ipcp_transform);
5345 encoder = ob->decl_state->symtab_node_encoder;
5346 ob->symbol = NULL;
5347 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5348 lsei_next_function_in_partition (&lsei))
5349 {
5350 node = lsei_cgraph_node (lsei);
5351 if (node->has_gimple_body_p ())
5352 count++;
5353 }
5354
5355 streamer_write_uhwi (ob, count);
5356
5357 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5358 lsei_next_function_in_partition (&lsei))
5359 {
5360 node = lsei_cgraph_node (lsei);
5361 if (node->has_gimple_body_p ())
5362 write_ipcp_transformation_info (ob, node);
5363 }
5364 streamer_write_char_stream (ob->main_stream, 0);
5365 produce_asm (ob, NULL);
5366 destroy_output_block (ob);
5367 }
5368
5369 /* Read replacements section in file FILE_DATA of length LEN with data
5370 DATA. */
5371
5372 static void
5373 read_replacements_section (struct lto_file_decl_data *file_data,
5374 const char *data,
5375 size_t len)
5376 {
5377 const struct lto_function_header *header =
5378 (const struct lto_function_header *) data;
5379 const int cfg_offset = sizeof (struct lto_function_header);
5380 const int main_offset = cfg_offset + header->cfg_size;
5381 const int string_offset = main_offset + header->main_size;
5382 struct data_in *data_in;
5383 unsigned int i;
5384 unsigned int count;
5385
5386 lto_input_block ib_main ((const char *) data + main_offset,
5387 header->main_size, file_data->mode_table);
5388
5389 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5390 header->string_size, vNULL);
5391 count = streamer_read_uhwi (&ib_main);
5392
5393 for (i = 0; i < count; i++)
5394 {
5395 unsigned int index;
5396 struct cgraph_node *node;
5397 lto_symtab_encoder_t encoder;
5398
5399 index = streamer_read_uhwi (&ib_main);
5400 encoder = file_data->symtab_node_encoder;
5401 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5402 index));
5403 gcc_assert (node->definition);
5404 read_ipcp_transformation_info (&ib_main, node, data_in);
5405 }
5406 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5407 len);
5408 lto_data_in_delete (data_in);
5409 }
5410
5411 /* Read IPA-CP aggregate replacements. */
5412
5413 void
5414 ipcp_read_transformation_summaries (void)
5415 {
5416 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5417 struct lto_file_decl_data *file_data;
5418 unsigned int j = 0;
5419
5420 while ((file_data = file_data_vec[j++]))
5421 {
5422 size_t len;
5423 const char *data = lto_get_section_data (file_data,
5424 LTO_section_ipcp_transform,
5425 NULL, &len);
5426 if (data)
5427 read_replacements_section (file_data, data, len);
5428 }
5429 }
5430
5431 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5432 NODE. */
5433
5434 static void
5435 adjust_agg_replacement_values (struct cgraph_node *node,
5436 struct ipa_agg_replacement_value *aggval)
5437 {
5438 struct ipa_agg_replacement_value *v;
5439 int i, c = 0, d = 0, *adj;
5440
5441 if (!node->clone.combined_args_to_skip)
5442 return;
5443
5444 for (v = aggval; v; v = v->next)
5445 {
5446 gcc_assert (v->index >= 0);
5447 if (c < v->index)
5448 c = v->index;
5449 }
5450 c++;
5451
5452 adj = XALLOCAVEC (int, c);
5453 for (i = 0; i < c; i++)
5454 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5455 {
5456 adj[i] = -1;
5457 d++;
5458 }
5459 else
5460 adj[i] = i - d;
5461
5462 for (v = aggval; v; v = v->next)
5463 v->index = adj[v->index];
5464 }
5465
5466 /* Dominator walker driving the ipcp modification phase. */
5467
5468 class ipcp_modif_dom_walker : public dom_walker
5469 {
5470 public:
5471 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5472 vec<ipa_param_descriptor> descs,
5473 struct ipa_agg_replacement_value *av,
5474 bool *sc, bool *cc)
5475 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5476 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5477
5478 virtual edge before_dom_children (basic_block);
5479
5480 private:
5481 struct ipa_func_body_info *m_fbi;
5482 vec<ipa_param_descriptor> m_descriptors;
5483 struct ipa_agg_replacement_value *m_aggval;
5484 bool *m_something_changed, *m_cfg_changed;
5485 };
5486
5487 edge
5488 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5489 {
5490 gimple_stmt_iterator gsi;
5491 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5492 {
5493 struct ipa_agg_replacement_value *v;
5494 gimple *stmt = gsi_stmt (gsi);
5495 tree rhs, val, t;
5496 HOST_WIDE_INT offset, size;
5497 int index;
5498 bool by_ref, vce;
5499
5500 if (!gimple_assign_load_p (stmt))
5501 continue;
5502 rhs = gimple_assign_rhs1 (stmt);
5503 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5504 continue;
5505
5506 vce = false;
5507 t = rhs;
5508 while (handled_component_p (t))
5509 {
5510 /* V_C_E can do things like convert an array of integers to one
5511 bigger integer and similar things we do not handle below. */
5512 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5513 {
5514 vce = true;
5515 break;
5516 }
5517 t = TREE_OPERAND (t, 0);
5518 }
5519 if (vce)
5520 continue;
5521
5522 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5523 &offset, &size, &by_ref))
5524 continue;
5525 for (v = m_aggval; v; v = v->next)
5526 if (v->index == index
5527 && v->offset == offset)
5528 break;
5529 if (!v
5530 || v->by_ref != by_ref
5531 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5532 continue;
5533
5534 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5535 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5536 {
5537 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5538 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5539 else if (TYPE_SIZE (TREE_TYPE (rhs))
5540 == TYPE_SIZE (TREE_TYPE (v->value)))
5541 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5542 else
5543 {
5544 if (dump_file)
5545 {
5546 fprintf (dump_file, " const ");
5547 print_generic_expr (dump_file, v->value, 0);
5548 fprintf (dump_file, " can't be converted to type of ");
5549 print_generic_expr (dump_file, rhs, 0);
5550 fprintf (dump_file, "\n");
5551 }
5552 continue;
5553 }
5554 }
5555 else
5556 val = v->value;
5557
5558 if (dump_file && (dump_flags & TDF_DETAILS))
5559 {
5560 fprintf (dump_file, "Modifying stmt:\n ");
5561 print_gimple_stmt (dump_file, stmt, 0, 0);
5562 }
5563 gimple_assign_set_rhs_from_tree (&gsi, val);
5564 update_stmt (stmt);
5565
5566 if (dump_file && (dump_flags & TDF_DETAILS))
5567 {
5568 fprintf (dump_file, "into:\n ");
5569 print_gimple_stmt (dump_file, stmt, 0, 0);
5570 fprintf (dump_file, "\n");
5571 }
5572
5573 *m_something_changed = true;
5574 if (maybe_clean_eh_stmt (stmt)
5575 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5576 *m_cfg_changed = true;
5577 }
5578 return NULL;
5579 }
5580
5581 /* Update bits info of formal parameters as described in
5582 ipcp_transformation_summary. */
5583
5584 static void
5585 ipcp_update_bits (struct cgraph_node *node)
5586 {
5587 tree parm = DECL_ARGUMENTS (node->decl);
5588 tree next_parm = parm;
5589 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5590
5591 if (!ts || vec_safe_length (ts->bits) == 0)
5592 return;
5593
5594 vec<ipa_bits, va_gc> &bits = *ts->bits;
5595 unsigned count = bits.length ();
5596
5597 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5598 {
5599 if (node->clone.combined_args_to_skip
5600 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5601 continue;
5602
5603 gcc_checking_assert (parm);
5604 next_parm = DECL_CHAIN (parm);
5605
5606 if (!bits[i].known
5607 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm)) || POINTER_TYPE_P (TREE_TYPE (parm)))
5608 || !is_gimple_reg (parm))
5609 continue;
5610
5611 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5612 if (!ddef)
5613 continue;
5614
5615 if (dump_file)
5616 {
5617 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5618 print_hex (bits[i].mask, dump_file);
5619 fprintf (dump_file, "\n");
5620 }
5621
5622 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5623 {
5624 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5625 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5626
5627 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5628 | wide_int::from (bits[i].value, prec, sgn);
5629 set_nonzero_bits (ddef, nonzero_bits);
5630 }
5631 else
5632 {
5633 unsigned tem = bits[i].mask.to_uhwi ();
5634 unsigned HOST_WIDE_INT bitpos = bits[i].value.to_uhwi ();
5635 unsigned align = tem & -tem;
5636 unsigned misalign = bitpos & (align - 1);
5637
5638 if (align > 1)
5639 {
5640 if (dump_file)
5641 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5642
5643 unsigned old_align, old_misalign;
5644 struct ptr_info_def *pi = get_ptr_info (ddef);
5645 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5646
5647 if (old_known
5648 && old_align > align)
5649 {
5650 if (dump_file)
5651 {
5652 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5653 if ((old_misalign & (align - 1)) != misalign)
5654 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5655 old_misalign, misalign);
5656 }
5657 continue;
5658 }
5659
5660 if (old_known
5661 && ((misalign & (old_align - 1)) != old_misalign)
5662 && dump_file)
5663 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5664 old_misalign, misalign);
5665
5666 set_ptr_info_alignment (pi, align, misalign);
5667 }
5668 }
5669 }
5670 }
5671
5672 /* Update value range of formal parameters as described in
5673 ipcp_transformation_summary. */
5674
5675 static void
5676 ipcp_update_vr (struct cgraph_node *node)
5677 {
5678 tree fndecl = node->decl;
5679 tree parm = DECL_ARGUMENTS (fndecl);
5680 tree next_parm = parm;
5681 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5682 if (!ts || vec_safe_length (ts->m_vr) == 0)
5683 return;
5684 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5685 unsigned count = vr.length ();
5686
5687 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5688 {
5689 if (node->clone.combined_args_to_skip
5690 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5691 continue;
5692 gcc_checking_assert (parm);
5693 next_parm = DECL_CHAIN (parm);
5694 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5695
5696 if (!ddef || !is_gimple_reg (parm))
5697 continue;
5698
5699 if (vr[i].known
5700 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5701 {
5702 tree type = TREE_TYPE (ddef);
5703 unsigned prec = TYPE_PRECISION (type);
5704 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5705 {
5706 if (dump_file)
5707 {
5708 fprintf (dump_file, "Setting value range of param %u ", i);
5709 fprintf (dump_file, "%s[",
5710 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5711 print_decs (vr[i].min, dump_file);
5712 fprintf (dump_file, ", ");
5713 print_decs (vr[i].max, dump_file);
5714 fprintf (dump_file, "]\n");
5715 }
5716 set_range_info (ddef, vr[i].type,
5717 wide_int_storage::from (vr[i].min, prec,
5718 TYPE_SIGN (type)),
5719 wide_int_storage::from (vr[i].max, prec,
5720 TYPE_SIGN (type)));
5721 }
5722 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5723 && vr[i].type == VR_ANTI_RANGE
5724 && wi::eq_p (vr[i].min, 0)
5725 && wi::eq_p (vr[i].max, 0))
5726 {
5727 if (dump_file)
5728 fprintf (dump_file, "Setting nonnull for %u\n", i);
5729 set_ptr_nonnull (ddef);
5730 }
5731 }
5732 }
5733 }
5734
5735 /* IPCP transformation phase doing propagation of aggregate values. */
5736
5737 unsigned int
5738 ipcp_transform_function (struct cgraph_node *node)
5739 {
5740 vec<ipa_param_descriptor> descriptors = vNULL;
5741 struct ipa_func_body_info fbi;
5742 struct ipa_agg_replacement_value *aggval;
5743 int param_count;
5744 bool cfg_changed = false, something_changed = false;
5745
5746 gcc_checking_assert (cfun);
5747 gcc_checking_assert (current_function_decl);
5748
5749 if (dump_file)
5750 fprintf (dump_file, "Modification phase of node %s/%i\n",
5751 node->name (), node->order);
5752
5753 ipcp_update_bits (node);
5754 ipcp_update_vr (node);
5755 aggval = ipa_get_agg_replacements_for_node (node);
5756 if (!aggval)
5757 return 0;
5758 param_count = count_formal_params (node->decl);
5759 if (param_count == 0)
5760 return 0;
5761 adjust_agg_replacement_values (node, aggval);
5762 if (dump_file)
5763 ipa_dump_agg_replacement_values (dump_file, aggval);
5764
5765 fbi.node = node;
5766 fbi.info = NULL;
5767 fbi.bb_infos = vNULL;
5768 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5769 fbi.param_count = param_count;
5770 fbi.aa_walked = 0;
5771
5772 descriptors.safe_grow_cleared (param_count);
5773 ipa_populate_param_decls (node, descriptors);
5774 calculate_dominance_info (CDI_DOMINATORS);
5775 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5776 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5777
5778 int i;
5779 struct ipa_bb_info *bi;
5780 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5781 free_ipa_bb_info (bi);
5782 fbi.bb_infos.release ();
5783 free_dominance_info (CDI_DOMINATORS);
5784 (*ipcp_transformations)[node->uid].agg_values = NULL;
5785 (*ipcp_transformations)[node->uid].bits = NULL;
5786 (*ipcp_transformations)[node->uid].m_vr = NULL;
5787
5788 descriptors.release ();
5789
5790 if (!something_changed)
5791 return 0;
5792 else if (cfg_changed)
5793 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5794 else
5795 return TODO_update_ssa_only_virtuals;
5796 }