]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
[PR 79198] Call ipa-prop func summary destructor
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
62
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
67
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
70 {
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
78 };
79
80 /* Allocation pool for reference descriptions. */
81
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
84
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
87
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
90 {
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
92
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
96 }
97
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
100
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
103 tree ptree)
104 {
105 int i, count;
106
107 count = vec_safe_length (descriptors);
108 for (i = 0; i < count; i++)
109 if ((*descriptors)[i].decl_or_type == ptree)
110 return i;
111
112 return -1;
113 }
114
115 /* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
117
118 int
119 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
120 {
121 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
122 }
123
124 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
125 NODE. */
126
127 static void
128 ipa_populate_param_decls (struct cgraph_node *node,
129 vec<ipa_param_descriptor, va_gc> &descriptors)
130 {
131 tree fndecl;
132 tree fnargs;
133 tree parm;
134 int param_num;
135
136 fndecl = node->decl;
137 gcc_assert (gimple_has_body_p (fndecl));
138 fnargs = DECL_ARGUMENTS (fndecl);
139 param_num = 0;
140 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
141 {
142 descriptors[param_num].decl_or_type = parm;
143 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
144 true);
145 param_num++;
146 }
147 }
148
149 /* Return how many formal parameters FNDECL has. */
150
151 int
152 count_formal_params (tree fndecl)
153 {
154 tree parm;
155 int count = 0;
156 gcc_assert (gimple_has_body_p (fndecl));
157
158 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
159 count++;
160
161 return count;
162 }
163
164 /* Return the declaration of Ith formal parameter of the function corresponding
165 to INFO. Note there is no setter function as this array is built just once
166 using ipa_initialize_node_params. */
167
168 void
169 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
170 {
171 fprintf (file, "param #%i", i);
172 if ((*info->descriptors)[i].decl_or_type)
173 {
174 fprintf (file, " ");
175 print_generic_expr (file, (*info->descriptors)[i].decl_or_type, 0);
176 }
177 }
178
179 /* Initialize the ipa_node_params structure associated with NODE
180 to hold PARAM_COUNT parameters. */
181
182 void
183 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
184 {
185 struct ipa_node_params *info = IPA_NODE_REF (node);
186
187 if (!info->descriptors && param_count)
188 vec_safe_grow_cleared (info->descriptors, param_count);
189 }
190
191 /* Initialize the ipa_node_params structure associated with NODE by counting
192 the function parameters, creating the descriptors and populating their
193 param_decls. */
194
195 void
196 ipa_initialize_node_params (struct cgraph_node *node)
197 {
198 struct ipa_node_params *info = IPA_NODE_REF (node);
199
200 if (!info->descriptors)
201 {
202 ipa_alloc_node_params (node, count_formal_params (node->decl));
203 ipa_populate_param_decls (node, *info->descriptors);
204 }
205 }
206
207 /* Print the jump functions associated with call graph edge CS to file F. */
208
209 static void
210 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
211 {
212 int i, count;
213
214 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
215 for (i = 0; i < count; i++)
216 {
217 struct ipa_jump_func *jump_func;
218 enum jump_func_type type;
219
220 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
221 type = jump_func->type;
222
223 fprintf (f, " param %d: ", i);
224 if (type == IPA_JF_UNKNOWN)
225 fprintf (f, "UNKNOWN\n");
226 else if (type == IPA_JF_CONST)
227 {
228 tree val = jump_func->value.constant.value;
229 fprintf (f, "CONST: ");
230 print_generic_expr (f, val, 0);
231 if (TREE_CODE (val) == ADDR_EXPR
232 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
233 {
234 fprintf (f, " -> ");
235 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
236 0);
237 }
238 fprintf (f, "\n");
239 }
240 else if (type == IPA_JF_PASS_THROUGH)
241 {
242 fprintf (f, "PASS THROUGH: ");
243 fprintf (f, "%d, op %s",
244 jump_func->value.pass_through.formal_id,
245 get_tree_code_name(jump_func->value.pass_through.operation));
246 if (jump_func->value.pass_through.operation != NOP_EXPR)
247 {
248 fprintf (f, " ");
249 print_generic_expr (f,
250 jump_func->value.pass_through.operand, 0);
251 }
252 if (jump_func->value.pass_through.agg_preserved)
253 fprintf (f, ", agg_preserved");
254 fprintf (f, "\n");
255 }
256 else if (type == IPA_JF_ANCESTOR)
257 {
258 fprintf (f, "ANCESTOR: ");
259 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
260 jump_func->value.ancestor.formal_id,
261 jump_func->value.ancestor.offset);
262 if (jump_func->value.ancestor.agg_preserved)
263 fprintf (f, ", agg_preserved");
264 fprintf (f, "\n");
265 }
266
267 if (jump_func->agg.items)
268 {
269 struct ipa_agg_jf_item *item;
270 int j;
271
272 fprintf (f, " Aggregate passed by %s:\n",
273 jump_func->agg.by_ref ? "reference" : "value");
274 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
275 {
276 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
277 item->offset);
278 if (TYPE_P (item->value))
279 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
280 tree_to_uhwi (TYPE_SIZE (item->value)));
281 else
282 {
283 fprintf (f, "cst: ");
284 print_generic_expr (f, item->value, 0);
285 }
286 fprintf (f, "\n");
287 }
288 }
289
290 struct ipa_polymorphic_call_context *ctx
291 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
292 if (ctx && !ctx->useless_p ())
293 {
294 fprintf (f, " Context: ");
295 ctx->dump (dump_file);
296 }
297
298 if (jump_func->bits.known)
299 {
300 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
301 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
302 fprintf (f, "\n");
303 }
304 else
305 fprintf (f, " Unknown bits\n");
306
307 if (jump_func->vr_known)
308 {
309 fprintf (f, " VR ");
310 fprintf (f, "%s[",
311 (jump_func->m_vr.type == VR_ANTI_RANGE) ? "~" : "");
312 print_decs (jump_func->m_vr.min, f);
313 fprintf (f, ", ");
314 print_decs (jump_func->m_vr.max, f);
315 fprintf (f, "]\n");
316 }
317 else
318 fprintf (f, " Unknown VR\n");
319 }
320 }
321
322
323 /* Print the jump functions of all arguments on all call graph edges going from
324 NODE to file F. */
325
326 void
327 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
328 {
329 struct cgraph_edge *cs;
330
331 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
332 node->order);
333 for (cs = node->callees; cs; cs = cs->next_callee)
334 {
335 if (!ipa_edge_args_info_available_for_edge_p (cs))
336 continue;
337
338 fprintf (f, " callsite %s/%i -> %s/%i : \n",
339 xstrdup_for_dump (node->name ()), node->order,
340 xstrdup_for_dump (cs->callee->name ()),
341 cs->callee->order);
342 ipa_print_node_jump_functions_for_edge (f, cs);
343 }
344
345 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
346 {
347 struct cgraph_indirect_call_info *ii;
348 if (!ipa_edge_args_info_available_for_edge_p (cs))
349 continue;
350
351 ii = cs->indirect_info;
352 if (ii->agg_contents)
353 fprintf (f, " indirect %s callsite, calling param %i, "
354 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
355 ii->member_ptr ? "member ptr" : "aggregate",
356 ii->param_index, ii->offset,
357 ii->by_ref ? "by reference" : "by_value");
358 else
359 fprintf (f, " indirect %s callsite, calling param %i, "
360 "offset " HOST_WIDE_INT_PRINT_DEC,
361 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
362 ii->offset);
363
364 if (cs->call_stmt)
365 {
366 fprintf (f, ", for stmt ");
367 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
368 }
369 else
370 fprintf (f, "\n");
371 if (ii->polymorphic)
372 ii->context.dump (f);
373 ipa_print_node_jump_functions_for_edge (f, cs);
374 }
375 }
376
377 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
378
379 void
380 ipa_print_all_jump_functions (FILE *f)
381 {
382 struct cgraph_node *node;
383
384 fprintf (f, "\nJump functions:\n");
385 FOR_EACH_FUNCTION (node)
386 {
387 ipa_print_node_jump_functions (f, node);
388 }
389 }
390
391 /* Set jfunc to be a know-really nothing jump function. */
392
393 static void
394 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
395 {
396 jfunc->type = IPA_JF_UNKNOWN;
397 jfunc->bits.known = false;
398 jfunc->vr_known = false;
399 }
400
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
403
404 static void
405 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
406 struct ipa_jump_func *src)
407
408 {
409 gcc_checking_assert (src->type == IPA_JF_CONST);
410 dst->type = IPA_JF_CONST;
411 dst->value.constant = src->value.constant;
412 }
413
414 /* Set JFUNC to be a constant jmp function. */
415
416 static void
417 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
418 struct cgraph_edge *cs)
419 {
420 jfunc->type = IPA_JF_CONST;
421 jfunc->value.constant.value = unshare_expr_without_location (constant);
422
423 if (TREE_CODE (constant) == ADDR_EXPR
424 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
425 {
426 struct ipa_cst_ref_desc *rdesc;
427
428 rdesc = ipa_refdesc_pool.allocate ();
429 rdesc->cs = cs;
430 rdesc->next_duplicate = NULL;
431 rdesc->refcount = 1;
432 jfunc->value.constant.rdesc = rdesc;
433 }
434 else
435 jfunc->value.constant.rdesc = NULL;
436 }
437
438 /* Set JFUNC to be a simple pass-through jump function. */
439 static void
440 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
441 bool agg_preserved)
442 {
443 jfunc->type = IPA_JF_PASS_THROUGH;
444 jfunc->value.pass_through.operand = NULL_TREE;
445 jfunc->value.pass_through.formal_id = formal_id;
446 jfunc->value.pass_through.operation = NOP_EXPR;
447 jfunc->value.pass_through.agg_preserved = agg_preserved;
448 }
449
450 /* Set JFUNC to be an unary pass through jump function. */
451
452 static void
453 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
454 enum tree_code operation)
455 {
456 jfunc->type = IPA_JF_PASS_THROUGH;
457 jfunc->value.pass_through.operand = NULL_TREE;
458 jfunc->value.pass_through.formal_id = formal_id;
459 jfunc->value.pass_through.operation = operation;
460 jfunc->value.pass_through.agg_preserved = false;
461 }
462 /* Set JFUNC to be an arithmetic pass through jump function. */
463
464 static void
465 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
466 tree operand, enum tree_code operation)
467 {
468 jfunc->type = IPA_JF_PASS_THROUGH;
469 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
470 jfunc->value.pass_through.formal_id = formal_id;
471 jfunc->value.pass_through.operation = operation;
472 jfunc->value.pass_through.agg_preserved = false;
473 }
474
475 /* Set JFUNC to be an ancestor jump function. */
476
477 static void
478 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
479 int formal_id, bool agg_preserved)
480 {
481 jfunc->type = IPA_JF_ANCESTOR;
482 jfunc->value.ancestor.formal_id = formal_id;
483 jfunc->value.ancestor.offset = offset;
484 jfunc->value.ancestor.agg_preserved = agg_preserved;
485 }
486
487 /* Get IPA BB information about the given BB. FBI is the context of analyzis
488 of this function body. */
489
490 static struct ipa_bb_info *
491 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
492 {
493 gcc_checking_assert (fbi);
494 return &fbi->bb_infos[bb->index];
495 }
496
497 /* Structure to be passed in between detect_type_change and
498 check_stmt_for_type_change. */
499
500 struct prop_type_change_info
501 {
502 /* Offset into the object where there is the virtual method pointer we are
503 looking for. */
504 HOST_WIDE_INT offset;
505 /* The declaration or SSA_NAME pointer of the base that we are checking for
506 type change. */
507 tree object;
508 /* Set to true if dynamic type change has been detected. */
509 bool type_maybe_changed;
510 };
511
512 /* Return true if STMT can modify a virtual method table pointer.
513
514 This function makes special assumptions about both constructors and
515 destructors which are all the functions that are allowed to alter the VMT
516 pointers. It assumes that destructors begin with assignment into all VMT
517 pointers and that constructors essentially look in the following way:
518
519 1) The very first thing they do is that they call constructors of ancestor
520 sub-objects that have them.
521
522 2) Then VMT pointers of this and all its ancestors is set to new values
523 corresponding to the type corresponding to the constructor.
524
525 3) Only afterwards, other stuff such as constructor of member sub-objects
526 and the code written by the user is run. Only this may include calling
527 virtual functions, directly or indirectly.
528
529 There is no way to call a constructor of an ancestor sub-object in any
530 other way.
531
532 This means that we do not have to care whether constructors get the correct
533 type information because they will always change it (in fact, if we define
534 the type to be given by the VMT pointer, it is undefined).
535
536 The most important fact to derive from the above is that if, for some
537 statement in the section 3, we try to detect whether the dynamic type has
538 changed, we can safely ignore all calls as we examine the function body
539 backwards until we reach statements in section 2 because these calls cannot
540 be ancestor constructors or destructors (if the input is not bogus) and so
541 do not change the dynamic type (this holds true only for automatically
542 allocated objects but at the moment we devirtualize only these). We then
543 must detect that statements in section 2 change the dynamic type and can try
544 to derive the new type. That is enough and we can stop, we will never see
545 the calls into constructors of sub-objects in this code. Therefore we can
546 safely ignore all call statements that we traverse.
547 */
548
549 static bool
550 stmt_may_be_vtbl_ptr_store (gimple *stmt)
551 {
552 if (is_gimple_call (stmt))
553 return false;
554 if (gimple_clobber_p (stmt))
555 return false;
556 else if (is_gimple_assign (stmt))
557 {
558 tree lhs = gimple_assign_lhs (stmt);
559
560 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
561 {
562 if (flag_strict_aliasing
563 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
564 return false;
565
566 if (TREE_CODE (lhs) == COMPONENT_REF
567 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
568 return false;
569 /* In the future we might want to use get_base_ref_and_offset to find
570 if there is a field corresponding to the offset and if so, proceed
571 almost like if it was a component ref. */
572 }
573 }
574 return true;
575 }
576
577 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
578 to check whether a particular statement may modify the virtual table
579 pointerIt stores its result into DATA, which points to a
580 prop_type_change_info structure. */
581
582 static bool
583 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
584 {
585 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
586 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
587
588 if (stmt_may_be_vtbl_ptr_store (stmt))
589 {
590 tci->type_maybe_changed = true;
591 return true;
592 }
593 else
594 return false;
595 }
596
597 /* See if ARG is PARAM_DECl describing instance passed by pointer
598 or reference in FUNCTION. Return false if the dynamic type may change
599 in between beggining of the function until CALL is invoked.
600
601 Generally functions are not allowed to change type of such instances,
602 but they call destructors. We assume that methods can not destroy the THIS
603 pointer. Also as a special cases, constructor and destructors may change
604 type of the THIS pointer. */
605
606 static bool
607 param_type_may_change_p (tree function, tree arg, gimple *call)
608 {
609 /* Pure functions can not do any changes on the dynamic type;
610 that require writting to memory. */
611 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
612 return false;
613 /* We need to check if we are within inlined consturctor
614 or destructor (ideally we would have way to check that the
615 inline cdtor is actually working on ARG, but we don't have
616 easy tie on this, so punt on all non-pure cdtors.
617 We may also record the types of cdtors and once we know type
618 of the instance match them.
619
620 Also code unification optimizations may merge calls from
621 different blocks making return values unreliable. So
622 do nothing during late optimization. */
623 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
624 return true;
625 if (TREE_CODE (arg) == SSA_NAME
626 && SSA_NAME_IS_DEFAULT_DEF (arg)
627 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
628 {
629 /* Normal (non-THIS) argument. */
630 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
631 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
632 /* THIS pointer of an method - here we want to watch constructors
633 and destructors as those definitely may change the dynamic
634 type. */
635 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
636 && !DECL_CXX_CONSTRUCTOR_P (function)
637 && !DECL_CXX_DESTRUCTOR_P (function)
638 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
639 {
640 /* Walk the inline stack and watch out for ctors/dtors. */
641 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
642 block = BLOCK_SUPERCONTEXT (block))
643 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
644 return true;
645 return false;
646 }
647 }
648 return true;
649 }
650
651 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
652 callsite CALL) by looking for assignments to its virtual table pointer. If
653 it is, return true and fill in the jump function JFUNC with relevant type
654 information or set it to unknown. ARG is the object itself (not a pointer
655 to it, unless dereferenced). BASE is the base of the memory access as
656 returned by get_ref_base_and_extent, as is the offset.
657
658 This is helper function for detect_type_change and detect_type_change_ssa
659 that does the heavy work which is usually unnecesary. */
660
661 static bool
662 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
663 gcall *call, struct ipa_jump_func *jfunc,
664 HOST_WIDE_INT offset)
665 {
666 struct prop_type_change_info tci;
667 ao_ref ao;
668 bool entry_reached = false;
669
670 gcc_checking_assert (DECL_P (arg)
671 || TREE_CODE (arg) == MEM_REF
672 || handled_component_p (arg));
673
674 comp_type = TYPE_MAIN_VARIANT (comp_type);
675
676 /* Const calls cannot call virtual methods through VMT and so type changes do
677 not matter. */
678 if (!flag_devirtualize || !gimple_vuse (call)
679 /* Be sure expected_type is polymorphic. */
680 || !comp_type
681 || TREE_CODE (comp_type) != RECORD_TYPE
682 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
683 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
684 return true;
685
686 ao_ref_init (&ao, arg);
687 ao.base = base;
688 ao.offset = offset;
689 ao.size = POINTER_SIZE;
690 ao.max_size = ao.size;
691
692 tci.offset = offset;
693 tci.object = get_base_address (arg);
694 tci.type_maybe_changed = false;
695
696 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
697 &tci, NULL, &entry_reached);
698 if (!tci.type_maybe_changed)
699 return false;
700
701 ipa_set_jf_unknown (jfunc);
702 return true;
703 }
704
705 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
706 If it is, return true and fill in the jump function JFUNC with relevant type
707 information or set it to unknown. ARG is the object itself (not a pointer
708 to it, unless dereferenced). BASE is the base of the memory access as
709 returned by get_ref_base_and_extent, as is the offset. */
710
711 static bool
712 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
713 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
714 {
715 if (!flag_devirtualize)
716 return false;
717
718 if (TREE_CODE (base) == MEM_REF
719 && !param_type_may_change_p (current_function_decl,
720 TREE_OPERAND (base, 0),
721 call))
722 return false;
723 return detect_type_change_from_memory_writes (arg, base, comp_type,
724 call, jfunc, offset);
725 }
726
727 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
728 SSA name (its dereference will become the base and the offset is assumed to
729 be zero). */
730
731 static bool
732 detect_type_change_ssa (tree arg, tree comp_type,
733 gcall *call, struct ipa_jump_func *jfunc)
734 {
735 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
736 if (!flag_devirtualize
737 || !POINTER_TYPE_P (TREE_TYPE (arg)))
738 return false;
739
740 if (!param_type_may_change_p (current_function_decl, arg, call))
741 return false;
742
743 arg = build2 (MEM_REF, ptr_type_node, arg,
744 build_int_cst (ptr_type_node, 0));
745
746 return detect_type_change_from_memory_writes (arg, arg, comp_type,
747 call, jfunc, 0);
748 }
749
750 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
751 boolean variable pointed to by DATA. */
752
753 static bool
754 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
755 void *data)
756 {
757 bool *b = (bool *) data;
758 *b = true;
759 return true;
760 }
761
762 /* Return true if we have already walked so many statements in AA that we
763 should really just start giving up. */
764
765 static bool
766 aa_overwalked (struct ipa_func_body_info *fbi)
767 {
768 gcc_checking_assert (fbi);
769 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
770 }
771
772 /* Find the nearest valid aa status for parameter specified by INDEX that
773 dominates BB. */
774
775 static struct ipa_param_aa_status *
776 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
777 int index)
778 {
779 while (true)
780 {
781 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
782 if (!bb)
783 return NULL;
784 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
785 if (!bi->param_aa_statuses.is_empty ()
786 && bi->param_aa_statuses[index].valid)
787 return &bi->param_aa_statuses[index];
788 }
789 }
790
791 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
792 structures and/or intialize the result with a dominating description as
793 necessary. */
794
795 static struct ipa_param_aa_status *
796 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
797 int index)
798 {
799 gcc_checking_assert (fbi);
800 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
801 if (bi->param_aa_statuses.is_empty ())
802 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
803 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
804 if (!paa->valid)
805 {
806 gcc_checking_assert (!paa->parm_modified
807 && !paa->ref_modified
808 && !paa->pt_modified);
809 struct ipa_param_aa_status *dom_paa;
810 dom_paa = find_dominating_aa_status (fbi, bb, index);
811 if (dom_paa)
812 *paa = *dom_paa;
813 else
814 paa->valid = true;
815 }
816
817 return paa;
818 }
819
820 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
821 a value known not to be modified in this function before reaching the
822 statement STMT. FBI holds information about the function we have so far
823 gathered but do not survive the summary building stage. */
824
825 static bool
826 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
827 gimple *stmt, tree parm_load)
828 {
829 struct ipa_param_aa_status *paa;
830 bool modified = false;
831 ao_ref refd;
832
833 tree base = get_base_address (parm_load);
834 gcc_assert (TREE_CODE (base) == PARM_DECL);
835 if (TREE_READONLY (base))
836 return true;
837
838 /* FIXME: FBI can be NULL if we are being called from outside
839 ipa_node_analysis or ipcp_transform_function, which currently happens
840 during inlining analysis. It would be great to extend fbi's lifetime and
841 always have it. Currently, we are just not afraid of too much walking in
842 that case. */
843 if (fbi)
844 {
845 if (aa_overwalked (fbi))
846 return false;
847 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
848 if (paa->parm_modified)
849 return false;
850 }
851 else
852 paa = NULL;
853
854 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
855 ao_ref_init (&refd, parm_load);
856 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
857 &modified, NULL);
858 if (fbi)
859 fbi->aa_walked += walked;
860 if (paa && modified)
861 paa->parm_modified = true;
862 return !modified;
863 }
864
865 /* If STMT is an assignment that loads a value from an parameter declaration,
866 return the index of the parameter in ipa_node_params which has not been
867 modified. Otherwise return -1. */
868
869 static int
870 load_from_unmodified_param (struct ipa_func_body_info *fbi,
871 vec<ipa_param_descriptor, va_gc> *descriptors,
872 gimple *stmt)
873 {
874 int index;
875 tree op1;
876
877 if (!gimple_assign_single_p (stmt))
878 return -1;
879
880 op1 = gimple_assign_rhs1 (stmt);
881 if (TREE_CODE (op1) != PARM_DECL)
882 return -1;
883
884 index = ipa_get_param_decl_index_1 (descriptors, op1);
885 if (index < 0
886 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
887 return -1;
888
889 return index;
890 }
891
892 /* Return true if memory reference REF (which must be a load through parameter
893 with INDEX) loads data that are known to be unmodified in this function
894 before reaching statement STMT. */
895
896 static bool
897 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
898 int index, gimple *stmt, tree ref)
899 {
900 struct ipa_param_aa_status *paa;
901 bool modified = false;
902 ao_ref refd;
903
904 /* FIXME: FBI can be NULL if we are being called from outside
905 ipa_node_analysis or ipcp_transform_function, which currently happens
906 during inlining analysis. It would be great to extend fbi's lifetime and
907 always have it. Currently, we are just not afraid of too much walking in
908 that case. */
909 if (fbi)
910 {
911 if (aa_overwalked (fbi))
912 return false;
913 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
914 if (paa->ref_modified)
915 return false;
916 }
917 else
918 paa = NULL;
919
920 gcc_checking_assert (gimple_vuse (stmt));
921 ao_ref_init (&refd, ref);
922 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
923 &modified, NULL);
924 if (fbi)
925 fbi->aa_walked += walked;
926 if (paa && modified)
927 paa->ref_modified = true;
928 return !modified;
929 }
930
931 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
932 is known to be unmodified in this function before reaching call statement
933 CALL into which it is passed. FBI describes the function body. */
934
935 static bool
936 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
937 gimple *call, tree parm)
938 {
939 bool modified = false;
940 ao_ref refd;
941
942 /* It's unnecessary to calculate anything about memory contnets for a const
943 function because it is not goin to use it. But do not cache the result
944 either. Also, no such calculations for non-pointers. */
945 if (!gimple_vuse (call)
946 || !POINTER_TYPE_P (TREE_TYPE (parm))
947 || aa_overwalked (fbi))
948 return false;
949
950 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
951 gimple_bb (call),
952 index);
953 if (paa->pt_modified)
954 return false;
955
956 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
957 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
958 &modified, NULL);
959 fbi->aa_walked += walked;
960 if (modified)
961 paa->pt_modified = true;
962 return !modified;
963 }
964
965 /* Return true if we can prove that OP is a memory reference loading
966 data from an aggregate passed as a parameter.
967
968 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
969 false if it cannot prove that the value has not been modified before the
970 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
971 if it cannot prove the value has not been modified, in that case it will
972 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
973
974 INFO and PARMS_AINFO describe parameters of the current function (but the
975 latter can be NULL), STMT is the load statement. If function returns true,
976 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
977 within the aggregate and whether it is a load from a value passed by
978 reference respectively. */
979
980 bool
981 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
982 vec<ipa_param_descriptor, va_gc> *descriptors,
983 gimple *stmt, tree op, int *index_p,
984 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
985 bool *by_ref_p, bool *guaranteed_unmodified)
986 {
987 int index;
988 HOST_WIDE_INT size, max_size;
989 bool reverse;
990 tree base
991 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
992
993 if (max_size == -1 || max_size != size || *offset_p < 0)
994 return false;
995
996 if (DECL_P (base))
997 {
998 int index = ipa_get_param_decl_index_1 (descriptors, base);
999 if (index >= 0
1000 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1001 {
1002 *index_p = index;
1003 *by_ref_p = false;
1004 if (size_p)
1005 *size_p = size;
1006 if (guaranteed_unmodified)
1007 *guaranteed_unmodified = true;
1008 return true;
1009 }
1010 return false;
1011 }
1012
1013 if (TREE_CODE (base) != MEM_REF
1014 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1015 || !integer_zerop (TREE_OPERAND (base, 1)))
1016 return false;
1017
1018 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1019 {
1020 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1021 index = ipa_get_param_decl_index_1 (descriptors, parm);
1022 }
1023 else
1024 {
1025 /* This branch catches situations where a pointer parameter is not a
1026 gimple register, for example:
1027
1028 void hip7(S*) (struct S * p)
1029 {
1030 void (*<T2e4>) (struct S *) D.1867;
1031 struct S * p.1;
1032
1033 <bb 2>:
1034 p.1_1 = p;
1035 D.1867_2 = p.1_1->f;
1036 D.1867_2 ();
1037 gdp = &p;
1038 */
1039
1040 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1041 index = load_from_unmodified_param (fbi, descriptors, def);
1042 }
1043
1044 if (index >= 0)
1045 {
1046 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1047 if (!data_preserved && !guaranteed_unmodified)
1048 return false;
1049
1050 *index_p = index;
1051 *by_ref_p = true;
1052 if (size_p)
1053 *size_p = size;
1054 if (guaranteed_unmodified)
1055 *guaranteed_unmodified = data_preserved;
1056 return true;
1057 }
1058 return false;
1059 }
1060
1061 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1062 of an assignment statement STMT, try to determine whether we are actually
1063 handling any of the following cases and construct an appropriate jump
1064 function into JFUNC if so:
1065
1066 1) The passed value is loaded from a formal parameter which is not a gimple
1067 register (most probably because it is addressable, the value has to be
1068 scalar) and we can guarantee the value has not changed. This case can
1069 therefore be described by a simple pass-through jump function. For example:
1070
1071 foo (int a)
1072 {
1073 int a.0;
1074
1075 a.0_2 = a;
1076 bar (a.0_2);
1077
1078 2) The passed value can be described by a simple arithmetic pass-through
1079 jump function. E.g.
1080
1081 foo (int a)
1082 {
1083 int D.2064;
1084
1085 D.2064_4 = a.1(D) + 4;
1086 bar (D.2064_4);
1087
1088 This case can also occur in combination of the previous one, e.g.:
1089
1090 foo (int a, int z)
1091 {
1092 int a.0;
1093 int D.2064;
1094
1095 a.0_3 = a;
1096 D.2064_4 = a.0_3 + 4;
1097 foo (D.2064_4);
1098
1099 3) The passed value is an address of an object within another one (which
1100 also passed by reference). Such situations are described by an ancestor
1101 jump function and describe situations such as:
1102
1103 B::foo() (struct B * const this)
1104 {
1105 struct A * D.1845;
1106
1107 D.1845_2 = &this_1(D)->D.1748;
1108 A::bar (D.1845_2);
1109
1110 INFO is the structure describing individual parameters access different
1111 stages of IPA optimizations. PARMS_AINFO contains the information that is
1112 only needed for intraprocedural analysis. */
1113
1114 static void
1115 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1116 struct ipa_node_params *info,
1117 struct ipa_jump_func *jfunc,
1118 gcall *call, gimple *stmt, tree name,
1119 tree param_type)
1120 {
1121 HOST_WIDE_INT offset, size, max_size;
1122 tree op1, tc_ssa, base, ssa;
1123 bool reverse;
1124 int index;
1125
1126 op1 = gimple_assign_rhs1 (stmt);
1127
1128 if (TREE_CODE (op1) == SSA_NAME)
1129 {
1130 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1131 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1132 else
1133 index = load_from_unmodified_param (fbi, info->descriptors,
1134 SSA_NAME_DEF_STMT (op1));
1135 tc_ssa = op1;
1136 }
1137 else
1138 {
1139 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1140 tc_ssa = gimple_assign_lhs (stmt);
1141 }
1142
1143 if (index >= 0)
1144 {
1145 switch (gimple_assign_rhs_class (stmt))
1146 {
1147 case GIMPLE_BINARY_RHS:
1148 {
1149 tree op2 = gimple_assign_rhs2 (stmt);
1150 if (!is_gimple_ip_invariant (op2)
1151 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1152 != tcc_comparison)
1153 && !useless_type_conversion_p (TREE_TYPE (name),
1154 TREE_TYPE (op1))))
1155 return;
1156
1157 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1158 gimple_assign_rhs_code (stmt));
1159 break;
1160 }
1161 case GIMPLE_SINGLE_RHS:
1162 {
1163 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1164 tc_ssa);
1165 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1166 break;
1167 }
1168 case GIMPLE_UNARY_RHS:
1169 if (is_gimple_assign (stmt)
1170 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1171 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1172 ipa_set_jf_unary_pass_through (jfunc, index,
1173 gimple_assign_rhs_code (stmt));
1174 default:;
1175 }
1176 return;
1177 }
1178
1179 if (TREE_CODE (op1) != ADDR_EXPR)
1180 return;
1181 op1 = TREE_OPERAND (op1, 0);
1182 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1183 return;
1184 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1185 if (TREE_CODE (base) != MEM_REF
1186 /* If this is a varying address, punt. */
1187 || max_size == -1
1188 || max_size != size)
1189 return;
1190 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1191 ssa = TREE_OPERAND (base, 0);
1192 if (TREE_CODE (ssa) != SSA_NAME
1193 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1194 || offset < 0)
1195 return;
1196
1197 /* Dynamic types are changed in constructors and destructors. */
1198 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1199 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1200 ipa_set_ancestor_jf (jfunc, offset, index,
1201 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1202 }
1203
1204 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1205 it looks like:
1206
1207 iftmp.1_3 = &obj_2(D)->D.1762;
1208
1209 The base of the MEM_REF must be a default definition SSA NAME of a
1210 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1211 whole MEM_REF expression is returned and the offset calculated from any
1212 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1213 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1214
1215 static tree
1216 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1217 {
1218 HOST_WIDE_INT size, max_size;
1219 tree expr, parm, obj;
1220 bool reverse;
1221
1222 if (!gimple_assign_single_p (assign))
1223 return NULL_TREE;
1224 expr = gimple_assign_rhs1 (assign);
1225
1226 if (TREE_CODE (expr) != ADDR_EXPR)
1227 return NULL_TREE;
1228 expr = TREE_OPERAND (expr, 0);
1229 obj = expr;
1230 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1231
1232 if (TREE_CODE (expr) != MEM_REF
1233 /* If this is a varying address, punt. */
1234 || max_size == -1
1235 || max_size != size
1236 || *offset < 0)
1237 return NULL_TREE;
1238 parm = TREE_OPERAND (expr, 0);
1239 if (TREE_CODE (parm) != SSA_NAME
1240 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1241 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1242 return NULL_TREE;
1243
1244 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1245 *obj_p = obj;
1246 return expr;
1247 }
1248
1249
1250 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1251 statement PHI, try to find out whether NAME is in fact a
1252 multiple-inheritance typecast from a descendant into an ancestor of a formal
1253 parameter and thus can be described by an ancestor jump function and if so,
1254 write the appropriate function into JFUNC.
1255
1256 Essentially we want to match the following pattern:
1257
1258 if (obj_2(D) != 0B)
1259 goto <bb 3>;
1260 else
1261 goto <bb 4>;
1262
1263 <bb 3>:
1264 iftmp.1_3 = &obj_2(D)->D.1762;
1265
1266 <bb 4>:
1267 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1268 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1269 return D.1879_6; */
1270
1271 static void
1272 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1273 struct ipa_node_params *info,
1274 struct ipa_jump_func *jfunc,
1275 gcall *call, gphi *phi)
1276 {
1277 HOST_WIDE_INT offset;
1278 gimple *assign, *cond;
1279 basic_block phi_bb, assign_bb, cond_bb;
1280 tree tmp, parm, expr, obj;
1281 int index, i;
1282
1283 if (gimple_phi_num_args (phi) != 2)
1284 return;
1285
1286 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1287 tmp = PHI_ARG_DEF (phi, 0);
1288 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1289 tmp = PHI_ARG_DEF (phi, 1);
1290 else
1291 return;
1292 if (TREE_CODE (tmp) != SSA_NAME
1293 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1294 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1295 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1296 return;
1297
1298 assign = SSA_NAME_DEF_STMT (tmp);
1299 assign_bb = gimple_bb (assign);
1300 if (!single_pred_p (assign_bb))
1301 return;
1302 expr = get_ancestor_addr_info (assign, &obj, &offset);
1303 if (!expr)
1304 return;
1305 parm = TREE_OPERAND (expr, 0);
1306 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1307 if (index < 0)
1308 return;
1309
1310 cond_bb = single_pred (assign_bb);
1311 cond = last_stmt (cond_bb);
1312 if (!cond
1313 || gimple_code (cond) != GIMPLE_COND
1314 || gimple_cond_code (cond) != NE_EXPR
1315 || gimple_cond_lhs (cond) != parm
1316 || !integer_zerop (gimple_cond_rhs (cond)))
1317 return;
1318
1319 phi_bb = gimple_bb (phi);
1320 for (i = 0; i < 2; i++)
1321 {
1322 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1323 if (pred != assign_bb && pred != cond_bb)
1324 return;
1325 }
1326
1327 ipa_set_ancestor_jf (jfunc, offset, index,
1328 parm_ref_data_pass_through_p (fbi, index, call, parm));
1329 }
1330
1331 /* Inspect the given TYPE and return true iff it has the same structure (the
1332 same number of fields of the same types) as a C++ member pointer. If
1333 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1334 corresponding fields there. */
1335
1336 static bool
1337 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1338 {
1339 tree fld;
1340
1341 if (TREE_CODE (type) != RECORD_TYPE)
1342 return false;
1343
1344 fld = TYPE_FIELDS (type);
1345 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1346 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1347 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1348 return false;
1349
1350 if (method_ptr)
1351 *method_ptr = fld;
1352
1353 fld = DECL_CHAIN (fld);
1354 if (!fld || INTEGRAL_TYPE_P (fld)
1355 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1356 return false;
1357 if (delta)
1358 *delta = fld;
1359
1360 if (DECL_CHAIN (fld))
1361 return false;
1362
1363 return true;
1364 }
1365
1366 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1367 return the rhs of its defining statement. Otherwise return RHS as it
1368 is. */
1369
1370 static inline tree
1371 get_ssa_def_if_simple_copy (tree rhs)
1372 {
1373 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1374 {
1375 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1376
1377 if (gimple_assign_single_p (def_stmt))
1378 rhs = gimple_assign_rhs1 (def_stmt);
1379 else
1380 break;
1381 }
1382 return rhs;
1383 }
1384
1385 /* Simple linked list, describing known contents of an aggregate beforere
1386 call. */
1387
1388 struct ipa_known_agg_contents_list
1389 {
1390 /* Offset and size of the described part of the aggregate. */
1391 HOST_WIDE_INT offset, size;
1392 /* Known constant value or NULL if the contents is known to be unknown. */
1393 tree constant;
1394 /* Pointer to the next structure in the list. */
1395 struct ipa_known_agg_contents_list *next;
1396 };
1397
1398 /* Find the proper place in linked list of ipa_known_agg_contents_list
1399 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1400 unless there is a partial overlap, in which case return NULL, or such
1401 element is already there, in which case set *ALREADY_THERE to true. */
1402
1403 static struct ipa_known_agg_contents_list **
1404 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1405 HOST_WIDE_INT lhs_offset,
1406 HOST_WIDE_INT lhs_size,
1407 bool *already_there)
1408 {
1409 struct ipa_known_agg_contents_list **p = list;
1410 while (*p && (*p)->offset < lhs_offset)
1411 {
1412 if ((*p)->offset + (*p)->size > lhs_offset)
1413 return NULL;
1414 p = &(*p)->next;
1415 }
1416
1417 if (*p && (*p)->offset < lhs_offset + lhs_size)
1418 {
1419 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1420 /* We already know this value is subsequently overwritten with
1421 something else. */
1422 *already_there = true;
1423 else
1424 /* Otherwise this is a partial overlap which we cannot
1425 represent. */
1426 return NULL;
1427 }
1428 return p;
1429 }
1430
1431 /* Build aggregate jump function from LIST, assuming there are exactly
1432 CONST_COUNT constant entries there and that th offset of the passed argument
1433 is ARG_OFFSET and store it into JFUNC. */
1434
1435 static void
1436 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1437 int const_count, HOST_WIDE_INT arg_offset,
1438 struct ipa_jump_func *jfunc)
1439 {
1440 vec_alloc (jfunc->agg.items, const_count);
1441 while (list)
1442 {
1443 if (list->constant)
1444 {
1445 struct ipa_agg_jf_item item;
1446 item.offset = list->offset - arg_offset;
1447 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1448 item.value = unshare_expr_without_location (list->constant);
1449 jfunc->agg.items->quick_push (item);
1450 }
1451 list = list->next;
1452 }
1453 }
1454
1455 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1456 in ARG is filled in with constant values. ARG can either be an aggregate
1457 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1458 aggregate. JFUNC is the jump function into which the constants are
1459 subsequently stored. */
1460
1461 static void
1462 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1463 tree arg_type,
1464 struct ipa_jump_func *jfunc)
1465 {
1466 struct ipa_known_agg_contents_list *list = NULL;
1467 int item_count = 0, const_count = 0;
1468 HOST_WIDE_INT arg_offset, arg_size;
1469 gimple_stmt_iterator gsi;
1470 tree arg_base;
1471 bool check_ref, by_ref;
1472 ao_ref r;
1473
1474 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1475 return;
1476
1477 /* The function operates in three stages. First, we prepare check_ref, r,
1478 arg_base and arg_offset based on what is actually passed as an actual
1479 argument. */
1480
1481 if (POINTER_TYPE_P (arg_type))
1482 {
1483 by_ref = true;
1484 if (TREE_CODE (arg) == SSA_NAME)
1485 {
1486 tree type_size;
1487 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1488 return;
1489 check_ref = true;
1490 arg_base = arg;
1491 arg_offset = 0;
1492 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1493 arg_size = tree_to_uhwi (type_size);
1494 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1495 }
1496 else if (TREE_CODE (arg) == ADDR_EXPR)
1497 {
1498 HOST_WIDE_INT arg_max_size;
1499 bool reverse;
1500
1501 arg = TREE_OPERAND (arg, 0);
1502 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1503 &arg_max_size, &reverse);
1504 if (arg_max_size == -1
1505 || arg_max_size != arg_size
1506 || arg_offset < 0)
1507 return;
1508 if (DECL_P (arg_base))
1509 {
1510 check_ref = false;
1511 ao_ref_init (&r, arg_base);
1512 }
1513 else
1514 return;
1515 }
1516 else
1517 return;
1518 }
1519 else
1520 {
1521 HOST_WIDE_INT arg_max_size;
1522 bool reverse;
1523
1524 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1525
1526 by_ref = false;
1527 check_ref = false;
1528 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1529 &arg_max_size, &reverse);
1530 if (arg_max_size == -1
1531 || arg_max_size != arg_size
1532 || arg_offset < 0)
1533 return;
1534
1535 ao_ref_init (&r, arg);
1536 }
1537
1538 /* Second stage walks back the BB, looks at individual statements and as long
1539 as it is confident of how the statements affect contents of the
1540 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1541 describing it. */
1542 gsi = gsi_for_stmt (call);
1543 gsi_prev (&gsi);
1544 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1545 {
1546 struct ipa_known_agg_contents_list *n, **p;
1547 gimple *stmt = gsi_stmt (gsi);
1548 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1549 tree lhs, rhs, lhs_base;
1550 bool reverse;
1551
1552 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1553 continue;
1554 if (!gimple_assign_single_p (stmt))
1555 break;
1556
1557 lhs = gimple_assign_lhs (stmt);
1558 rhs = gimple_assign_rhs1 (stmt);
1559 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1560 || TREE_CODE (lhs) == BIT_FIELD_REF
1561 || contains_bitfld_component_ref_p (lhs))
1562 break;
1563
1564 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1565 &lhs_max_size, &reverse);
1566 if (lhs_max_size == -1
1567 || lhs_max_size != lhs_size)
1568 break;
1569
1570 if (check_ref)
1571 {
1572 if (TREE_CODE (lhs_base) != MEM_REF
1573 || TREE_OPERAND (lhs_base, 0) != arg_base
1574 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1575 break;
1576 }
1577 else if (lhs_base != arg_base)
1578 {
1579 if (DECL_P (lhs_base))
1580 continue;
1581 else
1582 break;
1583 }
1584
1585 bool already_there = false;
1586 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1587 &already_there);
1588 if (!p)
1589 break;
1590 if (already_there)
1591 continue;
1592
1593 rhs = get_ssa_def_if_simple_copy (rhs);
1594 n = XALLOCA (struct ipa_known_agg_contents_list);
1595 n->size = lhs_size;
1596 n->offset = lhs_offset;
1597 if (is_gimple_ip_invariant (rhs))
1598 {
1599 n->constant = rhs;
1600 const_count++;
1601 }
1602 else
1603 n->constant = NULL_TREE;
1604 n->next = *p;
1605 *p = n;
1606
1607 item_count++;
1608 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1609 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1610 break;
1611 }
1612
1613 /* Third stage just goes over the list and creates an appropriate vector of
1614 ipa_agg_jf_item structures out of it, of sourse only if there are
1615 any known constants to begin with. */
1616
1617 if (const_count)
1618 {
1619 jfunc->agg.by_ref = by_ref;
1620 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1621 }
1622 }
1623
1624 /* Return the Ith param type of callee associated with call graph
1625 edge E. */
1626
1627 tree
1628 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1629 {
1630 int n;
1631 tree type = (e->callee
1632 ? TREE_TYPE (e->callee->decl)
1633 : gimple_call_fntype (e->call_stmt));
1634 tree t = TYPE_ARG_TYPES (type);
1635
1636 for (n = 0; n < i; n++)
1637 {
1638 if (!t)
1639 break;
1640 t = TREE_CHAIN (t);
1641 }
1642 if (t)
1643 return TREE_VALUE (t);
1644 if (!e->callee)
1645 return NULL;
1646 t = DECL_ARGUMENTS (e->callee->decl);
1647 for (n = 0; n < i; n++)
1648 {
1649 if (!t)
1650 return NULL;
1651 t = TREE_CHAIN (t);
1652 }
1653 if (t)
1654 return TREE_TYPE (t);
1655 return NULL;
1656 }
1657
1658 /* Compute jump function for all arguments of callsite CS and insert the
1659 information in the jump_functions array in the ipa_edge_args corresponding
1660 to this callsite. */
1661
1662 static void
1663 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1664 struct cgraph_edge *cs)
1665 {
1666 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1667 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1668 gcall *call = cs->call_stmt;
1669 int n, arg_num = gimple_call_num_args (call);
1670 bool useful_context = false;
1671
1672 if (arg_num == 0 || args->jump_functions)
1673 return;
1674 vec_safe_grow_cleared (args->jump_functions, arg_num);
1675 if (flag_devirtualize)
1676 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1677
1678 if (gimple_call_internal_p (call))
1679 return;
1680 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1681 return;
1682
1683 for (n = 0; n < arg_num; n++)
1684 {
1685 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1686 tree arg = gimple_call_arg (call, n);
1687 tree param_type = ipa_get_callee_param_type (cs, n);
1688 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1689 {
1690 tree instance;
1691 struct ipa_polymorphic_call_context context (cs->caller->decl,
1692 arg, cs->call_stmt,
1693 &instance);
1694 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1695 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1696 if (!context.useless_p ())
1697 useful_context = true;
1698 }
1699
1700 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1701 {
1702 bool addr_nonzero = false;
1703 bool strict_overflow = false;
1704
1705 if (TREE_CODE (arg) == SSA_NAME
1706 && param_type
1707 && get_ptr_nonnull (arg))
1708 addr_nonzero = true;
1709 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1710 addr_nonzero = true;
1711
1712 if (addr_nonzero)
1713 {
1714 jfunc->vr_known = true;
1715 jfunc->m_vr.type = VR_ANTI_RANGE;
1716 jfunc->m_vr.min = build_int_cst (TREE_TYPE (arg), 0);
1717 jfunc->m_vr.max = build_int_cst (TREE_TYPE (arg), 0);
1718 jfunc->m_vr.equiv = NULL;
1719 }
1720 else
1721 gcc_assert (!jfunc->vr_known);
1722 }
1723 else
1724 {
1725 wide_int min, max;
1726 value_range_type type;
1727 if (TREE_CODE (arg) == SSA_NAME
1728 && param_type
1729 && (type = get_range_info (arg, &min, &max))
1730 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1731 {
1732 value_range vr;
1733
1734 vr.type = type;
1735 vr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1736 vr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1737 vr.equiv = NULL;
1738 extract_range_from_unary_expr (&jfunc->m_vr,
1739 NOP_EXPR,
1740 param_type,
1741 &vr, TREE_TYPE (arg));
1742 if (jfunc->m_vr.type == VR_RANGE
1743 || jfunc->m_vr.type == VR_ANTI_RANGE)
1744 jfunc->vr_known = true;
1745 else
1746 jfunc->vr_known = false;
1747 }
1748 else
1749 gcc_assert (!jfunc->vr_known);
1750 }
1751
1752 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1753 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1754 {
1755 jfunc->bits.known = true;
1756
1757 if (TREE_CODE (arg) == SSA_NAME)
1758 {
1759 jfunc->bits.value = 0;
1760 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1761 TYPE_SIGN (TREE_TYPE (arg)));
1762 }
1763 else
1764 {
1765 jfunc->bits.value = wi::to_widest (arg);
1766 jfunc->bits.mask = 0;
1767 }
1768 }
1769 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1770 {
1771 unsigned HOST_WIDE_INT bitpos;
1772 unsigned align;
1773
1774 jfunc->bits.known = true;
1775 get_pointer_alignment_1 (arg, &align, &bitpos);
1776 jfunc->bits.mask = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1777 .and_not (align / BITS_PER_UNIT - 1);
1778 jfunc->bits.value = bitpos / BITS_PER_UNIT;
1779 }
1780 else
1781 gcc_assert (!jfunc->bits.known);
1782
1783 if (is_gimple_ip_invariant (arg)
1784 || (VAR_P (arg)
1785 && is_global_var (arg)
1786 && TREE_READONLY (arg)))
1787 ipa_set_jf_constant (jfunc, arg, cs);
1788 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1789 && TREE_CODE (arg) == PARM_DECL)
1790 {
1791 int index = ipa_get_param_decl_index (info, arg);
1792
1793 gcc_assert (index >=0);
1794 /* Aggregate passed by value, check for pass-through, otherwise we
1795 will attempt to fill in aggregate contents later in this
1796 for cycle. */
1797 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1798 {
1799 ipa_set_jf_simple_pass_through (jfunc, index, false);
1800 continue;
1801 }
1802 }
1803 else if (TREE_CODE (arg) == SSA_NAME)
1804 {
1805 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1806 {
1807 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1808 if (index >= 0)
1809 {
1810 bool agg_p;
1811 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1812 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1813 }
1814 }
1815 else
1816 {
1817 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1818 if (is_gimple_assign (stmt))
1819 compute_complex_assign_jump_func (fbi, info, jfunc,
1820 call, stmt, arg, param_type);
1821 else if (gimple_code (stmt) == GIMPLE_PHI)
1822 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1823 call,
1824 as_a <gphi *> (stmt));
1825 }
1826 }
1827
1828 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1829 passed (because type conversions are ignored in gimple). Usually we can
1830 safely get type from function declaration, but in case of K&R prototypes or
1831 variadic functions we can try our luck with type of the pointer passed.
1832 TODO: Since we look for actual initialization of the memory object, we may better
1833 work out the type based on the memory stores we find. */
1834 if (!param_type)
1835 param_type = TREE_TYPE (arg);
1836
1837 if ((jfunc->type != IPA_JF_PASS_THROUGH
1838 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1839 && (jfunc->type != IPA_JF_ANCESTOR
1840 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1841 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1842 || POINTER_TYPE_P (param_type)))
1843 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1844 }
1845 if (!useful_context)
1846 vec_free (args->polymorphic_call_contexts);
1847 }
1848
1849 /* Compute jump functions for all edges - both direct and indirect - outgoing
1850 from BB. */
1851
1852 static void
1853 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1854 {
1855 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1856 int i;
1857 struct cgraph_edge *cs;
1858
1859 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1860 {
1861 struct cgraph_node *callee = cs->callee;
1862
1863 if (callee)
1864 {
1865 callee->ultimate_alias_target ();
1866 /* We do not need to bother analyzing calls to unknown functions
1867 unless they may become known during lto/whopr. */
1868 if (!callee->definition && !flag_lto)
1869 continue;
1870 }
1871 ipa_compute_jump_functions_for_edge (fbi, cs);
1872 }
1873 }
1874
1875 /* If STMT looks like a statement loading a value from a member pointer formal
1876 parameter, return that parameter and store the offset of the field to
1877 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1878 might be clobbered). If USE_DELTA, then we look for a use of the delta
1879 field rather than the pfn. */
1880
1881 static tree
1882 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1883 HOST_WIDE_INT *offset_p)
1884 {
1885 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1886
1887 if (!gimple_assign_single_p (stmt))
1888 return NULL_TREE;
1889
1890 rhs = gimple_assign_rhs1 (stmt);
1891 if (TREE_CODE (rhs) == COMPONENT_REF)
1892 {
1893 ref_field = TREE_OPERAND (rhs, 1);
1894 rhs = TREE_OPERAND (rhs, 0);
1895 }
1896 else
1897 ref_field = NULL_TREE;
1898 if (TREE_CODE (rhs) != MEM_REF)
1899 return NULL_TREE;
1900 rec = TREE_OPERAND (rhs, 0);
1901 if (TREE_CODE (rec) != ADDR_EXPR)
1902 return NULL_TREE;
1903 rec = TREE_OPERAND (rec, 0);
1904 if (TREE_CODE (rec) != PARM_DECL
1905 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1906 return NULL_TREE;
1907 ref_offset = TREE_OPERAND (rhs, 1);
1908
1909 if (use_delta)
1910 fld = delta_field;
1911 else
1912 fld = ptr_field;
1913 if (offset_p)
1914 *offset_p = int_bit_position (fld);
1915
1916 if (ref_field)
1917 {
1918 if (integer_nonzerop (ref_offset))
1919 return NULL_TREE;
1920 return ref_field == fld ? rec : NULL_TREE;
1921 }
1922 else
1923 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1924 : NULL_TREE;
1925 }
1926
1927 /* Returns true iff T is an SSA_NAME defined by a statement. */
1928
1929 static bool
1930 ipa_is_ssa_with_stmt_def (tree t)
1931 {
1932 if (TREE_CODE (t) == SSA_NAME
1933 && !SSA_NAME_IS_DEFAULT_DEF (t))
1934 return true;
1935 else
1936 return false;
1937 }
1938
1939 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1940 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1941 indirect call graph edge. */
1942
1943 static struct cgraph_edge *
1944 ipa_note_param_call (struct cgraph_node *node, int param_index,
1945 gcall *stmt)
1946 {
1947 struct cgraph_edge *cs;
1948
1949 cs = node->get_edge (stmt);
1950 cs->indirect_info->param_index = param_index;
1951 cs->indirect_info->agg_contents = 0;
1952 cs->indirect_info->member_ptr = 0;
1953 cs->indirect_info->guaranteed_unmodified = 0;
1954 return cs;
1955 }
1956
1957 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1958 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1959 intermediate information about each formal parameter. Currently it checks
1960 whether the call calls a pointer that is a formal parameter and if so, the
1961 parameter is marked with the called flag and an indirect call graph edge
1962 describing the call is created. This is very simple for ordinary pointers
1963 represented in SSA but not-so-nice when it comes to member pointers. The
1964 ugly part of this function does nothing more than trying to match the
1965 pattern of such a call. An example of such a pattern is the gimple dump
1966 below, the call is on the last line:
1967
1968 <bb 2>:
1969 f$__delta_5 = f.__delta;
1970 f$__pfn_24 = f.__pfn;
1971
1972 or
1973 <bb 2>:
1974 f$__delta_5 = MEM[(struct *)&f];
1975 f$__pfn_24 = MEM[(struct *)&f + 4B];
1976
1977 and a few lines below:
1978
1979 <bb 5>
1980 D.2496_3 = (int) f$__pfn_24;
1981 D.2497_4 = D.2496_3 & 1;
1982 if (D.2497_4 != 0)
1983 goto <bb 3>;
1984 else
1985 goto <bb 4>;
1986
1987 <bb 6>:
1988 D.2500_7 = (unsigned int) f$__delta_5;
1989 D.2501_8 = &S + D.2500_7;
1990 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1991 D.2503_10 = *D.2502_9;
1992 D.2504_12 = f$__pfn_24 + -1;
1993 D.2505_13 = (unsigned int) D.2504_12;
1994 D.2506_14 = D.2503_10 + D.2505_13;
1995 D.2507_15 = *D.2506_14;
1996 iftmp.11_16 = (String:: *) D.2507_15;
1997
1998 <bb 7>:
1999 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2000 D.2500_19 = (unsigned int) f$__delta_5;
2001 D.2508_20 = &S + D.2500_19;
2002 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2003
2004 Such patterns are results of simple calls to a member pointer:
2005
2006 int doprinting (int (MyString::* f)(int) const)
2007 {
2008 MyString S ("somestring");
2009
2010 return (S.*f)(4);
2011 }
2012
2013 Moreover, the function also looks for called pointers loaded from aggregates
2014 passed by value or reference. */
2015
2016 static void
2017 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2018 tree target)
2019 {
2020 struct ipa_node_params *info = fbi->info;
2021 HOST_WIDE_INT offset;
2022 bool by_ref;
2023
2024 if (SSA_NAME_IS_DEFAULT_DEF (target))
2025 {
2026 tree var = SSA_NAME_VAR (target);
2027 int index = ipa_get_param_decl_index (info, var);
2028 if (index >= 0)
2029 ipa_note_param_call (fbi->node, index, call);
2030 return;
2031 }
2032
2033 int index;
2034 gimple *def = SSA_NAME_DEF_STMT (target);
2035 bool guaranteed_unmodified;
2036 if (gimple_assign_single_p (def)
2037 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2038 gimple_assign_rhs1 (def), &index, &offset,
2039 NULL, &by_ref, &guaranteed_unmodified))
2040 {
2041 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2042 cs->indirect_info->offset = offset;
2043 cs->indirect_info->agg_contents = 1;
2044 cs->indirect_info->by_ref = by_ref;
2045 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2046 return;
2047 }
2048
2049 /* Now we need to try to match the complex pattern of calling a member
2050 pointer. */
2051 if (gimple_code (def) != GIMPLE_PHI
2052 || gimple_phi_num_args (def) != 2
2053 || !POINTER_TYPE_P (TREE_TYPE (target))
2054 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2055 return;
2056
2057 /* First, we need to check whether one of these is a load from a member
2058 pointer that is a parameter to this function. */
2059 tree n1 = PHI_ARG_DEF (def, 0);
2060 tree n2 = PHI_ARG_DEF (def, 1);
2061 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2062 return;
2063 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2064 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2065
2066 tree rec;
2067 basic_block bb, virt_bb;
2068 basic_block join = gimple_bb (def);
2069 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2070 {
2071 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2072 return;
2073
2074 bb = EDGE_PRED (join, 0)->src;
2075 virt_bb = gimple_bb (d2);
2076 }
2077 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2078 {
2079 bb = EDGE_PRED (join, 1)->src;
2080 virt_bb = gimple_bb (d1);
2081 }
2082 else
2083 return;
2084
2085 /* Second, we need to check that the basic blocks are laid out in the way
2086 corresponding to the pattern. */
2087
2088 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2089 || single_pred (virt_bb) != bb
2090 || single_succ (virt_bb) != join)
2091 return;
2092
2093 /* Third, let's see that the branching is done depending on the least
2094 significant bit of the pfn. */
2095
2096 gimple *branch = last_stmt (bb);
2097 if (!branch || gimple_code (branch) != GIMPLE_COND)
2098 return;
2099
2100 if ((gimple_cond_code (branch) != NE_EXPR
2101 && gimple_cond_code (branch) != EQ_EXPR)
2102 || !integer_zerop (gimple_cond_rhs (branch)))
2103 return;
2104
2105 tree cond = gimple_cond_lhs (branch);
2106 if (!ipa_is_ssa_with_stmt_def (cond))
2107 return;
2108
2109 def = SSA_NAME_DEF_STMT (cond);
2110 if (!is_gimple_assign (def)
2111 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2112 || !integer_onep (gimple_assign_rhs2 (def)))
2113 return;
2114
2115 cond = gimple_assign_rhs1 (def);
2116 if (!ipa_is_ssa_with_stmt_def (cond))
2117 return;
2118
2119 def = SSA_NAME_DEF_STMT (cond);
2120
2121 if (is_gimple_assign (def)
2122 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2123 {
2124 cond = gimple_assign_rhs1 (def);
2125 if (!ipa_is_ssa_with_stmt_def (cond))
2126 return;
2127 def = SSA_NAME_DEF_STMT (cond);
2128 }
2129
2130 tree rec2;
2131 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2132 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2133 == ptrmemfunc_vbit_in_delta),
2134 NULL);
2135 if (rec != rec2)
2136 return;
2137
2138 index = ipa_get_param_decl_index (info, rec);
2139 if (index >= 0
2140 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2141 {
2142 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2143 cs->indirect_info->offset = offset;
2144 cs->indirect_info->agg_contents = 1;
2145 cs->indirect_info->member_ptr = 1;
2146 cs->indirect_info->guaranteed_unmodified = 1;
2147 }
2148
2149 return;
2150 }
2151
2152 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2153 object referenced in the expression is a formal parameter of the caller
2154 FBI->node (described by FBI->info), create a call note for the
2155 statement. */
2156
2157 static void
2158 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2159 gcall *call, tree target)
2160 {
2161 tree obj = OBJ_TYPE_REF_OBJECT (target);
2162 int index;
2163 HOST_WIDE_INT anc_offset;
2164
2165 if (!flag_devirtualize)
2166 return;
2167
2168 if (TREE_CODE (obj) != SSA_NAME)
2169 return;
2170
2171 struct ipa_node_params *info = fbi->info;
2172 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2173 {
2174 struct ipa_jump_func jfunc;
2175 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2176 return;
2177
2178 anc_offset = 0;
2179 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2180 gcc_assert (index >= 0);
2181 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2182 call, &jfunc))
2183 return;
2184 }
2185 else
2186 {
2187 struct ipa_jump_func jfunc;
2188 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2189 tree expr;
2190
2191 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2192 if (!expr)
2193 return;
2194 index = ipa_get_param_decl_index (info,
2195 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2196 gcc_assert (index >= 0);
2197 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2198 call, &jfunc, anc_offset))
2199 return;
2200 }
2201
2202 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2203 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2204 ii->offset = anc_offset;
2205 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2206 ii->otr_type = obj_type_ref_class (target);
2207 ii->polymorphic = 1;
2208 }
2209
2210 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2211 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2212 containing intermediate information about each formal parameter. */
2213
2214 static void
2215 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2216 {
2217 tree target = gimple_call_fn (call);
2218
2219 if (!target
2220 || (TREE_CODE (target) != SSA_NAME
2221 && !virtual_method_call_p (target)))
2222 return;
2223
2224 struct cgraph_edge *cs = fbi->node->get_edge (call);
2225 /* If we previously turned the call into a direct call, there is
2226 no need to analyze. */
2227 if (cs && !cs->indirect_unknown_callee)
2228 return;
2229
2230 if (cs->indirect_info->polymorphic && flag_devirtualize)
2231 {
2232 tree instance;
2233 tree target = gimple_call_fn (call);
2234 ipa_polymorphic_call_context context (current_function_decl,
2235 target, call, &instance);
2236
2237 gcc_checking_assert (cs->indirect_info->otr_type
2238 == obj_type_ref_class (target));
2239 gcc_checking_assert (cs->indirect_info->otr_token
2240 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2241
2242 cs->indirect_info->vptr_changed
2243 = !context.get_dynamic_type (instance,
2244 OBJ_TYPE_REF_OBJECT (target),
2245 obj_type_ref_class (target), call);
2246 cs->indirect_info->context = context;
2247 }
2248
2249 if (TREE_CODE (target) == SSA_NAME)
2250 ipa_analyze_indirect_call_uses (fbi, call, target);
2251 else if (virtual_method_call_p (target))
2252 ipa_analyze_virtual_call_uses (fbi, call, target);
2253 }
2254
2255
2256 /* Analyze the call statement STMT with respect to formal parameters (described
2257 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2258 formal parameters are called. */
2259
2260 static void
2261 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2262 {
2263 if (is_gimple_call (stmt))
2264 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2265 }
2266
2267 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2268 If OP is a parameter declaration, mark it as used in the info structure
2269 passed in DATA. */
2270
2271 static bool
2272 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2273 {
2274 struct ipa_node_params *info = (struct ipa_node_params *) data;
2275
2276 op = get_base_address (op);
2277 if (op
2278 && TREE_CODE (op) == PARM_DECL)
2279 {
2280 int index = ipa_get_param_decl_index (info, op);
2281 gcc_assert (index >= 0);
2282 ipa_set_param_used (info, index, true);
2283 }
2284
2285 return false;
2286 }
2287
2288 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2289 the findings in various structures of the associated ipa_node_params
2290 structure, such as parameter flags, notes etc. FBI holds various data about
2291 the function being analyzed. */
2292
2293 static void
2294 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2295 {
2296 gimple_stmt_iterator gsi;
2297 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2298 {
2299 gimple *stmt = gsi_stmt (gsi);
2300
2301 if (is_gimple_debug (stmt))
2302 continue;
2303
2304 ipa_analyze_stmt_uses (fbi, stmt);
2305 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2306 visit_ref_for_mod_analysis,
2307 visit_ref_for_mod_analysis,
2308 visit_ref_for_mod_analysis);
2309 }
2310 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2311 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2312 visit_ref_for_mod_analysis,
2313 visit_ref_for_mod_analysis,
2314 visit_ref_for_mod_analysis);
2315 }
2316
2317 /* Calculate controlled uses of parameters of NODE. */
2318
2319 static void
2320 ipa_analyze_controlled_uses (struct cgraph_node *node)
2321 {
2322 struct ipa_node_params *info = IPA_NODE_REF (node);
2323
2324 for (int i = 0; i < ipa_get_param_count (info); i++)
2325 {
2326 tree parm = ipa_get_param (info, i);
2327 int controlled_uses = 0;
2328
2329 /* For SSA regs see if parameter is used. For non-SSA we compute
2330 the flag during modification analysis. */
2331 if (is_gimple_reg (parm))
2332 {
2333 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2334 parm);
2335 if (ddef && !has_zero_uses (ddef))
2336 {
2337 imm_use_iterator imm_iter;
2338 use_operand_p use_p;
2339
2340 ipa_set_param_used (info, i, true);
2341 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2342 if (!is_gimple_call (USE_STMT (use_p)))
2343 {
2344 if (!is_gimple_debug (USE_STMT (use_p)))
2345 {
2346 controlled_uses = IPA_UNDESCRIBED_USE;
2347 break;
2348 }
2349 }
2350 else
2351 controlled_uses++;
2352 }
2353 else
2354 controlled_uses = 0;
2355 }
2356 else
2357 controlled_uses = IPA_UNDESCRIBED_USE;
2358 ipa_set_controlled_uses (info, i, controlled_uses);
2359 }
2360 }
2361
2362 /* Free stuff in BI. */
2363
2364 static void
2365 free_ipa_bb_info (struct ipa_bb_info *bi)
2366 {
2367 bi->cg_edges.release ();
2368 bi->param_aa_statuses.release ();
2369 }
2370
2371 /* Dominator walker driving the analysis. */
2372
2373 class analysis_dom_walker : public dom_walker
2374 {
2375 public:
2376 analysis_dom_walker (struct ipa_func_body_info *fbi)
2377 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2378
2379 virtual edge before_dom_children (basic_block);
2380
2381 private:
2382 struct ipa_func_body_info *m_fbi;
2383 };
2384
2385 edge
2386 analysis_dom_walker::before_dom_children (basic_block bb)
2387 {
2388 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2389 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2390 return NULL;
2391 }
2392
2393 /* Release body info FBI. */
2394
2395 void
2396 ipa_release_body_info (struct ipa_func_body_info *fbi)
2397 {
2398 int i;
2399 struct ipa_bb_info *bi;
2400
2401 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2402 free_ipa_bb_info (bi);
2403 fbi->bb_infos.release ();
2404 }
2405
2406 /* Initialize the array describing properties of formal parameters
2407 of NODE, analyze their uses and compute jump functions associated
2408 with actual arguments of calls from within NODE. */
2409
2410 void
2411 ipa_analyze_node (struct cgraph_node *node)
2412 {
2413 struct ipa_func_body_info fbi;
2414 struct ipa_node_params *info;
2415
2416 ipa_check_create_node_params ();
2417 ipa_check_create_edge_args ();
2418 info = IPA_NODE_REF (node);
2419
2420 if (info->analysis_done)
2421 return;
2422 info->analysis_done = 1;
2423
2424 if (ipa_func_spec_opts_forbid_analysis_p (node))
2425 {
2426 for (int i = 0; i < ipa_get_param_count (info); i++)
2427 {
2428 ipa_set_param_used (info, i, true);
2429 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2430 }
2431 return;
2432 }
2433
2434 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2435 push_cfun (func);
2436 calculate_dominance_info (CDI_DOMINATORS);
2437 ipa_initialize_node_params (node);
2438 ipa_analyze_controlled_uses (node);
2439
2440 fbi.node = node;
2441 fbi.info = IPA_NODE_REF (node);
2442 fbi.bb_infos = vNULL;
2443 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2444 fbi.param_count = ipa_get_param_count (info);
2445 fbi.aa_walked = 0;
2446
2447 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2448 {
2449 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2450 bi->cg_edges.safe_push (cs);
2451 }
2452
2453 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2454 {
2455 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2456 bi->cg_edges.safe_push (cs);
2457 }
2458
2459 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2460
2461 ipa_release_body_info (&fbi);
2462 free_dominance_info (CDI_DOMINATORS);
2463 pop_cfun ();
2464 }
2465
2466 /* Update the jump functions associated with call graph edge E when the call
2467 graph edge CS is being inlined, assuming that E->caller is already (possibly
2468 indirectly) inlined into CS->callee and that E has not been inlined. */
2469
2470 static void
2471 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2472 struct cgraph_edge *e)
2473 {
2474 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2475 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2476 int count = ipa_get_cs_argument_count (args);
2477 int i;
2478
2479 for (i = 0; i < count; i++)
2480 {
2481 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2482 struct ipa_polymorphic_call_context *dst_ctx
2483 = ipa_get_ith_polymorhic_call_context (args, i);
2484
2485 if (dst->type == IPA_JF_ANCESTOR)
2486 {
2487 struct ipa_jump_func *src;
2488 int dst_fid = dst->value.ancestor.formal_id;
2489 struct ipa_polymorphic_call_context *src_ctx
2490 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2491
2492 /* Variable number of arguments can cause havoc if we try to access
2493 one that does not exist in the inlined edge. So make sure we
2494 don't. */
2495 if (dst_fid >= ipa_get_cs_argument_count (top))
2496 {
2497 ipa_set_jf_unknown (dst);
2498 continue;
2499 }
2500
2501 src = ipa_get_ith_jump_func (top, dst_fid);
2502
2503 if (src_ctx && !src_ctx->useless_p ())
2504 {
2505 struct ipa_polymorphic_call_context ctx = *src_ctx;
2506
2507 /* TODO: Make type preserved safe WRT contexts. */
2508 if (!ipa_get_jf_ancestor_type_preserved (dst))
2509 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2510 ctx.offset_by (dst->value.ancestor.offset);
2511 if (!ctx.useless_p ())
2512 {
2513 if (!dst_ctx)
2514 {
2515 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2516 count);
2517 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2518 }
2519
2520 dst_ctx->combine_with (ctx);
2521 }
2522 }
2523
2524 if (src->agg.items
2525 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2526 {
2527 struct ipa_agg_jf_item *item;
2528 int j;
2529
2530 /* Currently we do not produce clobber aggregate jump functions,
2531 replace with merging when we do. */
2532 gcc_assert (!dst->agg.items);
2533
2534 dst->agg.items = vec_safe_copy (src->agg.items);
2535 dst->agg.by_ref = src->agg.by_ref;
2536 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2537 item->offset -= dst->value.ancestor.offset;
2538 }
2539
2540 if (src->type == IPA_JF_PASS_THROUGH
2541 && src->value.pass_through.operation == NOP_EXPR)
2542 {
2543 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2544 dst->value.ancestor.agg_preserved &=
2545 src->value.pass_through.agg_preserved;
2546 }
2547 else if (src->type == IPA_JF_PASS_THROUGH
2548 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2549 {
2550 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2551 dst->value.ancestor.agg_preserved = false;
2552 }
2553 else if (src->type == IPA_JF_ANCESTOR)
2554 {
2555 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2556 dst->value.ancestor.offset += src->value.ancestor.offset;
2557 dst->value.ancestor.agg_preserved &=
2558 src->value.ancestor.agg_preserved;
2559 }
2560 else
2561 ipa_set_jf_unknown (dst);
2562 }
2563 else if (dst->type == IPA_JF_PASS_THROUGH)
2564 {
2565 struct ipa_jump_func *src;
2566 /* We must check range due to calls with variable number of arguments
2567 and we cannot combine jump functions with operations. */
2568 if (dst->value.pass_through.operation == NOP_EXPR
2569 && (dst->value.pass_through.formal_id
2570 < ipa_get_cs_argument_count (top)))
2571 {
2572 int dst_fid = dst->value.pass_through.formal_id;
2573 src = ipa_get_ith_jump_func (top, dst_fid);
2574 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2575 struct ipa_polymorphic_call_context *src_ctx
2576 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2577
2578 if (src_ctx && !src_ctx->useless_p ())
2579 {
2580 struct ipa_polymorphic_call_context ctx = *src_ctx;
2581
2582 /* TODO: Make type preserved safe WRT contexts. */
2583 if (!ipa_get_jf_pass_through_type_preserved (dst))
2584 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2585 if (!ctx.useless_p ())
2586 {
2587 if (!dst_ctx)
2588 {
2589 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2590 count);
2591 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2592 }
2593 dst_ctx->combine_with (ctx);
2594 }
2595 }
2596 switch (src->type)
2597 {
2598 case IPA_JF_UNKNOWN:
2599 ipa_set_jf_unknown (dst);
2600 break;
2601 case IPA_JF_CONST:
2602 ipa_set_jf_cst_copy (dst, src);
2603 break;
2604
2605 case IPA_JF_PASS_THROUGH:
2606 {
2607 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2608 enum tree_code operation;
2609 operation = ipa_get_jf_pass_through_operation (src);
2610
2611 if (operation == NOP_EXPR)
2612 {
2613 bool agg_p;
2614 agg_p = dst_agg_p
2615 && ipa_get_jf_pass_through_agg_preserved (src);
2616 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2617 }
2618 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2619 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2620 else
2621 {
2622 tree operand = ipa_get_jf_pass_through_operand (src);
2623 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2624 operation);
2625 }
2626 break;
2627 }
2628 case IPA_JF_ANCESTOR:
2629 {
2630 bool agg_p;
2631 agg_p = dst_agg_p
2632 && ipa_get_jf_ancestor_agg_preserved (src);
2633 ipa_set_ancestor_jf (dst,
2634 ipa_get_jf_ancestor_offset (src),
2635 ipa_get_jf_ancestor_formal_id (src),
2636 agg_p);
2637 break;
2638 }
2639 default:
2640 gcc_unreachable ();
2641 }
2642
2643 if (src->agg.items
2644 && (dst_agg_p || !src->agg.by_ref))
2645 {
2646 /* Currently we do not produce clobber aggregate jump
2647 functions, replace with merging when we do. */
2648 gcc_assert (!dst->agg.items);
2649
2650 dst->agg.by_ref = src->agg.by_ref;
2651 dst->agg.items = vec_safe_copy (src->agg.items);
2652 }
2653 }
2654 else
2655 ipa_set_jf_unknown (dst);
2656 }
2657 }
2658 }
2659
2660 /* If TARGET is an addr_expr of a function declaration, make it the
2661 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2662 Otherwise, return NULL. */
2663
2664 struct cgraph_edge *
2665 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2666 bool speculative)
2667 {
2668 struct cgraph_node *callee;
2669 struct inline_edge_summary *es = inline_edge_summary (ie);
2670 bool unreachable = false;
2671
2672 if (TREE_CODE (target) == ADDR_EXPR)
2673 target = TREE_OPERAND (target, 0);
2674 if (TREE_CODE (target) != FUNCTION_DECL)
2675 {
2676 target = canonicalize_constructor_val (target, NULL);
2677 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2678 {
2679 /* Member pointer call that goes through a VMT lookup. */
2680 if (ie->indirect_info->member_ptr
2681 /* Or if target is not an invariant expression and we do not
2682 know if it will evaulate to function at runtime.
2683 This can happen when folding through &VAR, where &VAR
2684 is IP invariant, but VAR itself is not.
2685
2686 TODO: Revisit this when GCC 5 is branched. It seems that
2687 member_ptr check is not needed and that we may try to fold
2688 the expression and see if VAR is readonly. */
2689 || !is_gimple_ip_invariant (target))
2690 {
2691 if (dump_enabled_p ())
2692 {
2693 location_t loc = gimple_location_safe (ie->call_stmt);
2694 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2695 "discovered direct call non-invariant "
2696 "%s/%i\n",
2697 ie->caller->name (), ie->caller->order);
2698 }
2699 return NULL;
2700 }
2701
2702
2703 if (dump_enabled_p ())
2704 {
2705 location_t loc = gimple_location_safe (ie->call_stmt);
2706 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2707 "discovered direct call to non-function in %s/%i, "
2708 "making it __builtin_unreachable\n",
2709 ie->caller->name (), ie->caller->order);
2710 }
2711
2712 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2713 callee = cgraph_node::get_create (target);
2714 unreachable = true;
2715 }
2716 else
2717 callee = cgraph_node::get (target);
2718 }
2719 else
2720 callee = cgraph_node::get (target);
2721
2722 /* Because may-edges are not explicitely represented and vtable may be external,
2723 we may create the first reference to the object in the unit. */
2724 if (!callee || callee->global.inlined_to)
2725 {
2726
2727 /* We are better to ensure we can refer to it.
2728 In the case of static functions we are out of luck, since we already
2729 removed its body. In the case of public functions we may or may
2730 not introduce the reference. */
2731 if (!canonicalize_constructor_val (target, NULL)
2732 || !TREE_PUBLIC (target))
2733 {
2734 if (dump_file)
2735 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2736 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2737 xstrdup_for_dump (ie->caller->name ()),
2738 ie->caller->order,
2739 xstrdup_for_dump (ie->callee->name ()),
2740 ie->callee->order);
2741 return NULL;
2742 }
2743 callee = cgraph_node::get_create (target);
2744 }
2745
2746 /* If the edge is already speculated. */
2747 if (speculative && ie->speculative)
2748 {
2749 struct cgraph_edge *e2;
2750 struct ipa_ref *ref;
2751 ie->speculative_call_info (e2, ie, ref);
2752 if (e2->callee->ultimate_alias_target ()
2753 != callee->ultimate_alias_target ())
2754 {
2755 if (dump_file)
2756 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2757 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2758 xstrdup_for_dump (ie->caller->name ()),
2759 ie->caller->order,
2760 xstrdup_for_dump (callee->name ()),
2761 callee->order,
2762 xstrdup_for_dump (e2->callee->name ()),
2763 e2->callee->order);
2764 }
2765 else
2766 {
2767 if (dump_file)
2768 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2769 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2770 xstrdup_for_dump (ie->caller->name ()),
2771 ie->caller->order,
2772 xstrdup_for_dump (callee->name ()),
2773 callee->order);
2774 }
2775 return NULL;
2776 }
2777
2778 if (!dbg_cnt (devirt))
2779 return NULL;
2780
2781 ipa_check_create_node_params ();
2782
2783 /* We can not make edges to inline clones. It is bug that someone removed
2784 the cgraph node too early. */
2785 gcc_assert (!callee->global.inlined_to);
2786
2787 if (dump_file && !unreachable)
2788 {
2789 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2790 "(%s/%i -> %s/%i), for stmt ",
2791 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2792 speculative ? "speculative" : "known",
2793 xstrdup_for_dump (ie->caller->name ()),
2794 ie->caller->order,
2795 xstrdup_for_dump (callee->name ()),
2796 callee->order);
2797 if (ie->call_stmt)
2798 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2799 else
2800 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2801 }
2802 if (dump_enabled_p ())
2803 {
2804 location_t loc = gimple_location_safe (ie->call_stmt);
2805
2806 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2807 "converting indirect call in %s to direct call to %s\n",
2808 ie->caller->name (), callee->name ());
2809 }
2810 if (!speculative)
2811 {
2812 struct cgraph_edge *orig = ie;
2813 ie = ie->make_direct (callee);
2814 /* If we resolved speculative edge the cost is already up to date
2815 for direct call (adjusted by inline_edge_duplication_hook). */
2816 if (ie == orig)
2817 {
2818 es = inline_edge_summary (ie);
2819 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2820 - eni_size_weights.call_cost);
2821 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2822 - eni_time_weights.call_cost);
2823 }
2824 }
2825 else
2826 {
2827 if (!callee->can_be_discarded_p ())
2828 {
2829 cgraph_node *alias;
2830 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2831 if (alias)
2832 callee = alias;
2833 }
2834 /* make_speculative will update ie's cost to direct call cost. */
2835 ie = ie->make_speculative
2836 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2837 }
2838
2839 return ie;
2840 }
2841
2842 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2843 CONSTRUCTOR and return it. Return NULL if the search fails for some
2844 reason. */
2845
2846 static tree
2847 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2848 {
2849 tree type = TREE_TYPE (constructor);
2850 if (TREE_CODE (type) != ARRAY_TYPE
2851 && TREE_CODE (type) != RECORD_TYPE)
2852 return NULL;
2853
2854 unsigned ix;
2855 tree index, val;
2856 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2857 {
2858 HOST_WIDE_INT elt_offset;
2859 if (TREE_CODE (type) == ARRAY_TYPE)
2860 {
2861 offset_int off;
2862 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2863 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2864
2865 if (index)
2866 {
2867 off = wi::to_offset (index);
2868 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2869 {
2870 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2871 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2872 off = wi::sext (off - wi::to_offset (low_bound),
2873 TYPE_PRECISION (TREE_TYPE (index)));
2874 }
2875 off *= wi::to_offset (unit_size);
2876 }
2877 else
2878 off = wi::to_offset (unit_size) * ix;
2879
2880 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2881 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2882 continue;
2883 elt_offset = off.to_shwi ();
2884 }
2885 else if (TREE_CODE (type) == RECORD_TYPE)
2886 {
2887 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2888 if (DECL_BIT_FIELD (index))
2889 continue;
2890 elt_offset = int_bit_position (index);
2891 }
2892 else
2893 gcc_unreachable ();
2894
2895 if (elt_offset > req_offset)
2896 return NULL;
2897
2898 if (TREE_CODE (val) == CONSTRUCTOR)
2899 return find_constructor_constant_at_offset (val,
2900 req_offset - elt_offset);
2901
2902 if (elt_offset == req_offset
2903 && is_gimple_reg_type (TREE_TYPE (val))
2904 && is_gimple_ip_invariant (val))
2905 return val;
2906 }
2907 return NULL;
2908 }
2909
2910 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2911 invariant from a static constructor and if so, return it. Otherwise return
2912 NULL. */
2913
2914 static tree
2915 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2916 {
2917 if (by_ref)
2918 {
2919 if (TREE_CODE (scalar) != ADDR_EXPR)
2920 return NULL;
2921 scalar = TREE_OPERAND (scalar, 0);
2922 }
2923
2924 if (!VAR_P (scalar)
2925 || !is_global_var (scalar)
2926 || !TREE_READONLY (scalar)
2927 || !DECL_INITIAL (scalar)
2928 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2929 return NULL;
2930
2931 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2932 }
2933
2934 /* Retrieve value from aggregate jump function AGG or static initializer of
2935 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2936 none. BY_REF specifies whether the value has to be passed by reference or
2937 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2938 to is set to true if the value comes from an initializer of a constant. */
2939
2940 tree
2941 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2942 HOST_WIDE_INT offset, bool by_ref,
2943 bool *from_global_constant)
2944 {
2945 struct ipa_agg_jf_item *item;
2946 int i;
2947
2948 if (scalar)
2949 {
2950 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2951 if (res)
2952 {
2953 if (from_global_constant)
2954 *from_global_constant = true;
2955 return res;
2956 }
2957 }
2958
2959 if (!agg
2960 || by_ref != agg->by_ref)
2961 return NULL;
2962
2963 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2964 if (item->offset == offset)
2965 {
2966 /* Currently we do not have clobber values, return NULL for them once
2967 we do. */
2968 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2969 if (from_global_constant)
2970 *from_global_constant = false;
2971 return item->value;
2972 }
2973 return NULL;
2974 }
2975
2976 /* Remove a reference to SYMBOL from the list of references of a node given by
2977 reference description RDESC. Return true if the reference has been
2978 successfully found and removed. */
2979
2980 static bool
2981 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2982 {
2983 struct ipa_ref *to_del;
2984 struct cgraph_edge *origin;
2985
2986 origin = rdesc->cs;
2987 if (!origin)
2988 return false;
2989 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2990 origin->lto_stmt_uid);
2991 if (!to_del)
2992 return false;
2993
2994 to_del->remove_reference ();
2995 if (dump_file)
2996 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2997 xstrdup_for_dump (origin->caller->name ()),
2998 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2999 return true;
3000 }
3001
3002 /* If JFUNC has a reference description with refcount different from
3003 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3004 NULL. JFUNC must be a constant jump function. */
3005
3006 static struct ipa_cst_ref_desc *
3007 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3008 {
3009 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3010 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3011 return rdesc;
3012 else
3013 return NULL;
3014 }
3015
3016 /* If the value of constant jump function JFUNC is an address of a function
3017 declaration, return the associated call graph node. Otherwise return
3018 NULL. */
3019
3020 static cgraph_node *
3021 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3022 {
3023 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3024 tree cst = ipa_get_jf_constant (jfunc);
3025 if (TREE_CODE (cst) != ADDR_EXPR
3026 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3027 return NULL;
3028
3029 return cgraph_node::get (TREE_OPERAND (cst, 0));
3030 }
3031
3032
3033 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3034 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3035 the edge specified in the rdesc. Return false if either the symbol or the
3036 reference could not be found, otherwise return true. */
3037
3038 static bool
3039 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3040 {
3041 struct ipa_cst_ref_desc *rdesc;
3042 if (jfunc->type == IPA_JF_CONST
3043 && (rdesc = jfunc_rdesc_usable (jfunc))
3044 && --rdesc->refcount == 0)
3045 {
3046 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3047 if (!symbol)
3048 return false;
3049
3050 return remove_described_reference (symbol, rdesc);
3051 }
3052 return true;
3053 }
3054
3055 /* Try to find a destination for indirect edge IE that corresponds to a simple
3056 call or a call of a member function pointer and where the destination is a
3057 pointer formal parameter described by jump function JFUNC. If it can be
3058 determined, return the newly direct edge, otherwise return NULL.
3059 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3060
3061 static struct cgraph_edge *
3062 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3063 struct ipa_jump_func *jfunc,
3064 struct ipa_node_params *new_root_info)
3065 {
3066 struct cgraph_edge *cs;
3067 tree target;
3068 bool agg_contents = ie->indirect_info->agg_contents;
3069 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3070 if (agg_contents)
3071 {
3072 bool from_global_constant;
3073 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3074 ie->indirect_info->offset,
3075 ie->indirect_info->by_ref,
3076 &from_global_constant);
3077 if (target
3078 && !from_global_constant
3079 && !ie->indirect_info->guaranteed_unmodified)
3080 return NULL;
3081 }
3082 else
3083 target = scalar;
3084 if (!target)
3085 return NULL;
3086 cs = ipa_make_edge_direct_to_target (ie, target);
3087
3088 if (cs && !agg_contents)
3089 {
3090 bool ok;
3091 gcc_checking_assert (cs->callee
3092 && (cs != ie
3093 || jfunc->type != IPA_JF_CONST
3094 || !cgraph_node_for_jfunc (jfunc)
3095 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3096 ok = try_decrement_rdesc_refcount (jfunc);
3097 gcc_checking_assert (ok);
3098 }
3099
3100 return cs;
3101 }
3102
3103 /* Return the target to be used in cases of impossible devirtualization. IE
3104 and target (the latter can be NULL) are dumped when dumping is enabled. */
3105
3106 tree
3107 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3108 {
3109 if (dump_file)
3110 {
3111 if (target)
3112 fprintf (dump_file,
3113 "Type inconsistent devirtualization: %s/%i->%s\n",
3114 ie->caller->name (), ie->caller->order,
3115 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3116 else
3117 fprintf (dump_file,
3118 "No devirtualization target in %s/%i\n",
3119 ie->caller->name (), ie->caller->order);
3120 }
3121 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3122 cgraph_node::get_create (new_target);
3123 return new_target;
3124 }
3125
3126 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3127 call based on a formal parameter which is described by jump function JFUNC
3128 and if it can be determined, make it direct and return the direct edge.
3129 Otherwise, return NULL. CTX describes the polymorphic context that the
3130 parameter the call is based on brings along with it. */
3131
3132 static struct cgraph_edge *
3133 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3134 struct ipa_jump_func *jfunc,
3135 struct ipa_polymorphic_call_context ctx)
3136 {
3137 tree target = NULL;
3138 bool speculative = false;
3139
3140 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3141 return NULL;
3142
3143 gcc_assert (!ie->indirect_info->by_ref);
3144
3145 /* Try to do lookup via known virtual table pointer value. */
3146 if (!ie->indirect_info->vptr_changed
3147 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3148 {
3149 tree vtable;
3150 unsigned HOST_WIDE_INT offset;
3151 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3152 : NULL;
3153 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3154 ie->indirect_info->offset,
3155 true);
3156 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3157 {
3158 bool can_refer;
3159 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3160 vtable, offset, &can_refer);
3161 if (can_refer)
3162 {
3163 if (!t
3164 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3165 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3166 || !possible_polymorphic_call_target_p
3167 (ie, cgraph_node::get (t)))
3168 {
3169 /* Do not speculate builtin_unreachable, it is stupid! */
3170 if (!ie->indirect_info->vptr_changed)
3171 target = ipa_impossible_devirt_target (ie, target);
3172 else
3173 target = NULL;
3174 }
3175 else
3176 {
3177 target = t;
3178 speculative = ie->indirect_info->vptr_changed;
3179 }
3180 }
3181 }
3182 }
3183
3184 ipa_polymorphic_call_context ie_context (ie);
3185 vec <cgraph_node *>targets;
3186 bool final;
3187
3188 ctx.offset_by (ie->indirect_info->offset);
3189 if (ie->indirect_info->vptr_changed)
3190 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3191 ie->indirect_info->otr_type);
3192 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3193 targets = possible_polymorphic_call_targets
3194 (ie->indirect_info->otr_type,
3195 ie->indirect_info->otr_token,
3196 ctx, &final);
3197 if (final && targets.length () <= 1)
3198 {
3199 speculative = false;
3200 if (targets.length () == 1)
3201 target = targets[0]->decl;
3202 else
3203 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3204 }
3205 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3206 && !ie->speculative && ie->maybe_hot_p ())
3207 {
3208 cgraph_node *n;
3209 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3210 ie->indirect_info->otr_token,
3211 ie->indirect_info->context);
3212 if (n)
3213 {
3214 target = n->decl;
3215 speculative = true;
3216 }
3217 }
3218
3219 if (target)
3220 {
3221 if (!possible_polymorphic_call_target_p
3222 (ie, cgraph_node::get_create (target)))
3223 {
3224 if (speculative)
3225 return NULL;
3226 target = ipa_impossible_devirt_target (ie, target);
3227 }
3228 return ipa_make_edge_direct_to_target (ie, target, speculative);
3229 }
3230 else
3231 return NULL;
3232 }
3233
3234 /* Update the param called notes associated with NODE when CS is being inlined,
3235 assuming NODE is (potentially indirectly) inlined into CS->callee.
3236 Moreover, if the callee is discovered to be constant, create a new cgraph
3237 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3238 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3239
3240 static bool
3241 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3242 struct cgraph_node *node,
3243 vec<cgraph_edge *> *new_edges)
3244 {
3245 struct ipa_edge_args *top;
3246 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3247 struct ipa_node_params *new_root_info;
3248 bool res = false;
3249
3250 ipa_check_create_edge_args ();
3251 top = IPA_EDGE_REF (cs);
3252 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3253 ? cs->caller->global.inlined_to
3254 : cs->caller);
3255
3256 for (ie = node->indirect_calls; ie; ie = next_ie)
3257 {
3258 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3259 struct ipa_jump_func *jfunc;
3260 int param_index;
3261 cgraph_node *spec_target = NULL;
3262
3263 next_ie = ie->next_callee;
3264
3265 if (ici->param_index == -1)
3266 continue;
3267
3268 /* We must check range due to calls with variable number of arguments: */
3269 if (ici->param_index >= ipa_get_cs_argument_count (top))
3270 {
3271 ici->param_index = -1;
3272 continue;
3273 }
3274
3275 param_index = ici->param_index;
3276 jfunc = ipa_get_ith_jump_func (top, param_index);
3277
3278 if (ie->speculative)
3279 {
3280 struct cgraph_edge *de;
3281 struct ipa_ref *ref;
3282 ie->speculative_call_info (de, ie, ref);
3283 spec_target = de->callee;
3284 }
3285
3286 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3287 new_direct_edge = NULL;
3288 else if (ici->polymorphic)
3289 {
3290 ipa_polymorphic_call_context ctx;
3291 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3292 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3293 }
3294 else
3295 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3296 new_root_info);
3297 /* If speculation was removed, then we need to do nothing. */
3298 if (new_direct_edge && new_direct_edge != ie
3299 && new_direct_edge->callee == spec_target)
3300 {
3301 new_direct_edge->indirect_inlining_edge = 1;
3302 top = IPA_EDGE_REF (cs);
3303 res = true;
3304 if (!new_direct_edge->speculative)
3305 continue;
3306 }
3307 else if (new_direct_edge)
3308 {
3309 new_direct_edge->indirect_inlining_edge = 1;
3310 if (new_direct_edge->call_stmt)
3311 new_direct_edge->call_stmt_cannot_inline_p
3312 = !gimple_check_call_matching_types (
3313 new_direct_edge->call_stmt,
3314 new_direct_edge->callee->decl, false);
3315 if (new_edges)
3316 {
3317 new_edges->safe_push (new_direct_edge);
3318 res = true;
3319 }
3320 top = IPA_EDGE_REF (cs);
3321 /* If speculative edge was introduced we still need to update
3322 call info of the indirect edge. */
3323 if (!new_direct_edge->speculative)
3324 continue;
3325 }
3326 if (jfunc->type == IPA_JF_PASS_THROUGH
3327 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3328 {
3329 if (ici->agg_contents
3330 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3331 && !ici->polymorphic)
3332 ici->param_index = -1;
3333 else
3334 {
3335 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3336 if (ici->polymorphic
3337 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3338 ici->vptr_changed = true;
3339 }
3340 }
3341 else if (jfunc->type == IPA_JF_ANCESTOR)
3342 {
3343 if (ici->agg_contents
3344 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3345 && !ici->polymorphic)
3346 ici->param_index = -1;
3347 else
3348 {
3349 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3350 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3351 if (ici->polymorphic
3352 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3353 ici->vptr_changed = true;
3354 }
3355 }
3356 else
3357 /* Either we can find a destination for this edge now or never. */
3358 ici->param_index = -1;
3359 }
3360
3361 return res;
3362 }
3363
3364 /* Recursively traverse subtree of NODE (including node) made of inlined
3365 cgraph_edges when CS has been inlined and invoke
3366 update_indirect_edges_after_inlining on all nodes and
3367 update_jump_functions_after_inlining on all non-inlined edges that lead out
3368 of this subtree. Newly discovered indirect edges will be added to
3369 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3370 created. */
3371
3372 static bool
3373 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3374 struct cgraph_node *node,
3375 vec<cgraph_edge *> *new_edges)
3376 {
3377 struct cgraph_edge *e;
3378 bool res;
3379
3380 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3381
3382 for (e = node->callees; e; e = e->next_callee)
3383 if (!e->inline_failed)
3384 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3385 else
3386 update_jump_functions_after_inlining (cs, e);
3387 for (e = node->indirect_calls; e; e = e->next_callee)
3388 update_jump_functions_after_inlining (cs, e);
3389
3390 return res;
3391 }
3392
3393 /* Combine two controlled uses counts as done during inlining. */
3394
3395 static int
3396 combine_controlled_uses_counters (int c, int d)
3397 {
3398 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3399 return IPA_UNDESCRIBED_USE;
3400 else
3401 return c + d - 1;
3402 }
3403
3404 /* Propagate number of controlled users from CS->caleee to the new root of the
3405 tree of inlined nodes. */
3406
3407 static void
3408 propagate_controlled_uses (struct cgraph_edge *cs)
3409 {
3410 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3411 struct cgraph_node *new_root = cs->caller->global.inlined_to
3412 ? cs->caller->global.inlined_to : cs->caller;
3413 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3414 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3415 int count, i;
3416
3417 count = MIN (ipa_get_cs_argument_count (args),
3418 ipa_get_param_count (old_root_info));
3419 for (i = 0; i < count; i++)
3420 {
3421 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3422 struct ipa_cst_ref_desc *rdesc;
3423
3424 if (jf->type == IPA_JF_PASS_THROUGH)
3425 {
3426 int src_idx, c, d;
3427 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3428 c = ipa_get_controlled_uses (new_root_info, src_idx);
3429 d = ipa_get_controlled_uses (old_root_info, i);
3430
3431 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3432 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3433 c = combine_controlled_uses_counters (c, d);
3434 ipa_set_controlled_uses (new_root_info, src_idx, c);
3435 if (c == 0 && new_root_info->ipcp_orig_node)
3436 {
3437 struct cgraph_node *n;
3438 struct ipa_ref *ref;
3439 tree t = new_root_info->known_csts[src_idx];
3440
3441 if (t && TREE_CODE (t) == ADDR_EXPR
3442 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3443 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3444 && (ref = new_root->find_reference (n, NULL, 0)))
3445 {
3446 if (dump_file)
3447 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3448 "reference from %s/%i to %s/%i.\n",
3449 xstrdup_for_dump (new_root->name ()),
3450 new_root->order,
3451 xstrdup_for_dump (n->name ()), n->order);
3452 ref->remove_reference ();
3453 }
3454 }
3455 }
3456 else if (jf->type == IPA_JF_CONST
3457 && (rdesc = jfunc_rdesc_usable (jf)))
3458 {
3459 int d = ipa_get_controlled_uses (old_root_info, i);
3460 int c = rdesc->refcount;
3461 rdesc->refcount = combine_controlled_uses_counters (c, d);
3462 if (rdesc->refcount == 0)
3463 {
3464 tree cst = ipa_get_jf_constant (jf);
3465 struct cgraph_node *n;
3466 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3467 && TREE_CODE (TREE_OPERAND (cst, 0))
3468 == FUNCTION_DECL);
3469 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3470 if (n)
3471 {
3472 struct cgraph_node *clone;
3473 bool ok;
3474 ok = remove_described_reference (n, rdesc);
3475 gcc_checking_assert (ok);
3476
3477 clone = cs->caller;
3478 while (clone->global.inlined_to
3479 && clone != rdesc->cs->caller
3480 && IPA_NODE_REF (clone)->ipcp_orig_node)
3481 {
3482 struct ipa_ref *ref;
3483 ref = clone->find_reference (n, NULL, 0);
3484 if (ref)
3485 {
3486 if (dump_file)
3487 fprintf (dump_file, "ipa-prop: Removing "
3488 "cloning-created reference "
3489 "from %s/%i to %s/%i.\n",
3490 xstrdup_for_dump (clone->name ()),
3491 clone->order,
3492 xstrdup_for_dump (n->name ()),
3493 n->order);
3494 ref->remove_reference ();
3495 }
3496 clone = clone->callers->caller;
3497 }
3498 }
3499 }
3500 }
3501 }
3502
3503 for (i = ipa_get_param_count (old_root_info);
3504 i < ipa_get_cs_argument_count (args);
3505 i++)
3506 {
3507 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3508
3509 if (jf->type == IPA_JF_CONST)
3510 {
3511 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3512 if (rdesc)
3513 rdesc->refcount = IPA_UNDESCRIBED_USE;
3514 }
3515 else if (jf->type == IPA_JF_PASS_THROUGH)
3516 ipa_set_controlled_uses (new_root_info,
3517 jf->value.pass_through.formal_id,
3518 IPA_UNDESCRIBED_USE);
3519 }
3520 }
3521
3522 /* Update jump functions and call note functions on inlining the call site CS.
3523 CS is expected to lead to a node already cloned by
3524 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3525 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3526 created. */
3527
3528 bool
3529 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3530 vec<cgraph_edge *> *new_edges)
3531 {
3532 bool changed;
3533 /* Do nothing if the preparation phase has not been carried out yet
3534 (i.e. during early inlining). */
3535 if (!ipa_node_params_sum)
3536 return false;
3537 gcc_assert (ipa_edge_args_vector);
3538
3539 propagate_controlled_uses (cs);
3540 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3541
3542 return changed;
3543 }
3544
3545 /* Frees all dynamically allocated structures that the argument info points
3546 to. */
3547
3548 void
3549 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3550 {
3551 vec_free (args->jump_functions);
3552 memset (args, 0, sizeof (*args));
3553 }
3554
3555 /* Free all ipa_edge structures. */
3556
3557 void
3558 ipa_free_all_edge_args (void)
3559 {
3560 int i;
3561 struct ipa_edge_args *args;
3562
3563 if (!ipa_edge_args_vector)
3564 return;
3565
3566 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3567 ipa_free_edge_args_substructures (args);
3568
3569 vec_free (ipa_edge_args_vector);
3570 }
3571
3572 /* Free all ipa_node_params structures. */
3573
3574 void
3575 ipa_free_all_node_params (void)
3576 {
3577 ipa_node_params_sum->~ipa_node_params_t ();
3578 ipa_node_params_sum = NULL;
3579 }
3580
3581 /* Grow ipcp_transformations if necessary. */
3582
3583 void
3584 ipcp_grow_transformations_if_necessary (void)
3585 {
3586 if (vec_safe_length (ipcp_transformations)
3587 <= (unsigned) symtab->cgraph_max_uid)
3588 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3589 }
3590
3591 /* Set the aggregate replacements of NODE to be AGGVALS. */
3592
3593 void
3594 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3595 struct ipa_agg_replacement_value *aggvals)
3596 {
3597 ipcp_grow_transformations_if_necessary ();
3598 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3599 }
3600
3601 /* Hook that is called by cgraph.c when an edge is removed. */
3602
3603 static void
3604 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3605 {
3606 struct ipa_edge_args *args;
3607
3608 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3609 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3610 return;
3611
3612 args = IPA_EDGE_REF (cs);
3613 if (args->jump_functions)
3614 {
3615 struct ipa_jump_func *jf;
3616 int i;
3617 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3618 {
3619 struct ipa_cst_ref_desc *rdesc;
3620 try_decrement_rdesc_refcount (jf);
3621 if (jf->type == IPA_JF_CONST
3622 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3623 && rdesc->cs == cs)
3624 rdesc->cs = NULL;
3625 }
3626 }
3627
3628 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3629 }
3630
3631 /* Hook that is called by cgraph.c when an edge is duplicated. */
3632
3633 static void
3634 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3635 void *)
3636 {
3637 struct ipa_edge_args *old_args, *new_args;
3638 unsigned int i;
3639
3640 ipa_check_create_edge_args ();
3641
3642 old_args = IPA_EDGE_REF (src);
3643 new_args = IPA_EDGE_REF (dst);
3644
3645 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3646 if (old_args->polymorphic_call_contexts)
3647 new_args->polymorphic_call_contexts
3648 = vec_safe_copy (old_args->polymorphic_call_contexts);
3649
3650 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3651 {
3652 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3653 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3654
3655 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3656
3657 if (src_jf->type == IPA_JF_CONST)
3658 {
3659 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3660
3661 if (!src_rdesc)
3662 dst_jf->value.constant.rdesc = NULL;
3663 else if (src->caller == dst->caller)
3664 {
3665 struct ipa_ref *ref;
3666 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3667 gcc_checking_assert (n);
3668 ref = src->caller->find_reference (n, src->call_stmt,
3669 src->lto_stmt_uid);
3670 gcc_checking_assert (ref);
3671 dst->caller->clone_reference (ref, ref->stmt);
3672
3673 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3674 dst_rdesc->cs = dst;
3675 dst_rdesc->refcount = src_rdesc->refcount;
3676 dst_rdesc->next_duplicate = NULL;
3677 dst_jf->value.constant.rdesc = dst_rdesc;
3678 }
3679 else if (src_rdesc->cs == src)
3680 {
3681 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3682 dst_rdesc->cs = dst;
3683 dst_rdesc->refcount = src_rdesc->refcount;
3684 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3685 src_rdesc->next_duplicate = dst_rdesc;
3686 dst_jf->value.constant.rdesc = dst_rdesc;
3687 }
3688 else
3689 {
3690 struct ipa_cst_ref_desc *dst_rdesc;
3691 /* This can happen during inlining, when a JFUNC can refer to a
3692 reference taken in a function up in the tree of inline clones.
3693 We need to find the duplicate that refers to our tree of
3694 inline clones. */
3695
3696 gcc_assert (dst->caller->global.inlined_to);
3697 for (dst_rdesc = src_rdesc->next_duplicate;
3698 dst_rdesc;
3699 dst_rdesc = dst_rdesc->next_duplicate)
3700 {
3701 struct cgraph_node *top;
3702 top = dst_rdesc->cs->caller->global.inlined_to
3703 ? dst_rdesc->cs->caller->global.inlined_to
3704 : dst_rdesc->cs->caller;
3705 if (dst->caller->global.inlined_to == top)
3706 break;
3707 }
3708 gcc_assert (dst_rdesc);
3709 dst_jf->value.constant.rdesc = dst_rdesc;
3710 }
3711 }
3712 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3713 && src->caller == dst->caller)
3714 {
3715 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3716 ? dst->caller->global.inlined_to : dst->caller;
3717 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3718 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3719
3720 int c = ipa_get_controlled_uses (root_info, idx);
3721 if (c != IPA_UNDESCRIBED_USE)
3722 {
3723 c++;
3724 ipa_set_controlled_uses (root_info, idx, c);
3725 }
3726 }
3727 }
3728 }
3729
3730 /* Analyze newly added function into callgraph. */
3731
3732 static void
3733 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3734 {
3735 if (node->has_gimple_body_p ())
3736 ipa_analyze_node (node);
3737 }
3738
3739 /* Initialize a newly created param info. */
3740
3741 void
3742 ipa_node_params_t::insert (cgraph_node *, ipa_node_params *info)
3743 {
3744 info->lattices = NULL;
3745 info->ipcp_orig_node = NULL;
3746 info->known_csts = vNULL;
3747 info->known_contexts = vNULL;
3748 info->analysis_done = 0;
3749 info->node_enqueued = 0;
3750 info->do_clone_for_all_contexts = 0;
3751 info->is_all_contexts_clone = 0;
3752 info->node_dead = 0;
3753 info->node_within_scc = 0;
3754 info->node_calling_single_call = 0;
3755 info->versionable = 0;
3756 }
3757
3758 /* Frees all dynamically allocated structures that the param info points
3759 to. */
3760
3761 void
3762 ipa_node_params_t::remove (cgraph_node *, ipa_node_params *info)
3763 {
3764 free (info->lattices);
3765 /* Lattice values and their sources are deallocated with their alocation
3766 pool. */
3767 info->known_csts.release ();
3768 info->known_contexts.release ();
3769 }
3770
3771 /* Hook that is called by summary when a node is duplicated. */
3772
3773 void
3774 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3775 ipa_node_params *old_info,
3776 ipa_node_params *new_info)
3777 {
3778 ipa_agg_replacement_value *old_av, *new_av;
3779
3780 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3781 new_info->lattices = NULL;
3782 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3783 new_info->known_csts = old_info->known_csts.copy ();
3784 new_info->known_contexts = old_info->known_contexts.copy ();
3785
3786 new_info->analysis_done = old_info->analysis_done;
3787 new_info->node_enqueued = old_info->node_enqueued;
3788 new_info->versionable = old_info->versionable;
3789
3790 old_av = ipa_get_agg_replacements_for_node (src);
3791 if (old_av)
3792 {
3793 new_av = NULL;
3794 while (old_av)
3795 {
3796 struct ipa_agg_replacement_value *v;
3797
3798 v = ggc_alloc<ipa_agg_replacement_value> ();
3799 memcpy (v, old_av, sizeof (*v));
3800 v->next = new_av;
3801 new_av = v;
3802 old_av = old_av->next;
3803 }
3804 ipa_set_node_agg_value_chain (dst, new_av);
3805 }
3806
3807 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3808
3809 if (src_trans)
3810 {
3811 ipcp_grow_transformations_if_necessary ();
3812 src_trans = ipcp_get_transformation_summary (src);
3813 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3814 vec<ipa_vr, va_gc> *&dst_vr
3815 = ipcp_get_transformation_summary (dst)->m_vr;
3816 if (vec_safe_length (src_trans->m_vr) > 0)
3817 {
3818 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3819 for (unsigned i = 0; i < src_vr->length (); ++i)
3820 dst_vr->quick_push ((*src_vr)[i]);
3821 }
3822 }
3823
3824 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3825 {
3826 ipcp_grow_transformations_if_necessary ();
3827 src_trans = ipcp_get_transformation_summary (src);
3828 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3829 vec<ipa_bits, va_gc> *&dst_bits
3830 = ipcp_get_transformation_summary (dst)->bits;
3831 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3832 for (unsigned i = 0; i < src_bits->length (); ++i)
3833 dst_bits->quick_push ((*src_bits)[i]);
3834 }
3835 }
3836
3837 /* Register our cgraph hooks if they are not already there. */
3838
3839 void
3840 ipa_register_cgraph_hooks (void)
3841 {
3842 ipa_check_create_node_params ();
3843
3844 if (!edge_removal_hook_holder)
3845 edge_removal_hook_holder =
3846 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3847 if (!edge_duplication_hook_holder)
3848 edge_duplication_hook_holder =
3849 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3850 function_insertion_hook_holder =
3851 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3852 }
3853
3854 /* Unregister our cgraph hooks if they are not already there. */
3855
3856 static void
3857 ipa_unregister_cgraph_hooks (void)
3858 {
3859 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3860 edge_removal_hook_holder = NULL;
3861 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3862 edge_duplication_hook_holder = NULL;
3863 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3864 function_insertion_hook_holder = NULL;
3865 }
3866
3867 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3868 longer needed after ipa-cp. */
3869
3870 void
3871 ipa_free_all_structures_after_ipa_cp (void)
3872 {
3873 if (!optimize && !in_lto_p)
3874 {
3875 ipa_free_all_edge_args ();
3876 ipa_free_all_node_params ();
3877 ipcp_sources_pool.release ();
3878 ipcp_cst_values_pool.release ();
3879 ipcp_poly_ctx_values_pool.release ();
3880 ipcp_agg_lattice_pool.release ();
3881 ipa_unregister_cgraph_hooks ();
3882 ipa_refdesc_pool.release ();
3883 }
3884 }
3885
3886 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3887 longer needed after indirect inlining. */
3888
3889 void
3890 ipa_free_all_structures_after_iinln (void)
3891 {
3892 ipa_free_all_edge_args ();
3893 ipa_free_all_node_params ();
3894 ipa_unregister_cgraph_hooks ();
3895 ipcp_sources_pool.release ();
3896 ipcp_cst_values_pool.release ();
3897 ipcp_poly_ctx_values_pool.release ();
3898 ipcp_agg_lattice_pool.release ();
3899 ipa_refdesc_pool.release ();
3900 }
3901
3902 /* Print ipa_tree_map data structures of all functions in the
3903 callgraph to F. */
3904
3905 void
3906 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3907 {
3908 int i, count;
3909 struct ipa_node_params *info;
3910
3911 if (!node->definition)
3912 return;
3913 info = IPA_NODE_REF (node);
3914 fprintf (f, " function %s/%i parameter descriptors:\n",
3915 node->name (), node->order);
3916 count = ipa_get_param_count (info);
3917 for (i = 0; i < count; i++)
3918 {
3919 int c;
3920
3921 fprintf (f, " ");
3922 ipa_dump_param (f, info, i);
3923 if (ipa_is_param_used (info, i))
3924 fprintf (f, " used");
3925 c = ipa_get_controlled_uses (info, i);
3926 if (c == IPA_UNDESCRIBED_USE)
3927 fprintf (f, " undescribed_use");
3928 else
3929 fprintf (f, " controlled_uses=%i", c);
3930 fprintf (f, "\n");
3931 }
3932 }
3933
3934 /* Print ipa_tree_map data structures of all functions in the
3935 callgraph to F. */
3936
3937 void
3938 ipa_print_all_params (FILE * f)
3939 {
3940 struct cgraph_node *node;
3941
3942 fprintf (f, "\nFunction parameters:\n");
3943 FOR_EACH_FUNCTION (node)
3944 ipa_print_node_params (f, node);
3945 }
3946
3947 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3948
3949 vec<tree>
3950 ipa_get_vector_of_formal_parms (tree fndecl)
3951 {
3952 vec<tree> args;
3953 int count;
3954 tree parm;
3955
3956 gcc_assert (!flag_wpa);
3957 count = count_formal_params (fndecl);
3958 args.create (count);
3959 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3960 args.quick_push (parm);
3961
3962 return args;
3963 }
3964
3965 /* Return a heap allocated vector containing types of formal parameters of
3966 function type FNTYPE. */
3967
3968 vec<tree>
3969 ipa_get_vector_of_formal_parm_types (tree fntype)
3970 {
3971 vec<tree> types;
3972 int count = 0;
3973 tree t;
3974
3975 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3976 count++;
3977
3978 types.create (count);
3979 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3980 types.quick_push (TREE_VALUE (t));
3981
3982 return types;
3983 }
3984
3985 /* Modify the function declaration FNDECL and its type according to the plan in
3986 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3987 to reflect the actual parameters being modified which are determined by the
3988 base_index field. */
3989
3990 void
3991 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3992 {
3993 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3994 tree orig_type = TREE_TYPE (fndecl);
3995 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3996
3997 /* The following test is an ugly hack, some functions simply don't have any
3998 arguments in their type. This is probably a bug but well... */
3999 bool care_for_types = (old_arg_types != NULL_TREE);
4000 bool last_parm_void;
4001 vec<tree> otypes;
4002 if (care_for_types)
4003 {
4004 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4005 == void_type_node);
4006 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4007 if (last_parm_void)
4008 gcc_assert (oparms.length () + 1 == otypes.length ());
4009 else
4010 gcc_assert (oparms.length () == otypes.length ());
4011 }
4012 else
4013 {
4014 last_parm_void = false;
4015 otypes.create (0);
4016 }
4017
4018 int len = adjustments.length ();
4019 tree *link = &DECL_ARGUMENTS (fndecl);
4020 tree new_arg_types = NULL;
4021 for (int i = 0; i < len; i++)
4022 {
4023 struct ipa_parm_adjustment *adj;
4024 gcc_assert (link);
4025
4026 adj = &adjustments[i];
4027 tree parm;
4028 if (adj->op == IPA_PARM_OP_NEW)
4029 parm = NULL;
4030 else
4031 parm = oparms[adj->base_index];
4032 adj->base = parm;
4033
4034 if (adj->op == IPA_PARM_OP_COPY)
4035 {
4036 if (care_for_types)
4037 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4038 new_arg_types);
4039 *link = parm;
4040 link = &DECL_CHAIN (parm);
4041 }
4042 else if (adj->op != IPA_PARM_OP_REMOVE)
4043 {
4044 tree new_parm;
4045 tree ptype;
4046
4047 if (adj->by_ref)
4048 ptype = build_pointer_type (adj->type);
4049 else
4050 {
4051 ptype = adj->type;
4052 if (is_gimple_reg_type (ptype))
4053 {
4054 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4055 if (TYPE_ALIGN (ptype) != malign)
4056 ptype = build_aligned_type (ptype, malign);
4057 }
4058 }
4059
4060 if (care_for_types)
4061 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4062
4063 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4064 ptype);
4065 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4066 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4067 DECL_ARTIFICIAL (new_parm) = 1;
4068 DECL_ARG_TYPE (new_parm) = ptype;
4069 DECL_CONTEXT (new_parm) = fndecl;
4070 TREE_USED (new_parm) = 1;
4071 DECL_IGNORED_P (new_parm) = 1;
4072 layout_decl (new_parm, 0);
4073
4074 if (adj->op == IPA_PARM_OP_NEW)
4075 adj->base = NULL;
4076 else
4077 adj->base = parm;
4078 adj->new_decl = new_parm;
4079
4080 *link = new_parm;
4081 link = &DECL_CHAIN (new_parm);
4082 }
4083 }
4084
4085 *link = NULL_TREE;
4086
4087 tree new_reversed = NULL;
4088 if (care_for_types)
4089 {
4090 new_reversed = nreverse (new_arg_types);
4091 if (last_parm_void)
4092 {
4093 if (new_reversed)
4094 TREE_CHAIN (new_arg_types) = void_list_node;
4095 else
4096 new_reversed = void_list_node;
4097 }
4098 }
4099
4100 /* Use copy_node to preserve as much as possible from original type
4101 (debug info, attribute lists etc.)
4102 Exception is METHOD_TYPEs must have THIS argument.
4103 When we are asked to remove it, we need to build new FUNCTION_TYPE
4104 instead. */
4105 tree new_type = NULL;
4106 if (TREE_CODE (orig_type) != METHOD_TYPE
4107 || (adjustments[0].op == IPA_PARM_OP_COPY
4108 && adjustments[0].base_index == 0))
4109 {
4110 new_type = build_distinct_type_copy (orig_type);
4111 TYPE_ARG_TYPES (new_type) = new_reversed;
4112 }
4113 else
4114 {
4115 new_type
4116 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4117 new_reversed));
4118 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4119 DECL_VINDEX (fndecl) = NULL_TREE;
4120 }
4121
4122 /* When signature changes, we need to clear builtin info. */
4123 if (DECL_BUILT_IN (fndecl))
4124 {
4125 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4126 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4127 }
4128
4129 TREE_TYPE (fndecl) = new_type;
4130 DECL_VIRTUAL_P (fndecl) = 0;
4131 DECL_LANG_SPECIFIC (fndecl) = NULL;
4132 otypes.release ();
4133 oparms.release ();
4134 }
4135
4136 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4137 If this is a directly recursive call, CS must be NULL. Otherwise it must
4138 contain the corresponding call graph edge. */
4139
4140 void
4141 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4142 ipa_parm_adjustment_vec adjustments)
4143 {
4144 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4145 vec<tree> vargs;
4146 vec<tree, va_gc> **debug_args = NULL;
4147 gcall *new_stmt;
4148 gimple_stmt_iterator gsi, prev_gsi;
4149 tree callee_decl;
4150 int i, len;
4151
4152 len = adjustments.length ();
4153 vargs.create (len);
4154 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4155 current_node->remove_stmt_references (stmt);
4156
4157 gsi = gsi_for_stmt (stmt);
4158 prev_gsi = gsi;
4159 gsi_prev (&prev_gsi);
4160 for (i = 0; i < len; i++)
4161 {
4162 struct ipa_parm_adjustment *adj;
4163
4164 adj = &adjustments[i];
4165
4166 if (adj->op == IPA_PARM_OP_COPY)
4167 {
4168 tree arg = gimple_call_arg (stmt, adj->base_index);
4169
4170 vargs.quick_push (arg);
4171 }
4172 else if (adj->op != IPA_PARM_OP_REMOVE)
4173 {
4174 tree expr, base, off;
4175 location_t loc;
4176 unsigned int deref_align = 0;
4177 bool deref_base = false;
4178
4179 /* We create a new parameter out of the value of the old one, we can
4180 do the following kind of transformations:
4181
4182 - A scalar passed by reference is converted to a scalar passed by
4183 value. (adj->by_ref is false and the type of the original
4184 actual argument is a pointer to a scalar).
4185
4186 - A part of an aggregate is passed instead of the whole aggregate.
4187 The part can be passed either by value or by reference, this is
4188 determined by value of adj->by_ref. Moreover, the code below
4189 handles both situations when the original aggregate is passed by
4190 value (its type is not a pointer) and when it is passed by
4191 reference (it is a pointer to an aggregate).
4192
4193 When the new argument is passed by reference (adj->by_ref is true)
4194 it must be a part of an aggregate and therefore we form it by
4195 simply taking the address of a reference inside the original
4196 aggregate. */
4197
4198 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4199 base = gimple_call_arg (stmt, adj->base_index);
4200 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4201 : EXPR_LOCATION (base);
4202
4203 if (TREE_CODE (base) != ADDR_EXPR
4204 && POINTER_TYPE_P (TREE_TYPE (base)))
4205 off = build_int_cst (adj->alias_ptr_type,
4206 adj->offset / BITS_PER_UNIT);
4207 else
4208 {
4209 HOST_WIDE_INT base_offset;
4210 tree prev_base;
4211 bool addrof;
4212
4213 if (TREE_CODE (base) == ADDR_EXPR)
4214 {
4215 base = TREE_OPERAND (base, 0);
4216 addrof = true;
4217 }
4218 else
4219 addrof = false;
4220 prev_base = base;
4221 base = get_addr_base_and_unit_offset (base, &base_offset);
4222 /* Aggregate arguments can have non-invariant addresses. */
4223 if (!base)
4224 {
4225 base = build_fold_addr_expr (prev_base);
4226 off = build_int_cst (adj->alias_ptr_type,
4227 adj->offset / BITS_PER_UNIT);
4228 }
4229 else if (TREE_CODE (base) == MEM_REF)
4230 {
4231 if (!addrof)
4232 {
4233 deref_base = true;
4234 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4235 }
4236 off = build_int_cst (adj->alias_ptr_type,
4237 base_offset
4238 + adj->offset / BITS_PER_UNIT);
4239 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4240 off);
4241 base = TREE_OPERAND (base, 0);
4242 }
4243 else
4244 {
4245 off = build_int_cst (adj->alias_ptr_type,
4246 base_offset
4247 + adj->offset / BITS_PER_UNIT);
4248 base = build_fold_addr_expr (base);
4249 }
4250 }
4251
4252 if (!adj->by_ref)
4253 {
4254 tree type = adj->type;
4255 unsigned int align;
4256 unsigned HOST_WIDE_INT misalign;
4257
4258 if (deref_base)
4259 {
4260 align = deref_align;
4261 misalign = 0;
4262 }
4263 else
4264 {
4265 get_pointer_alignment_1 (base, &align, &misalign);
4266 if (TYPE_ALIGN (type) > align)
4267 align = TYPE_ALIGN (type);
4268 }
4269 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4270 * BITS_PER_UNIT);
4271 misalign = misalign & (align - 1);
4272 if (misalign != 0)
4273 align = least_bit_hwi (misalign);
4274 if (align < TYPE_ALIGN (type))
4275 type = build_aligned_type (type, align);
4276 base = force_gimple_operand_gsi (&gsi, base,
4277 true, NULL, true, GSI_SAME_STMT);
4278 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4279 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4280 /* If expr is not a valid gimple call argument emit
4281 a load into a temporary. */
4282 if (is_gimple_reg_type (TREE_TYPE (expr)))
4283 {
4284 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4285 if (gimple_in_ssa_p (cfun))
4286 {
4287 gimple_set_vuse (tem, gimple_vuse (stmt));
4288 expr = make_ssa_name (TREE_TYPE (expr), tem);
4289 }
4290 else
4291 expr = create_tmp_reg (TREE_TYPE (expr));
4292 gimple_assign_set_lhs (tem, expr);
4293 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4294 }
4295 }
4296 else
4297 {
4298 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4299 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4300 expr = build_fold_addr_expr (expr);
4301 expr = force_gimple_operand_gsi (&gsi, expr,
4302 true, NULL, true, GSI_SAME_STMT);
4303 }
4304 vargs.quick_push (expr);
4305 }
4306 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4307 {
4308 unsigned int ix;
4309 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4310 gimple *def_temp;
4311
4312 arg = gimple_call_arg (stmt, adj->base_index);
4313 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4314 {
4315 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4316 continue;
4317 arg = fold_convert_loc (gimple_location (stmt),
4318 TREE_TYPE (origin), arg);
4319 }
4320 if (debug_args == NULL)
4321 debug_args = decl_debug_args_insert (callee_decl);
4322 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4323 if (ddecl == origin)
4324 {
4325 ddecl = (**debug_args)[ix + 1];
4326 break;
4327 }
4328 if (ddecl == NULL)
4329 {
4330 ddecl = make_node (DEBUG_EXPR_DECL);
4331 DECL_ARTIFICIAL (ddecl) = 1;
4332 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4333 SET_DECL_MODE (ddecl, DECL_MODE (origin));
4334
4335 vec_safe_push (*debug_args, origin);
4336 vec_safe_push (*debug_args, ddecl);
4337 }
4338 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4339 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4340 }
4341 }
4342
4343 if (dump_file && (dump_flags & TDF_DETAILS))
4344 {
4345 fprintf (dump_file, "replacing stmt:");
4346 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4347 }
4348
4349 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4350 vargs.release ();
4351 if (gimple_call_lhs (stmt))
4352 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4353
4354 gimple_set_block (new_stmt, gimple_block (stmt));
4355 if (gimple_has_location (stmt))
4356 gimple_set_location (new_stmt, gimple_location (stmt));
4357 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4358 gimple_call_copy_flags (new_stmt, stmt);
4359 if (gimple_in_ssa_p (cfun))
4360 {
4361 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4362 if (gimple_vdef (stmt))
4363 {
4364 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4365 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4366 }
4367 }
4368
4369 if (dump_file && (dump_flags & TDF_DETAILS))
4370 {
4371 fprintf (dump_file, "with stmt:");
4372 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4373 fprintf (dump_file, "\n");
4374 }
4375 gsi_replace (&gsi, new_stmt, true);
4376 if (cs)
4377 cs->set_call_stmt (new_stmt);
4378 do
4379 {
4380 current_node->record_stmt_references (gsi_stmt (gsi));
4381 gsi_prev (&gsi);
4382 }
4383 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4384 }
4385
4386 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4387 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4388 specifies whether the function should care about type incompatibility the
4389 current and new expressions. If it is false, the function will leave
4390 incompatibility issues to the caller. Return true iff the expression
4391 was modified. */
4392
4393 bool
4394 ipa_modify_expr (tree *expr, bool convert,
4395 ipa_parm_adjustment_vec adjustments)
4396 {
4397 struct ipa_parm_adjustment *cand
4398 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4399 if (!cand)
4400 return false;
4401
4402 tree src;
4403 if (cand->by_ref)
4404 {
4405 src = build_simple_mem_ref (cand->new_decl);
4406 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4407 }
4408 else
4409 src = cand->new_decl;
4410
4411 if (dump_file && (dump_flags & TDF_DETAILS))
4412 {
4413 fprintf (dump_file, "About to replace expr ");
4414 print_generic_expr (dump_file, *expr, 0);
4415 fprintf (dump_file, " with ");
4416 print_generic_expr (dump_file, src, 0);
4417 fprintf (dump_file, "\n");
4418 }
4419
4420 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4421 {
4422 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4423 *expr = vce;
4424 }
4425 else
4426 *expr = src;
4427 return true;
4428 }
4429
4430 /* If T is an SSA_NAME, return NULL if it is not a default def or
4431 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4432 the base variable is always returned, regardless if it is a default
4433 def. Return T if it is not an SSA_NAME. */
4434
4435 static tree
4436 get_ssa_base_param (tree t, bool ignore_default_def)
4437 {
4438 if (TREE_CODE (t) == SSA_NAME)
4439 {
4440 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4441 return SSA_NAME_VAR (t);
4442 else
4443 return NULL_TREE;
4444 }
4445 return t;
4446 }
4447
4448 /* Given an expression, return an adjustment entry specifying the
4449 transformation to be done on EXPR. If no suitable adjustment entry
4450 was found, returns NULL.
4451
4452 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4453 default def, otherwise bail on them.
4454
4455 If CONVERT is non-NULL, this function will set *CONVERT if the
4456 expression provided is a component reference. ADJUSTMENTS is the
4457 adjustments vector. */
4458
4459 ipa_parm_adjustment *
4460 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4461 ipa_parm_adjustment_vec adjustments,
4462 bool ignore_default_def)
4463 {
4464 if (TREE_CODE (**expr) == BIT_FIELD_REF
4465 || TREE_CODE (**expr) == IMAGPART_EXPR
4466 || TREE_CODE (**expr) == REALPART_EXPR)
4467 {
4468 *expr = &TREE_OPERAND (**expr, 0);
4469 if (convert)
4470 *convert = true;
4471 }
4472
4473 HOST_WIDE_INT offset, size, max_size;
4474 bool reverse;
4475 tree base
4476 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4477 if (!base || size == -1 || max_size == -1)
4478 return NULL;
4479
4480 if (TREE_CODE (base) == MEM_REF)
4481 {
4482 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4483 base = TREE_OPERAND (base, 0);
4484 }
4485
4486 base = get_ssa_base_param (base, ignore_default_def);
4487 if (!base || TREE_CODE (base) != PARM_DECL)
4488 return NULL;
4489
4490 struct ipa_parm_adjustment *cand = NULL;
4491 unsigned int len = adjustments.length ();
4492 for (unsigned i = 0; i < len; i++)
4493 {
4494 struct ipa_parm_adjustment *adj = &adjustments[i];
4495
4496 if (adj->base == base
4497 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4498 {
4499 cand = adj;
4500 break;
4501 }
4502 }
4503
4504 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4505 return NULL;
4506 return cand;
4507 }
4508
4509 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4510
4511 static bool
4512 index_in_adjustments_multiple_times_p (int base_index,
4513 ipa_parm_adjustment_vec adjustments)
4514 {
4515 int i, len = adjustments.length ();
4516 bool one = false;
4517
4518 for (i = 0; i < len; i++)
4519 {
4520 struct ipa_parm_adjustment *adj;
4521 adj = &adjustments[i];
4522
4523 if (adj->base_index == base_index)
4524 {
4525 if (one)
4526 return true;
4527 else
4528 one = true;
4529 }
4530 }
4531 return false;
4532 }
4533
4534
4535 /* Return adjustments that should have the same effect on function parameters
4536 and call arguments as if they were first changed according to adjustments in
4537 INNER and then by adjustments in OUTER. */
4538
4539 ipa_parm_adjustment_vec
4540 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4541 ipa_parm_adjustment_vec outer)
4542 {
4543 int i, outlen = outer.length ();
4544 int inlen = inner.length ();
4545 int removals = 0;
4546 ipa_parm_adjustment_vec adjustments, tmp;
4547
4548 tmp.create (inlen);
4549 for (i = 0; i < inlen; i++)
4550 {
4551 struct ipa_parm_adjustment *n;
4552 n = &inner[i];
4553
4554 if (n->op == IPA_PARM_OP_REMOVE)
4555 removals++;
4556 else
4557 {
4558 /* FIXME: Handling of new arguments are not implemented yet. */
4559 gcc_assert (n->op != IPA_PARM_OP_NEW);
4560 tmp.quick_push (*n);
4561 }
4562 }
4563
4564 adjustments.create (outlen + removals);
4565 for (i = 0; i < outlen; i++)
4566 {
4567 struct ipa_parm_adjustment r;
4568 struct ipa_parm_adjustment *out = &outer[i];
4569 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4570
4571 memset (&r, 0, sizeof (r));
4572 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4573 if (out->op == IPA_PARM_OP_REMOVE)
4574 {
4575 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4576 {
4577 r.op = IPA_PARM_OP_REMOVE;
4578 adjustments.quick_push (r);
4579 }
4580 continue;
4581 }
4582 else
4583 {
4584 /* FIXME: Handling of new arguments are not implemented yet. */
4585 gcc_assert (out->op != IPA_PARM_OP_NEW);
4586 }
4587
4588 r.base_index = in->base_index;
4589 r.type = out->type;
4590
4591 /* FIXME: Create nonlocal value too. */
4592
4593 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4594 r.op = IPA_PARM_OP_COPY;
4595 else if (in->op == IPA_PARM_OP_COPY)
4596 r.offset = out->offset;
4597 else if (out->op == IPA_PARM_OP_COPY)
4598 r.offset = in->offset;
4599 else
4600 r.offset = in->offset + out->offset;
4601 adjustments.quick_push (r);
4602 }
4603
4604 for (i = 0; i < inlen; i++)
4605 {
4606 struct ipa_parm_adjustment *n = &inner[i];
4607
4608 if (n->op == IPA_PARM_OP_REMOVE)
4609 adjustments.quick_push (*n);
4610 }
4611
4612 tmp.release ();
4613 return adjustments;
4614 }
4615
4616 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4617 friendly way, assuming they are meant to be applied to FNDECL. */
4618
4619 void
4620 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4621 tree fndecl)
4622 {
4623 int i, len = adjustments.length ();
4624 bool first = true;
4625 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4626
4627 fprintf (file, "IPA param adjustments: ");
4628 for (i = 0; i < len; i++)
4629 {
4630 struct ipa_parm_adjustment *adj;
4631 adj = &adjustments[i];
4632
4633 if (!first)
4634 fprintf (file, " ");
4635 else
4636 first = false;
4637
4638 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4639 print_generic_expr (file, parms[adj->base_index], 0);
4640 if (adj->base)
4641 {
4642 fprintf (file, ", base: ");
4643 print_generic_expr (file, adj->base, 0);
4644 }
4645 if (adj->new_decl)
4646 {
4647 fprintf (file, ", new_decl: ");
4648 print_generic_expr (file, adj->new_decl, 0);
4649 }
4650 if (adj->new_ssa_base)
4651 {
4652 fprintf (file, ", new_ssa_base: ");
4653 print_generic_expr (file, adj->new_ssa_base, 0);
4654 }
4655
4656 if (adj->op == IPA_PARM_OP_COPY)
4657 fprintf (file, ", copy_param");
4658 else if (adj->op == IPA_PARM_OP_REMOVE)
4659 fprintf (file, ", remove_param");
4660 else
4661 fprintf (file, ", offset %li", (long) adj->offset);
4662 if (adj->by_ref)
4663 fprintf (file, ", by_ref");
4664 print_node_brief (file, ", type: ", adj->type, 0);
4665 fprintf (file, "\n");
4666 }
4667 parms.release ();
4668 }
4669
4670 /* Dump the AV linked list. */
4671
4672 void
4673 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4674 {
4675 bool comma = false;
4676 fprintf (f, " Aggregate replacements:");
4677 for (; av; av = av->next)
4678 {
4679 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4680 av->index, av->offset);
4681 print_generic_expr (f, av->value, 0);
4682 comma = true;
4683 }
4684 fprintf (f, "\n");
4685 }
4686
4687 /* Stream out jump function JUMP_FUNC to OB. */
4688
4689 static void
4690 ipa_write_jump_function (struct output_block *ob,
4691 struct ipa_jump_func *jump_func)
4692 {
4693 struct ipa_agg_jf_item *item;
4694 struct bitpack_d bp;
4695 int i, count;
4696
4697 streamer_write_uhwi (ob, jump_func->type);
4698 switch (jump_func->type)
4699 {
4700 case IPA_JF_UNKNOWN:
4701 break;
4702 case IPA_JF_CONST:
4703 gcc_assert (
4704 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4705 stream_write_tree (ob, jump_func->value.constant.value, true);
4706 break;
4707 case IPA_JF_PASS_THROUGH:
4708 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4709 if (jump_func->value.pass_through.operation == NOP_EXPR)
4710 {
4711 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4712 bp = bitpack_create (ob->main_stream);
4713 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4714 streamer_write_bitpack (&bp);
4715 }
4716 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4717 == tcc_unary)
4718 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4719 else
4720 {
4721 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4722 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4723 }
4724 break;
4725 case IPA_JF_ANCESTOR:
4726 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4727 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4728 bp = bitpack_create (ob->main_stream);
4729 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4730 streamer_write_bitpack (&bp);
4731 break;
4732 }
4733
4734 count = vec_safe_length (jump_func->agg.items);
4735 streamer_write_uhwi (ob, count);
4736 if (count)
4737 {
4738 bp = bitpack_create (ob->main_stream);
4739 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4740 streamer_write_bitpack (&bp);
4741 }
4742
4743 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4744 {
4745 streamer_write_uhwi (ob, item->offset);
4746 stream_write_tree (ob, item->value, true);
4747 }
4748
4749 bp = bitpack_create (ob->main_stream);
4750 bp_pack_value (&bp, jump_func->bits.known, 1);
4751 streamer_write_bitpack (&bp);
4752 if (jump_func->bits.known)
4753 {
4754 streamer_write_widest_int (ob, jump_func->bits.value);
4755 streamer_write_widest_int (ob, jump_func->bits.mask);
4756 }
4757 bp_pack_value (&bp, jump_func->vr_known, 1);
4758 streamer_write_bitpack (&bp);
4759 if (jump_func->vr_known)
4760 {
4761 streamer_write_enum (ob->main_stream, value_rang_type,
4762 VR_LAST, jump_func->m_vr.type);
4763 stream_write_tree (ob, jump_func->m_vr.min, true);
4764 stream_write_tree (ob, jump_func->m_vr.max, true);
4765 }
4766 }
4767
4768 /* Read in jump function JUMP_FUNC from IB. */
4769
4770 static void
4771 ipa_read_jump_function (struct lto_input_block *ib,
4772 struct ipa_jump_func *jump_func,
4773 struct cgraph_edge *cs,
4774 struct data_in *data_in)
4775 {
4776 enum jump_func_type jftype;
4777 enum tree_code operation;
4778 int i, count;
4779
4780 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4781 switch (jftype)
4782 {
4783 case IPA_JF_UNKNOWN:
4784 ipa_set_jf_unknown (jump_func);
4785 break;
4786 case IPA_JF_CONST:
4787 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4788 break;
4789 case IPA_JF_PASS_THROUGH:
4790 operation = (enum tree_code) streamer_read_uhwi (ib);
4791 if (operation == NOP_EXPR)
4792 {
4793 int formal_id = streamer_read_uhwi (ib);
4794 struct bitpack_d bp = streamer_read_bitpack (ib);
4795 bool agg_preserved = bp_unpack_value (&bp, 1);
4796 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4797 }
4798 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4799 {
4800 int formal_id = streamer_read_uhwi (ib);
4801 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4802 }
4803 else
4804 {
4805 tree operand = stream_read_tree (ib, data_in);
4806 int formal_id = streamer_read_uhwi (ib);
4807 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4808 operation);
4809 }
4810 break;
4811 case IPA_JF_ANCESTOR:
4812 {
4813 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4814 int formal_id = streamer_read_uhwi (ib);
4815 struct bitpack_d bp = streamer_read_bitpack (ib);
4816 bool agg_preserved = bp_unpack_value (&bp, 1);
4817 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4818 break;
4819 }
4820 }
4821
4822 count = streamer_read_uhwi (ib);
4823 vec_alloc (jump_func->agg.items, count);
4824 if (count)
4825 {
4826 struct bitpack_d bp = streamer_read_bitpack (ib);
4827 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4828 }
4829 for (i = 0; i < count; i++)
4830 {
4831 struct ipa_agg_jf_item item;
4832 item.offset = streamer_read_uhwi (ib);
4833 item.value = stream_read_tree (ib, data_in);
4834 jump_func->agg.items->quick_push (item);
4835 }
4836
4837 struct bitpack_d bp = streamer_read_bitpack (ib);
4838 bool bits_known = bp_unpack_value (&bp, 1);
4839 if (bits_known)
4840 {
4841 jump_func->bits.known = true;
4842 jump_func->bits.value = streamer_read_widest_int (ib);
4843 jump_func->bits.mask = streamer_read_widest_int (ib);
4844 }
4845 else
4846 jump_func->bits.known = false;
4847
4848 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4849 bool vr_known = bp_unpack_value (&vr_bp, 1);
4850 if (vr_known)
4851 {
4852 jump_func->vr_known = true;
4853 jump_func->m_vr.type = streamer_read_enum (ib,
4854 value_range_type,
4855 VR_LAST);
4856 jump_func->m_vr.min = stream_read_tree (ib, data_in);
4857 jump_func->m_vr.max = stream_read_tree (ib, data_in);
4858 }
4859 else
4860 jump_func->vr_known = false;
4861 }
4862
4863 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4864 relevant to indirect inlining to OB. */
4865
4866 static void
4867 ipa_write_indirect_edge_info (struct output_block *ob,
4868 struct cgraph_edge *cs)
4869 {
4870 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4871 struct bitpack_d bp;
4872
4873 streamer_write_hwi (ob, ii->param_index);
4874 bp = bitpack_create (ob->main_stream);
4875 bp_pack_value (&bp, ii->polymorphic, 1);
4876 bp_pack_value (&bp, ii->agg_contents, 1);
4877 bp_pack_value (&bp, ii->member_ptr, 1);
4878 bp_pack_value (&bp, ii->by_ref, 1);
4879 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4880 bp_pack_value (&bp, ii->vptr_changed, 1);
4881 streamer_write_bitpack (&bp);
4882 if (ii->agg_contents || ii->polymorphic)
4883 streamer_write_hwi (ob, ii->offset);
4884 else
4885 gcc_assert (ii->offset == 0);
4886
4887 if (ii->polymorphic)
4888 {
4889 streamer_write_hwi (ob, ii->otr_token);
4890 stream_write_tree (ob, ii->otr_type, true);
4891 ii->context.stream_out (ob);
4892 }
4893 }
4894
4895 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4896 relevant to indirect inlining from IB. */
4897
4898 static void
4899 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4900 struct data_in *data_in,
4901 struct cgraph_edge *cs)
4902 {
4903 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4904 struct bitpack_d bp;
4905
4906 ii->param_index = (int) streamer_read_hwi (ib);
4907 bp = streamer_read_bitpack (ib);
4908 ii->polymorphic = bp_unpack_value (&bp, 1);
4909 ii->agg_contents = bp_unpack_value (&bp, 1);
4910 ii->member_ptr = bp_unpack_value (&bp, 1);
4911 ii->by_ref = bp_unpack_value (&bp, 1);
4912 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4913 ii->vptr_changed = bp_unpack_value (&bp, 1);
4914 if (ii->agg_contents || ii->polymorphic)
4915 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4916 else
4917 ii->offset = 0;
4918 if (ii->polymorphic)
4919 {
4920 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4921 ii->otr_type = stream_read_tree (ib, data_in);
4922 ii->context.stream_in (ib, data_in);
4923 }
4924 }
4925
4926 /* Stream out NODE info to OB. */
4927
4928 static void
4929 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4930 {
4931 int node_ref;
4932 lto_symtab_encoder_t encoder;
4933 struct ipa_node_params *info = IPA_NODE_REF (node);
4934 int j;
4935 struct cgraph_edge *e;
4936 struct bitpack_d bp;
4937
4938 encoder = ob->decl_state->symtab_node_encoder;
4939 node_ref = lto_symtab_encoder_encode (encoder, node);
4940 streamer_write_uhwi (ob, node_ref);
4941
4942 streamer_write_uhwi (ob, ipa_get_param_count (info));
4943 for (j = 0; j < ipa_get_param_count (info); j++)
4944 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4945 bp = bitpack_create (ob->main_stream);
4946 gcc_assert (info->analysis_done
4947 || ipa_get_param_count (info) == 0);
4948 gcc_assert (!info->node_enqueued);
4949 gcc_assert (!info->ipcp_orig_node);
4950 for (j = 0; j < ipa_get_param_count (info); j++)
4951 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4952 streamer_write_bitpack (&bp);
4953 for (j = 0; j < ipa_get_param_count (info); j++)
4954 {
4955 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4956 stream_write_tree (ob, ipa_get_type (info, j), true);
4957 }
4958 for (e = node->callees; e; e = e->next_callee)
4959 {
4960 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4961
4962 streamer_write_uhwi (ob,
4963 ipa_get_cs_argument_count (args) * 2
4964 + (args->polymorphic_call_contexts != NULL));
4965 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4966 {
4967 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4968 if (args->polymorphic_call_contexts != NULL)
4969 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4970 }
4971 }
4972 for (e = node->indirect_calls; e; e = e->next_callee)
4973 {
4974 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4975
4976 streamer_write_uhwi (ob,
4977 ipa_get_cs_argument_count (args) * 2
4978 + (args->polymorphic_call_contexts != NULL));
4979 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4980 {
4981 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4982 if (args->polymorphic_call_contexts != NULL)
4983 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4984 }
4985 ipa_write_indirect_edge_info (ob, e);
4986 }
4987 }
4988
4989 /* Stream in NODE info from IB. */
4990
4991 static void
4992 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4993 struct data_in *data_in)
4994 {
4995 struct ipa_node_params *info = IPA_NODE_REF (node);
4996 int k;
4997 struct cgraph_edge *e;
4998 struct bitpack_d bp;
4999
5000 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
5001
5002 for (k = 0; k < ipa_get_param_count (info); k++)
5003 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5004
5005 bp = streamer_read_bitpack (ib);
5006 if (ipa_get_param_count (info) != 0)
5007 info->analysis_done = true;
5008 info->node_enqueued = false;
5009 for (k = 0; k < ipa_get_param_count (info); k++)
5010 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
5011 for (k = 0; k < ipa_get_param_count (info); k++)
5012 {
5013 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
5014 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
5015 }
5016 for (e = node->callees; e; e = e->next_callee)
5017 {
5018 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5019 int count = streamer_read_uhwi (ib);
5020 bool contexts_computed = count & 1;
5021 count /= 2;
5022
5023 if (!count)
5024 continue;
5025 vec_safe_grow_cleared (args->jump_functions, count);
5026 if (contexts_computed)
5027 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5028
5029 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5030 {
5031 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5032 data_in);
5033 if (contexts_computed)
5034 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5035 }
5036 }
5037 for (e = node->indirect_calls; e; e = e->next_callee)
5038 {
5039 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5040 int count = streamer_read_uhwi (ib);
5041 bool contexts_computed = count & 1;
5042 count /= 2;
5043
5044 if (count)
5045 {
5046 vec_safe_grow_cleared (args->jump_functions, count);
5047 if (contexts_computed)
5048 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5049 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5050 {
5051 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5052 data_in);
5053 if (contexts_computed)
5054 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5055 }
5056 }
5057 ipa_read_indirect_edge_info (ib, data_in, e);
5058 }
5059 }
5060
5061 /* Write jump functions for nodes in SET. */
5062
5063 void
5064 ipa_prop_write_jump_functions (void)
5065 {
5066 struct cgraph_node *node;
5067 struct output_block *ob;
5068 unsigned int count = 0;
5069 lto_symtab_encoder_iterator lsei;
5070 lto_symtab_encoder_t encoder;
5071
5072 if (!ipa_node_params_sum)
5073 return;
5074
5075 ob = create_output_block (LTO_section_jump_functions);
5076 encoder = ob->decl_state->symtab_node_encoder;
5077 ob->symbol = NULL;
5078 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5079 lsei_next_function_in_partition (&lsei))
5080 {
5081 node = lsei_cgraph_node (lsei);
5082 if (node->has_gimple_body_p ()
5083 && IPA_NODE_REF (node) != NULL)
5084 count++;
5085 }
5086
5087 streamer_write_uhwi (ob, count);
5088
5089 /* Process all of the functions. */
5090 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5091 lsei_next_function_in_partition (&lsei))
5092 {
5093 node = lsei_cgraph_node (lsei);
5094 if (node->has_gimple_body_p ()
5095 && IPA_NODE_REF (node) != NULL)
5096 ipa_write_node_info (ob, node);
5097 }
5098 streamer_write_char_stream (ob->main_stream, 0);
5099 produce_asm (ob, NULL);
5100 destroy_output_block (ob);
5101 }
5102
5103 /* Read section in file FILE_DATA of length LEN with data DATA. */
5104
5105 static void
5106 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5107 size_t len)
5108 {
5109 const struct lto_function_header *header =
5110 (const struct lto_function_header *) data;
5111 const int cfg_offset = sizeof (struct lto_function_header);
5112 const int main_offset = cfg_offset + header->cfg_size;
5113 const int string_offset = main_offset + header->main_size;
5114 struct data_in *data_in;
5115 unsigned int i;
5116 unsigned int count;
5117
5118 lto_input_block ib_main ((const char *) data + main_offset,
5119 header->main_size, file_data->mode_table);
5120
5121 data_in =
5122 lto_data_in_create (file_data, (const char *) data + string_offset,
5123 header->string_size, vNULL);
5124 count = streamer_read_uhwi (&ib_main);
5125
5126 for (i = 0; i < count; i++)
5127 {
5128 unsigned int index;
5129 struct cgraph_node *node;
5130 lto_symtab_encoder_t encoder;
5131
5132 index = streamer_read_uhwi (&ib_main);
5133 encoder = file_data->symtab_node_encoder;
5134 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5135 index));
5136 gcc_assert (node->definition);
5137 ipa_read_node_info (&ib_main, node, data_in);
5138 }
5139 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5140 len);
5141 lto_data_in_delete (data_in);
5142 }
5143
5144 /* Read ipcp jump functions. */
5145
5146 void
5147 ipa_prop_read_jump_functions (void)
5148 {
5149 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5150 struct lto_file_decl_data *file_data;
5151 unsigned int j = 0;
5152
5153 ipa_check_create_node_params ();
5154 ipa_check_create_edge_args ();
5155 ipa_register_cgraph_hooks ();
5156
5157 while ((file_data = file_data_vec[j++]))
5158 {
5159 size_t len;
5160 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5161
5162 if (data)
5163 ipa_prop_read_section (file_data, data, len);
5164 }
5165 }
5166
5167 /* After merging units, we can get mismatch in argument counts.
5168 Also decl merging might've rendered parameter lists obsolete.
5169 Also compute called_with_variable_arg info. */
5170
5171 void
5172 ipa_update_after_lto_read (void)
5173 {
5174 ipa_check_create_node_params ();
5175 ipa_check_create_edge_args ();
5176 }
5177
5178 void
5179 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5180 {
5181 int node_ref;
5182 unsigned int count = 0;
5183 lto_symtab_encoder_t encoder;
5184 struct ipa_agg_replacement_value *aggvals, *av;
5185
5186 aggvals = ipa_get_agg_replacements_for_node (node);
5187 encoder = ob->decl_state->symtab_node_encoder;
5188 node_ref = lto_symtab_encoder_encode (encoder, node);
5189 streamer_write_uhwi (ob, node_ref);
5190
5191 for (av = aggvals; av; av = av->next)
5192 count++;
5193 streamer_write_uhwi (ob, count);
5194
5195 for (av = aggvals; av; av = av->next)
5196 {
5197 struct bitpack_d bp;
5198
5199 streamer_write_uhwi (ob, av->offset);
5200 streamer_write_uhwi (ob, av->index);
5201 stream_write_tree (ob, av->value, true);
5202
5203 bp = bitpack_create (ob->main_stream);
5204 bp_pack_value (&bp, av->by_ref, 1);
5205 streamer_write_bitpack (&bp);
5206 }
5207
5208 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5209 if (ts && vec_safe_length (ts->m_vr) > 0)
5210 {
5211 count = ts->m_vr->length ();
5212 streamer_write_uhwi (ob, count);
5213 for (unsigned i = 0; i < count; ++i)
5214 {
5215 struct bitpack_d bp;
5216 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5217 bp = bitpack_create (ob->main_stream);
5218 bp_pack_value (&bp, parm_vr->known, 1);
5219 streamer_write_bitpack (&bp);
5220 if (parm_vr->known)
5221 {
5222 streamer_write_enum (ob->main_stream, value_rang_type,
5223 VR_LAST, parm_vr->type);
5224 streamer_write_wide_int (ob, parm_vr->min);
5225 streamer_write_wide_int (ob, parm_vr->max);
5226 }
5227 }
5228 }
5229 else
5230 streamer_write_uhwi (ob, 0);
5231
5232 if (ts && vec_safe_length (ts->bits) > 0)
5233 {
5234 count = ts->bits->length ();
5235 streamer_write_uhwi (ob, count);
5236
5237 for (unsigned i = 0; i < count; ++i)
5238 {
5239 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5240 struct bitpack_d bp = bitpack_create (ob->main_stream);
5241 bp_pack_value (&bp, bits_jfunc.known, 1);
5242 streamer_write_bitpack (&bp);
5243 if (bits_jfunc.known)
5244 {
5245 streamer_write_widest_int (ob, bits_jfunc.value);
5246 streamer_write_widest_int (ob, bits_jfunc.mask);
5247 }
5248 }
5249 }
5250 else
5251 streamer_write_uhwi (ob, 0);
5252 }
5253
5254 /* Stream in the aggregate value replacement chain for NODE from IB. */
5255
5256 static void
5257 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5258 data_in *data_in)
5259 {
5260 struct ipa_agg_replacement_value *aggvals = NULL;
5261 unsigned int count, i;
5262
5263 count = streamer_read_uhwi (ib);
5264 for (i = 0; i <count; i++)
5265 {
5266 struct ipa_agg_replacement_value *av;
5267 struct bitpack_d bp;
5268
5269 av = ggc_alloc<ipa_agg_replacement_value> ();
5270 av->offset = streamer_read_uhwi (ib);
5271 av->index = streamer_read_uhwi (ib);
5272 av->value = stream_read_tree (ib, data_in);
5273 bp = streamer_read_bitpack (ib);
5274 av->by_ref = bp_unpack_value (&bp, 1);
5275 av->next = aggvals;
5276 aggvals = av;
5277 }
5278 ipa_set_node_agg_value_chain (node, aggvals);
5279
5280 count = streamer_read_uhwi (ib);
5281 if (count > 0)
5282 {
5283 ipcp_grow_transformations_if_necessary ();
5284
5285 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5286 vec_safe_grow_cleared (ts->m_vr, count);
5287 for (i = 0; i < count; i++)
5288 {
5289 ipa_vr *parm_vr;
5290 parm_vr = &(*ts->m_vr)[i];
5291 struct bitpack_d bp;
5292 bp = streamer_read_bitpack (ib);
5293 parm_vr->known = bp_unpack_value (&bp, 1);
5294 if (parm_vr->known)
5295 {
5296 parm_vr->type = streamer_read_enum (ib, value_range_type,
5297 VR_LAST);
5298 parm_vr->min = streamer_read_wide_int (ib);
5299 parm_vr->max = streamer_read_wide_int (ib);
5300 }
5301 }
5302 }
5303 count = streamer_read_uhwi (ib);
5304 if (count > 0)
5305 {
5306 ipcp_grow_transformations_if_necessary ();
5307
5308 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5309 vec_safe_grow_cleared (ts->bits, count);
5310
5311 for (i = 0; i < count; i++)
5312 {
5313 ipa_bits& bits_jfunc = (*ts->bits)[i];
5314 struct bitpack_d bp = streamer_read_bitpack (ib);
5315 bits_jfunc.known = bp_unpack_value (&bp, 1);
5316 if (bits_jfunc.known)
5317 {
5318 bits_jfunc.value = streamer_read_widest_int (ib);
5319 bits_jfunc.mask = streamer_read_widest_int (ib);
5320 }
5321 }
5322 }
5323 }
5324
5325 /* Write all aggregate replacement for nodes in set. */
5326
5327 void
5328 ipcp_write_transformation_summaries (void)
5329 {
5330 struct cgraph_node *node;
5331 struct output_block *ob;
5332 unsigned int count = 0;
5333 lto_symtab_encoder_iterator lsei;
5334 lto_symtab_encoder_t encoder;
5335
5336 ob = create_output_block (LTO_section_ipcp_transform);
5337 encoder = ob->decl_state->symtab_node_encoder;
5338 ob->symbol = NULL;
5339 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5340 lsei_next_function_in_partition (&lsei))
5341 {
5342 node = lsei_cgraph_node (lsei);
5343 if (node->has_gimple_body_p ())
5344 count++;
5345 }
5346
5347 streamer_write_uhwi (ob, count);
5348
5349 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5350 lsei_next_function_in_partition (&lsei))
5351 {
5352 node = lsei_cgraph_node (lsei);
5353 if (node->has_gimple_body_p ())
5354 write_ipcp_transformation_info (ob, node);
5355 }
5356 streamer_write_char_stream (ob->main_stream, 0);
5357 produce_asm (ob, NULL);
5358 destroy_output_block (ob);
5359 }
5360
5361 /* Read replacements section in file FILE_DATA of length LEN with data
5362 DATA. */
5363
5364 static void
5365 read_replacements_section (struct lto_file_decl_data *file_data,
5366 const char *data,
5367 size_t len)
5368 {
5369 const struct lto_function_header *header =
5370 (const struct lto_function_header *) data;
5371 const int cfg_offset = sizeof (struct lto_function_header);
5372 const int main_offset = cfg_offset + header->cfg_size;
5373 const int string_offset = main_offset + header->main_size;
5374 struct data_in *data_in;
5375 unsigned int i;
5376 unsigned int count;
5377
5378 lto_input_block ib_main ((const char *) data + main_offset,
5379 header->main_size, file_data->mode_table);
5380
5381 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5382 header->string_size, vNULL);
5383 count = streamer_read_uhwi (&ib_main);
5384
5385 for (i = 0; i < count; i++)
5386 {
5387 unsigned int index;
5388 struct cgraph_node *node;
5389 lto_symtab_encoder_t encoder;
5390
5391 index = streamer_read_uhwi (&ib_main);
5392 encoder = file_data->symtab_node_encoder;
5393 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5394 index));
5395 gcc_assert (node->definition);
5396 read_ipcp_transformation_info (&ib_main, node, data_in);
5397 }
5398 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5399 len);
5400 lto_data_in_delete (data_in);
5401 }
5402
5403 /* Read IPA-CP aggregate replacements. */
5404
5405 void
5406 ipcp_read_transformation_summaries (void)
5407 {
5408 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5409 struct lto_file_decl_data *file_data;
5410 unsigned int j = 0;
5411
5412 while ((file_data = file_data_vec[j++]))
5413 {
5414 size_t len;
5415 const char *data = lto_get_section_data (file_data,
5416 LTO_section_ipcp_transform,
5417 NULL, &len);
5418 if (data)
5419 read_replacements_section (file_data, data, len);
5420 }
5421 }
5422
5423 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5424 NODE. */
5425
5426 static void
5427 adjust_agg_replacement_values (struct cgraph_node *node,
5428 struct ipa_agg_replacement_value *aggval)
5429 {
5430 struct ipa_agg_replacement_value *v;
5431 int i, c = 0, d = 0, *adj;
5432
5433 if (!node->clone.combined_args_to_skip)
5434 return;
5435
5436 for (v = aggval; v; v = v->next)
5437 {
5438 gcc_assert (v->index >= 0);
5439 if (c < v->index)
5440 c = v->index;
5441 }
5442 c++;
5443
5444 adj = XALLOCAVEC (int, c);
5445 for (i = 0; i < c; i++)
5446 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5447 {
5448 adj[i] = -1;
5449 d++;
5450 }
5451 else
5452 adj[i] = i - d;
5453
5454 for (v = aggval; v; v = v->next)
5455 v->index = adj[v->index];
5456 }
5457
5458 /* Dominator walker driving the ipcp modification phase. */
5459
5460 class ipcp_modif_dom_walker : public dom_walker
5461 {
5462 public:
5463 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5464 vec<ipa_param_descriptor, va_gc> *descs,
5465 struct ipa_agg_replacement_value *av,
5466 bool *sc, bool *cc)
5467 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5468 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5469
5470 virtual edge before_dom_children (basic_block);
5471
5472 private:
5473 struct ipa_func_body_info *m_fbi;
5474 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5475 struct ipa_agg_replacement_value *m_aggval;
5476 bool *m_something_changed, *m_cfg_changed;
5477 };
5478
5479 edge
5480 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5481 {
5482 gimple_stmt_iterator gsi;
5483 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5484 {
5485 struct ipa_agg_replacement_value *v;
5486 gimple *stmt = gsi_stmt (gsi);
5487 tree rhs, val, t;
5488 HOST_WIDE_INT offset, size;
5489 int index;
5490 bool by_ref, vce;
5491
5492 if (!gimple_assign_load_p (stmt))
5493 continue;
5494 rhs = gimple_assign_rhs1 (stmt);
5495 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5496 continue;
5497
5498 vce = false;
5499 t = rhs;
5500 while (handled_component_p (t))
5501 {
5502 /* V_C_E can do things like convert an array of integers to one
5503 bigger integer and similar things we do not handle below. */
5504 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5505 {
5506 vce = true;
5507 break;
5508 }
5509 t = TREE_OPERAND (t, 0);
5510 }
5511 if (vce)
5512 continue;
5513
5514 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5515 &offset, &size, &by_ref))
5516 continue;
5517 for (v = m_aggval; v; v = v->next)
5518 if (v->index == index
5519 && v->offset == offset)
5520 break;
5521 if (!v
5522 || v->by_ref != by_ref
5523 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5524 continue;
5525
5526 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5527 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5528 {
5529 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5530 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5531 else if (TYPE_SIZE (TREE_TYPE (rhs))
5532 == TYPE_SIZE (TREE_TYPE (v->value)))
5533 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5534 else
5535 {
5536 if (dump_file)
5537 {
5538 fprintf (dump_file, " const ");
5539 print_generic_expr (dump_file, v->value, 0);
5540 fprintf (dump_file, " can't be converted to type of ");
5541 print_generic_expr (dump_file, rhs, 0);
5542 fprintf (dump_file, "\n");
5543 }
5544 continue;
5545 }
5546 }
5547 else
5548 val = v->value;
5549
5550 if (dump_file && (dump_flags & TDF_DETAILS))
5551 {
5552 fprintf (dump_file, "Modifying stmt:\n ");
5553 print_gimple_stmt (dump_file, stmt, 0, 0);
5554 }
5555 gimple_assign_set_rhs_from_tree (&gsi, val);
5556 update_stmt (stmt);
5557
5558 if (dump_file && (dump_flags & TDF_DETAILS))
5559 {
5560 fprintf (dump_file, "into:\n ");
5561 print_gimple_stmt (dump_file, stmt, 0, 0);
5562 fprintf (dump_file, "\n");
5563 }
5564
5565 *m_something_changed = true;
5566 if (maybe_clean_eh_stmt (stmt)
5567 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5568 *m_cfg_changed = true;
5569 }
5570 return NULL;
5571 }
5572
5573 /* Update bits info of formal parameters as described in
5574 ipcp_transformation_summary. */
5575
5576 static void
5577 ipcp_update_bits (struct cgraph_node *node)
5578 {
5579 tree parm = DECL_ARGUMENTS (node->decl);
5580 tree next_parm = parm;
5581 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5582
5583 if (!ts || vec_safe_length (ts->bits) == 0)
5584 return;
5585
5586 vec<ipa_bits, va_gc> &bits = *ts->bits;
5587 unsigned count = bits.length ();
5588
5589 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5590 {
5591 if (node->clone.combined_args_to_skip
5592 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5593 continue;
5594
5595 gcc_checking_assert (parm);
5596 next_parm = DECL_CHAIN (parm);
5597
5598 if (!bits[i].known
5599 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm)) || POINTER_TYPE_P (TREE_TYPE (parm)))
5600 || !is_gimple_reg (parm))
5601 continue;
5602
5603 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5604 if (!ddef)
5605 continue;
5606
5607 if (dump_file)
5608 {
5609 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5610 print_hex (bits[i].mask, dump_file);
5611 fprintf (dump_file, "\n");
5612 }
5613
5614 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5615 {
5616 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5617 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5618
5619 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5620 | wide_int::from (bits[i].value, prec, sgn);
5621 set_nonzero_bits (ddef, nonzero_bits);
5622 }
5623 else
5624 {
5625 unsigned tem = bits[i].mask.to_uhwi ();
5626 unsigned HOST_WIDE_INT bitpos = bits[i].value.to_uhwi ();
5627 unsigned align = tem & -tem;
5628 unsigned misalign = bitpos & (align - 1);
5629
5630 if (align > 1)
5631 {
5632 if (dump_file)
5633 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5634
5635 unsigned old_align, old_misalign;
5636 struct ptr_info_def *pi = get_ptr_info (ddef);
5637 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5638
5639 if (old_known
5640 && old_align > align)
5641 {
5642 if (dump_file)
5643 {
5644 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5645 if ((old_misalign & (align - 1)) != misalign)
5646 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5647 old_misalign, misalign);
5648 }
5649 continue;
5650 }
5651
5652 if (old_known
5653 && ((misalign & (old_align - 1)) != old_misalign)
5654 && dump_file)
5655 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5656 old_misalign, misalign);
5657
5658 set_ptr_info_alignment (pi, align, misalign);
5659 }
5660 }
5661 }
5662 }
5663
5664 /* Update value range of formal parameters as described in
5665 ipcp_transformation_summary. */
5666
5667 static void
5668 ipcp_update_vr (struct cgraph_node *node)
5669 {
5670 tree fndecl = node->decl;
5671 tree parm = DECL_ARGUMENTS (fndecl);
5672 tree next_parm = parm;
5673 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5674 if (!ts || vec_safe_length (ts->m_vr) == 0)
5675 return;
5676 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5677 unsigned count = vr.length ();
5678
5679 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5680 {
5681 if (node->clone.combined_args_to_skip
5682 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5683 continue;
5684 gcc_checking_assert (parm);
5685 next_parm = DECL_CHAIN (parm);
5686 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5687
5688 if (!ddef || !is_gimple_reg (parm))
5689 continue;
5690
5691 if (vr[i].known
5692 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5693 {
5694 tree type = TREE_TYPE (ddef);
5695 unsigned prec = TYPE_PRECISION (type);
5696 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5697 {
5698 if (dump_file)
5699 {
5700 fprintf (dump_file, "Setting value range of param %u ", i);
5701 fprintf (dump_file, "%s[",
5702 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5703 print_decs (vr[i].min, dump_file);
5704 fprintf (dump_file, ", ");
5705 print_decs (vr[i].max, dump_file);
5706 fprintf (dump_file, "]\n");
5707 }
5708 set_range_info (ddef, vr[i].type,
5709 wide_int_storage::from (vr[i].min, prec,
5710 TYPE_SIGN (type)),
5711 wide_int_storage::from (vr[i].max, prec,
5712 TYPE_SIGN (type)));
5713 }
5714 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5715 && vr[i].type == VR_ANTI_RANGE
5716 && wi::eq_p (vr[i].min, 0)
5717 && wi::eq_p (vr[i].max, 0))
5718 {
5719 if (dump_file)
5720 fprintf (dump_file, "Setting nonnull for %u\n", i);
5721 set_ptr_nonnull (ddef);
5722 }
5723 }
5724 }
5725 }
5726
5727 /* IPCP transformation phase doing propagation of aggregate values. */
5728
5729 unsigned int
5730 ipcp_transform_function (struct cgraph_node *node)
5731 {
5732 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5733 struct ipa_func_body_info fbi;
5734 struct ipa_agg_replacement_value *aggval;
5735 int param_count;
5736 bool cfg_changed = false, something_changed = false;
5737
5738 gcc_checking_assert (cfun);
5739 gcc_checking_assert (current_function_decl);
5740
5741 if (dump_file)
5742 fprintf (dump_file, "Modification phase of node %s/%i\n",
5743 node->name (), node->order);
5744
5745 ipcp_update_bits (node);
5746 ipcp_update_vr (node);
5747 aggval = ipa_get_agg_replacements_for_node (node);
5748 if (!aggval)
5749 return 0;
5750 param_count = count_formal_params (node->decl);
5751 if (param_count == 0)
5752 return 0;
5753 adjust_agg_replacement_values (node, aggval);
5754 if (dump_file)
5755 ipa_dump_agg_replacement_values (dump_file, aggval);
5756
5757 fbi.node = node;
5758 fbi.info = NULL;
5759 fbi.bb_infos = vNULL;
5760 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5761 fbi.param_count = param_count;
5762 fbi.aa_walked = 0;
5763
5764 vec_safe_grow_cleared (descriptors, param_count);
5765 ipa_populate_param_decls (node, *descriptors);
5766 calculate_dominance_info (CDI_DOMINATORS);
5767 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5768 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5769
5770 int i;
5771 struct ipa_bb_info *bi;
5772 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5773 free_ipa_bb_info (bi);
5774 fbi.bb_infos.release ();
5775 free_dominance_info (CDI_DOMINATORS);
5776 (*ipcp_transformations)[node->uid].agg_values = NULL;
5777 (*ipcp_transformations)[node->uid].bits = NULL;
5778 (*ipcp_transformations)[node->uid].m_vr = NULL;
5779
5780 vec_free (descriptors);
5781
5782 if (!something_changed)
5783 return 0;
5784 else if (cfg_changed)
5785 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5786 else
5787 return TODO_update_ssa_only_virtuals;
5788 }