]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
512bcbed0cbdee18ff4253e21c77965eb73ee5db
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
62
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
67
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
70 {
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
78 };
79
80 /* Allocation pool for reference descriptions. */
81
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
84
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
87
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
90 {
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
92
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
96 }
97
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
100
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
103 tree ptree)
104 {
105 int i, count;
106
107 count = vec_safe_length (descriptors);
108 for (i = 0; i < count; i++)
109 if ((*descriptors)[i].decl_or_type == ptree)
110 return i;
111
112 return -1;
113 }
114
115 /* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
117
118 int
119 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
120 {
121 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
122 }
123
124 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
125 NODE. */
126
127 static void
128 ipa_populate_param_decls (struct cgraph_node *node,
129 vec<ipa_param_descriptor, va_gc> &descriptors)
130 {
131 tree fndecl;
132 tree fnargs;
133 tree parm;
134 int param_num;
135
136 fndecl = node->decl;
137 gcc_assert (gimple_has_body_p (fndecl));
138 fnargs = DECL_ARGUMENTS (fndecl);
139 param_num = 0;
140 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
141 {
142 descriptors[param_num].decl_or_type = parm;
143 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
144 true);
145 param_num++;
146 }
147 }
148
149 /* Return how many formal parameters FNDECL has. */
150
151 int
152 count_formal_params (tree fndecl)
153 {
154 tree parm;
155 int count = 0;
156 gcc_assert (gimple_has_body_p (fndecl));
157
158 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
159 count++;
160
161 return count;
162 }
163
164 /* Return the declaration of Ith formal parameter of the function corresponding
165 to INFO. Note there is no setter function as this array is built just once
166 using ipa_initialize_node_params. */
167
168 void
169 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
170 {
171 fprintf (file, "param #%i", i);
172 if ((*info->descriptors)[i].decl_or_type)
173 {
174 fprintf (file, " ");
175 print_generic_expr (file, (*info->descriptors)[i].decl_or_type, 0);
176 }
177 }
178
179 /* Initialize the ipa_node_params structure associated with NODE
180 to hold PARAM_COUNT parameters. */
181
182 void
183 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
184 {
185 struct ipa_node_params *info = IPA_NODE_REF (node);
186
187 if (!info->descriptors && param_count)
188 vec_safe_grow_cleared (info->descriptors, param_count);
189 }
190
191 /* Initialize the ipa_node_params structure associated with NODE by counting
192 the function parameters, creating the descriptors and populating their
193 param_decls. */
194
195 void
196 ipa_initialize_node_params (struct cgraph_node *node)
197 {
198 struct ipa_node_params *info = IPA_NODE_REF (node);
199
200 if (!info->descriptors)
201 {
202 ipa_alloc_node_params (node, count_formal_params (node->decl));
203 ipa_populate_param_decls (node, *info->descriptors);
204 }
205 }
206
207 /* Print the jump functions associated with call graph edge CS to file F. */
208
209 static void
210 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
211 {
212 int i, count;
213
214 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
215 for (i = 0; i < count; i++)
216 {
217 struct ipa_jump_func *jump_func;
218 enum jump_func_type type;
219
220 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
221 type = jump_func->type;
222
223 fprintf (f, " param %d: ", i);
224 if (type == IPA_JF_UNKNOWN)
225 fprintf (f, "UNKNOWN\n");
226 else if (type == IPA_JF_CONST)
227 {
228 tree val = jump_func->value.constant.value;
229 fprintf (f, "CONST: ");
230 print_generic_expr (f, val, 0);
231 if (TREE_CODE (val) == ADDR_EXPR
232 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
233 {
234 fprintf (f, " -> ");
235 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
236 0);
237 }
238 fprintf (f, "\n");
239 }
240 else if (type == IPA_JF_PASS_THROUGH)
241 {
242 fprintf (f, "PASS THROUGH: ");
243 fprintf (f, "%d, op %s",
244 jump_func->value.pass_through.formal_id,
245 get_tree_code_name(jump_func->value.pass_through.operation));
246 if (jump_func->value.pass_through.operation != NOP_EXPR)
247 {
248 fprintf (f, " ");
249 print_generic_expr (f,
250 jump_func->value.pass_through.operand, 0);
251 }
252 if (jump_func->value.pass_through.agg_preserved)
253 fprintf (f, ", agg_preserved");
254 fprintf (f, "\n");
255 }
256 else if (type == IPA_JF_ANCESTOR)
257 {
258 fprintf (f, "ANCESTOR: ");
259 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
260 jump_func->value.ancestor.formal_id,
261 jump_func->value.ancestor.offset);
262 if (jump_func->value.ancestor.agg_preserved)
263 fprintf (f, ", agg_preserved");
264 fprintf (f, "\n");
265 }
266
267 if (jump_func->agg.items)
268 {
269 struct ipa_agg_jf_item *item;
270 int j;
271
272 fprintf (f, " Aggregate passed by %s:\n",
273 jump_func->agg.by_ref ? "reference" : "value");
274 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
275 {
276 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
277 item->offset);
278 if (TYPE_P (item->value))
279 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
280 tree_to_uhwi (TYPE_SIZE (item->value)));
281 else
282 {
283 fprintf (f, "cst: ");
284 print_generic_expr (f, item->value, 0);
285 }
286 fprintf (f, "\n");
287 }
288 }
289
290 struct ipa_polymorphic_call_context *ctx
291 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
292 if (ctx && !ctx->useless_p ())
293 {
294 fprintf (f, " Context: ");
295 ctx->dump (dump_file);
296 }
297
298 if (jump_func->bits.known)
299 {
300 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
301 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
302 fprintf (f, "\n");
303 }
304 else
305 fprintf (f, " Unknown bits\n");
306
307 if (jump_func->vr_known)
308 {
309 fprintf (f, " VR ");
310 fprintf (f, "%s[",
311 (jump_func->m_vr.type == VR_ANTI_RANGE) ? "~" : "");
312 print_decs (jump_func->m_vr.min, f);
313 fprintf (f, ", ");
314 print_decs (jump_func->m_vr.max, f);
315 fprintf (f, "]\n");
316 }
317 else
318 fprintf (f, " Unknown VR\n");
319 }
320 }
321
322
323 /* Print the jump functions of all arguments on all call graph edges going from
324 NODE to file F. */
325
326 void
327 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
328 {
329 struct cgraph_edge *cs;
330
331 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
332 node->order);
333 for (cs = node->callees; cs; cs = cs->next_callee)
334 {
335 if (!ipa_edge_args_info_available_for_edge_p (cs))
336 continue;
337
338 fprintf (f, " callsite %s/%i -> %s/%i : \n",
339 xstrdup_for_dump (node->name ()), node->order,
340 xstrdup_for_dump (cs->callee->name ()),
341 cs->callee->order);
342 ipa_print_node_jump_functions_for_edge (f, cs);
343 }
344
345 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
346 {
347 struct cgraph_indirect_call_info *ii;
348 if (!ipa_edge_args_info_available_for_edge_p (cs))
349 continue;
350
351 ii = cs->indirect_info;
352 if (ii->agg_contents)
353 fprintf (f, " indirect %s callsite, calling param %i, "
354 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
355 ii->member_ptr ? "member ptr" : "aggregate",
356 ii->param_index, ii->offset,
357 ii->by_ref ? "by reference" : "by_value");
358 else
359 fprintf (f, " indirect %s callsite, calling param %i, "
360 "offset " HOST_WIDE_INT_PRINT_DEC,
361 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
362 ii->offset);
363
364 if (cs->call_stmt)
365 {
366 fprintf (f, ", for stmt ");
367 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
368 }
369 else
370 fprintf (f, "\n");
371 if (ii->polymorphic)
372 ii->context.dump (f);
373 ipa_print_node_jump_functions_for_edge (f, cs);
374 }
375 }
376
377 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
378
379 void
380 ipa_print_all_jump_functions (FILE *f)
381 {
382 struct cgraph_node *node;
383
384 fprintf (f, "\nJump functions:\n");
385 FOR_EACH_FUNCTION (node)
386 {
387 ipa_print_node_jump_functions (f, node);
388 }
389 }
390
391 /* Set jfunc to be a know-really nothing jump function. */
392
393 static void
394 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
395 {
396 jfunc->type = IPA_JF_UNKNOWN;
397 jfunc->bits.known = false;
398 jfunc->vr_known = false;
399 }
400
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
403
404 static void
405 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
406 struct ipa_jump_func *src)
407
408 {
409 gcc_checking_assert (src->type == IPA_JF_CONST);
410 dst->type = IPA_JF_CONST;
411 dst->value.constant = src->value.constant;
412 }
413
414 /* Set JFUNC to be a constant jmp function. */
415
416 static void
417 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
418 struct cgraph_edge *cs)
419 {
420 jfunc->type = IPA_JF_CONST;
421 jfunc->value.constant.value = unshare_expr_without_location (constant);
422
423 if (TREE_CODE (constant) == ADDR_EXPR
424 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
425 {
426 struct ipa_cst_ref_desc *rdesc;
427
428 rdesc = ipa_refdesc_pool.allocate ();
429 rdesc->cs = cs;
430 rdesc->next_duplicate = NULL;
431 rdesc->refcount = 1;
432 jfunc->value.constant.rdesc = rdesc;
433 }
434 else
435 jfunc->value.constant.rdesc = NULL;
436 }
437
438 /* Set JFUNC to be a simple pass-through jump function. */
439 static void
440 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
441 bool agg_preserved)
442 {
443 jfunc->type = IPA_JF_PASS_THROUGH;
444 jfunc->value.pass_through.operand = NULL_TREE;
445 jfunc->value.pass_through.formal_id = formal_id;
446 jfunc->value.pass_through.operation = NOP_EXPR;
447 jfunc->value.pass_through.agg_preserved = agg_preserved;
448 }
449
450 /* Set JFUNC to be an unary pass through jump function. */
451
452 static void
453 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
454 enum tree_code operation)
455 {
456 jfunc->type = IPA_JF_PASS_THROUGH;
457 jfunc->value.pass_through.operand = NULL_TREE;
458 jfunc->value.pass_through.formal_id = formal_id;
459 jfunc->value.pass_through.operation = operation;
460 jfunc->value.pass_through.agg_preserved = false;
461 }
462 /* Set JFUNC to be an arithmetic pass through jump function. */
463
464 static void
465 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
466 tree operand, enum tree_code operation)
467 {
468 jfunc->type = IPA_JF_PASS_THROUGH;
469 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
470 jfunc->value.pass_through.formal_id = formal_id;
471 jfunc->value.pass_through.operation = operation;
472 jfunc->value.pass_through.agg_preserved = false;
473 }
474
475 /* Set JFUNC to be an ancestor jump function. */
476
477 static void
478 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
479 int formal_id, bool agg_preserved)
480 {
481 jfunc->type = IPA_JF_ANCESTOR;
482 jfunc->value.ancestor.formal_id = formal_id;
483 jfunc->value.ancestor.offset = offset;
484 jfunc->value.ancestor.agg_preserved = agg_preserved;
485 }
486
487 /* Get IPA BB information about the given BB. FBI is the context of analyzis
488 of this function body. */
489
490 static struct ipa_bb_info *
491 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
492 {
493 gcc_checking_assert (fbi);
494 return &fbi->bb_infos[bb->index];
495 }
496
497 /* Structure to be passed in between detect_type_change and
498 check_stmt_for_type_change. */
499
500 struct prop_type_change_info
501 {
502 /* Offset into the object where there is the virtual method pointer we are
503 looking for. */
504 HOST_WIDE_INT offset;
505 /* The declaration or SSA_NAME pointer of the base that we are checking for
506 type change. */
507 tree object;
508 /* Set to true if dynamic type change has been detected. */
509 bool type_maybe_changed;
510 };
511
512 /* Return true if STMT can modify a virtual method table pointer.
513
514 This function makes special assumptions about both constructors and
515 destructors which are all the functions that are allowed to alter the VMT
516 pointers. It assumes that destructors begin with assignment into all VMT
517 pointers and that constructors essentially look in the following way:
518
519 1) The very first thing they do is that they call constructors of ancestor
520 sub-objects that have them.
521
522 2) Then VMT pointers of this and all its ancestors is set to new values
523 corresponding to the type corresponding to the constructor.
524
525 3) Only afterwards, other stuff such as constructor of member sub-objects
526 and the code written by the user is run. Only this may include calling
527 virtual functions, directly or indirectly.
528
529 There is no way to call a constructor of an ancestor sub-object in any
530 other way.
531
532 This means that we do not have to care whether constructors get the correct
533 type information because they will always change it (in fact, if we define
534 the type to be given by the VMT pointer, it is undefined).
535
536 The most important fact to derive from the above is that if, for some
537 statement in the section 3, we try to detect whether the dynamic type has
538 changed, we can safely ignore all calls as we examine the function body
539 backwards until we reach statements in section 2 because these calls cannot
540 be ancestor constructors or destructors (if the input is not bogus) and so
541 do not change the dynamic type (this holds true only for automatically
542 allocated objects but at the moment we devirtualize only these). We then
543 must detect that statements in section 2 change the dynamic type and can try
544 to derive the new type. That is enough and we can stop, we will never see
545 the calls into constructors of sub-objects in this code. Therefore we can
546 safely ignore all call statements that we traverse.
547 */
548
549 static bool
550 stmt_may_be_vtbl_ptr_store (gimple *stmt)
551 {
552 if (is_gimple_call (stmt))
553 return false;
554 if (gimple_clobber_p (stmt))
555 return false;
556 else if (is_gimple_assign (stmt))
557 {
558 tree lhs = gimple_assign_lhs (stmt);
559
560 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
561 {
562 if (flag_strict_aliasing
563 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
564 return false;
565
566 if (TREE_CODE (lhs) == COMPONENT_REF
567 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
568 return false;
569 /* In the future we might want to use get_base_ref_and_offset to find
570 if there is a field corresponding to the offset and if so, proceed
571 almost like if it was a component ref. */
572 }
573 }
574 return true;
575 }
576
577 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
578 to check whether a particular statement may modify the virtual table
579 pointerIt stores its result into DATA, which points to a
580 prop_type_change_info structure. */
581
582 static bool
583 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
584 {
585 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
586 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
587
588 if (stmt_may_be_vtbl_ptr_store (stmt))
589 {
590 tci->type_maybe_changed = true;
591 return true;
592 }
593 else
594 return false;
595 }
596
597 /* See if ARG is PARAM_DECl describing instance passed by pointer
598 or reference in FUNCTION. Return false if the dynamic type may change
599 in between beggining of the function until CALL is invoked.
600
601 Generally functions are not allowed to change type of such instances,
602 but they call destructors. We assume that methods can not destroy the THIS
603 pointer. Also as a special cases, constructor and destructors may change
604 type of the THIS pointer. */
605
606 static bool
607 param_type_may_change_p (tree function, tree arg, gimple *call)
608 {
609 /* Pure functions can not do any changes on the dynamic type;
610 that require writting to memory. */
611 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
612 return false;
613 /* We need to check if we are within inlined consturctor
614 or destructor (ideally we would have way to check that the
615 inline cdtor is actually working on ARG, but we don't have
616 easy tie on this, so punt on all non-pure cdtors.
617 We may also record the types of cdtors and once we know type
618 of the instance match them.
619
620 Also code unification optimizations may merge calls from
621 different blocks making return values unreliable. So
622 do nothing during late optimization. */
623 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
624 return true;
625 if (TREE_CODE (arg) == SSA_NAME
626 && SSA_NAME_IS_DEFAULT_DEF (arg)
627 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
628 {
629 /* Normal (non-THIS) argument. */
630 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
631 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
632 /* THIS pointer of an method - here we want to watch constructors
633 and destructors as those definitely may change the dynamic
634 type. */
635 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
636 && !DECL_CXX_CONSTRUCTOR_P (function)
637 && !DECL_CXX_DESTRUCTOR_P (function)
638 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
639 {
640 /* Walk the inline stack and watch out for ctors/dtors. */
641 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
642 block = BLOCK_SUPERCONTEXT (block))
643 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
644 return true;
645 return false;
646 }
647 }
648 return true;
649 }
650
651 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
652 callsite CALL) by looking for assignments to its virtual table pointer. If
653 it is, return true and fill in the jump function JFUNC with relevant type
654 information or set it to unknown. ARG is the object itself (not a pointer
655 to it, unless dereferenced). BASE is the base of the memory access as
656 returned by get_ref_base_and_extent, as is the offset.
657
658 This is helper function for detect_type_change and detect_type_change_ssa
659 that does the heavy work which is usually unnecesary. */
660
661 static bool
662 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
663 gcall *call, struct ipa_jump_func *jfunc,
664 HOST_WIDE_INT offset)
665 {
666 struct prop_type_change_info tci;
667 ao_ref ao;
668 bool entry_reached = false;
669
670 gcc_checking_assert (DECL_P (arg)
671 || TREE_CODE (arg) == MEM_REF
672 || handled_component_p (arg));
673
674 comp_type = TYPE_MAIN_VARIANT (comp_type);
675
676 /* Const calls cannot call virtual methods through VMT and so type changes do
677 not matter. */
678 if (!flag_devirtualize || !gimple_vuse (call)
679 /* Be sure expected_type is polymorphic. */
680 || !comp_type
681 || TREE_CODE (comp_type) != RECORD_TYPE
682 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
683 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
684 return true;
685
686 ao_ref_init (&ao, arg);
687 ao.base = base;
688 ao.offset = offset;
689 ao.size = POINTER_SIZE;
690 ao.max_size = ao.size;
691
692 tci.offset = offset;
693 tci.object = get_base_address (arg);
694 tci.type_maybe_changed = false;
695
696 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
697 &tci, NULL, &entry_reached);
698 if (!tci.type_maybe_changed)
699 return false;
700
701 ipa_set_jf_unknown (jfunc);
702 return true;
703 }
704
705 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
706 If it is, return true and fill in the jump function JFUNC with relevant type
707 information or set it to unknown. ARG is the object itself (not a pointer
708 to it, unless dereferenced). BASE is the base of the memory access as
709 returned by get_ref_base_and_extent, as is the offset. */
710
711 static bool
712 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
713 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
714 {
715 if (!flag_devirtualize)
716 return false;
717
718 if (TREE_CODE (base) == MEM_REF
719 && !param_type_may_change_p (current_function_decl,
720 TREE_OPERAND (base, 0),
721 call))
722 return false;
723 return detect_type_change_from_memory_writes (arg, base, comp_type,
724 call, jfunc, offset);
725 }
726
727 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
728 SSA name (its dereference will become the base and the offset is assumed to
729 be zero). */
730
731 static bool
732 detect_type_change_ssa (tree arg, tree comp_type,
733 gcall *call, struct ipa_jump_func *jfunc)
734 {
735 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
736 if (!flag_devirtualize
737 || !POINTER_TYPE_P (TREE_TYPE (arg)))
738 return false;
739
740 if (!param_type_may_change_p (current_function_decl, arg, call))
741 return false;
742
743 arg = build2 (MEM_REF, ptr_type_node, arg,
744 build_int_cst (ptr_type_node, 0));
745
746 return detect_type_change_from_memory_writes (arg, arg, comp_type,
747 call, jfunc, 0);
748 }
749
750 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
751 boolean variable pointed to by DATA. */
752
753 static bool
754 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
755 void *data)
756 {
757 bool *b = (bool *) data;
758 *b = true;
759 return true;
760 }
761
762 /* Return true if we have already walked so many statements in AA that we
763 should really just start giving up. */
764
765 static bool
766 aa_overwalked (struct ipa_func_body_info *fbi)
767 {
768 gcc_checking_assert (fbi);
769 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
770 }
771
772 /* Find the nearest valid aa status for parameter specified by INDEX that
773 dominates BB. */
774
775 static struct ipa_param_aa_status *
776 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
777 int index)
778 {
779 while (true)
780 {
781 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
782 if (!bb)
783 return NULL;
784 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
785 if (!bi->param_aa_statuses.is_empty ()
786 && bi->param_aa_statuses[index].valid)
787 return &bi->param_aa_statuses[index];
788 }
789 }
790
791 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
792 structures and/or intialize the result with a dominating description as
793 necessary. */
794
795 static struct ipa_param_aa_status *
796 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
797 int index)
798 {
799 gcc_checking_assert (fbi);
800 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
801 if (bi->param_aa_statuses.is_empty ())
802 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
803 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
804 if (!paa->valid)
805 {
806 gcc_checking_assert (!paa->parm_modified
807 && !paa->ref_modified
808 && !paa->pt_modified);
809 struct ipa_param_aa_status *dom_paa;
810 dom_paa = find_dominating_aa_status (fbi, bb, index);
811 if (dom_paa)
812 *paa = *dom_paa;
813 else
814 paa->valid = true;
815 }
816
817 return paa;
818 }
819
820 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
821 a value known not to be modified in this function before reaching the
822 statement STMT. FBI holds information about the function we have so far
823 gathered but do not survive the summary building stage. */
824
825 static bool
826 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
827 gimple *stmt, tree parm_load)
828 {
829 struct ipa_param_aa_status *paa;
830 bool modified = false;
831 ao_ref refd;
832
833 tree base = get_base_address (parm_load);
834 gcc_assert (TREE_CODE (base) == PARM_DECL);
835 if (TREE_READONLY (base))
836 return true;
837
838 /* FIXME: FBI can be NULL if we are being called from outside
839 ipa_node_analysis or ipcp_transform_function, which currently happens
840 during inlining analysis. It would be great to extend fbi's lifetime and
841 always have it. Currently, we are just not afraid of too much walking in
842 that case. */
843 if (fbi)
844 {
845 if (aa_overwalked (fbi))
846 return false;
847 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
848 if (paa->parm_modified)
849 return false;
850 }
851 else
852 paa = NULL;
853
854 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
855 ao_ref_init (&refd, parm_load);
856 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
857 &modified, NULL);
858 if (fbi)
859 fbi->aa_walked += walked;
860 if (paa && modified)
861 paa->parm_modified = true;
862 return !modified;
863 }
864
865 /* If STMT is an assignment that loads a value from an parameter declaration,
866 return the index of the parameter in ipa_node_params which has not been
867 modified. Otherwise return -1. */
868
869 static int
870 load_from_unmodified_param (struct ipa_func_body_info *fbi,
871 vec<ipa_param_descriptor, va_gc> *descriptors,
872 gimple *stmt)
873 {
874 int index;
875 tree op1;
876
877 if (!gimple_assign_single_p (stmt))
878 return -1;
879
880 op1 = gimple_assign_rhs1 (stmt);
881 if (TREE_CODE (op1) != PARM_DECL)
882 return -1;
883
884 index = ipa_get_param_decl_index_1 (descriptors, op1);
885 if (index < 0
886 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
887 return -1;
888
889 return index;
890 }
891
892 /* Return true if memory reference REF (which must be a load through parameter
893 with INDEX) loads data that are known to be unmodified in this function
894 before reaching statement STMT. */
895
896 static bool
897 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
898 int index, gimple *stmt, tree ref)
899 {
900 struct ipa_param_aa_status *paa;
901 bool modified = false;
902 ao_ref refd;
903
904 /* FIXME: FBI can be NULL if we are being called from outside
905 ipa_node_analysis or ipcp_transform_function, which currently happens
906 during inlining analysis. It would be great to extend fbi's lifetime and
907 always have it. Currently, we are just not afraid of too much walking in
908 that case. */
909 if (fbi)
910 {
911 if (aa_overwalked (fbi))
912 return false;
913 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
914 if (paa->ref_modified)
915 return false;
916 }
917 else
918 paa = NULL;
919
920 gcc_checking_assert (gimple_vuse (stmt));
921 ao_ref_init (&refd, ref);
922 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
923 &modified, NULL);
924 if (fbi)
925 fbi->aa_walked += walked;
926 if (paa && modified)
927 paa->ref_modified = true;
928 return !modified;
929 }
930
931 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
932 is known to be unmodified in this function before reaching call statement
933 CALL into which it is passed. FBI describes the function body. */
934
935 static bool
936 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
937 gimple *call, tree parm)
938 {
939 bool modified = false;
940 ao_ref refd;
941
942 /* It's unnecessary to calculate anything about memory contnets for a const
943 function because it is not goin to use it. But do not cache the result
944 either. Also, no such calculations for non-pointers. */
945 if (!gimple_vuse (call)
946 || !POINTER_TYPE_P (TREE_TYPE (parm))
947 || aa_overwalked (fbi))
948 return false;
949
950 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
951 gimple_bb (call),
952 index);
953 if (paa->pt_modified)
954 return false;
955
956 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
957 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
958 &modified, NULL);
959 fbi->aa_walked += walked;
960 if (modified)
961 paa->pt_modified = true;
962 return !modified;
963 }
964
965 /* Return true if we can prove that OP is a memory reference loading
966 data from an aggregate passed as a parameter.
967
968 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
969 false if it cannot prove that the value has not been modified before the
970 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
971 if it cannot prove the value has not been modified, in that case it will
972 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
973
974 INFO and PARMS_AINFO describe parameters of the current function (but the
975 latter can be NULL), STMT is the load statement. If function returns true,
976 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
977 within the aggregate and whether it is a load from a value passed by
978 reference respectively. */
979
980 bool
981 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
982 vec<ipa_param_descriptor, va_gc> *descriptors,
983 gimple *stmt, tree op, int *index_p,
984 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
985 bool *by_ref_p, bool *guaranteed_unmodified)
986 {
987 int index;
988 HOST_WIDE_INT size, max_size;
989 bool reverse;
990 tree base
991 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
992
993 if (max_size == -1 || max_size != size || *offset_p < 0)
994 return false;
995
996 if (DECL_P (base))
997 {
998 int index = ipa_get_param_decl_index_1 (descriptors, base);
999 if (index >= 0
1000 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1001 {
1002 *index_p = index;
1003 *by_ref_p = false;
1004 if (size_p)
1005 *size_p = size;
1006 if (guaranteed_unmodified)
1007 *guaranteed_unmodified = true;
1008 return true;
1009 }
1010 return false;
1011 }
1012
1013 if (TREE_CODE (base) != MEM_REF
1014 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1015 || !integer_zerop (TREE_OPERAND (base, 1)))
1016 return false;
1017
1018 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1019 {
1020 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1021 index = ipa_get_param_decl_index_1 (descriptors, parm);
1022 }
1023 else
1024 {
1025 /* This branch catches situations where a pointer parameter is not a
1026 gimple register, for example:
1027
1028 void hip7(S*) (struct S * p)
1029 {
1030 void (*<T2e4>) (struct S *) D.1867;
1031 struct S * p.1;
1032
1033 <bb 2>:
1034 p.1_1 = p;
1035 D.1867_2 = p.1_1->f;
1036 D.1867_2 ();
1037 gdp = &p;
1038 */
1039
1040 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1041 index = load_from_unmodified_param (fbi, descriptors, def);
1042 }
1043
1044 if (index >= 0)
1045 {
1046 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1047 if (!data_preserved && !guaranteed_unmodified)
1048 return false;
1049
1050 *index_p = index;
1051 *by_ref_p = true;
1052 if (size_p)
1053 *size_p = size;
1054 if (guaranteed_unmodified)
1055 *guaranteed_unmodified = data_preserved;
1056 return true;
1057 }
1058 return false;
1059 }
1060
1061 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1062 of an assignment statement STMT, try to determine whether we are actually
1063 handling any of the following cases and construct an appropriate jump
1064 function into JFUNC if so:
1065
1066 1) The passed value is loaded from a formal parameter which is not a gimple
1067 register (most probably because it is addressable, the value has to be
1068 scalar) and we can guarantee the value has not changed. This case can
1069 therefore be described by a simple pass-through jump function. For example:
1070
1071 foo (int a)
1072 {
1073 int a.0;
1074
1075 a.0_2 = a;
1076 bar (a.0_2);
1077
1078 2) The passed value can be described by a simple arithmetic pass-through
1079 jump function. E.g.
1080
1081 foo (int a)
1082 {
1083 int D.2064;
1084
1085 D.2064_4 = a.1(D) + 4;
1086 bar (D.2064_4);
1087
1088 This case can also occur in combination of the previous one, e.g.:
1089
1090 foo (int a, int z)
1091 {
1092 int a.0;
1093 int D.2064;
1094
1095 a.0_3 = a;
1096 D.2064_4 = a.0_3 + 4;
1097 foo (D.2064_4);
1098
1099 3) The passed value is an address of an object within another one (which
1100 also passed by reference). Such situations are described by an ancestor
1101 jump function and describe situations such as:
1102
1103 B::foo() (struct B * const this)
1104 {
1105 struct A * D.1845;
1106
1107 D.1845_2 = &this_1(D)->D.1748;
1108 A::bar (D.1845_2);
1109
1110 INFO is the structure describing individual parameters access different
1111 stages of IPA optimizations. PARMS_AINFO contains the information that is
1112 only needed for intraprocedural analysis. */
1113
1114 static void
1115 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1116 struct ipa_node_params *info,
1117 struct ipa_jump_func *jfunc,
1118 gcall *call, gimple *stmt, tree name,
1119 tree param_type)
1120 {
1121 HOST_WIDE_INT offset, size, max_size;
1122 tree op1, tc_ssa, base, ssa;
1123 bool reverse;
1124 int index;
1125
1126 op1 = gimple_assign_rhs1 (stmt);
1127
1128 if (TREE_CODE (op1) == SSA_NAME)
1129 {
1130 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1131 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1132 else
1133 index = load_from_unmodified_param (fbi, info->descriptors,
1134 SSA_NAME_DEF_STMT (op1));
1135 tc_ssa = op1;
1136 }
1137 else
1138 {
1139 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1140 tc_ssa = gimple_assign_lhs (stmt);
1141 }
1142
1143 if (index >= 0)
1144 {
1145 switch (gimple_assign_rhs_class (stmt))
1146 {
1147 case GIMPLE_BINARY_RHS:
1148 {
1149 tree op2 = gimple_assign_rhs2 (stmt);
1150 if (!is_gimple_ip_invariant (op2)
1151 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1152 != tcc_comparison)
1153 && !useless_type_conversion_p (TREE_TYPE (name),
1154 TREE_TYPE (op1))))
1155 return;
1156
1157 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1158 gimple_assign_rhs_code (stmt));
1159 break;
1160 }
1161 case GIMPLE_SINGLE_RHS:
1162 {
1163 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1164 tc_ssa);
1165 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1166 break;
1167 }
1168 case GIMPLE_UNARY_RHS:
1169 if (is_gimple_assign (stmt)
1170 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1171 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1172 ipa_set_jf_unary_pass_through (jfunc, index,
1173 gimple_assign_rhs_code (stmt));
1174 default:;
1175 }
1176 return;
1177 }
1178
1179 if (TREE_CODE (op1) != ADDR_EXPR)
1180 return;
1181 op1 = TREE_OPERAND (op1, 0);
1182 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1183 return;
1184 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1185 if (TREE_CODE (base) != MEM_REF
1186 /* If this is a varying address, punt. */
1187 || max_size == -1
1188 || max_size != size)
1189 return;
1190 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1191 ssa = TREE_OPERAND (base, 0);
1192 if (TREE_CODE (ssa) != SSA_NAME
1193 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1194 || offset < 0)
1195 return;
1196
1197 /* Dynamic types are changed in constructors and destructors. */
1198 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1199 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1200 ipa_set_ancestor_jf (jfunc, offset, index,
1201 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1202 }
1203
1204 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1205 it looks like:
1206
1207 iftmp.1_3 = &obj_2(D)->D.1762;
1208
1209 The base of the MEM_REF must be a default definition SSA NAME of a
1210 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1211 whole MEM_REF expression is returned and the offset calculated from any
1212 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1213 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1214
1215 static tree
1216 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1217 {
1218 HOST_WIDE_INT size, max_size;
1219 tree expr, parm, obj;
1220 bool reverse;
1221
1222 if (!gimple_assign_single_p (assign))
1223 return NULL_TREE;
1224 expr = gimple_assign_rhs1 (assign);
1225
1226 if (TREE_CODE (expr) != ADDR_EXPR)
1227 return NULL_TREE;
1228 expr = TREE_OPERAND (expr, 0);
1229 obj = expr;
1230 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1231
1232 if (TREE_CODE (expr) != MEM_REF
1233 /* If this is a varying address, punt. */
1234 || max_size == -1
1235 || max_size != size
1236 || *offset < 0)
1237 return NULL_TREE;
1238 parm = TREE_OPERAND (expr, 0);
1239 if (TREE_CODE (parm) != SSA_NAME
1240 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1241 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1242 return NULL_TREE;
1243
1244 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1245 *obj_p = obj;
1246 return expr;
1247 }
1248
1249
1250 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1251 statement PHI, try to find out whether NAME is in fact a
1252 multiple-inheritance typecast from a descendant into an ancestor of a formal
1253 parameter and thus can be described by an ancestor jump function and if so,
1254 write the appropriate function into JFUNC.
1255
1256 Essentially we want to match the following pattern:
1257
1258 if (obj_2(D) != 0B)
1259 goto <bb 3>;
1260 else
1261 goto <bb 4>;
1262
1263 <bb 3>:
1264 iftmp.1_3 = &obj_2(D)->D.1762;
1265
1266 <bb 4>:
1267 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1268 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1269 return D.1879_6; */
1270
1271 static void
1272 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1273 struct ipa_node_params *info,
1274 struct ipa_jump_func *jfunc,
1275 gcall *call, gphi *phi)
1276 {
1277 HOST_WIDE_INT offset;
1278 gimple *assign, *cond;
1279 basic_block phi_bb, assign_bb, cond_bb;
1280 tree tmp, parm, expr, obj;
1281 int index, i;
1282
1283 if (gimple_phi_num_args (phi) != 2)
1284 return;
1285
1286 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1287 tmp = PHI_ARG_DEF (phi, 0);
1288 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1289 tmp = PHI_ARG_DEF (phi, 1);
1290 else
1291 return;
1292 if (TREE_CODE (tmp) != SSA_NAME
1293 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1294 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1295 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1296 return;
1297
1298 assign = SSA_NAME_DEF_STMT (tmp);
1299 assign_bb = gimple_bb (assign);
1300 if (!single_pred_p (assign_bb))
1301 return;
1302 expr = get_ancestor_addr_info (assign, &obj, &offset);
1303 if (!expr)
1304 return;
1305 parm = TREE_OPERAND (expr, 0);
1306 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1307 if (index < 0)
1308 return;
1309
1310 cond_bb = single_pred (assign_bb);
1311 cond = last_stmt (cond_bb);
1312 if (!cond
1313 || gimple_code (cond) != GIMPLE_COND
1314 || gimple_cond_code (cond) != NE_EXPR
1315 || gimple_cond_lhs (cond) != parm
1316 || !integer_zerop (gimple_cond_rhs (cond)))
1317 return;
1318
1319 phi_bb = gimple_bb (phi);
1320 for (i = 0; i < 2; i++)
1321 {
1322 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1323 if (pred != assign_bb && pred != cond_bb)
1324 return;
1325 }
1326
1327 ipa_set_ancestor_jf (jfunc, offset, index,
1328 parm_ref_data_pass_through_p (fbi, index, call, parm));
1329 }
1330
1331 /* Inspect the given TYPE and return true iff it has the same structure (the
1332 same number of fields of the same types) as a C++ member pointer. If
1333 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1334 corresponding fields there. */
1335
1336 static bool
1337 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1338 {
1339 tree fld;
1340
1341 if (TREE_CODE (type) != RECORD_TYPE)
1342 return false;
1343
1344 fld = TYPE_FIELDS (type);
1345 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1346 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1347 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1348 return false;
1349
1350 if (method_ptr)
1351 *method_ptr = fld;
1352
1353 fld = DECL_CHAIN (fld);
1354 if (!fld || INTEGRAL_TYPE_P (fld)
1355 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1356 return false;
1357 if (delta)
1358 *delta = fld;
1359
1360 if (DECL_CHAIN (fld))
1361 return false;
1362
1363 return true;
1364 }
1365
1366 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1367 return the rhs of its defining statement. Otherwise return RHS as it
1368 is. */
1369
1370 static inline tree
1371 get_ssa_def_if_simple_copy (tree rhs)
1372 {
1373 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1374 {
1375 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1376
1377 if (gimple_assign_single_p (def_stmt))
1378 rhs = gimple_assign_rhs1 (def_stmt);
1379 else
1380 break;
1381 }
1382 return rhs;
1383 }
1384
1385 /* Simple linked list, describing known contents of an aggregate beforere
1386 call. */
1387
1388 struct ipa_known_agg_contents_list
1389 {
1390 /* Offset and size of the described part of the aggregate. */
1391 HOST_WIDE_INT offset, size;
1392 /* Known constant value or NULL if the contents is known to be unknown. */
1393 tree constant;
1394 /* Pointer to the next structure in the list. */
1395 struct ipa_known_agg_contents_list *next;
1396 };
1397
1398 /* Find the proper place in linked list of ipa_known_agg_contents_list
1399 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1400 unless there is a partial overlap, in which case return NULL, or such
1401 element is already there, in which case set *ALREADY_THERE to true. */
1402
1403 static struct ipa_known_agg_contents_list **
1404 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1405 HOST_WIDE_INT lhs_offset,
1406 HOST_WIDE_INT lhs_size,
1407 bool *already_there)
1408 {
1409 struct ipa_known_agg_contents_list **p = list;
1410 while (*p && (*p)->offset < lhs_offset)
1411 {
1412 if ((*p)->offset + (*p)->size > lhs_offset)
1413 return NULL;
1414 p = &(*p)->next;
1415 }
1416
1417 if (*p && (*p)->offset < lhs_offset + lhs_size)
1418 {
1419 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1420 /* We already know this value is subsequently overwritten with
1421 something else. */
1422 *already_there = true;
1423 else
1424 /* Otherwise this is a partial overlap which we cannot
1425 represent. */
1426 return NULL;
1427 }
1428 return p;
1429 }
1430
1431 /* Build aggregate jump function from LIST, assuming there are exactly
1432 CONST_COUNT constant entries there and that th offset of the passed argument
1433 is ARG_OFFSET and store it into JFUNC. */
1434
1435 static void
1436 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1437 int const_count, HOST_WIDE_INT arg_offset,
1438 struct ipa_jump_func *jfunc)
1439 {
1440 vec_alloc (jfunc->agg.items, const_count);
1441 while (list)
1442 {
1443 if (list->constant)
1444 {
1445 struct ipa_agg_jf_item item;
1446 item.offset = list->offset - arg_offset;
1447 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1448 item.value = unshare_expr_without_location (list->constant);
1449 jfunc->agg.items->quick_push (item);
1450 }
1451 list = list->next;
1452 }
1453 }
1454
1455 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1456 in ARG is filled in with constant values. ARG can either be an aggregate
1457 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1458 aggregate. JFUNC is the jump function into which the constants are
1459 subsequently stored. */
1460
1461 static void
1462 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1463 tree arg_type,
1464 struct ipa_jump_func *jfunc)
1465 {
1466 struct ipa_known_agg_contents_list *list = NULL;
1467 int item_count = 0, const_count = 0;
1468 HOST_WIDE_INT arg_offset, arg_size;
1469 gimple_stmt_iterator gsi;
1470 tree arg_base;
1471 bool check_ref, by_ref;
1472 ao_ref r;
1473
1474 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1475 return;
1476
1477 /* The function operates in three stages. First, we prepare check_ref, r,
1478 arg_base and arg_offset based on what is actually passed as an actual
1479 argument. */
1480
1481 if (POINTER_TYPE_P (arg_type))
1482 {
1483 by_ref = true;
1484 if (TREE_CODE (arg) == SSA_NAME)
1485 {
1486 tree type_size;
1487 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1488 return;
1489 check_ref = true;
1490 arg_base = arg;
1491 arg_offset = 0;
1492 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1493 arg_size = tree_to_uhwi (type_size);
1494 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1495 }
1496 else if (TREE_CODE (arg) == ADDR_EXPR)
1497 {
1498 HOST_WIDE_INT arg_max_size;
1499 bool reverse;
1500
1501 arg = TREE_OPERAND (arg, 0);
1502 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1503 &arg_max_size, &reverse);
1504 if (arg_max_size == -1
1505 || arg_max_size != arg_size
1506 || arg_offset < 0)
1507 return;
1508 if (DECL_P (arg_base))
1509 {
1510 check_ref = false;
1511 ao_ref_init (&r, arg_base);
1512 }
1513 else
1514 return;
1515 }
1516 else
1517 return;
1518 }
1519 else
1520 {
1521 HOST_WIDE_INT arg_max_size;
1522 bool reverse;
1523
1524 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1525
1526 by_ref = false;
1527 check_ref = false;
1528 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1529 &arg_max_size, &reverse);
1530 if (arg_max_size == -1
1531 || arg_max_size != arg_size
1532 || arg_offset < 0)
1533 return;
1534
1535 ao_ref_init (&r, arg);
1536 }
1537
1538 /* Second stage walks back the BB, looks at individual statements and as long
1539 as it is confident of how the statements affect contents of the
1540 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1541 describing it. */
1542 gsi = gsi_for_stmt (call);
1543 gsi_prev (&gsi);
1544 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1545 {
1546 struct ipa_known_agg_contents_list *n, **p;
1547 gimple *stmt = gsi_stmt (gsi);
1548 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1549 tree lhs, rhs, lhs_base;
1550 bool reverse;
1551
1552 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1553 continue;
1554 if (!gimple_assign_single_p (stmt))
1555 break;
1556
1557 lhs = gimple_assign_lhs (stmt);
1558 rhs = gimple_assign_rhs1 (stmt);
1559 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1560 || TREE_CODE (lhs) == BIT_FIELD_REF
1561 || contains_bitfld_component_ref_p (lhs))
1562 break;
1563
1564 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1565 &lhs_max_size, &reverse);
1566 if (lhs_max_size == -1
1567 || lhs_max_size != lhs_size)
1568 break;
1569
1570 if (check_ref)
1571 {
1572 if (TREE_CODE (lhs_base) != MEM_REF
1573 || TREE_OPERAND (lhs_base, 0) != arg_base
1574 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1575 break;
1576 }
1577 else if (lhs_base != arg_base)
1578 {
1579 if (DECL_P (lhs_base))
1580 continue;
1581 else
1582 break;
1583 }
1584
1585 bool already_there = false;
1586 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1587 &already_there);
1588 if (!p)
1589 break;
1590 if (already_there)
1591 continue;
1592
1593 rhs = get_ssa_def_if_simple_copy (rhs);
1594 n = XALLOCA (struct ipa_known_agg_contents_list);
1595 n->size = lhs_size;
1596 n->offset = lhs_offset;
1597 if (is_gimple_ip_invariant (rhs))
1598 {
1599 n->constant = rhs;
1600 const_count++;
1601 }
1602 else
1603 n->constant = NULL_TREE;
1604 n->next = *p;
1605 *p = n;
1606
1607 item_count++;
1608 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1609 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1610 break;
1611 }
1612
1613 /* Third stage just goes over the list and creates an appropriate vector of
1614 ipa_agg_jf_item structures out of it, of sourse only if there are
1615 any known constants to begin with. */
1616
1617 if (const_count)
1618 {
1619 jfunc->agg.by_ref = by_ref;
1620 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1621 }
1622 }
1623
1624 /* Return the Ith param type of callee associated with call graph
1625 edge E. */
1626
1627 tree
1628 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1629 {
1630 int n;
1631 tree type = (e->callee
1632 ? TREE_TYPE (e->callee->decl)
1633 : gimple_call_fntype (e->call_stmt));
1634 tree t = TYPE_ARG_TYPES (type);
1635
1636 for (n = 0; n < i; n++)
1637 {
1638 if (!t)
1639 break;
1640 t = TREE_CHAIN (t);
1641 }
1642 if (t)
1643 return TREE_VALUE (t);
1644 if (!e->callee)
1645 return NULL;
1646 t = DECL_ARGUMENTS (e->callee->decl);
1647 for (n = 0; n < i; n++)
1648 {
1649 if (!t)
1650 return NULL;
1651 t = TREE_CHAIN (t);
1652 }
1653 if (t)
1654 return TREE_TYPE (t);
1655 return NULL;
1656 }
1657
1658 /* Compute jump function for all arguments of callsite CS and insert the
1659 information in the jump_functions array in the ipa_edge_args corresponding
1660 to this callsite. */
1661
1662 static void
1663 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1664 struct cgraph_edge *cs)
1665 {
1666 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1667 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1668 gcall *call = cs->call_stmt;
1669 int n, arg_num = gimple_call_num_args (call);
1670 bool useful_context = false;
1671
1672 if (arg_num == 0 || args->jump_functions)
1673 return;
1674 vec_safe_grow_cleared (args->jump_functions, arg_num);
1675 if (flag_devirtualize)
1676 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1677
1678 if (gimple_call_internal_p (call))
1679 return;
1680 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1681 return;
1682
1683 for (n = 0; n < arg_num; n++)
1684 {
1685 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1686 tree arg = gimple_call_arg (call, n);
1687 tree param_type = ipa_get_callee_param_type (cs, n);
1688 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1689 {
1690 tree instance;
1691 struct ipa_polymorphic_call_context context (cs->caller->decl,
1692 arg, cs->call_stmt,
1693 &instance);
1694 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1695 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1696 if (!context.useless_p ())
1697 useful_context = true;
1698 }
1699
1700 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1701 {
1702 bool addr_nonzero = false;
1703 bool strict_overflow = false;
1704
1705 if (TREE_CODE (arg) == SSA_NAME
1706 && param_type
1707 && get_ptr_nonnull (arg))
1708 addr_nonzero = true;
1709 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1710 addr_nonzero = true;
1711
1712 if (addr_nonzero)
1713 {
1714 jfunc->vr_known = true;
1715 jfunc->m_vr.type = VR_ANTI_RANGE;
1716 jfunc->m_vr.min = build_int_cst (TREE_TYPE (arg), 0);
1717 jfunc->m_vr.max = build_int_cst (TREE_TYPE (arg), 0);
1718 jfunc->m_vr.equiv = NULL;
1719 }
1720 else
1721 gcc_assert (!jfunc->vr_known);
1722 }
1723 else
1724 {
1725 wide_int min, max;
1726 value_range_type type;
1727 if (TREE_CODE (arg) == SSA_NAME
1728 && param_type
1729 && (type = get_range_info (arg, &min, &max))
1730 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1731 {
1732 value_range vr;
1733
1734 vr.type = type;
1735 vr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1736 vr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1737 vr.equiv = NULL;
1738 extract_range_from_unary_expr (&jfunc->m_vr,
1739 NOP_EXPR,
1740 param_type,
1741 &vr, TREE_TYPE (arg));
1742 if (jfunc->m_vr.type == VR_RANGE
1743 || jfunc->m_vr.type == VR_ANTI_RANGE)
1744 jfunc->vr_known = true;
1745 else
1746 jfunc->vr_known = false;
1747 }
1748 else
1749 gcc_assert (!jfunc->vr_known);
1750 }
1751
1752 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1753 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1754 {
1755 jfunc->bits.known = true;
1756
1757 if (TREE_CODE (arg) == SSA_NAME)
1758 {
1759 jfunc->bits.value = 0;
1760 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1761 TYPE_SIGN (TREE_TYPE (arg)));
1762 }
1763 else
1764 {
1765 jfunc->bits.value = wi::to_widest (arg);
1766 jfunc->bits.mask = 0;
1767 }
1768 }
1769 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1770 {
1771 unsigned HOST_WIDE_INT bitpos;
1772 unsigned align;
1773
1774 jfunc->bits.known = true;
1775 get_pointer_alignment_1 (arg, &align, &bitpos);
1776 jfunc->bits.mask = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1777 .and_not (align / BITS_PER_UNIT - 1);
1778 jfunc->bits.value = bitpos / BITS_PER_UNIT;
1779 }
1780 else
1781 gcc_assert (!jfunc->bits.known);
1782
1783 if (is_gimple_ip_invariant (arg)
1784 || (VAR_P (arg)
1785 && is_global_var (arg)
1786 && TREE_READONLY (arg)))
1787 ipa_set_jf_constant (jfunc, arg, cs);
1788 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1789 && TREE_CODE (arg) == PARM_DECL)
1790 {
1791 int index = ipa_get_param_decl_index (info, arg);
1792
1793 gcc_assert (index >=0);
1794 /* Aggregate passed by value, check for pass-through, otherwise we
1795 will attempt to fill in aggregate contents later in this
1796 for cycle. */
1797 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1798 {
1799 ipa_set_jf_simple_pass_through (jfunc, index, false);
1800 continue;
1801 }
1802 }
1803 else if (TREE_CODE (arg) == SSA_NAME)
1804 {
1805 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1806 {
1807 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1808 if (index >= 0)
1809 {
1810 bool agg_p;
1811 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1812 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1813 }
1814 }
1815 else
1816 {
1817 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1818 if (is_gimple_assign (stmt))
1819 compute_complex_assign_jump_func (fbi, info, jfunc,
1820 call, stmt, arg, param_type);
1821 else if (gimple_code (stmt) == GIMPLE_PHI)
1822 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1823 call,
1824 as_a <gphi *> (stmt));
1825 }
1826 }
1827
1828 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1829 passed (because type conversions are ignored in gimple). Usually we can
1830 safely get type from function declaration, but in case of K&R prototypes or
1831 variadic functions we can try our luck with type of the pointer passed.
1832 TODO: Since we look for actual initialization of the memory object, we may better
1833 work out the type based on the memory stores we find. */
1834 if (!param_type)
1835 param_type = TREE_TYPE (arg);
1836
1837 if ((jfunc->type != IPA_JF_PASS_THROUGH
1838 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1839 && (jfunc->type != IPA_JF_ANCESTOR
1840 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1841 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1842 || POINTER_TYPE_P (param_type)))
1843 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1844 }
1845 if (!useful_context)
1846 vec_free (args->polymorphic_call_contexts);
1847 }
1848
1849 /* Compute jump functions for all edges - both direct and indirect - outgoing
1850 from BB. */
1851
1852 static void
1853 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1854 {
1855 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1856 int i;
1857 struct cgraph_edge *cs;
1858
1859 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1860 {
1861 struct cgraph_node *callee = cs->callee;
1862
1863 if (callee)
1864 {
1865 callee->ultimate_alias_target ();
1866 /* We do not need to bother analyzing calls to unknown functions
1867 unless they may become known during lto/whopr. */
1868 if (!callee->definition && !flag_lto)
1869 continue;
1870 }
1871 ipa_compute_jump_functions_for_edge (fbi, cs);
1872 }
1873 }
1874
1875 /* If STMT looks like a statement loading a value from a member pointer formal
1876 parameter, return that parameter and store the offset of the field to
1877 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1878 might be clobbered). If USE_DELTA, then we look for a use of the delta
1879 field rather than the pfn. */
1880
1881 static tree
1882 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1883 HOST_WIDE_INT *offset_p)
1884 {
1885 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1886
1887 if (!gimple_assign_single_p (stmt))
1888 return NULL_TREE;
1889
1890 rhs = gimple_assign_rhs1 (stmt);
1891 if (TREE_CODE (rhs) == COMPONENT_REF)
1892 {
1893 ref_field = TREE_OPERAND (rhs, 1);
1894 rhs = TREE_OPERAND (rhs, 0);
1895 }
1896 else
1897 ref_field = NULL_TREE;
1898 if (TREE_CODE (rhs) != MEM_REF)
1899 return NULL_TREE;
1900 rec = TREE_OPERAND (rhs, 0);
1901 if (TREE_CODE (rec) != ADDR_EXPR)
1902 return NULL_TREE;
1903 rec = TREE_OPERAND (rec, 0);
1904 if (TREE_CODE (rec) != PARM_DECL
1905 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1906 return NULL_TREE;
1907 ref_offset = TREE_OPERAND (rhs, 1);
1908
1909 if (use_delta)
1910 fld = delta_field;
1911 else
1912 fld = ptr_field;
1913 if (offset_p)
1914 *offset_p = int_bit_position (fld);
1915
1916 if (ref_field)
1917 {
1918 if (integer_nonzerop (ref_offset))
1919 return NULL_TREE;
1920 return ref_field == fld ? rec : NULL_TREE;
1921 }
1922 else
1923 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1924 : NULL_TREE;
1925 }
1926
1927 /* Returns true iff T is an SSA_NAME defined by a statement. */
1928
1929 static bool
1930 ipa_is_ssa_with_stmt_def (tree t)
1931 {
1932 if (TREE_CODE (t) == SSA_NAME
1933 && !SSA_NAME_IS_DEFAULT_DEF (t))
1934 return true;
1935 else
1936 return false;
1937 }
1938
1939 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1940 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1941 indirect call graph edge. */
1942
1943 static struct cgraph_edge *
1944 ipa_note_param_call (struct cgraph_node *node, int param_index,
1945 gcall *stmt)
1946 {
1947 struct cgraph_edge *cs;
1948
1949 cs = node->get_edge (stmt);
1950 cs->indirect_info->param_index = param_index;
1951 cs->indirect_info->agg_contents = 0;
1952 cs->indirect_info->member_ptr = 0;
1953 cs->indirect_info->guaranteed_unmodified = 0;
1954 return cs;
1955 }
1956
1957 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1958 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1959 intermediate information about each formal parameter. Currently it checks
1960 whether the call calls a pointer that is a formal parameter and if so, the
1961 parameter is marked with the called flag and an indirect call graph edge
1962 describing the call is created. This is very simple for ordinary pointers
1963 represented in SSA but not-so-nice when it comes to member pointers. The
1964 ugly part of this function does nothing more than trying to match the
1965 pattern of such a call. An example of such a pattern is the gimple dump
1966 below, the call is on the last line:
1967
1968 <bb 2>:
1969 f$__delta_5 = f.__delta;
1970 f$__pfn_24 = f.__pfn;
1971
1972 or
1973 <bb 2>:
1974 f$__delta_5 = MEM[(struct *)&f];
1975 f$__pfn_24 = MEM[(struct *)&f + 4B];
1976
1977 and a few lines below:
1978
1979 <bb 5>
1980 D.2496_3 = (int) f$__pfn_24;
1981 D.2497_4 = D.2496_3 & 1;
1982 if (D.2497_4 != 0)
1983 goto <bb 3>;
1984 else
1985 goto <bb 4>;
1986
1987 <bb 6>:
1988 D.2500_7 = (unsigned int) f$__delta_5;
1989 D.2501_8 = &S + D.2500_7;
1990 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1991 D.2503_10 = *D.2502_9;
1992 D.2504_12 = f$__pfn_24 + -1;
1993 D.2505_13 = (unsigned int) D.2504_12;
1994 D.2506_14 = D.2503_10 + D.2505_13;
1995 D.2507_15 = *D.2506_14;
1996 iftmp.11_16 = (String:: *) D.2507_15;
1997
1998 <bb 7>:
1999 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2000 D.2500_19 = (unsigned int) f$__delta_5;
2001 D.2508_20 = &S + D.2500_19;
2002 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2003
2004 Such patterns are results of simple calls to a member pointer:
2005
2006 int doprinting (int (MyString::* f)(int) const)
2007 {
2008 MyString S ("somestring");
2009
2010 return (S.*f)(4);
2011 }
2012
2013 Moreover, the function also looks for called pointers loaded from aggregates
2014 passed by value or reference. */
2015
2016 static void
2017 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2018 tree target)
2019 {
2020 struct ipa_node_params *info = fbi->info;
2021 HOST_WIDE_INT offset;
2022 bool by_ref;
2023
2024 if (SSA_NAME_IS_DEFAULT_DEF (target))
2025 {
2026 tree var = SSA_NAME_VAR (target);
2027 int index = ipa_get_param_decl_index (info, var);
2028 if (index >= 0)
2029 ipa_note_param_call (fbi->node, index, call);
2030 return;
2031 }
2032
2033 int index;
2034 gimple *def = SSA_NAME_DEF_STMT (target);
2035 bool guaranteed_unmodified;
2036 if (gimple_assign_single_p (def)
2037 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2038 gimple_assign_rhs1 (def), &index, &offset,
2039 NULL, &by_ref, &guaranteed_unmodified))
2040 {
2041 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2042 cs->indirect_info->offset = offset;
2043 cs->indirect_info->agg_contents = 1;
2044 cs->indirect_info->by_ref = by_ref;
2045 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2046 return;
2047 }
2048
2049 /* Now we need to try to match the complex pattern of calling a member
2050 pointer. */
2051 if (gimple_code (def) != GIMPLE_PHI
2052 || gimple_phi_num_args (def) != 2
2053 || !POINTER_TYPE_P (TREE_TYPE (target))
2054 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2055 return;
2056
2057 /* First, we need to check whether one of these is a load from a member
2058 pointer that is a parameter to this function. */
2059 tree n1 = PHI_ARG_DEF (def, 0);
2060 tree n2 = PHI_ARG_DEF (def, 1);
2061 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2062 return;
2063 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2064 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2065
2066 tree rec;
2067 basic_block bb, virt_bb;
2068 basic_block join = gimple_bb (def);
2069 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2070 {
2071 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2072 return;
2073
2074 bb = EDGE_PRED (join, 0)->src;
2075 virt_bb = gimple_bb (d2);
2076 }
2077 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2078 {
2079 bb = EDGE_PRED (join, 1)->src;
2080 virt_bb = gimple_bb (d1);
2081 }
2082 else
2083 return;
2084
2085 /* Second, we need to check that the basic blocks are laid out in the way
2086 corresponding to the pattern. */
2087
2088 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2089 || single_pred (virt_bb) != bb
2090 || single_succ (virt_bb) != join)
2091 return;
2092
2093 /* Third, let's see that the branching is done depending on the least
2094 significant bit of the pfn. */
2095
2096 gimple *branch = last_stmt (bb);
2097 if (!branch || gimple_code (branch) != GIMPLE_COND)
2098 return;
2099
2100 if ((gimple_cond_code (branch) != NE_EXPR
2101 && gimple_cond_code (branch) != EQ_EXPR)
2102 || !integer_zerop (gimple_cond_rhs (branch)))
2103 return;
2104
2105 tree cond = gimple_cond_lhs (branch);
2106 if (!ipa_is_ssa_with_stmt_def (cond))
2107 return;
2108
2109 def = SSA_NAME_DEF_STMT (cond);
2110 if (!is_gimple_assign (def)
2111 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2112 || !integer_onep (gimple_assign_rhs2 (def)))
2113 return;
2114
2115 cond = gimple_assign_rhs1 (def);
2116 if (!ipa_is_ssa_with_stmt_def (cond))
2117 return;
2118
2119 def = SSA_NAME_DEF_STMT (cond);
2120
2121 if (is_gimple_assign (def)
2122 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2123 {
2124 cond = gimple_assign_rhs1 (def);
2125 if (!ipa_is_ssa_with_stmt_def (cond))
2126 return;
2127 def = SSA_NAME_DEF_STMT (cond);
2128 }
2129
2130 tree rec2;
2131 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2132 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2133 == ptrmemfunc_vbit_in_delta),
2134 NULL);
2135 if (rec != rec2)
2136 return;
2137
2138 index = ipa_get_param_decl_index (info, rec);
2139 if (index >= 0
2140 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2141 {
2142 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2143 cs->indirect_info->offset = offset;
2144 cs->indirect_info->agg_contents = 1;
2145 cs->indirect_info->member_ptr = 1;
2146 cs->indirect_info->guaranteed_unmodified = 1;
2147 }
2148
2149 return;
2150 }
2151
2152 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2153 object referenced in the expression is a formal parameter of the caller
2154 FBI->node (described by FBI->info), create a call note for the
2155 statement. */
2156
2157 static void
2158 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2159 gcall *call, tree target)
2160 {
2161 tree obj = OBJ_TYPE_REF_OBJECT (target);
2162 int index;
2163 HOST_WIDE_INT anc_offset;
2164
2165 if (!flag_devirtualize)
2166 return;
2167
2168 if (TREE_CODE (obj) != SSA_NAME)
2169 return;
2170
2171 struct ipa_node_params *info = fbi->info;
2172 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2173 {
2174 struct ipa_jump_func jfunc;
2175 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2176 return;
2177
2178 anc_offset = 0;
2179 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2180 gcc_assert (index >= 0);
2181 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2182 call, &jfunc))
2183 return;
2184 }
2185 else
2186 {
2187 struct ipa_jump_func jfunc;
2188 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2189 tree expr;
2190
2191 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2192 if (!expr)
2193 return;
2194 index = ipa_get_param_decl_index (info,
2195 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2196 gcc_assert (index >= 0);
2197 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2198 call, &jfunc, anc_offset))
2199 return;
2200 }
2201
2202 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2203 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2204 ii->offset = anc_offset;
2205 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2206 ii->otr_type = obj_type_ref_class (target);
2207 ii->polymorphic = 1;
2208 }
2209
2210 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2211 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2212 containing intermediate information about each formal parameter. */
2213
2214 static void
2215 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2216 {
2217 tree target = gimple_call_fn (call);
2218
2219 if (!target
2220 || (TREE_CODE (target) != SSA_NAME
2221 && !virtual_method_call_p (target)))
2222 return;
2223
2224 struct cgraph_edge *cs = fbi->node->get_edge (call);
2225 /* If we previously turned the call into a direct call, there is
2226 no need to analyze. */
2227 if (cs && !cs->indirect_unknown_callee)
2228 return;
2229
2230 if (cs->indirect_info->polymorphic && flag_devirtualize)
2231 {
2232 tree instance;
2233 tree target = gimple_call_fn (call);
2234 ipa_polymorphic_call_context context (current_function_decl,
2235 target, call, &instance);
2236
2237 gcc_checking_assert (cs->indirect_info->otr_type
2238 == obj_type_ref_class (target));
2239 gcc_checking_assert (cs->indirect_info->otr_token
2240 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2241
2242 cs->indirect_info->vptr_changed
2243 = !context.get_dynamic_type (instance,
2244 OBJ_TYPE_REF_OBJECT (target),
2245 obj_type_ref_class (target), call);
2246 cs->indirect_info->context = context;
2247 }
2248
2249 if (TREE_CODE (target) == SSA_NAME)
2250 ipa_analyze_indirect_call_uses (fbi, call, target);
2251 else if (virtual_method_call_p (target))
2252 ipa_analyze_virtual_call_uses (fbi, call, target);
2253 }
2254
2255
2256 /* Analyze the call statement STMT with respect to formal parameters (described
2257 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2258 formal parameters are called. */
2259
2260 static void
2261 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2262 {
2263 if (is_gimple_call (stmt))
2264 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2265 }
2266
2267 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2268 If OP is a parameter declaration, mark it as used in the info structure
2269 passed in DATA. */
2270
2271 static bool
2272 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2273 {
2274 struct ipa_node_params *info = (struct ipa_node_params *) data;
2275
2276 op = get_base_address (op);
2277 if (op
2278 && TREE_CODE (op) == PARM_DECL)
2279 {
2280 int index = ipa_get_param_decl_index (info, op);
2281 gcc_assert (index >= 0);
2282 ipa_set_param_used (info, index, true);
2283 }
2284
2285 return false;
2286 }
2287
2288 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2289 the findings in various structures of the associated ipa_node_params
2290 structure, such as parameter flags, notes etc. FBI holds various data about
2291 the function being analyzed. */
2292
2293 static void
2294 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2295 {
2296 gimple_stmt_iterator gsi;
2297 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2298 {
2299 gimple *stmt = gsi_stmt (gsi);
2300
2301 if (is_gimple_debug (stmt))
2302 continue;
2303
2304 ipa_analyze_stmt_uses (fbi, stmt);
2305 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2306 visit_ref_for_mod_analysis,
2307 visit_ref_for_mod_analysis,
2308 visit_ref_for_mod_analysis);
2309 }
2310 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2311 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2312 visit_ref_for_mod_analysis,
2313 visit_ref_for_mod_analysis,
2314 visit_ref_for_mod_analysis);
2315 }
2316
2317 /* Calculate controlled uses of parameters of NODE. */
2318
2319 static void
2320 ipa_analyze_controlled_uses (struct cgraph_node *node)
2321 {
2322 struct ipa_node_params *info = IPA_NODE_REF (node);
2323
2324 for (int i = 0; i < ipa_get_param_count (info); i++)
2325 {
2326 tree parm = ipa_get_param (info, i);
2327 int controlled_uses = 0;
2328
2329 /* For SSA regs see if parameter is used. For non-SSA we compute
2330 the flag during modification analysis. */
2331 if (is_gimple_reg (parm))
2332 {
2333 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2334 parm);
2335 if (ddef && !has_zero_uses (ddef))
2336 {
2337 imm_use_iterator imm_iter;
2338 use_operand_p use_p;
2339
2340 ipa_set_param_used (info, i, true);
2341 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2342 if (!is_gimple_call (USE_STMT (use_p)))
2343 {
2344 if (!is_gimple_debug (USE_STMT (use_p)))
2345 {
2346 controlled_uses = IPA_UNDESCRIBED_USE;
2347 break;
2348 }
2349 }
2350 else
2351 controlled_uses++;
2352 }
2353 else
2354 controlled_uses = 0;
2355 }
2356 else
2357 controlled_uses = IPA_UNDESCRIBED_USE;
2358 ipa_set_controlled_uses (info, i, controlled_uses);
2359 }
2360 }
2361
2362 /* Free stuff in BI. */
2363
2364 static void
2365 free_ipa_bb_info (struct ipa_bb_info *bi)
2366 {
2367 bi->cg_edges.release ();
2368 bi->param_aa_statuses.release ();
2369 }
2370
2371 /* Dominator walker driving the analysis. */
2372
2373 class analysis_dom_walker : public dom_walker
2374 {
2375 public:
2376 analysis_dom_walker (struct ipa_func_body_info *fbi)
2377 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2378
2379 virtual edge before_dom_children (basic_block);
2380
2381 private:
2382 struct ipa_func_body_info *m_fbi;
2383 };
2384
2385 edge
2386 analysis_dom_walker::before_dom_children (basic_block bb)
2387 {
2388 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2389 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2390 return NULL;
2391 }
2392
2393 /* Release body info FBI. */
2394
2395 void
2396 ipa_release_body_info (struct ipa_func_body_info *fbi)
2397 {
2398 int i;
2399 struct ipa_bb_info *bi;
2400
2401 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2402 free_ipa_bb_info (bi);
2403 fbi->bb_infos.release ();
2404 }
2405
2406 /* Initialize the array describing properties of formal parameters
2407 of NODE, analyze their uses and compute jump functions associated
2408 with actual arguments of calls from within NODE. */
2409
2410 void
2411 ipa_analyze_node (struct cgraph_node *node)
2412 {
2413 struct ipa_func_body_info fbi;
2414 struct ipa_node_params *info;
2415
2416 ipa_check_create_node_params ();
2417 ipa_check_create_edge_args ();
2418 info = IPA_NODE_REF (node);
2419
2420 if (info->analysis_done)
2421 return;
2422 info->analysis_done = 1;
2423
2424 if (ipa_func_spec_opts_forbid_analysis_p (node))
2425 {
2426 for (int i = 0; i < ipa_get_param_count (info); i++)
2427 {
2428 ipa_set_param_used (info, i, true);
2429 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2430 }
2431 return;
2432 }
2433
2434 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2435 push_cfun (func);
2436 calculate_dominance_info (CDI_DOMINATORS);
2437 ipa_initialize_node_params (node);
2438 ipa_analyze_controlled_uses (node);
2439
2440 fbi.node = node;
2441 fbi.info = IPA_NODE_REF (node);
2442 fbi.bb_infos = vNULL;
2443 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2444 fbi.param_count = ipa_get_param_count (info);
2445 fbi.aa_walked = 0;
2446
2447 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2448 {
2449 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2450 bi->cg_edges.safe_push (cs);
2451 }
2452
2453 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2454 {
2455 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2456 bi->cg_edges.safe_push (cs);
2457 }
2458
2459 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2460
2461 ipa_release_body_info (&fbi);
2462 free_dominance_info (CDI_DOMINATORS);
2463 pop_cfun ();
2464 }
2465
2466 /* Update the jump functions associated with call graph edge E when the call
2467 graph edge CS is being inlined, assuming that E->caller is already (possibly
2468 indirectly) inlined into CS->callee and that E has not been inlined. */
2469
2470 static void
2471 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2472 struct cgraph_edge *e)
2473 {
2474 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2475 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2476 int count = ipa_get_cs_argument_count (args);
2477 int i;
2478
2479 for (i = 0; i < count; i++)
2480 {
2481 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2482 struct ipa_polymorphic_call_context *dst_ctx
2483 = ipa_get_ith_polymorhic_call_context (args, i);
2484
2485 if (dst->type == IPA_JF_ANCESTOR)
2486 {
2487 struct ipa_jump_func *src;
2488 int dst_fid = dst->value.ancestor.formal_id;
2489 struct ipa_polymorphic_call_context *src_ctx
2490 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2491
2492 /* Variable number of arguments can cause havoc if we try to access
2493 one that does not exist in the inlined edge. So make sure we
2494 don't. */
2495 if (dst_fid >= ipa_get_cs_argument_count (top))
2496 {
2497 ipa_set_jf_unknown (dst);
2498 continue;
2499 }
2500
2501 src = ipa_get_ith_jump_func (top, dst_fid);
2502
2503 if (src_ctx && !src_ctx->useless_p ())
2504 {
2505 struct ipa_polymorphic_call_context ctx = *src_ctx;
2506
2507 /* TODO: Make type preserved safe WRT contexts. */
2508 if (!ipa_get_jf_ancestor_type_preserved (dst))
2509 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2510 ctx.offset_by (dst->value.ancestor.offset);
2511 if (!ctx.useless_p ())
2512 {
2513 if (!dst_ctx)
2514 {
2515 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2516 count);
2517 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2518 }
2519
2520 dst_ctx->combine_with (ctx);
2521 }
2522 }
2523
2524 if (src->agg.items
2525 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2526 {
2527 struct ipa_agg_jf_item *item;
2528 int j;
2529
2530 /* Currently we do not produce clobber aggregate jump functions,
2531 replace with merging when we do. */
2532 gcc_assert (!dst->agg.items);
2533
2534 dst->agg.items = vec_safe_copy (src->agg.items);
2535 dst->agg.by_ref = src->agg.by_ref;
2536 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2537 item->offset -= dst->value.ancestor.offset;
2538 }
2539
2540 if (src->type == IPA_JF_PASS_THROUGH
2541 && src->value.pass_through.operation == NOP_EXPR)
2542 {
2543 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2544 dst->value.ancestor.agg_preserved &=
2545 src->value.pass_through.agg_preserved;
2546 }
2547 else if (src->type == IPA_JF_PASS_THROUGH
2548 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2549 {
2550 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2551 dst->value.ancestor.agg_preserved = false;
2552 }
2553 else if (src->type == IPA_JF_ANCESTOR)
2554 {
2555 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2556 dst->value.ancestor.offset += src->value.ancestor.offset;
2557 dst->value.ancestor.agg_preserved &=
2558 src->value.ancestor.agg_preserved;
2559 }
2560 else
2561 ipa_set_jf_unknown (dst);
2562 }
2563 else if (dst->type == IPA_JF_PASS_THROUGH)
2564 {
2565 struct ipa_jump_func *src;
2566 /* We must check range due to calls with variable number of arguments
2567 and we cannot combine jump functions with operations. */
2568 if (dst->value.pass_through.operation == NOP_EXPR
2569 && (dst->value.pass_through.formal_id
2570 < ipa_get_cs_argument_count (top)))
2571 {
2572 int dst_fid = dst->value.pass_through.formal_id;
2573 src = ipa_get_ith_jump_func (top, dst_fid);
2574 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2575 struct ipa_polymorphic_call_context *src_ctx
2576 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2577
2578 if (src_ctx && !src_ctx->useless_p ())
2579 {
2580 struct ipa_polymorphic_call_context ctx = *src_ctx;
2581
2582 /* TODO: Make type preserved safe WRT contexts. */
2583 if (!ipa_get_jf_pass_through_type_preserved (dst))
2584 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2585 if (!ctx.useless_p ())
2586 {
2587 if (!dst_ctx)
2588 {
2589 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2590 count);
2591 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2592 }
2593 dst_ctx->combine_with (ctx);
2594 }
2595 }
2596 switch (src->type)
2597 {
2598 case IPA_JF_UNKNOWN:
2599 ipa_set_jf_unknown (dst);
2600 break;
2601 case IPA_JF_CONST:
2602 ipa_set_jf_cst_copy (dst, src);
2603 break;
2604
2605 case IPA_JF_PASS_THROUGH:
2606 {
2607 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2608 enum tree_code operation;
2609 operation = ipa_get_jf_pass_through_operation (src);
2610
2611 if (operation == NOP_EXPR)
2612 {
2613 bool agg_p;
2614 agg_p = dst_agg_p
2615 && ipa_get_jf_pass_through_agg_preserved (src);
2616 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2617 }
2618 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2619 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2620 else
2621 {
2622 tree operand = ipa_get_jf_pass_through_operand (src);
2623 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2624 operation);
2625 }
2626 break;
2627 }
2628 case IPA_JF_ANCESTOR:
2629 {
2630 bool agg_p;
2631 agg_p = dst_agg_p
2632 && ipa_get_jf_ancestor_agg_preserved (src);
2633 ipa_set_ancestor_jf (dst,
2634 ipa_get_jf_ancestor_offset (src),
2635 ipa_get_jf_ancestor_formal_id (src),
2636 agg_p);
2637 break;
2638 }
2639 default:
2640 gcc_unreachable ();
2641 }
2642
2643 if (src->agg.items
2644 && (dst_agg_p || !src->agg.by_ref))
2645 {
2646 /* Currently we do not produce clobber aggregate jump
2647 functions, replace with merging when we do. */
2648 gcc_assert (!dst->agg.items);
2649
2650 dst->agg.by_ref = src->agg.by_ref;
2651 dst->agg.items = vec_safe_copy (src->agg.items);
2652 }
2653 }
2654 else
2655 ipa_set_jf_unknown (dst);
2656 }
2657 }
2658 }
2659
2660 /* If TARGET is an addr_expr of a function declaration, make it the
2661 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2662 Otherwise, return NULL. */
2663
2664 struct cgraph_edge *
2665 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2666 bool speculative)
2667 {
2668 struct cgraph_node *callee;
2669 struct inline_edge_summary *es = inline_edge_summary (ie);
2670 bool unreachable = false;
2671
2672 if (TREE_CODE (target) == ADDR_EXPR)
2673 target = TREE_OPERAND (target, 0);
2674 if (TREE_CODE (target) != FUNCTION_DECL)
2675 {
2676 target = canonicalize_constructor_val (target, NULL);
2677 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2678 {
2679 /* Member pointer call that goes through a VMT lookup. */
2680 if (ie->indirect_info->member_ptr
2681 /* Or if target is not an invariant expression and we do not
2682 know if it will evaulate to function at runtime.
2683 This can happen when folding through &VAR, where &VAR
2684 is IP invariant, but VAR itself is not.
2685
2686 TODO: Revisit this when GCC 5 is branched. It seems that
2687 member_ptr check is not needed and that we may try to fold
2688 the expression and see if VAR is readonly. */
2689 || !is_gimple_ip_invariant (target))
2690 {
2691 if (dump_enabled_p ())
2692 {
2693 location_t loc = gimple_location_safe (ie->call_stmt);
2694 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2695 "discovered direct call non-invariant "
2696 "%s/%i\n",
2697 ie->caller->name (), ie->caller->order);
2698 }
2699 return NULL;
2700 }
2701
2702
2703 if (dump_enabled_p ())
2704 {
2705 location_t loc = gimple_location_safe (ie->call_stmt);
2706 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2707 "discovered direct call to non-function in %s/%i, "
2708 "making it __builtin_unreachable\n",
2709 ie->caller->name (), ie->caller->order);
2710 }
2711
2712 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2713 callee = cgraph_node::get_create (target);
2714 unreachable = true;
2715 }
2716 else
2717 callee = cgraph_node::get (target);
2718 }
2719 else
2720 callee = cgraph_node::get (target);
2721
2722 /* Because may-edges are not explicitely represented and vtable may be external,
2723 we may create the first reference to the object in the unit. */
2724 if (!callee || callee->global.inlined_to)
2725 {
2726
2727 /* We are better to ensure we can refer to it.
2728 In the case of static functions we are out of luck, since we already
2729 removed its body. In the case of public functions we may or may
2730 not introduce the reference. */
2731 if (!canonicalize_constructor_val (target, NULL)
2732 || !TREE_PUBLIC (target))
2733 {
2734 if (dump_file)
2735 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2736 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2737 xstrdup_for_dump (ie->caller->name ()),
2738 ie->caller->order,
2739 xstrdup_for_dump (ie->callee->name ()),
2740 ie->callee->order);
2741 return NULL;
2742 }
2743 callee = cgraph_node::get_create (target);
2744 }
2745
2746 /* If the edge is already speculated. */
2747 if (speculative && ie->speculative)
2748 {
2749 struct cgraph_edge *e2;
2750 struct ipa_ref *ref;
2751 ie->speculative_call_info (e2, ie, ref);
2752 if (e2->callee->ultimate_alias_target ()
2753 != callee->ultimate_alias_target ())
2754 {
2755 if (dump_file)
2756 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2757 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2758 xstrdup_for_dump (ie->caller->name ()),
2759 ie->caller->order,
2760 xstrdup_for_dump (callee->name ()),
2761 callee->order,
2762 xstrdup_for_dump (e2->callee->name ()),
2763 e2->callee->order);
2764 }
2765 else
2766 {
2767 if (dump_file)
2768 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2769 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2770 xstrdup_for_dump (ie->caller->name ()),
2771 ie->caller->order,
2772 xstrdup_for_dump (callee->name ()),
2773 callee->order);
2774 }
2775 return NULL;
2776 }
2777
2778 if (!dbg_cnt (devirt))
2779 return NULL;
2780
2781 ipa_check_create_node_params ();
2782
2783 /* We can not make edges to inline clones. It is bug that someone removed
2784 the cgraph node too early. */
2785 gcc_assert (!callee->global.inlined_to);
2786
2787 if (dump_file && !unreachable)
2788 {
2789 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2790 "(%s/%i -> %s/%i), for stmt ",
2791 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2792 speculative ? "speculative" : "known",
2793 xstrdup_for_dump (ie->caller->name ()),
2794 ie->caller->order,
2795 xstrdup_for_dump (callee->name ()),
2796 callee->order);
2797 if (ie->call_stmt)
2798 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2799 else
2800 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2801 }
2802 if (dump_enabled_p ())
2803 {
2804 location_t loc = gimple_location_safe (ie->call_stmt);
2805
2806 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2807 "converting indirect call in %s to direct call to %s\n",
2808 ie->caller->name (), callee->name ());
2809 }
2810 if (!speculative)
2811 {
2812 struct cgraph_edge *orig = ie;
2813 ie = ie->make_direct (callee);
2814 /* If we resolved speculative edge the cost is already up to date
2815 for direct call (adjusted by inline_edge_duplication_hook). */
2816 if (ie == orig)
2817 {
2818 es = inline_edge_summary (ie);
2819 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2820 - eni_size_weights.call_cost);
2821 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2822 - eni_time_weights.call_cost);
2823 }
2824 }
2825 else
2826 {
2827 if (!callee->can_be_discarded_p ())
2828 {
2829 cgraph_node *alias;
2830 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2831 if (alias)
2832 callee = alias;
2833 }
2834 /* make_speculative will update ie's cost to direct call cost. */
2835 ie = ie->make_speculative
2836 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2837 }
2838
2839 return ie;
2840 }
2841
2842 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2843 CONSTRUCTOR and return it. Return NULL if the search fails for some
2844 reason. */
2845
2846 static tree
2847 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2848 {
2849 tree type = TREE_TYPE (constructor);
2850 if (TREE_CODE (type) != ARRAY_TYPE
2851 && TREE_CODE (type) != RECORD_TYPE)
2852 return NULL;
2853
2854 unsigned ix;
2855 tree index, val;
2856 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2857 {
2858 HOST_WIDE_INT elt_offset;
2859 if (TREE_CODE (type) == ARRAY_TYPE)
2860 {
2861 offset_int off;
2862 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2863 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2864
2865 if (index)
2866 {
2867 off = wi::to_offset (index);
2868 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2869 {
2870 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2871 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2872 off = wi::sext (off - wi::to_offset (low_bound),
2873 TYPE_PRECISION (TREE_TYPE (index)));
2874 }
2875 off *= wi::to_offset (unit_size);
2876 }
2877 else
2878 off = wi::to_offset (unit_size) * ix;
2879
2880 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2881 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2882 continue;
2883 elt_offset = off.to_shwi ();
2884 }
2885 else if (TREE_CODE (type) == RECORD_TYPE)
2886 {
2887 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2888 if (DECL_BIT_FIELD (index))
2889 continue;
2890 elt_offset = int_bit_position (index);
2891 }
2892 else
2893 gcc_unreachable ();
2894
2895 if (elt_offset > req_offset)
2896 return NULL;
2897
2898 if (TREE_CODE (val) == CONSTRUCTOR)
2899 return find_constructor_constant_at_offset (val,
2900 req_offset - elt_offset);
2901
2902 if (elt_offset == req_offset
2903 && is_gimple_reg_type (TREE_TYPE (val))
2904 && is_gimple_ip_invariant (val))
2905 return val;
2906 }
2907 return NULL;
2908 }
2909
2910 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2911 invariant from a static constructor and if so, return it. Otherwise return
2912 NULL. */
2913
2914 static tree
2915 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2916 {
2917 if (by_ref)
2918 {
2919 if (TREE_CODE (scalar) != ADDR_EXPR)
2920 return NULL;
2921 scalar = TREE_OPERAND (scalar, 0);
2922 }
2923
2924 if (!VAR_P (scalar)
2925 || !is_global_var (scalar)
2926 || !TREE_READONLY (scalar)
2927 || !DECL_INITIAL (scalar)
2928 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2929 return NULL;
2930
2931 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2932 }
2933
2934 /* Retrieve value from aggregate jump function AGG or static initializer of
2935 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2936 none. BY_REF specifies whether the value has to be passed by reference or
2937 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2938 to is set to true if the value comes from an initializer of a constant. */
2939
2940 tree
2941 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2942 HOST_WIDE_INT offset, bool by_ref,
2943 bool *from_global_constant)
2944 {
2945 struct ipa_agg_jf_item *item;
2946 int i;
2947
2948 if (scalar)
2949 {
2950 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2951 if (res)
2952 {
2953 if (from_global_constant)
2954 *from_global_constant = true;
2955 return res;
2956 }
2957 }
2958
2959 if (!agg
2960 || by_ref != agg->by_ref)
2961 return NULL;
2962
2963 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2964 if (item->offset == offset)
2965 {
2966 /* Currently we do not have clobber values, return NULL for them once
2967 we do. */
2968 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2969 if (from_global_constant)
2970 *from_global_constant = false;
2971 return item->value;
2972 }
2973 return NULL;
2974 }
2975
2976 /* Remove a reference to SYMBOL from the list of references of a node given by
2977 reference description RDESC. Return true if the reference has been
2978 successfully found and removed. */
2979
2980 static bool
2981 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2982 {
2983 struct ipa_ref *to_del;
2984 struct cgraph_edge *origin;
2985
2986 origin = rdesc->cs;
2987 if (!origin)
2988 return false;
2989 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2990 origin->lto_stmt_uid);
2991 if (!to_del)
2992 return false;
2993
2994 to_del->remove_reference ();
2995 if (dump_file)
2996 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2997 xstrdup_for_dump (origin->caller->name ()),
2998 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2999 return true;
3000 }
3001
3002 /* If JFUNC has a reference description with refcount different from
3003 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3004 NULL. JFUNC must be a constant jump function. */
3005
3006 static struct ipa_cst_ref_desc *
3007 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3008 {
3009 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3010 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3011 return rdesc;
3012 else
3013 return NULL;
3014 }
3015
3016 /* If the value of constant jump function JFUNC is an address of a function
3017 declaration, return the associated call graph node. Otherwise return
3018 NULL. */
3019
3020 static cgraph_node *
3021 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3022 {
3023 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3024 tree cst = ipa_get_jf_constant (jfunc);
3025 if (TREE_CODE (cst) != ADDR_EXPR
3026 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3027 return NULL;
3028
3029 return cgraph_node::get (TREE_OPERAND (cst, 0));
3030 }
3031
3032
3033 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3034 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3035 the edge specified in the rdesc. Return false if either the symbol or the
3036 reference could not be found, otherwise return true. */
3037
3038 static bool
3039 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3040 {
3041 struct ipa_cst_ref_desc *rdesc;
3042 if (jfunc->type == IPA_JF_CONST
3043 && (rdesc = jfunc_rdesc_usable (jfunc))
3044 && --rdesc->refcount == 0)
3045 {
3046 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3047 if (!symbol)
3048 return false;
3049
3050 return remove_described_reference (symbol, rdesc);
3051 }
3052 return true;
3053 }
3054
3055 /* Try to find a destination for indirect edge IE that corresponds to a simple
3056 call or a call of a member function pointer and where the destination is a
3057 pointer formal parameter described by jump function JFUNC. If it can be
3058 determined, return the newly direct edge, otherwise return NULL.
3059 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3060
3061 static struct cgraph_edge *
3062 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3063 struct ipa_jump_func *jfunc,
3064 struct ipa_node_params *new_root_info)
3065 {
3066 struct cgraph_edge *cs;
3067 tree target;
3068 bool agg_contents = ie->indirect_info->agg_contents;
3069 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3070 if (agg_contents)
3071 {
3072 bool from_global_constant;
3073 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3074 ie->indirect_info->offset,
3075 ie->indirect_info->by_ref,
3076 &from_global_constant);
3077 if (target
3078 && !from_global_constant
3079 && !ie->indirect_info->guaranteed_unmodified)
3080 return NULL;
3081 }
3082 else
3083 target = scalar;
3084 if (!target)
3085 return NULL;
3086 cs = ipa_make_edge_direct_to_target (ie, target);
3087
3088 if (cs && !agg_contents)
3089 {
3090 bool ok;
3091 gcc_checking_assert (cs->callee
3092 && (cs != ie
3093 || jfunc->type != IPA_JF_CONST
3094 || !cgraph_node_for_jfunc (jfunc)
3095 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3096 ok = try_decrement_rdesc_refcount (jfunc);
3097 gcc_checking_assert (ok);
3098 }
3099
3100 return cs;
3101 }
3102
3103 /* Return the target to be used in cases of impossible devirtualization. IE
3104 and target (the latter can be NULL) are dumped when dumping is enabled. */
3105
3106 tree
3107 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3108 {
3109 if (dump_file)
3110 {
3111 if (target)
3112 fprintf (dump_file,
3113 "Type inconsistent devirtualization: %s/%i->%s\n",
3114 ie->caller->name (), ie->caller->order,
3115 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3116 else
3117 fprintf (dump_file,
3118 "No devirtualization target in %s/%i\n",
3119 ie->caller->name (), ie->caller->order);
3120 }
3121 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3122 cgraph_node::get_create (new_target);
3123 return new_target;
3124 }
3125
3126 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3127 call based on a formal parameter which is described by jump function JFUNC
3128 and if it can be determined, make it direct and return the direct edge.
3129 Otherwise, return NULL. CTX describes the polymorphic context that the
3130 parameter the call is based on brings along with it. */
3131
3132 static struct cgraph_edge *
3133 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3134 struct ipa_jump_func *jfunc,
3135 struct ipa_polymorphic_call_context ctx)
3136 {
3137 tree target = NULL;
3138 bool speculative = false;
3139
3140 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3141 return NULL;
3142
3143 gcc_assert (!ie->indirect_info->by_ref);
3144
3145 /* Try to do lookup via known virtual table pointer value. */
3146 if (!ie->indirect_info->vptr_changed
3147 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3148 {
3149 tree vtable;
3150 unsigned HOST_WIDE_INT offset;
3151 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3152 : NULL;
3153 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3154 ie->indirect_info->offset,
3155 true);
3156 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3157 {
3158 bool can_refer;
3159 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3160 vtable, offset, &can_refer);
3161 if (can_refer)
3162 {
3163 if (!t
3164 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3165 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3166 || !possible_polymorphic_call_target_p
3167 (ie, cgraph_node::get (t)))
3168 {
3169 /* Do not speculate builtin_unreachable, it is stupid! */
3170 if (!ie->indirect_info->vptr_changed)
3171 target = ipa_impossible_devirt_target (ie, target);
3172 else
3173 target = NULL;
3174 }
3175 else
3176 {
3177 target = t;
3178 speculative = ie->indirect_info->vptr_changed;
3179 }
3180 }
3181 }
3182 }
3183
3184 ipa_polymorphic_call_context ie_context (ie);
3185 vec <cgraph_node *>targets;
3186 bool final;
3187
3188 ctx.offset_by (ie->indirect_info->offset);
3189 if (ie->indirect_info->vptr_changed)
3190 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3191 ie->indirect_info->otr_type);
3192 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3193 targets = possible_polymorphic_call_targets
3194 (ie->indirect_info->otr_type,
3195 ie->indirect_info->otr_token,
3196 ctx, &final);
3197 if (final && targets.length () <= 1)
3198 {
3199 speculative = false;
3200 if (targets.length () == 1)
3201 target = targets[0]->decl;
3202 else
3203 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3204 }
3205 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3206 && !ie->speculative && ie->maybe_hot_p ())
3207 {
3208 cgraph_node *n;
3209 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3210 ie->indirect_info->otr_token,
3211 ie->indirect_info->context);
3212 if (n)
3213 {
3214 target = n->decl;
3215 speculative = true;
3216 }
3217 }
3218
3219 if (target)
3220 {
3221 if (!possible_polymorphic_call_target_p
3222 (ie, cgraph_node::get_create (target)))
3223 {
3224 if (speculative)
3225 return NULL;
3226 target = ipa_impossible_devirt_target (ie, target);
3227 }
3228 return ipa_make_edge_direct_to_target (ie, target, speculative);
3229 }
3230 else
3231 return NULL;
3232 }
3233
3234 /* Update the param called notes associated with NODE when CS is being inlined,
3235 assuming NODE is (potentially indirectly) inlined into CS->callee.
3236 Moreover, if the callee is discovered to be constant, create a new cgraph
3237 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3238 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3239
3240 static bool
3241 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3242 struct cgraph_node *node,
3243 vec<cgraph_edge *> *new_edges)
3244 {
3245 struct ipa_edge_args *top;
3246 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3247 struct ipa_node_params *new_root_info;
3248 bool res = false;
3249
3250 ipa_check_create_edge_args ();
3251 top = IPA_EDGE_REF (cs);
3252 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3253 ? cs->caller->global.inlined_to
3254 : cs->caller);
3255
3256 for (ie = node->indirect_calls; ie; ie = next_ie)
3257 {
3258 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3259 struct ipa_jump_func *jfunc;
3260 int param_index;
3261 cgraph_node *spec_target = NULL;
3262
3263 next_ie = ie->next_callee;
3264
3265 if (ici->param_index == -1)
3266 continue;
3267
3268 /* We must check range due to calls with variable number of arguments: */
3269 if (ici->param_index >= ipa_get_cs_argument_count (top))
3270 {
3271 ici->param_index = -1;
3272 continue;
3273 }
3274
3275 param_index = ici->param_index;
3276 jfunc = ipa_get_ith_jump_func (top, param_index);
3277
3278 if (ie->speculative)
3279 {
3280 struct cgraph_edge *de;
3281 struct ipa_ref *ref;
3282 ie->speculative_call_info (de, ie, ref);
3283 spec_target = de->callee;
3284 }
3285
3286 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3287 new_direct_edge = NULL;
3288 else if (ici->polymorphic)
3289 {
3290 ipa_polymorphic_call_context ctx;
3291 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3292 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3293 }
3294 else
3295 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3296 new_root_info);
3297 /* If speculation was removed, then we need to do nothing. */
3298 if (new_direct_edge && new_direct_edge != ie
3299 && new_direct_edge->callee == spec_target)
3300 {
3301 new_direct_edge->indirect_inlining_edge = 1;
3302 top = IPA_EDGE_REF (cs);
3303 res = true;
3304 if (!new_direct_edge->speculative)
3305 continue;
3306 }
3307 else if (new_direct_edge)
3308 {
3309 new_direct_edge->indirect_inlining_edge = 1;
3310 if (new_direct_edge->call_stmt)
3311 new_direct_edge->call_stmt_cannot_inline_p
3312 = !gimple_check_call_matching_types (
3313 new_direct_edge->call_stmt,
3314 new_direct_edge->callee->decl, false);
3315 if (new_edges)
3316 {
3317 new_edges->safe_push (new_direct_edge);
3318 res = true;
3319 }
3320 top = IPA_EDGE_REF (cs);
3321 /* If speculative edge was introduced we still need to update
3322 call info of the indirect edge. */
3323 if (!new_direct_edge->speculative)
3324 continue;
3325 }
3326 if (jfunc->type == IPA_JF_PASS_THROUGH
3327 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3328 {
3329 if (ici->agg_contents
3330 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3331 && !ici->polymorphic)
3332 ici->param_index = -1;
3333 else
3334 {
3335 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3336 if (ici->polymorphic
3337 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3338 ici->vptr_changed = true;
3339 }
3340 }
3341 else if (jfunc->type == IPA_JF_ANCESTOR)
3342 {
3343 if (ici->agg_contents
3344 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3345 && !ici->polymorphic)
3346 ici->param_index = -1;
3347 else
3348 {
3349 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3350 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3351 if (ici->polymorphic
3352 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3353 ici->vptr_changed = true;
3354 }
3355 }
3356 else
3357 /* Either we can find a destination for this edge now or never. */
3358 ici->param_index = -1;
3359 }
3360
3361 return res;
3362 }
3363
3364 /* Recursively traverse subtree of NODE (including node) made of inlined
3365 cgraph_edges when CS has been inlined and invoke
3366 update_indirect_edges_after_inlining on all nodes and
3367 update_jump_functions_after_inlining on all non-inlined edges that lead out
3368 of this subtree. Newly discovered indirect edges will be added to
3369 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3370 created. */
3371
3372 static bool
3373 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3374 struct cgraph_node *node,
3375 vec<cgraph_edge *> *new_edges)
3376 {
3377 struct cgraph_edge *e;
3378 bool res;
3379
3380 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3381
3382 for (e = node->callees; e; e = e->next_callee)
3383 if (!e->inline_failed)
3384 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3385 else
3386 update_jump_functions_after_inlining (cs, e);
3387 for (e = node->indirect_calls; e; e = e->next_callee)
3388 update_jump_functions_after_inlining (cs, e);
3389
3390 return res;
3391 }
3392
3393 /* Combine two controlled uses counts as done during inlining. */
3394
3395 static int
3396 combine_controlled_uses_counters (int c, int d)
3397 {
3398 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3399 return IPA_UNDESCRIBED_USE;
3400 else
3401 return c + d - 1;
3402 }
3403
3404 /* Propagate number of controlled users from CS->caleee to the new root of the
3405 tree of inlined nodes. */
3406
3407 static void
3408 propagate_controlled_uses (struct cgraph_edge *cs)
3409 {
3410 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3411 struct cgraph_node *new_root = cs->caller->global.inlined_to
3412 ? cs->caller->global.inlined_to : cs->caller;
3413 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3414 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3415 int count, i;
3416
3417 count = MIN (ipa_get_cs_argument_count (args),
3418 ipa_get_param_count (old_root_info));
3419 for (i = 0; i < count; i++)
3420 {
3421 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3422 struct ipa_cst_ref_desc *rdesc;
3423
3424 if (jf->type == IPA_JF_PASS_THROUGH)
3425 {
3426 int src_idx, c, d;
3427 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3428 c = ipa_get_controlled_uses (new_root_info, src_idx);
3429 d = ipa_get_controlled_uses (old_root_info, i);
3430
3431 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3432 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3433 c = combine_controlled_uses_counters (c, d);
3434 ipa_set_controlled_uses (new_root_info, src_idx, c);
3435 if (c == 0 && new_root_info->ipcp_orig_node)
3436 {
3437 struct cgraph_node *n;
3438 struct ipa_ref *ref;
3439 tree t = new_root_info->known_csts[src_idx];
3440
3441 if (t && TREE_CODE (t) == ADDR_EXPR
3442 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3443 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3444 && (ref = new_root->find_reference (n, NULL, 0)))
3445 {
3446 if (dump_file)
3447 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3448 "reference from %s/%i to %s/%i.\n",
3449 xstrdup_for_dump (new_root->name ()),
3450 new_root->order,
3451 xstrdup_for_dump (n->name ()), n->order);
3452 ref->remove_reference ();
3453 }
3454 }
3455 }
3456 else if (jf->type == IPA_JF_CONST
3457 && (rdesc = jfunc_rdesc_usable (jf)))
3458 {
3459 int d = ipa_get_controlled_uses (old_root_info, i);
3460 int c = rdesc->refcount;
3461 rdesc->refcount = combine_controlled_uses_counters (c, d);
3462 if (rdesc->refcount == 0)
3463 {
3464 tree cst = ipa_get_jf_constant (jf);
3465 struct cgraph_node *n;
3466 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3467 && TREE_CODE (TREE_OPERAND (cst, 0))
3468 == FUNCTION_DECL);
3469 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3470 if (n)
3471 {
3472 struct cgraph_node *clone;
3473 bool ok;
3474 ok = remove_described_reference (n, rdesc);
3475 gcc_checking_assert (ok);
3476
3477 clone = cs->caller;
3478 while (clone->global.inlined_to
3479 && clone != rdesc->cs->caller
3480 && IPA_NODE_REF (clone)->ipcp_orig_node)
3481 {
3482 struct ipa_ref *ref;
3483 ref = clone->find_reference (n, NULL, 0);
3484 if (ref)
3485 {
3486 if (dump_file)
3487 fprintf (dump_file, "ipa-prop: Removing "
3488 "cloning-created reference "
3489 "from %s/%i to %s/%i.\n",
3490 xstrdup_for_dump (clone->name ()),
3491 clone->order,
3492 xstrdup_for_dump (n->name ()),
3493 n->order);
3494 ref->remove_reference ();
3495 }
3496 clone = clone->callers->caller;
3497 }
3498 }
3499 }
3500 }
3501 }
3502
3503 for (i = ipa_get_param_count (old_root_info);
3504 i < ipa_get_cs_argument_count (args);
3505 i++)
3506 {
3507 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3508
3509 if (jf->type == IPA_JF_CONST)
3510 {
3511 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3512 if (rdesc)
3513 rdesc->refcount = IPA_UNDESCRIBED_USE;
3514 }
3515 else if (jf->type == IPA_JF_PASS_THROUGH)
3516 ipa_set_controlled_uses (new_root_info,
3517 jf->value.pass_through.formal_id,
3518 IPA_UNDESCRIBED_USE);
3519 }
3520 }
3521
3522 /* Update jump functions and call note functions on inlining the call site CS.
3523 CS is expected to lead to a node already cloned by
3524 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3525 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3526 created. */
3527
3528 bool
3529 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3530 vec<cgraph_edge *> *new_edges)
3531 {
3532 bool changed;
3533 /* Do nothing if the preparation phase has not been carried out yet
3534 (i.e. during early inlining). */
3535 if (!ipa_node_params_sum)
3536 return false;
3537 gcc_assert (ipa_edge_args_vector);
3538
3539 propagate_controlled_uses (cs);
3540 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3541
3542 return changed;
3543 }
3544
3545 /* Frees all dynamically allocated structures that the argument info points
3546 to. */
3547
3548 void
3549 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3550 {
3551 vec_free (args->jump_functions);
3552 memset (args, 0, sizeof (*args));
3553 }
3554
3555 /* Free all ipa_edge structures. */
3556
3557 void
3558 ipa_free_all_edge_args (void)
3559 {
3560 int i;
3561 struct ipa_edge_args *args;
3562
3563 if (!ipa_edge_args_vector)
3564 return;
3565
3566 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3567 ipa_free_edge_args_substructures (args);
3568
3569 vec_free (ipa_edge_args_vector);
3570 }
3571
3572 /* Free all ipa_node_params structures. */
3573
3574 void
3575 ipa_free_all_node_params (void)
3576 {
3577 ipa_node_params_sum = NULL;
3578 }
3579
3580 /* Grow ipcp_transformations if necessary. */
3581
3582 void
3583 ipcp_grow_transformations_if_necessary (void)
3584 {
3585 if (vec_safe_length (ipcp_transformations)
3586 <= (unsigned) symtab->cgraph_max_uid)
3587 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3588 }
3589
3590 /* Set the aggregate replacements of NODE to be AGGVALS. */
3591
3592 void
3593 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3594 struct ipa_agg_replacement_value *aggvals)
3595 {
3596 ipcp_grow_transformations_if_necessary ();
3597 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3598 }
3599
3600 /* Hook that is called by cgraph.c when an edge is removed. */
3601
3602 static void
3603 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3604 {
3605 struct ipa_edge_args *args;
3606
3607 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3608 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3609 return;
3610
3611 args = IPA_EDGE_REF (cs);
3612 if (args->jump_functions)
3613 {
3614 struct ipa_jump_func *jf;
3615 int i;
3616 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3617 {
3618 struct ipa_cst_ref_desc *rdesc;
3619 try_decrement_rdesc_refcount (jf);
3620 if (jf->type == IPA_JF_CONST
3621 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3622 && rdesc->cs == cs)
3623 rdesc->cs = NULL;
3624 }
3625 }
3626
3627 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3628 }
3629
3630 /* Hook that is called by cgraph.c when an edge is duplicated. */
3631
3632 static void
3633 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3634 void *)
3635 {
3636 struct ipa_edge_args *old_args, *new_args;
3637 unsigned int i;
3638
3639 ipa_check_create_edge_args ();
3640
3641 old_args = IPA_EDGE_REF (src);
3642 new_args = IPA_EDGE_REF (dst);
3643
3644 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3645 if (old_args->polymorphic_call_contexts)
3646 new_args->polymorphic_call_contexts
3647 = vec_safe_copy (old_args->polymorphic_call_contexts);
3648
3649 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3650 {
3651 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3652 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3653
3654 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3655
3656 if (src_jf->type == IPA_JF_CONST)
3657 {
3658 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3659
3660 if (!src_rdesc)
3661 dst_jf->value.constant.rdesc = NULL;
3662 else if (src->caller == dst->caller)
3663 {
3664 struct ipa_ref *ref;
3665 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3666 gcc_checking_assert (n);
3667 ref = src->caller->find_reference (n, src->call_stmt,
3668 src->lto_stmt_uid);
3669 gcc_checking_assert (ref);
3670 dst->caller->clone_reference (ref, ref->stmt);
3671
3672 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3673 dst_rdesc->cs = dst;
3674 dst_rdesc->refcount = src_rdesc->refcount;
3675 dst_rdesc->next_duplicate = NULL;
3676 dst_jf->value.constant.rdesc = dst_rdesc;
3677 }
3678 else if (src_rdesc->cs == src)
3679 {
3680 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3681 dst_rdesc->cs = dst;
3682 dst_rdesc->refcount = src_rdesc->refcount;
3683 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3684 src_rdesc->next_duplicate = dst_rdesc;
3685 dst_jf->value.constant.rdesc = dst_rdesc;
3686 }
3687 else
3688 {
3689 struct ipa_cst_ref_desc *dst_rdesc;
3690 /* This can happen during inlining, when a JFUNC can refer to a
3691 reference taken in a function up in the tree of inline clones.
3692 We need to find the duplicate that refers to our tree of
3693 inline clones. */
3694
3695 gcc_assert (dst->caller->global.inlined_to);
3696 for (dst_rdesc = src_rdesc->next_duplicate;
3697 dst_rdesc;
3698 dst_rdesc = dst_rdesc->next_duplicate)
3699 {
3700 struct cgraph_node *top;
3701 top = dst_rdesc->cs->caller->global.inlined_to
3702 ? dst_rdesc->cs->caller->global.inlined_to
3703 : dst_rdesc->cs->caller;
3704 if (dst->caller->global.inlined_to == top)
3705 break;
3706 }
3707 gcc_assert (dst_rdesc);
3708 dst_jf->value.constant.rdesc = dst_rdesc;
3709 }
3710 }
3711 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3712 && src->caller == dst->caller)
3713 {
3714 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3715 ? dst->caller->global.inlined_to : dst->caller;
3716 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3717 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3718
3719 int c = ipa_get_controlled_uses (root_info, idx);
3720 if (c != IPA_UNDESCRIBED_USE)
3721 {
3722 c++;
3723 ipa_set_controlled_uses (root_info, idx, c);
3724 }
3725 }
3726 }
3727 }
3728
3729 /* Analyze newly added function into callgraph. */
3730
3731 static void
3732 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3733 {
3734 if (node->has_gimple_body_p ())
3735 ipa_analyze_node (node);
3736 }
3737
3738 /* Initialize a newly created param info. */
3739
3740 void
3741 ipa_node_params_t::insert (cgraph_node *, ipa_node_params *info)
3742 {
3743 info->lattices = NULL;
3744 info->ipcp_orig_node = NULL;
3745 info->analysis_done = 0;
3746 info->node_enqueued = 0;
3747 info->do_clone_for_all_contexts = 0;
3748 info->is_all_contexts_clone = 0;
3749 info->node_dead = 0;
3750 info->node_within_scc = 0;
3751 info->node_calling_single_call = 0;
3752 info->versionable = 0;
3753 }
3754
3755 /* Frees all dynamically allocated structures that the param info points
3756 to. */
3757
3758 void
3759 ipa_node_params_t::remove (cgraph_node *, ipa_node_params *info)
3760 {
3761 free (info->lattices);
3762 /* Lattice values and their sources are deallocated with their alocation
3763 pool. */
3764 info->known_csts.release ();
3765 info->known_contexts.release ();
3766 }
3767
3768 /* Hook that is called by summary when a node is duplicated. */
3769
3770 void
3771 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3772 ipa_node_params *old_info,
3773 ipa_node_params *new_info)
3774 {
3775 ipa_agg_replacement_value *old_av, *new_av;
3776
3777 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3778 new_info->lattices = NULL;
3779 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3780 new_info->known_csts = old_info->known_csts.copy ();
3781 new_info->known_contexts = old_info->known_contexts.copy ();
3782
3783 new_info->analysis_done = old_info->analysis_done;
3784 new_info->node_enqueued = old_info->node_enqueued;
3785 new_info->versionable = old_info->versionable;
3786
3787 old_av = ipa_get_agg_replacements_for_node (src);
3788 if (old_av)
3789 {
3790 new_av = NULL;
3791 while (old_av)
3792 {
3793 struct ipa_agg_replacement_value *v;
3794
3795 v = ggc_alloc<ipa_agg_replacement_value> ();
3796 memcpy (v, old_av, sizeof (*v));
3797 v->next = new_av;
3798 new_av = v;
3799 old_av = old_av->next;
3800 }
3801 ipa_set_node_agg_value_chain (dst, new_av);
3802 }
3803
3804 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3805
3806 if (src_trans)
3807 {
3808 ipcp_grow_transformations_if_necessary ();
3809 src_trans = ipcp_get_transformation_summary (src);
3810 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3811 vec<ipa_vr, va_gc> *&dst_vr
3812 = ipcp_get_transformation_summary (dst)->m_vr;
3813 if (vec_safe_length (src_trans->m_vr) > 0)
3814 {
3815 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3816 for (unsigned i = 0; i < src_vr->length (); ++i)
3817 dst_vr->quick_push ((*src_vr)[i]);
3818 }
3819 }
3820
3821 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3822 {
3823 ipcp_grow_transformations_if_necessary ();
3824 src_trans = ipcp_get_transformation_summary (src);
3825 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3826 vec<ipa_bits, va_gc> *&dst_bits
3827 = ipcp_get_transformation_summary (dst)->bits;
3828 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3829 for (unsigned i = 0; i < src_bits->length (); ++i)
3830 dst_bits->quick_push ((*src_bits)[i]);
3831 }
3832 }
3833
3834 /* Register our cgraph hooks if they are not already there. */
3835
3836 void
3837 ipa_register_cgraph_hooks (void)
3838 {
3839 ipa_check_create_node_params ();
3840
3841 if (!edge_removal_hook_holder)
3842 edge_removal_hook_holder =
3843 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3844 if (!edge_duplication_hook_holder)
3845 edge_duplication_hook_holder =
3846 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3847 function_insertion_hook_holder =
3848 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3849 }
3850
3851 /* Unregister our cgraph hooks if they are not already there. */
3852
3853 static void
3854 ipa_unregister_cgraph_hooks (void)
3855 {
3856 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3857 edge_removal_hook_holder = NULL;
3858 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3859 edge_duplication_hook_holder = NULL;
3860 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3861 function_insertion_hook_holder = NULL;
3862 }
3863
3864 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3865 longer needed after ipa-cp. */
3866
3867 void
3868 ipa_free_all_structures_after_ipa_cp (void)
3869 {
3870 if (!optimize && !in_lto_p)
3871 {
3872 ipa_free_all_edge_args ();
3873 ipa_free_all_node_params ();
3874 ipcp_sources_pool.release ();
3875 ipcp_cst_values_pool.release ();
3876 ipcp_poly_ctx_values_pool.release ();
3877 ipcp_agg_lattice_pool.release ();
3878 ipa_unregister_cgraph_hooks ();
3879 ipa_refdesc_pool.release ();
3880 }
3881 }
3882
3883 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3884 longer needed after indirect inlining. */
3885
3886 void
3887 ipa_free_all_structures_after_iinln (void)
3888 {
3889 ipa_free_all_edge_args ();
3890 ipa_free_all_node_params ();
3891 ipa_unregister_cgraph_hooks ();
3892 ipcp_sources_pool.release ();
3893 ipcp_cst_values_pool.release ();
3894 ipcp_poly_ctx_values_pool.release ();
3895 ipcp_agg_lattice_pool.release ();
3896 ipa_refdesc_pool.release ();
3897 }
3898
3899 /* Print ipa_tree_map data structures of all functions in the
3900 callgraph to F. */
3901
3902 void
3903 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3904 {
3905 int i, count;
3906 struct ipa_node_params *info;
3907
3908 if (!node->definition)
3909 return;
3910 info = IPA_NODE_REF (node);
3911 fprintf (f, " function %s/%i parameter descriptors:\n",
3912 node->name (), node->order);
3913 count = ipa_get_param_count (info);
3914 for (i = 0; i < count; i++)
3915 {
3916 int c;
3917
3918 fprintf (f, " ");
3919 ipa_dump_param (f, info, i);
3920 if (ipa_is_param_used (info, i))
3921 fprintf (f, " used");
3922 c = ipa_get_controlled_uses (info, i);
3923 if (c == IPA_UNDESCRIBED_USE)
3924 fprintf (f, " undescribed_use");
3925 else
3926 fprintf (f, " controlled_uses=%i", c);
3927 fprintf (f, "\n");
3928 }
3929 }
3930
3931 /* Print ipa_tree_map data structures of all functions in the
3932 callgraph to F. */
3933
3934 void
3935 ipa_print_all_params (FILE * f)
3936 {
3937 struct cgraph_node *node;
3938
3939 fprintf (f, "\nFunction parameters:\n");
3940 FOR_EACH_FUNCTION (node)
3941 ipa_print_node_params (f, node);
3942 }
3943
3944 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3945
3946 vec<tree>
3947 ipa_get_vector_of_formal_parms (tree fndecl)
3948 {
3949 vec<tree> args;
3950 int count;
3951 tree parm;
3952
3953 gcc_assert (!flag_wpa);
3954 count = count_formal_params (fndecl);
3955 args.create (count);
3956 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3957 args.quick_push (parm);
3958
3959 return args;
3960 }
3961
3962 /* Return a heap allocated vector containing types of formal parameters of
3963 function type FNTYPE. */
3964
3965 vec<tree>
3966 ipa_get_vector_of_formal_parm_types (tree fntype)
3967 {
3968 vec<tree> types;
3969 int count = 0;
3970 tree t;
3971
3972 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3973 count++;
3974
3975 types.create (count);
3976 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3977 types.quick_push (TREE_VALUE (t));
3978
3979 return types;
3980 }
3981
3982 /* Modify the function declaration FNDECL and its type according to the plan in
3983 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3984 to reflect the actual parameters being modified which are determined by the
3985 base_index field. */
3986
3987 void
3988 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3989 {
3990 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3991 tree orig_type = TREE_TYPE (fndecl);
3992 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3993
3994 /* The following test is an ugly hack, some functions simply don't have any
3995 arguments in their type. This is probably a bug but well... */
3996 bool care_for_types = (old_arg_types != NULL_TREE);
3997 bool last_parm_void;
3998 vec<tree> otypes;
3999 if (care_for_types)
4000 {
4001 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4002 == void_type_node);
4003 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4004 if (last_parm_void)
4005 gcc_assert (oparms.length () + 1 == otypes.length ());
4006 else
4007 gcc_assert (oparms.length () == otypes.length ());
4008 }
4009 else
4010 {
4011 last_parm_void = false;
4012 otypes.create (0);
4013 }
4014
4015 int len = adjustments.length ();
4016 tree *link = &DECL_ARGUMENTS (fndecl);
4017 tree new_arg_types = NULL;
4018 for (int i = 0; i < len; i++)
4019 {
4020 struct ipa_parm_adjustment *adj;
4021 gcc_assert (link);
4022
4023 adj = &adjustments[i];
4024 tree parm;
4025 if (adj->op == IPA_PARM_OP_NEW)
4026 parm = NULL;
4027 else
4028 parm = oparms[adj->base_index];
4029 adj->base = parm;
4030
4031 if (adj->op == IPA_PARM_OP_COPY)
4032 {
4033 if (care_for_types)
4034 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4035 new_arg_types);
4036 *link = parm;
4037 link = &DECL_CHAIN (parm);
4038 }
4039 else if (adj->op != IPA_PARM_OP_REMOVE)
4040 {
4041 tree new_parm;
4042 tree ptype;
4043
4044 if (adj->by_ref)
4045 ptype = build_pointer_type (adj->type);
4046 else
4047 {
4048 ptype = adj->type;
4049 if (is_gimple_reg_type (ptype))
4050 {
4051 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4052 if (TYPE_ALIGN (ptype) != malign)
4053 ptype = build_aligned_type (ptype, malign);
4054 }
4055 }
4056
4057 if (care_for_types)
4058 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4059
4060 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4061 ptype);
4062 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4063 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4064 DECL_ARTIFICIAL (new_parm) = 1;
4065 DECL_ARG_TYPE (new_parm) = ptype;
4066 DECL_CONTEXT (new_parm) = fndecl;
4067 TREE_USED (new_parm) = 1;
4068 DECL_IGNORED_P (new_parm) = 1;
4069 layout_decl (new_parm, 0);
4070
4071 if (adj->op == IPA_PARM_OP_NEW)
4072 adj->base = NULL;
4073 else
4074 adj->base = parm;
4075 adj->new_decl = new_parm;
4076
4077 *link = new_parm;
4078 link = &DECL_CHAIN (new_parm);
4079 }
4080 }
4081
4082 *link = NULL_TREE;
4083
4084 tree new_reversed = NULL;
4085 if (care_for_types)
4086 {
4087 new_reversed = nreverse (new_arg_types);
4088 if (last_parm_void)
4089 {
4090 if (new_reversed)
4091 TREE_CHAIN (new_arg_types) = void_list_node;
4092 else
4093 new_reversed = void_list_node;
4094 }
4095 }
4096
4097 /* Use copy_node to preserve as much as possible from original type
4098 (debug info, attribute lists etc.)
4099 Exception is METHOD_TYPEs must have THIS argument.
4100 When we are asked to remove it, we need to build new FUNCTION_TYPE
4101 instead. */
4102 tree new_type = NULL;
4103 if (TREE_CODE (orig_type) != METHOD_TYPE
4104 || (adjustments[0].op == IPA_PARM_OP_COPY
4105 && adjustments[0].base_index == 0))
4106 {
4107 new_type = build_distinct_type_copy (orig_type);
4108 TYPE_ARG_TYPES (new_type) = new_reversed;
4109 }
4110 else
4111 {
4112 new_type
4113 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4114 new_reversed));
4115 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4116 DECL_VINDEX (fndecl) = NULL_TREE;
4117 }
4118
4119 /* When signature changes, we need to clear builtin info. */
4120 if (DECL_BUILT_IN (fndecl))
4121 {
4122 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4123 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4124 }
4125
4126 TREE_TYPE (fndecl) = new_type;
4127 DECL_VIRTUAL_P (fndecl) = 0;
4128 DECL_LANG_SPECIFIC (fndecl) = NULL;
4129 otypes.release ();
4130 oparms.release ();
4131 }
4132
4133 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4134 If this is a directly recursive call, CS must be NULL. Otherwise it must
4135 contain the corresponding call graph edge. */
4136
4137 void
4138 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4139 ipa_parm_adjustment_vec adjustments)
4140 {
4141 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4142 vec<tree> vargs;
4143 vec<tree, va_gc> **debug_args = NULL;
4144 gcall *new_stmt;
4145 gimple_stmt_iterator gsi, prev_gsi;
4146 tree callee_decl;
4147 int i, len;
4148
4149 len = adjustments.length ();
4150 vargs.create (len);
4151 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4152 current_node->remove_stmt_references (stmt);
4153
4154 gsi = gsi_for_stmt (stmt);
4155 prev_gsi = gsi;
4156 gsi_prev (&prev_gsi);
4157 for (i = 0; i < len; i++)
4158 {
4159 struct ipa_parm_adjustment *adj;
4160
4161 adj = &adjustments[i];
4162
4163 if (adj->op == IPA_PARM_OP_COPY)
4164 {
4165 tree arg = gimple_call_arg (stmt, adj->base_index);
4166
4167 vargs.quick_push (arg);
4168 }
4169 else if (adj->op != IPA_PARM_OP_REMOVE)
4170 {
4171 tree expr, base, off;
4172 location_t loc;
4173 unsigned int deref_align = 0;
4174 bool deref_base = false;
4175
4176 /* We create a new parameter out of the value of the old one, we can
4177 do the following kind of transformations:
4178
4179 - A scalar passed by reference is converted to a scalar passed by
4180 value. (adj->by_ref is false and the type of the original
4181 actual argument is a pointer to a scalar).
4182
4183 - A part of an aggregate is passed instead of the whole aggregate.
4184 The part can be passed either by value or by reference, this is
4185 determined by value of adj->by_ref. Moreover, the code below
4186 handles both situations when the original aggregate is passed by
4187 value (its type is not a pointer) and when it is passed by
4188 reference (it is a pointer to an aggregate).
4189
4190 When the new argument is passed by reference (adj->by_ref is true)
4191 it must be a part of an aggregate and therefore we form it by
4192 simply taking the address of a reference inside the original
4193 aggregate. */
4194
4195 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4196 base = gimple_call_arg (stmt, adj->base_index);
4197 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4198 : EXPR_LOCATION (base);
4199
4200 if (TREE_CODE (base) != ADDR_EXPR
4201 && POINTER_TYPE_P (TREE_TYPE (base)))
4202 off = build_int_cst (adj->alias_ptr_type,
4203 adj->offset / BITS_PER_UNIT);
4204 else
4205 {
4206 HOST_WIDE_INT base_offset;
4207 tree prev_base;
4208 bool addrof;
4209
4210 if (TREE_CODE (base) == ADDR_EXPR)
4211 {
4212 base = TREE_OPERAND (base, 0);
4213 addrof = true;
4214 }
4215 else
4216 addrof = false;
4217 prev_base = base;
4218 base = get_addr_base_and_unit_offset (base, &base_offset);
4219 /* Aggregate arguments can have non-invariant addresses. */
4220 if (!base)
4221 {
4222 base = build_fold_addr_expr (prev_base);
4223 off = build_int_cst (adj->alias_ptr_type,
4224 adj->offset / BITS_PER_UNIT);
4225 }
4226 else if (TREE_CODE (base) == MEM_REF)
4227 {
4228 if (!addrof)
4229 {
4230 deref_base = true;
4231 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4232 }
4233 off = build_int_cst (adj->alias_ptr_type,
4234 base_offset
4235 + adj->offset / BITS_PER_UNIT);
4236 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4237 off);
4238 base = TREE_OPERAND (base, 0);
4239 }
4240 else
4241 {
4242 off = build_int_cst (adj->alias_ptr_type,
4243 base_offset
4244 + adj->offset / BITS_PER_UNIT);
4245 base = build_fold_addr_expr (base);
4246 }
4247 }
4248
4249 if (!adj->by_ref)
4250 {
4251 tree type = adj->type;
4252 unsigned int align;
4253 unsigned HOST_WIDE_INT misalign;
4254
4255 if (deref_base)
4256 {
4257 align = deref_align;
4258 misalign = 0;
4259 }
4260 else
4261 {
4262 get_pointer_alignment_1 (base, &align, &misalign);
4263 if (TYPE_ALIGN (type) > align)
4264 align = TYPE_ALIGN (type);
4265 }
4266 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4267 * BITS_PER_UNIT);
4268 misalign = misalign & (align - 1);
4269 if (misalign != 0)
4270 align = least_bit_hwi (misalign);
4271 if (align < TYPE_ALIGN (type))
4272 type = build_aligned_type (type, align);
4273 base = force_gimple_operand_gsi (&gsi, base,
4274 true, NULL, true, GSI_SAME_STMT);
4275 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4276 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4277 /* If expr is not a valid gimple call argument emit
4278 a load into a temporary. */
4279 if (is_gimple_reg_type (TREE_TYPE (expr)))
4280 {
4281 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4282 if (gimple_in_ssa_p (cfun))
4283 {
4284 gimple_set_vuse (tem, gimple_vuse (stmt));
4285 expr = make_ssa_name (TREE_TYPE (expr), tem);
4286 }
4287 else
4288 expr = create_tmp_reg (TREE_TYPE (expr));
4289 gimple_assign_set_lhs (tem, expr);
4290 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4291 }
4292 }
4293 else
4294 {
4295 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4296 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4297 expr = build_fold_addr_expr (expr);
4298 expr = force_gimple_operand_gsi (&gsi, expr,
4299 true, NULL, true, GSI_SAME_STMT);
4300 }
4301 vargs.quick_push (expr);
4302 }
4303 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4304 {
4305 unsigned int ix;
4306 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4307 gimple *def_temp;
4308
4309 arg = gimple_call_arg (stmt, adj->base_index);
4310 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4311 {
4312 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4313 continue;
4314 arg = fold_convert_loc (gimple_location (stmt),
4315 TREE_TYPE (origin), arg);
4316 }
4317 if (debug_args == NULL)
4318 debug_args = decl_debug_args_insert (callee_decl);
4319 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4320 if (ddecl == origin)
4321 {
4322 ddecl = (**debug_args)[ix + 1];
4323 break;
4324 }
4325 if (ddecl == NULL)
4326 {
4327 ddecl = make_node (DEBUG_EXPR_DECL);
4328 DECL_ARTIFICIAL (ddecl) = 1;
4329 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4330 SET_DECL_MODE (ddecl, DECL_MODE (origin));
4331
4332 vec_safe_push (*debug_args, origin);
4333 vec_safe_push (*debug_args, ddecl);
4334 }
4335 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4336 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4337 }
4338 }
4339
4340 if (dump_file && (dump_flags & TDF_DETAILS))
4341 {
4342 fprintf (dump_file, "replacing stmt:");
4343 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4344 }
4345
4346 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4347 vargs.release ();
4348 if (gimple_call_lhs (stmt))
4349 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4350
4351 gimple_set_block (new_stmt, gimple_block (stmt));
4352 if (gimple_has_location (stmt))
4353 gimple_set_location (new_stmt, gimple_location (stmt));
4354 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4355 gimple_call_copy_flags (new_stmt, stmt);
4356 if (gimple_in_ssa_p (cfun))
4357 {
4358 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4359 if (gimple_vdef (stmt))
4360 {
4361 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4362 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4363 }
4364 }
4365
4366 if (dump_file && (dump_flags & TDF_DETAILS))
4367 {
4368 fprintf (dump_file, "with stmt:");
4369 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4370 fprintf (dump_file, "\n");
4371 }
4372 gsi_replace (&gsi, new_stmt, true);
4373 if (cs)
4374 cs->set_call_stmt (new_stmt);
4375 do
4376 {
4377 current_node->record_stmt_references (gsi_stmt (gsi));
4378 gsi_prev (&gsi);
4379 }
4380 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4381 }
4382
4383 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4384 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4385 specifies whether the function should care about type incompatibility the
4386 current and new expressions. If it is false, the function will leave
4387 incompatibility issues to the caller. Return true iff the expression
4388 was modified. */
4389
4390 bool
4391 ipa_modify_expr (tree *expr, bool convert,
4392 ipa_parm_adjustment_vec adjustments)
4393 {
4394 struct ipa_parm_adjustment *cand
4395 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4396 if (!cand)
4397 return false;
4398
4399 tree src;
4400 if (cand->by_ref)
4401 {
4402 src = build_simple_mem_ref (cand->new_decl);
4403 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4404 }
4405 else
4406 src = cand->new_decl;
4407
4408 if (dump_file && (dump_flags & TDF_DETAILS))
4409 {
4410 fprintf (dump_file, "About to replace expr ");
4411 print_generic_expr (dump_file, *expr, 0);
4412 fprintf (dump_file, " with ");
4413 print_generic_expr (dump_file, src, 0);
4414 fprintf (dump_file, "\n");
4415 }
4416
4417 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4418 {
4419 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4420 *expr = vce;
4421 }
4422 else
4423 *expr = src;
4424 return true;
4425 }
4426
4427 /* If T is an SSA_NAME, return NULL if it is not a default def or
4428 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4429 the base variable is always returned, regardless if it is a default
4430 def. Return T if it is not an SSA_NAME. */
4431
4432 static tree
4433 get_ssa_base_param (tree t, bool ignore_default_def)
4434 {
4435 if (TREE_CODE (t) == SSA_NAME)
4436 {
4437 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4438 return SSA_NAME_VAR (t);
4439 else
4440 return NULL_TREE;
4441 }
4442 return t;
4443 }
4444
4445 /* Given an expression, return an adjustment entry specifying the
4446 transformation to be done on EXPR. If no suitable adjustment entry
4447 was found, returns NULL.
4448
4449 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4450 default def, otherwise bail on them.
4451
4452 If CONVERT is non-NULL, this function will set *CONVERT if the
4453 expression provided is a component reference. ADJUSTMENTS is the
4454 adjustments vector. */
4455
4456 ipa_parm_adjustment *
4457 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4458 ipa_parm_adjustment_vec adjustments,
4459 bool ignore_default_def)
4460 {
4461 if (TREE_CODE (**expr) == BIT_FIELD_REF
4462 || TREE_CODE (**expr) == IMAGPART_EXPR
4463 || TREE_CODE (**expr) == REALPART_EXPR)
4464 {
4465 *expr = &TREE_OPERAND (**expr, 0);
4466 if (convert)
4467 *convert = true;
4468 }
4469
4470 HOST_WIDE_INT offset, size, max_size;
4471 bool reverse;
4472 tree base
4473 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4474 if (!base || size == -1 || max_size == -1)
4475 return NULL;
4476
4477 if (TREE_CODE (base) == MEM_REF)
4478 {
4479 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4480 base = TREE_OPERAND (base, 0);
4481 }
4482
4483 base = get_ssa_base_param (base, ignore_default_def);
4484 if (!base || TREE_CODE (base) != PARM_DECL)
4485 return NULL;
4486
4487 struct ipa_parm_adjustment *cand = NULL;
4488 unsigned int len = adjustments.length ();
4489 for (unsigned i = 0; i < len; i++)
4490 {
4491 struct ipa_parm_adjustment *adj = &adjustments[i];
4492
4493 if (adj->base == base
4494 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4495 {
4496 cand = adj;
4497 break;
4498 }
4499 }
4500
4501 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4502 return NULL;
4503 return cand;
4504 }
4505
4506 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4507
4508 static bool
4509 index_in_adjustments_multiple_times_p (int base_index,
4510 ipa_parm_adjustment_vec adjustments)
4511 {
4512 int i, len = adjustments.length ();
4513 bool one = false;
4514
4515 for (i = 0; i < len; i++)
4516 {
4517 struct ipa_parm_adjustment *adj;
4518 adj = &adjustments[i];
4519
4520 if (adj->base_index == base_index)
4521 {
4522 if (one)
4523 return true;
4524 else
4525 one = true;
4526 }
4527 }
4528 return false;
4529 }
4530
4531
4532 /* Return adjustments that should have the same effect on function parameters
4533 and call arguments as if they were first changed according to adjustments in
4534 INNER and then by adjustments in OUTER. */
4535
4536 ipa_parm_adjustment_vec
4537 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4538 ipa_parm_adjustment_vec outer)
4539 {
4540 int i, outlen = outer.length ();
4541 int inlen = inner.length ();
4542 int removals = 0;
4543 ipa_parm_adjustment_vec adjustments, tmp;
4544
4545 tmp.create (inlen);
4546 for (i = 0; i < inlen; i++)
4547 {
4548 struct ipa_parm_adjustment *n;
4549 n = &inner[i];
4550
4551 if (n->op == IPA_PARM_OP_REMOVE)
4552 removals++;
4553 else
4554 {
4555 /* FIXME: Handling of new arguments are not implemented yet. */
4556 gcc_assert (n->op != IPA_PARM_OP_NEW);
4557 tmp.quick_push (*n);
4558 }
4559 }
4560
4561 adjustments.create (outlen + removals);
4562 for (i = 0; i < outlen; i++)
4563 {
4564 struct ipa_parm_adjustment r;
4565 struct ipa_parm_adjustment *out = &outer[i];
4566 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4567
4568 memset (&r, 0, sizeof (r));
4569 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4570 if (out->op == IPA_PARM_OP_REMOVE)
4571 {
4572 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4573 {
4574 r.op = IPA_PARM_OP_REMOVE;
4575 adjustments.quick_push (r);
4576 }
4577 continue;
4578 }
4579 else
4580 {
4581 /* FIXME: Handling of new arguments are not implemented yet. */
4582 gcc_assert (out->op != IPA_PARM_OP_NEW);
4583 }
4584
4585 r.base_index = in->base_index;
4586 r.type = out->type;
4587
4588 /* FIXME: Create nonlocal value too. */
4589
4590 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4591 r.op = IPA_PARM_OP_COPY;
4592 else if (in->op == IPA_PARM_OP_COPY)
4593 r.offset = out->offset;
4594 else if (out->op == IPA_PARM_OP_COPY)
4595 r.offset = in->offset;
4596 else
4597 r.offset = in->offset + out->offset;
4598 adjustments.quick_push (r);
4599 }
4600
4601 for (i = 0; i < inlen; i++)
4602 {
4603 struct ipa_parm_adjustment *n = &inner[i];
4604
4605 if (n->op == IPA_PARM_OP_REMOVE)
4606 adjustments.quick_push (*n);
4607 }
4608
4609 tmp.release ();
4610 return adjustments;
4611 }
4612
4613 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4614 friendly way, assuming they are meant to be applied to FNDECL. */
4615
4616 void
4617 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4618 tree fndecl)
4619 {
4620 int i, len = adjustments.length ();
4621 bool first = true;
4622 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4623
4624 fprintf (file, "IPA param adjustments: ");
4625 for (i = 0; i < len; i++)
4626 {
4627 struct ipa_parm_adjustment *adj;
4628 adj = &adjustments[i];
4629
4630 if (!first)
4631 fprintf (file, " ");
4632 else
4633 first = false;
4634
4635 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4636 print_generic_expr (file, parms[adj->base_index], 0);
4637 if (adj->base)
4638 {
4639 fprintf (file, ", base: ");
4640 print_generic_expr (file, adj->base, 0);
4641 }
4642 if (adj->new_decl)
4643 {
4644 fprintf (file, ", new_decl: ");
4645 print_generic_expr (file, adj->new_decl, 0);
4646 }
4647 if (adj->new_ssa_base)
4648 {
4649 fprintf (file, ", new_ssa_base: ");
4650 print_generic_expr (file, adj->new_ssa_base, 0);
4651 }
4652
4653 if (adj->op == IPA_PARM_OP_COPY)
4654 fprintf (file, ", copy_param");
4655 else if (adj->op == IPA_PARM_OP_REMOVE)
4656 fprintf (file, ", remove_param");
4657 else
4658 fprintf (file, ", offset %li", (long) adj->offset);
4659 if (adj->by_ref)
4660 fprintf (file, ", by_ref");
4661 print_node_brief (file, ", type: ", adj->type, 0);
4662 fprintf (file, "\n");
4663 }
4664 parms.release ();
4665 }
4666
4667 /* Dump the AV linked list. */
4668
4669 void
4670 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4671 {
4672 bool comma = false;
4673 fprintf (f, " Aggregate replacements:");
4674 for (; av; av = av->next)
4675 {
4676 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4677 av->index, av->offset);
4678 print_generic_expr (f, av->value, 0);
4679 comma = true;
4680 }
4681 fprintf (f, "\n");
4682 }
4683
4684 /* Stream out jump function JUMP_FUNC to OB. */
4685
4686 static void
4687 ipa_write_jump_function (struct output_block *ob,
4688 struct ipa_jump_func *jump_func)
4689 {
4690 struct ipa_agg_jf_item *item;
4691 struct bitpack_d bp;
4692 int i, count;
4693
4694 streamer_write_uhwi (ob, jump_func->type);
4695 switch (jump_func->type)
4696 {
4697 case IPA_JF_UNKNOWN:
4698 break;
4699 case IPA_JF_CONST:
4700 gcc_assert (
4701 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4702 stream_write_tree (ob, jump_func->value.constant.value, true);
4703 break;
4704 case IPA_JF_PASS_THROUGH:
4705 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4706 if (jump_func->value.pass_through.operation == NOP_EXPR)
4707 {
4708 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4709 bp = bitpack_create (ob->main_stream);
4710 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4711 streamer_write_bitpack (&bp);
4712 }
4713 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4714 == tcc_unary)
4715 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4716 else
4717 {
4718 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4719 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4720 }
4721 break;
4722 case IPA_JF_ANCESTOR:
4723 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4724 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4725 bp = bitpack_create (ob->main_stream);
4726 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4727 streamer_write_bitpack (&bp);
4728 break;
4729 }
4730
4731 count = vec_safe_length (jump_func->agg.items);
4732 streamer_write_uhwi (ob, count);
4733 if (count)
4734 {
4735 bp = bitpack_create (ob->main_stream);
4736 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4737 streamer_write_bitpack (&bp);
4738 }
4739
4740 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4741 {
4742 streamer_write_uhwi (ob, item->offset);
4743 stream_write_tree (ob, item->value, true);
4744 }
4745
4746 bp = bitpack_create (ob->main_stream);
4747 bp_pack_value (&bp, jump_func->bits.known, 1);
4748 streamer_write_bitpack (&bp);
4749 if (jump_func->bits.known)
4750 {
4751 streamer_write_widest_int (ob, jump_func->bits.value);
4752 streamer_write_widest_int (ob, jump_func->bits.mask);
4753 }
4754 bp_pack_value (&bp, jump_func->vr_known, 1);
4755 streamer_write_bitpack (&bp);
4756 if (jump_func->vr_known)
4757 {
4758 streamer_write_enum (ob->main_stream, value_rang_type,
4759 VR_LAST, jump_func->m_vr.type);
4760 stream_write_tree (ob, jump_func->m_vr.min, true);
4761 stream_write_tree (ob, jump_func->m_vr.max, true);
4762 }
4763 }
4764
4765 /* Read in jump function JUMP_FUNC from IB. */
4766
4767 static void
4768 ipa_read_jump_function (struct lto_input_block *ib,
4769 struct ipa_jump_func *jump_func,
4770 struct cgraph_edge *cs,
4771 struct data_in *data_in)
4772 {
4773 enum jump_func_type jftype;
4774 enum tree_code operation;
4775 int i, count;
4776
4777 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4778 switch (jftype)
4779 {
4780 case IPA_JF_UNKNOWN:
4781 ipa_set_jf_unknown (jump_func);
4782 break;
4783 case IPA_JF_CONST:
4784 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4785 break;
4786 case IPA_JF_PASS_THROUGH:
4787 operation = (enum tree_code) streamer_read_uhwi (ib);
4788 if (operation == NOP_EXPR)
4789 {
4790 int formal_id = streamer_read_uhwi (ib);
4791 struct bitpack_d bp = streamer_read_bitpack (ib);
4792 bool agg_preserved = bp_unpack_value (&bp, 1);
4793 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4794 }
4795 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4796 {
4797 int formal_id = streamer_read_uhwi (ib);
4798 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4799 }
4800 else
4801 {
4802 tree operand = stream_read_tree (ib, data_in);
4803 int formal_id = streamer_read_uhwi (ib);
4804 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4805 operation);
4806 }
4807 break;
4808 case IPA_JF_ANCESTOR:
4809 {
4810 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4811 int formal_id = streamer_read_uhwi (ib);
4812 struct bitpack_d bp = streamer_read_bitpack (ib);
4813 bool agg_preserved = bp_unpack_value (&bp, 1);
4814 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4815 break;
4816 }
4817 }
4818
4819 count = streamer_read_uhwi (ib);
4820 vec_alloc (jump_func->agg.items, count);
4821 if (count)
4822 {
4823 struct bitpack_d bp = streamer_read_bitpack (ib);
4824 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4825 }
4826 for (i = 0; i < count; i++)
4827 {
4828 struct ipa_agg_jf_item item;
4829 item.offset = streamer_read_uhwi (ib);
4830 item.value = stream_read_tree (ib, data_in);
4831 jump_func->agg.items->quick_push (item);
4832 }
4833
4834 struct bitpack_d bp = streamer_read_bitpack (ib);
4835 bool bits_known = bp_unpack_value (&bp, 1);
4836 if (bits_known)
4837 {
4838 jump_func->bits.known = true;
4839 jump_func->bits.value = streamer_read_widest_int (ib);
4840 jump_func->bits.mask = streamer_read_widest_int (ib);
4841 }
4842 else
4843 jump_func->bits.known = false;
4844
4845 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4846 bool vr_known = bp_unpack_value (&vr_bp, 1);
4847 if (vr_known)
4848 {
4849 jump_func->vr_known = true;
4850 jump_func->m_vr.type = streamer_read_enum (ib,
4851 value_range_type,
4852 VR_LAST);
4853 jump_func->m_vr.min = stream_read_tree (ib, data_in);
4854 jump_func->m_vr.max = stream_read_tree (ib, data_in);
4855 }
4856 else
4857 jump_func->vr_known = false;
4858 }
4859
4860 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4861 relevant to indirect inlining to OB. */
4862
4863 static void
4864 ipa_write_indirect_edge_info (struct output_block *ob,
4865 struct cgraph_edge *cs)
4866 {
4867 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4868 struct bitpack_d bp;
4869
4870 streamer_write_hwi (ob, ii->param_index);
4871 bp = bitpack_create (ob->main_stream);
4872 bp_pack_value (&bp, ii->polymorphic, 1);
4873 bp_pack_value (&bp, ii->agg_contents, 1);
4874 bp_pack_value (&bp, ii->member_ptr, 1);
4875 bp_pack_value (&bp, ii->by_ref, 1);
4876 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4877 bp_pack_value (&bp, ii->vptr_changed, 1);
4878 streamer_write_bitpack (&bp);
4879 if (ii->agg_contents || ii->polymorphic)
4880 streamer_write_hwi (ob, ii->offset);
4881 else
4882 gcc_assert (ii->offset == 0);
4883
4884 if (ii->polymorphic)
4885 {
4886 streamer_write_hwi (ob, ii->otr_token);
4887 stream_write_tree (ob, ii->otr_type, true);
4888 ii->context.stream_out (ob);
4889 }
4890 }
4891
4892 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4893 relevant to indirect inlining from IB. */
4894
4895 static void
4896 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4897 struct data_in *data_in,
4898 struct cgraph_edge *cs)
4899 {
4900 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4901 struct bitpack_d bp;
4902
4903 ii->param_index = (int) streamer_read_hwi (ib);
4904 bp = streamer_read_bitpack (ib);
4905 ii->polymorphic = bp_unpack_value (&bp, 1);
4906 ii->agg_contents = bp_unpack_value (&bp, 1);
4907 ii->member_ptr = bp_unpack_value (&bp, 1);
4908 ii->by_ref = bp_unpack_value (&bp, 1);
4909 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4910 ii->vptr_changed = bp_unpack_value (&bp, 1);
4911 if (ii->agg_contents || ii->polymorphic)
4912 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4913 else
4914 ii->offset = 0;
4915 if (ii->polymorphic)
4916 {
4917 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4918 ii->otr_type = stream_read_tree (ib, data_in);
4919 ii->context.stream_in (ib, data_in);
4920 }
4921 }
4922
4923 /* Stream out NODE info to OB. */
4924
4925 static void
4926 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4927 {
4928 int node_ref;
4929 lto_symtab_encoder_t encoder;
4930 struct ipa_node_params *info = IPA_NODE_REF (node);
4931 int j;
4932 struct cgraph_edge *e;
4933 struct bitpack_d bp;
4934
4935 encoder = ob->decl_state->symtab_node_encoder;
4936 node_ref = lto_symtab_encoder_encode (encoder, node);
4937 streamer_write_uhwi (ob, node_ref);
4938
4939 streamer_write_uhwi (ob, ipa_get_param_count (info));
4940 for (j = 0; j < ipa_get_param_count (info); j++)
4941 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4942 bp = bitpack_create (ob->main_stream);
4943 gcc_assert (info->analysis_done
4944 || ipa_get_param_count (info) == 0);
4945 gcc_assert (!info->node_enqueued);
4946 gcc_assert (!info->ipcp_orig_node);
4947 for (j = 0; j < ipa_get_param_count (info); j++)
4948 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4949 streamer_write_bitpack (&bp);
4950 for (j = 0; j < ipa_get_param_count (info); j++)
4951 {
4952 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4953 stream_write_tree (ob, ipa_get_type (info, j), true);
4954 }
4955 for (e = node->callees; e; e = e->next_callee)
4956 {
4957 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4958
4959 streamer_write_uhwi (ob,
4960 ipa_get_cs_argument_count (args) * 2
4961 + (args->polymorphic_call_contexts != NULL));
4962 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4963 {
4964 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4965 if (args->polymorphic_call_contexts != NULL)
4966 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4967 }
4968 }
4969 for (e = node->indirect_calls; e; e = e->next_callee)
4970 {
4971 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4972
4973 streamer_write_uhwi (ob,
4974 ipa_get_cs_argument_count (args) * 2
4975 + (args->polymorphic_call_contexts != NULL));
4976 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4977 {
4978 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4979 if (args->polymorphic_call_contexts != NULL)
4980 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4981 }
4982 ipa_write_indirect_edge_info (ob, e);
4983 }
4984 }
4985
4986 /* Stream in NODE info from IB. */
4987
4988 static void
4989 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4990 struct data_in *data_in)
4991 {
4992 struct ipa_node_params *info = IPA_NODE_REF (node);
4993 int k;
4994 struct cgraph_edge *e;
4995 struct bitpack_d bp;
4996
4997 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4998
4999 for (k = 0; k < ipa_get_param_count (info); k++)
5000 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5001
5002 bp = streamer_read_bitpack (ib);
5003 if (ipa_get_param_count (info) != 0)
5004 info->analysis_done = true;
5005 info->node_enqueued = false;
5006 for (k = 0; k < ipa_get_param_count (info); k++)
5007 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
5008 for (k = 0; k < ipa_get_param_count (info); k++)
5009 {
5010 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
5011 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
5012 }
5013 for (e = node->callees; e; e = e->next_callee)
5014 {
5015 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5016 int count = streamer_read_uhwi (ib);
5017 bool contexts_computed = count & 1;
5018 count /= 2;
5019
5020 if (!count)
5021 continue;
5022 vec_safe_grow_cleared (args->jump_functions, count);
5023 if (contexts_computed)
5024 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5025
5026 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5027 {
5028 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5029 data_in);
5030 if (contexts_computed)
5031 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5032 }
5033 }
5034 for (e = node->indirect_calls; e; e = e->next_callee)
5035 {
5036 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5037 int count = streamer_read_uhwi (ib);
5038 bool contexts_computed = count & 1;
5039 count /= 2;
5040
5041 if (count)
5042 {
5043 vec_safe_grow_cleared (args->jump_functions, count);
5044 if (contexts_computed)
5045 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5046 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5047 {
5048 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5049 data_in);
5050 if (contexts_computed)
5051 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5052 }
5053 }
5054 ipa_read_indirect_edge_info (ib, data_in, e);
5055 }
5056 }
5057
5058 /* Write jump functions for nodes in SET. */
5059
5060 void
5061 ipa_prop_write_jump_functions (void)
5062 {
5063 struct cgraph_node *node;
5064 struct output_block *ob;
5065 unsigned int count = 0;
5066 lto_symtab_encoder_iterator lsei;
5067 lto_symtab_encoder_t encoder;
5068
5069 if (!ipa_node_params_sum)
5070 return;
5071
5072 ob = create_output_block (LTO_section_jump_functions);
5073 encoder = ob->decl_state->symtab_node_encoder;
5074 ob->symbol = NULL;
5075 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5076 lsei_next_function_in_partition (&lsei))
5077 {
5078 node = lsei_cgraph_node (lsei);
5079 if (node->has_gimple_body_p ()
5080 && IPA_NODE_REF (node) != NULL)
5081 count++;
5082 }
5083
5084 streamer_write_uhwi (ob, count);
5085
5086 /* Process all of the functions. */
5087 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5088 lsei_next_function_in_partition (&lsei))
5089 {
5090 node = lsei_cgraph_node (lsei);
5091 if (node->has_gimple_body_p ()
5092 && IPA_NODE_REF (node) != NULL)
5093 ipa_write_node_info (ob, node);
5094 }
5095 streamer_write_char_stream (ob->main_stream, 0);
5096 produce_asm (ob, NULL);
5097 destroy_output_block (ob);
5098 }
5099
5100 /* Read section in file FILE_DATA of length LEN with data DATA. */
5101
5102 static void
5103 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5104 size_t len)
5105 {
5106 const struct lto_function_header *header =
5107 (const struct lto_function_header *) data;
5108 const int cfg_offset = sizeof (struct lto_function_header);
5109 const int main_offset = cfg_offset + header->cfg_size;
5110 const int string_offset = main_offset + header->main_size;
5111 struct data_in *data_in;
5112 unsigned int i;
5113 unsigned int count;
5114
5115 lto_input_block ib_main ((const char *) data + main_offset,
5116 header->main_size, file_data->mode_table);
5117
5118 data_in =
5119 lto_data_in_create (file_data, (const char *) data + string_offset,
5120 header->string_size, vNULL);
5121 count = streamer_read_uhwi (&ib_main);
5122
5123 for (i = 0; i < count; i++)
5124 {
5125 unsigned int index;
5126 struct cgraph_node *node;
5127 lto_symtab_encoder_t encoder;
5128
5129 index = streamer_read_uhwi (&ib_main);
5130 encoder = file_data->symtab_node_encoder;
5131 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5132 index));
5133 gcc_assert (node->definition);
5134 ipa_read_node_info (&ib_main, node, data_in);
5135 }
5136 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5137 len);
5138 lto_data_in_delete (data_in);
5139 }
5140
5141 /* Read ipcp jump functions. */
5142
5143 void
5144 ipa_prop_read_jump_functions (void)
5145 {
5146 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5147 struct lto_file_decl_data *file_data;
5148 unsigned int j = 0;
5149
5150 ipa_check_create_node_params ();
5151 ipa_check_create_edge_args ();
5152 ipa_register_cgraph_hooks ();
5153
5154 while ((file_data = file_data_vec[j++]))
5155 {
5156 size_t len;
5157 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5158
5159 if (data)
5160 ipa_prop_read_section (file_data, data, len);
5161 }
5162 }
5163
5164 /* After merging units, we can get mismatch in argument counts.
5165 Also decl merging might've rendered parameter lists obsolete.
5166 Also compute called_with_variable_arg info. */
5167
5168 void
5169 ipa_update_after_lto_read (void)
5170 {
5171 ipa_check_create_node_params ();
5172 ipa_check_create_edge_args ();
5173 }
5174
5175 void
5176 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5177 {
5178 int node_ref;
5179 unsigned int count = 0;
5180 lto_symtab_encoder_t encoder;
5181 struct ipa_agg_replacement_value *aggvals, *av;
5182
5183 aggvals = ipa_get_agg_replacements_for_node (node);
5184 encoder = ob->decl_state->symtab_node_encoder;
5185 node_ref = lto_symtab_encoder_encode (encoder, node);
5186 streamer_write_uhwi (ob, node_ref);
5187
5188 for (av = aggvals; av; av = av->next)
5189 count++;
5190 streamer_write_uhwi (ob, count);
5191
5192 for (av = aggvals; av; av = av->next)
5193 {
5194 struct bitpack_d bp;
5195
5196 streamer_write_uhwi (ob, av->offset);
5197 streamer_write_uhwi (ob, av->index);
5198 stream_write_tree (ob, av->value, true);
5199
5200 bp = bitpack_create (ob->main_stream);
5201 bp_pack_value (&bp, av->by_ref, 1);
5202 streamer_write_bitpack (&bp);
5203 }
5204
5205 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5206 if (ts && vec_safe_length (ts->m_vr) > 0)
5207 {
5208 count = ts->m_vr->length ();
5209 streamer_write_uhwi (ob, count);
5210 for (unsigned i = 0; i < count; ++i)
5211 {
5212 struct bitpack_d bp;
5213 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5214 bp = bitpack_create (ob->main_stream);
5215 bp_pack_value (&bp, parm_vr->known, 1);
5216 streamer_write_bitpack (&bp);
5217 if (parm_vr->known)
5218 {
5219 streamer_write_enum (ob->main_stream, value_rang_type,
5220 VR_LAST, parm_vr->type);
5221 streamer_write_wide_int (ob, parm_vr->min);
5222 streamer_write_wide_int (ob, parm_vr->max);
5223 }
5224 }
5225 }
5226 else
5227 streamer_write_uhwi (ob, 0);
5228
5229 if (ts && vec_safe_length (ts->bits) > 0)
5230 {
5231 count = ts->bits->length ();
5232 streamer_write_uhwi (ob, count);
5233
5234 for (unsigned i = 0; i < count; ++i)
5235 {
5236 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5237 struct bitpack_d bp = bitpack_create (ob->main_stream);
5238 bp_pack_value (&bp, bits_jfunc.known, 1);
5239 streamer_write_bitpack (&bp);
5240 if (bits_jfunc.known)
5241 {
5242 streamer_write_widest_int (ob, bits_jfunc.value);
5243 streamer_write_widest_int (ob, bits_jfunc.mask);
5244 }
5245 }
5246 }
5247 else
5248 streamer_write_uhwi (ob, 0);
5249 }
5250
5251 /* Stream in the aggregate value replacement chain for NODE from IB. */
5252
5253 static void
5254 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5255 data_in *data_in)
5256 {
5257 struct ipa_agg_replacement_value *aggvals = NULL;
5258 unsigned int count, i;
5259
5260 count = streamer_read_uhwi (ib);
5261 for (i = 0; i <count; i++)
5262 {
5263 struct ipa_agg_replacement_value *av;
5264 struct bitpack_d bp;
5265
5266 av = ggc_alloc<ipa_agg_replacement_value> ();
5267 av->offset = streamer_read_uhwi (ib);
5268 av->index = streamer_read_uhwi (ib);
5269 av->value = stream_read_tree (ib, data_in);
5270 bp = streamer_read_bitpack (ib);
5271 av->by_ref = bp_unpack_value (&bp, 1);
5272 av->next = aggvals;
5273 aggvals = av;
5274 }
5275 ipa_set_node_agg_value_chain (node, aggvals);
5276
5277 count = streamer_read_uhwi (ib);
5278 if (count > 0)
5279 {
5280 ipcp_grow_transformations_if_necessary ();
5281
5282 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5283 vec_safe_grow_cleared (ts->m_vr, count);
5284 for (i = 0; i < count; i++)
5285 {
5286 ipa_vr *parm_vr;
5287 parm_vr = &(*ts->m_vr)[i];
5288 struct bitpack_d bp;
5289 bp = streamer_read_bitpack (ib);
5290 parm_vr->known = bp_unpack_value (&bp, 1);
5291 if (parm_vr->known)
5292 {
5293 parm_vr->type = streamer_read_enum (ib, value_range_type,
5294 VR_LAST);
5295 parm_vr->min = streamer_read_wide_int (ib);
5296 parm_vr->max = streamer_read_wide_int (ib);
5297 }
5298 }
5299 }
5300 count = streamer_read_uhwi (ib);
5301 if (count > 0)
5302 {
5303 ipcp_grow_transformations_if_necessary ();
5304
5305 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5306 vec_safe_grow_cleared (ts->bits, count);
5307
5308 for (i = 0; i < count; i++)
5309 {
5310 ipa_bits& bits_jfunc = (*ts->bits)[i];
5311 struct bitpack_d bp = streamer_read_bitpack (ib);
5312 bits_jfunc.known = bp_unpack_value (&bp, 1);
5313 if (bits_jfunc.known)
5314 {
5315 bits_jfunc.value = streamer_read_widest_int (ib);
5316 bits_jfunc.mask = streamer_read_widest_int (ib);
5317 }
5318 }
5319 }
5320 }
5321
5322 /* Write all aggregate replacement for nodes in set. */
5323
5324 void
5325 ipcp_write_transformation_summaries (void)
5326 {
5327 struct cgraph_node *node;
5328 struct output_block *ob;
5329 unsigned int count = 0;
5330 lto_symtab_encoder_iterator lsei;
5331 lto_symtab_encoder_t encoder;
5332
5333 ob = create_output_block (LTO_section_ipcp_transform);
5334 encoder = ob->decl_state->symtab_node_encoder;
5335 ob->symbol = NULL;
5336 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5337 lsei_next_function_in_partition (&lsei))
5338 {
5339 node = lsei_cgraph_node (lsei);
5340 if (node->has_gimple_body_p ())
5341 count++;
5342 }
5343
5344 streamer_write_uhwi (ob, count);
5345
5346 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5347 lsei_next_function_in_partition (&lsei))
5348 {
5349 node = lsei_cgraph_node (lsei);
5350 if (node->has_gimple_body_p ())
5351 write_ipcp_transformation_info (ob, node);
5352 }
5353 streamer_write_char_stream (ob->main_stream, 0);
5354 produce_asm (ob, NULL);
5355 destroy_output_block (ob);
5356 }
5357
5358 /* Read replacements section in file FILE_DATA of length LEN with data
5359 DATA. */
5360
5361 static void
5362 read_replacements_section (struct lto_file_decl_data *file_data,
5363 const char *data,
5364 size_t len)
5365 {
5366 const struct lto_function_header *header =
5367 (const struct lto_function_header *) data;
5368 const int cfg_offset = sizeof (struct lto_function_header);
5369 const int main_offset = cfg_offset + header->cfg_size;
5370 const int string_offset = main_offset + header->main_size;
5371 struct data_in *data_in;
5372 unsigned int i;
5373 unsigned int count;
5374
5375 lto_input_block ib_main ((const char *) data + main_offset,
5376 header->main_size, file_data->mode_table);
5377
5378 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5379 header->string_size, vNULL);
5380 count = streamer_read_uhwi (&ib_main);
5381
5382 for (i = 0; i < count; i++)
5383 {
5384 unsigned int index;
5385 struct cgraph_node *node;
5386 lto_symtab_encoder_t encoder;
5387
5388 index = streamer_read_uhwi (&ib_main);
5389 encoder = file_data->symtab_node_encoder;
5390 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5391 index));
5392 gcc_assert (node->definition);
5393 read_ipcp_transformation_info (&ib_main, node, data_in);
5394 }
5395 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5396 len);
5397 lto_data_in_delete (data_in);
5398 }
5399
5400 /* Read IPA-CP aggregate replacements. */
5401
5402 void
5403 ipcp_read_transformation_summaries (void)
5404 {
5405 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5406 struct lto_file_decl_data *file_data;
5407 unsigned int j = 0;
5408
5409 while ((file_data = file_data_vec[j++]))
5410 {
5411 size_t len;
5412 const char *data = lto_get_section_data (file_data,
5413 LTO_section_ipcp_transform,
5414 NULL, &len);
5415 if (data)
5416 read_replacements_section (file_data, data, len);
5417 }
5418 }
5419
5420 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5421 NODE. */
5422
5423 static void
5424 adjust_agg_replacement_values (struct cgraph_node *node,
5425 struct ipa_agg_replacement_value *aggval)
5426 {
5427 struct ipa_agg_replacement_value *v;
5428 int i, c = 0, d = 0, *adj;
5429
5430 if (!node->clone.combined_args_to_skip)
5431 return;
5432
5433 for (v = aggval; v; v = v->next)
5434 {
5435 gcc_assert (v->index >= 0);
5436 if (c < v->index)
5437 c = v->index;
5438 }
5439 c++;
5440
5441 adj = XALLOCAVEC (int, c);
5442 for (i = 0; i < c; i++)
5443 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5444 {
5445 adj[i] = -1;
5446 d++;
5447 }
5448 else
5449 adj[i] = i - d;
5450
5451 for (v = aggval; v; v = v->next)
5452 v->index = adj[v->index];
5453 }
5454
5455 /* Dominator walker driving the ipcp modification phase. */
5456
5457 class ipcp_modif_dom_walker : public dom_walker
5458 {
5459 public:
5460 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5461 vec<ipa_param_descriptor, va_gc> *descs,
5462 struct ipa_agg_replacement_value *av,
5463 bool *sc, bool *cc)
5464 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5465 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5466
5467 virtual edge before_dom_children (basic_block);
5468
5469 private:
5470 struct ipa_func_body_info *m_fbi;
5471 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5472 struct ipa_agg_replacement_value *m_aggval;
5473 bool *m_something_changed, *m_cfg_changed;
5474 };
5475
5476 edge
5477 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5478 {
5479 gimple_stmt_iterator gsi;
5480 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5481 {
5482 struct ipa_agg_replacement_value *v;
5483 gimple *stmt = gsi_stmt (gsi);
5484 tree rhs, val, t;
5485 HOST_WIDE_INT offset, size;
5486 int index;
5487 bool by_ref, vce;
5488
5489 if (!gimple_assign_load_p (stmt))
5490 continue;
5491 rhs = gimple_assign_rhs1 (stmt);
5492 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5493 continue;
5494
5495 vce = false;
5496 t = rhs;
5497 while (handled_component_p (t))
5498 {
5499 /* V_C_E can do things like convert an array of integers to one
5500 bigger integer and similar things we do not handle below. */
5501 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5502 {
5503 vce = true;
5504 break;
5505 }
5506 t = TREE_OPERAND (t, 0);
5507 }
5508 if (vce)
5509 continue;
5510
5511 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5512 &offset, &size, &by_ref))
5513 continue;
5514 for (v = m_aggval; v; v = v->next)
5515 if (v->index == index
5516 && v->offset == offset)
5517 break;
5518 if (!v
5519 || v->by_ref != by_ref
5520 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5521 continue;
5522
5523 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5524 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5525 {
5526 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5527 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5528 else if (TYPE_SIZE (TREE_TYPE (rhs))
5529 == TYPE_SIZE (TREE_TYPE (v->value)))
5530 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5531 else
5532 {
5533 if (dump_file)
5534 {
5535 fprintf (dump_file, " const ");
5536 print_generic_expr (dump_file, v->value, 0);
5537 fprintf (dump_file, " can't be converted to type of ");
5538 print_generic_expr (dump_file, rhs, 0);
5539 fprintf (dump_file, "\n");
5540 }
5541 continue;
5542 }
5543 }
5544 else
5545 val = v->value;
5546
5547 if (dump_file && (dump_flags & TDF_DETAILS))
5548 {
5549 fprintf (dump_file, "Modifying stmt:\n ");
5550 print_gimple_stmt (dump_file, stmt, 0, 0);
5551 }
5552 gimple_assign_set_rhs_from_tree (&gsi, val);
5553 update_stmt (stmt);
5554
5555 if (dump_file && (dump_flags & TDF_DETAILS))
5556 {
5557 fprintf (dump_file, "into:\n ");
5558 print_gimple_stmt (dump_file, stmt, 0, 0);
5559 fprintf (dump_file, "\n");
5560 }
5561
5562 *m_something_changed = true;
5563 if (maybe_clean_eh_stmt (stmt)
5564 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5565 *m_cfg_changed = true;
5566 }
5567 return NULL;
5568 }
5569
5570 /* Update bits info of formal parameters as described in
5571 ipcp_transformation_summary. */
5572
5573 static void
5574 ipcp_update_bits (struct cgraph_node *node)
5575 {
5576 tree parm = DECL_ARGUMENTS (node->decl);
5577 tree next_parm = parm;
5578 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5579
5580 if (!ts || vec_safe_length (ts->bits) == 0)
5581 return;
5582
5583 vec<ipa_bits, va_gc> &bits = *ts->bits;
5584 unsigned count = bits.length ();
5585
5586 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5587 {
5588 if (node->clone.combined_args_to_skip
5589 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5590 continue;
5591
5592 gcc_checking_assert (parm);
5593 next_parm = DECL_CHAIN (parm);
5594
5595 if (!bits[i].known
5596 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm)) || POINTER_TYPE_P (TREE_TYPE (parm)))
5597 || !is_gimple_reg (parm))
5598 continue;
5599
5600 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5601 if (!ddef)
5602 continue;
5603
5604 if (dump_file)
5605 {
5606 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5607 print_hex (bits[i].mask, dump_file);
5608 fprintf (dump_file, "\n");
5609 }
5610
5611 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5612 {
5613 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5614 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5615
5616 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5617 | wide_int::from (bits[i].value, prec, sgn);
5618 set_nonzero_bits (ddef, nonzero_bits);
5619 }
5620 else
5621 {
5622 unsigned tem = bits[i].mask.to_uhwi ();
5623 unsigned HOST_WIDE_INT bitpos = bits[i].value.to_uhwi ();
5624 unsigned align = tem & -tem;
5625 unsigned misalign = bitpos & (align - 1);
5626
5627 if (align > 1)
5628 {
5629 if (dump_file)
5630 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5631
5632 unsigned old_align, old_misalign;
5633 struct ptr_info_def *pi = get_ptr_info (ddef);
5634 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5635
5636 if (old_known
5637 && old_align > align)
5638 {
5639 if (dump_file)
5640 {
5641 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5642 if ((old_misalign & (align - 1)) != misalign)
5643 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5644 old_misalign, misalign);
5645 }
5646 continue;
5647 }
5648
5649 if (old_known
5650 && ((misalign & (old_align - 1)) != old_misalign)
5651 && dump_file)
5652 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5653 old_misalign, misalign);
5654
5655 set_ptr_info_alignment (pi, align, misalign);
5656 }
5657 }
5658 }
5659 }
5660
5661 /* Update value range of formal parameters as described in
5662 ipcp_transformation_summary. */
5663
5664 static void
5665 ipcp_update_vr (struct cgraph_node *node)
5666 {
5667 tree fndecl = node->decl;
5668 tree parm = DECL_ARGUMENTS (fndecl);
5669 tree next_parm = parm;
5670 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5671 if (!ts || vec_safe_length (ts->m_vr) == 0)
5672 return;
5673 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5674 unsigned count = vr.length ();
5675
5676 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5677 {
5678 if (node->clone.combined_args_to_skip
5679 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5680 continue;
5681 gcc_checking_assert (parm);
5682 next_parm = DECL_CHAIN (parm);
5683 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5684
5685 if (!ddef || !is_gimple_reg (parm))
5686 continue;
5687
5688 if (vr[i].known
5689 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5690 {
5691 tree type = TREE_TYPE (ddef);
5692 unsigned prec = TYPE_PRECISION (type);
5693 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5694 {
5695 if (dump_file)
5696 {
5697 fprintf (dump_file, "Setting value range of param %u ", i);
5698 fprintf (dump_file, "%s[",
5699 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5700 print_decs (vr[i].min, dump_file);
5701 fprintf (dump_file, ", ");
5702 print_decs (vr[i].max, dump_file);
5703 fprintf (dump_file, "]\n");
5704 }
5705 set_range_info (ddef, vr[i].type,
5706 wide_int_storage::from (vr[i].min, prec,
5707 TYPE_SIGN (type)),
5708 wide_int_storage::from (vr[i].max, prec,
5709 TYPE_SIGN (type)));
5710 }
5711 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5712 && vr[i].type == VR_ANTI_RANGE
5713 && wi::eq_p (vr[i].min, 0)
5714 && wi::eq_p (vr[i].max, 0))
5715 {
5716 if (dump_file)
5717 fprintf (dump_file, "Setting nonnull for %u\n", i);
5718 set_ptr_nonnull (ddef);
5719 }
5720 }
5721 }
5722 }
5723
5724 /* IPCP transformation phase doing propagation of aggregate values. */
5725
5726 unsigned int
5727 ipcp_transform_function (struct cgraph_node *node)
5728 {
5729 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5730 struct ipa_func_body_info fbi;
5731 struct ipa_agg_replacement_value *aggval;
5732 int param_count;
5733 bool cfg_changed = false, something_changed = false;
5734
5735 gcc_checking_assert (cfun);
5736 gcc_checking_assert (current_function_decl);
5737
5738 if (dump_file)
5739 fprintf (dump_file, "Modification phase of node %s/%i\n",
5740 node->name (), node->order);
5741
5742 ipcp_update_bits (node);
5743 ipcp_update_vr (node);
5744 aggval = ipa_get_agg_replacements_for_node (node);
5745 if (!aggval)
5746 return 0;
5747 param_count = count_formal_params (node->decl);
5748 if (param_count == 0)
5749 return 0;
5750 adjust_agg_replacement_values (node, aggval);
5751 if (dump_file)
5752 ipa_dump_agg_replacement_values (dump_file, aggval);
5753
5754 fbi.node = node;
5755 fbi.info = NULL;
5756 fbi.bb_infos = vNULL;
5757 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5758 fbi.param_count = param_count;
5759 fbi.aa_walked = 0;
5760
5761 vec_safe_grow_cleared (descriptors, param_count);
5762 ipa_populate_param_decls (node, *descriptors);
5763 calculate_dominance_info (CDI_DOMINATORS);
5764 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5765 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5766
5767 int i;
5768 struct ipa_bb_info *bi;
5769 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5770 free_ipa_bb_info (bi);
5771 fbi.bb_infos.release ();
5772 free_dominance_info (CDI_DOMINATORS);
5773 (*ipcp_transformations)[node->uid].agg_values = NULL;
5774 (*ipcp_transformations)[node->uid].bits = NULL;
5775 (*ipcp_transformations)[node->uid].m_vr = NULL;
5776
5777 vec_free (descriptors);
5778
5779 if (!something_changed)
5780 return 0;
5781 else if (cfg_changed)
5782 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5783 else
5784 return TODO_update_ssa_only_virtuals;
5785 }