]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
Handle unary pass-through jump functions for ipa-vrp
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
62
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
67
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
70 {
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
78 };
79
80 /* Allocation pool for reference descriptions. */
81
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
84
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
87
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
90 {
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
92
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
96 }
97
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
100
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
103 {
104 int i, count;
105
106 count = descriptors.length ();
107 for (i = 0; i < count; i++)
108 if (descriptors[i].decl_or_type == ptree)
109 return i;
110
111 return -1;
112 }
113
114 /* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
116
117 int
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
119 {
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121 }
122
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
125
126 static void
127 ipa_populate_param_decls (struct cgraph_node *node,
128 vec<ipa_param_descriptor> &descriptors)
129 {
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
134
135 fndecl = node->decl;
136 gcc_assert (gimple_has_body_p (fndecl));
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
140 {
141 descriptors[param_num].decl_or_type = parm;
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
144 param_num++;
145 }
146 }
147
148 /* Return how many formal parameters FNDECL has. */
149
150 int
151 count_formal_params (tree fndecl)
152 {
153 tree parm;
154 int count = 0;
155 gcc_assert (gimple_has_body_p (fndecl));
156
157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
158 count++;
159
160 return count;
161 }
162
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
166
167 void
168 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
169 {
170 fprintf (file, "param #%i", i);
171 if (info->descriptors[i].decl_or_type)
172 {
173 fprintf (file, " ");
174 print_generic_expr (file, info->descriptors[i].decl_or_type, 0);
175 }
176 }
177
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
180
181 void
182 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
183 {
184 struct ipa_node_params *info = IPA_NODE_REF (node);
185
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
188 }
189
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
193
194 void
195 ipa_initialize_node_params (struct cgraph_node *node)
196 {
197 struct ipa_node_params *info = IPA_NODE_REF (node);
198
199 if (!info->descriptors.exists ())
200 {
201 ipa_alloc_node_params (node, count_formal_params (node->decl));
202 ipa_populate_param_decls (node, info->descriptors);
203 }
204 }
205
206 /* Print the jump functions associated with call graph edge CS to file F. */
207
208 static void
209 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
210 {
211 int i, count;
212
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
215 {
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
218
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
221
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
225 else if (type == IPA_JF_CONST)
226 {
227 tree val = jump_func->value.constant.value;
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
232 {
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
235 0);
236 }
237 fprintf (f, "\n");
238 }
239 else if (type == IPA_JF_PASS_THROUGH)
240 {
241 fprintf (f, "PASS THROUGH: ");
242 fprintf (f, "%d, op %s",
243 jump_func->value.pass_through.formal_id,
244 get_tree_code_name(jump_func->value.pass_through.operation));
245 if (jump_func->value.pass_through.operation != NOP_EXPR)
246 {
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
250 }
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
253 fprintf (f, "\n");
254 }
255 else if (type == IPA_JF_ANCESTOR)
256 {
257 fprintf (f, "ANCESTOR: ");
258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
263 fprintf (f, "\n");
264 }
265
266 if (jump_func->agg.items)
267 {
268 struct ipa_agg_jf_item *item;
269 int j;
270
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
274 {
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
279 tree_to_uhwi (TYPE_SIZE (item->value)));
280 else
281 {
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
284 }
285 fprintf (f, "\n");
286 }
287 }
288
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
292 {
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
295 }
296
297 if (jump_func->bits.known)
298 {
299 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
300 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
301 fprintf (f, "\n");
302 }
303 else
304 fprintf (f, " Unknown bits\n");
305
306 if (jump_func->vr_known)
307 {
308 fprintf (f, " VR ");
309 fprintf (f, "%s[",
310 (jump_func->m_vr.type == VR_ANTI_RANGE) ? "~" : "");
311 print_decs (jump_func->m_vr.min, f);
312 fprintf (f, ", ");
313 print_decs (jump_func->m_vr.max, f);
314 fprintf (f, "]\n");
315 }
316 else
317 fprintf (f, " Unknown VR\n");
318 }
319 }
320
321
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
327 {
328 struct cgraph_edge *cs;
329
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup_for_dump (node->name ()), node->order,
339 xstrdup_for_dump (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
343
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
345 {
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
349
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i, "
359 "offset " HOST_WIDE_INT_PRINT_DEC,
360 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
361 ii->offset);
362
363 if (cs->call_stmt)
364 {
365 fprintf (f, ", for stmt ");
366 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
367 }
368 else
369 fprintf (f, "\n");
370 if (ii->polymorphic)
371 ii->context.dump (f);
372 ipa_print_node_jump_functions_for_edge (f, cs);
373 }
374 }
375
376 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
377
378 void
379 ipa_print_all_jump_functions (FILE *f)
380 {
381 struct cgraph_node *node;
382
383 fprintf (f, "\nJump functions:\n");
384 FOR_EACH_FUNCTION (node)
385 {
386 ipa_print_node_jump_functions (f, node);
387 }
388 }
389
390 /* Set jfunc to be a know-really nothing jump function. */
391
392 static void
393 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
394 {
395 jfunc->type = IPA_JF_UNKNOWN;
396 jfunc->bits.known = false;
397 jfunc->vr_known = false;
398 }
399
400 /* Set JFUNC to be a copy of another jmp (to be used by jump function
401 combination code). The two functions will share their rdesc. */
402
403 static void
404 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
405 struct ipa_jump_func *src)
406
407 {
408 gcc_checking_assert (src->type == IPA_JF_CONST);
409 dst->type = IPA_JF_CONST;
410 dst->value.constant = src->value.constant;
411 }
412
413 /* Set JFUNC to be a constant jmp function. */
414
415 static void
416 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
417 struct cgraph_edge *cs)
418 {
419 jfunc->type = IPA_JF_CONST;
420 jfunc->value.constant.value = unshare_expr_without_location (constant);
421
422 if (TREE_CODE (constant) == ADDR_EXPR
423 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
424 {
425 struct ipa_cst_ref_desc *rdesc;
426
427 rdesc = ipa_refdesc_pool.allocate ();
428 rdesc->cs = cs;
429 rdesc->next_duplicate = NULL;
430 rdesc->refcount = 1;
431 jfunc->value.constant.rdesc = rdesc;
432 }
433 else
434 jfunc->value.constant.rdesc = NULL;
435 }
436
437 /* Set JFUNC to be a simple pass-through jump function. */
438 static void
439 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
440 bool agg_preserved)
441 {
442 jfunc->type = IPA_JF_PASS_THROUGH;
443 jfunc->value.pass_through.operand = NULL_TREE;
444 jfunc->value.pass_through.formal_id = formal_id;
445 jfunc->value.pass_through.operation = NOP_EXPR;
446 jfunc->value.pass_through.agg_preserved = agg_preserved;
447 }
448
449 /* Set JFUNC to be an unary pass through jump function. */
450
451 static void
452 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
453 enum tree_code operation)
454 {
455 jfunc->type = IPA_JF_PASS_THROUGH;
456 jfunc->value.pass_through.operand = NULL_TREE;
457 jfunc->value.pass_through.formal_id = formal_id;
458 jfunc->value.pass_through.operation = operation;
459 jfunc->value.pass_through.agg_preserved = false;
460 }
461 /* Set JFUNC to be an arithmetic pass through jump function. */
462
463 static void
464 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
465 tree operand, enum tree_code operation)
466 {
467 jfunc->type = IPA_JF_PASS_THROUGH;
468 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
469 jfunc->value.pass_through.formal_id = formal_id;
470 jfunc->value.pass_through.operation = operation;
471 jfunc->value.pass_through.agg_preserved = false;
472 }
473
474 /* Set JFUNC to be an ancestor jump function. */
475
476 static void
477 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
478 int formal_id, bool agg_preserved)
479 {
480 jfunc->type = IPA_JF_ANCESTOR;
481 jfunc->value.ancestor.formal_id = formal_id;
482 jfunc->value.ancestor.offset = offset;
483 jfunc->value.ancestor.agg_preserved = agg_preserved;
484 }
485
486 /* Get IPA BB information about the given BB. FBI is the context of analyzis
487 of this function body. */
488
489 static struct ipa_bb_info *
490 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
491 {
492 gcc_checking_assert (fbi);
493 return &fbi->bb_infos[bb->index];
494 }
495
496 /* Structure to be passed in between detect_type_change and
497 check_stmt_for_type_change. */
498
499 struct prop_type_change_info
500 {
501 /* Offset into the object where there is the virtual method pointer we are
502 looking for. */
503 HOST_WIDE_INT offset;
504 /* The declaration or SSA_NAME pointer of the base that we are checking for
505 type change. */
506 tree object;
507 /* Set to true if dynamic type change has been detected. */
508 bool type_maybe_changed;
509 };
510
511 /* Return true if STMT can modify a virtual method table pointer.
512
513 This function makes special assumptions about both constructors and
514 destructors which are all the functions that are allowed to alter the VMT
515 pointers. It assumes that destructors begin with assignment into all VMT
516 pointers and that constructors essentially look in the following way:
517
518 1) The very first thing they do is that they call constructors of ancestor
519 sub-objects that have them.
520
521 2) Then VMT pointers of this and all its ancestors is set to new values
522 corresponding to the type corresponding to the constructor.
523
524 3) Only afterwards, other stuff such as constructor of member sub-objects
525 and the code written by the user is run. Only this may include calling
526 virtual functions, directly or indirectly.
527
528 There is no way to call a constructor of an ancestor sub-object in any
529 other way.
530
531 This means that we do not have to care whether constructors get the correct
532 type information because they will always change it (in fact, if we define
533 the type to be given by the VMT pointer, it is undefined).
534
535 The most important fact to derive from the above is that if, for some
536 statement in the section 3, we try to detect whether the dynamic type has
537 changed, we can safely ignore all calls as we examine the function body
538 backwards until we reach statements in section 2 because these calls cannot
539 be ancestor constructors or destructors (if the input is not bogus) and so
540 do not change the dynamic type (this holds true only for automatically
541 allocated objects but at the moment we devirtualize only these). We then
542 must detect that statements in section 2 change the dynamic type and can try
543 to derive the new type. That is enough and we can stop, we will never see
544 the calls into constructors of sub-objects in this code. Therefore we can
545 safely ignore all call statements that we traverse.
546 */
547
548 static bool
549 stmt_may_be_vtbl_ptr_store (gimple *stmt)
550 {
551 if (is_gimple_call (stmt))
552 return false;
553 if (gimple_clobber_p (stmt))
554 return false;
555 else if (is_gimple_assign (stmt))
556 {
557 tree lhs = gimple_assign_lhs (stmt);
558
559 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
560 {
561 if (flag_strict_aliasing
562 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
563 return false;
564
565 if (TREE_CODE (lhs) == COMPONENT_REF
566 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
567 return false;
568 /* In the future we might want to use get_base_ref_and_offset to find
569 if there is a field corresponding to the offset and if so, proceed
570 almost like if it was a component ref. */
571 }
572 }
573 return true;
574 }
575
576 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
577 to check whether a particular statement may modify the virtual table
578 pointerIt stores its result into DATA, which points to a
579 prop_type_change_info structure. */
580
581 static bool
582 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
583 {
584 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
585 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
586
587 if (stmt_may_be_vtbl_ptr_store (stmt))
588 {
589 tci->type_maybe_changed = true;
590 return true;
591 }
592 else
593 return false;
594 }
595
596 /* See if ARG is PARAM_DECl describing instance passed by pointer
597 or reference in FUNCTION. Return false if the dynamic type may change
598 in between beggining of the function until CALL is invoked.
599
600 Generally functions are not allowed to change type of such instances,
601 but they call destructors. We assume that methods can not destroy the THIS
602 pointer. Also as a special cases, constructor and destructors may change
603 type of the THIS pointer. */
604
605 static bool
606 param_type_may_change_p (tree function, tree arg, gimple *call)
607 {
608 /* Pure functions can not do any changes on the dynamic type;
609 that require writting to memory. */
610 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
611 return false;
612 /* We need to check if we are within inlined consturctor
613 or destructor (ideally we would have way to check that the
614 inline cdtor is actually working on ARG, but we don't have
615 easy tie on this, so punt on all non-pure cdtors.
616 We may also record the types of cdtors and once we know type
617 of the instance match them.
618
619 Also code unification optimizations may merge calls from
620 different blocks making return values unreliable. So
621 do nothing during late optimization. */
622 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
623 return true;
624 if (TREE_CODE (arg) == SSA_NAME
625 && SSA_NAME_IS_DEFAULT_DEF (arg)
626 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
627 {
628 /* Normal (non-THIS) argument. */
629 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
630 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
631 /* THIS pointer of an method - here we want to watch constructors
632 and destructors as those definitely may change the dynamic
633 type. */
634 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
635 && !DECL_CXX_CONSTRUCTOR_P (function)
636 && !DECL_CXX_DESTRUCTOR_P (function)
637 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
638 {
639 /* Walk the inline stack and watch out for ctors/dtors. */
640 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
641 block = BLOCK_SUPERCONTEXT (block))
642 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
643 return true;
644 return false;
645 }
646 }
647 return true;
648 }
649
650 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
651 callsite CALL) by looking for assignments to its virtual table pointer. If
652 it is, return true and fill in the jump function JFUNC with relevant type
653 information or set it to unknown. ARG is the object itself (not a pointer
654 to it, unless dereferenced). BASE is the base of the memory access as
655 returned by get_ref_base_and_extent, as is the offset.
656
657 This is helper function for detect_type_change and detect_type_change_ssa
658 that does the heavy work which is usually unnecesary. */
659
660 static bool
661 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
662 gcall *call, struct ipa_jump_func *jfunc,
663 HOST_WIDE_INT offset)
664 {
665 struct prop_type_change_info tci;
666 ao_ref ao;
667 bool entry_reached = false;
668
669 gcc_checking_assert (DECL_P (arg)
670 || TREE_CODE (arg) == MEM_REF
671 || handled_component_p (arg));
672
673 comp_type = TYPE_MAIN_VARIANT (comp_type);
674
675 /* Const calls cannot call virtual methods through VMT and so type changes do
676 not matter. */
677 if (!flag_devirtualize || !gimple_vuse (call)
678 /* Be sure expected_type is polymorphic. */
679 || !comp_type
680 || TREE_CODE (comp_type) != RECORD_TYPE
681 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
682 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
683 return true;
684
685 ao_ref_init (&ao, arg);
686 ao.base = base;
687 ao.offset = offset;
688 ao.size = POINTER_SIZE;
689 ao.max_size = ao.size;
690
691 tci.offset = offset;
692 tci.object = get_base_address (arg);
693 tci.type_maybe_changed = false;
694
695 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
696 &tci, NULL, &entry_reached);
697 if (!tci.type_maybe_changed)
698 return false;
699
700 ipa_set_jf_unknown (jfunc);
701 return true;
702 }
703
704 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
705 If it is, return true and fill in the jump function JFUNC with relevant type
706 information or set it to unknown. ARG is the object itself (not a pointer
707 to it, unless dereferenced). BASE is the base of the memory access as
708 returned by get_ref_base_and_extent, as is the offset. */
709
710 static bool
711 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
712 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
713 {
714 if (!flag_devirtualize)
715 return false;
716
717 if (TREE_CODE (base) == MEM_REF
718 && !param_type_may_change_p (current_function_decl,
719 TREE_OPERAND (base, 0),
720 call))
721 return false;
722 return detect_type_change_from_memory_writes (arg, base, comp_type,
723 call, jfunc, offset);
724 }
725
726 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
727 SSA name (its dereference will become the base and the offset is assumed to
728 be zero). */
729
730 static bool
731 detect_type_change_ssa (tree arg, tree comp_type,
732 gcall *call, struct ipa_jump_func *jfunc)
733 {
734 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
735 if (!flag_devirtualize
736 || !POINTER_TYPE_P (TREE_TYPE (arg)))
737 return false;
738
739 if (!param_type_may_change_p (current_function_decl, arg, call))
740 return false;
741
742 arg = build2 (MEM_REF, ptr_type_node, arg,
743 build_int_cst (ptr_type_node, 0));
744
745 return detect_type_change_from_memory_writes (arg, arg, comp_type,
746 call, jfunc, 0);
747 }
748
749 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
750 boolean variable pointed to by DATA. */
751
752 static bool
753 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
754 void *data)
755 {
756 bool *b = (bool *) data;
757 *b = true;
758 return true;
759 }
760
761 /* Return true if we have already walked so many statements in AA that we
762 should really just start giving up. */
763
764 static bool
765 aa_overwalked (struct ipa_func_body_info *fbi)
766 {
767 gcc_checking_assert (fbi);
768 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
769 }
770
771 /* Find the nearest valid aa status for parameter specified by INDEX that
772 dominates BB. */
773
774 static struct ipa_param_aa_status *
775 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
776 int index)
777 {
778 while (true)
779 {
780 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
781 if (!bb)
782 return NULL;
783 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
784 if (!bi->param_aa_statuses.is_empty ()
785 && bi->param_aa_statuses[index].valid)
786 return &bi->param_aa_statuses[index];
787 }
788 }
789
790 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
791 structures and/or intialize the result with a dominating description as
792 necessary. */
793
794 static struct ipa_param_aa_status *
795 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
796 int index)
797 {
798 gcc_checking_assert (fbi);
799 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
800 if (bi->param_aa_statuses.is_empty ())
801 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
802 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
803 if (!paa->valid)
804 {
805 gcc_checking_assert (!paa->parm_modified
806 && !paa->ref_modified
807 && !paa->pt_modified);
808 struct ipa_param_aa_status *dom_paa;
809 dom_paa = find_dominating_aa_status (fbi, bb, index);
810 if (dom_paa)
811 *paa = *dom_paa;
812 else
813 paa->valid = true;
814 }
815
816 return paa;
817 }
818
819 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
820 a value known not to be modified in this function before reaching the
821 statement STMT. FBI holds information about the function we have so far
822 gathered but do not survive the summary building stage. */
823
824 static bool
825 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
826 gimple *stmt, tree parm_load)
827 {
828 struct ipa_param_aa_status *paa;
829 bool modified = false;
830 ao_ref refd;
831
832 tree base = get_base_address (parm_load);
833 gcc_assert (TREE_CODE (base) == PARM_DECL);
834 if (TREE_READONLY (base))
835 return true;
836
837 /* FIXME: FBI can be NULL if we are being called from outside
838 ipa_node_analysis or ipcp_transform_function, which currently happens
839 during inlining analysis. It would be great to extend fbi's lifetime and
840 always have it. Currently, we are just not afraid of too much walking in
841 that case. */
842 if (fbi)
843 {
844 if (aa_overwalked (fbi))
845 return false;
846 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
847 if (paa->parm_modified)
848 return false;
849 }
850 else
851 paa = NULL;
852
853 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
854 ao_ref_init (&refd, parm_load);
855 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
856 &modified, NULL);
857 if (fbi)
858 fbi->aa_walked += walked;
859 if (paa && modified)
860 paa->parm_modified = true;
861 return !modified;
862 }
863
864 /* Main worker for load_from_unmodified_param and load_from_param.
865 If STMT is an assignment that loads a value from an parameter declaration,
866 return the index of the parameter in ipa_node_params. Otherwise return -1. */
867
868 static int
869 load_from_param_1 (struct ipa_func_body_info *fbi,
870 vec<ipa_param_descriptor> descriptors,
871 gimple *stmt)
872 {
873 int index;
874 tree op1;
875
876 gcc_checking_assert (is_gimple_assign (stmt));
877 op1 = gimple_assign_rhs1 (stmt);
878 if (TREE_CODE (op1) != PARM_DECL)
879 return -1;
880
881 index = ipa_get_param_decl_index_1 (descriptors, op1);
882 if (index < 0
883 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
884 return -1;
885
886 return index;
887 }
888
889 /* If STMT is an assignment that loads a value from an parameter declaration,
890 return the index of the parameter in ipa_node_params which has not been
891 modified. Otherwise return -1. */
892
893 static int
894 load_from_unmodified_param (struct ipa_func_body_info *fbi,
895 vec<ipa_param_descriptor> descriptors,
896 gimple *stmt)
897 {
898 if (!gimple_assign_single_p (stmt))
899 return -1;
900
901 return load_from_param_1 (fbi, descriptors, stmt);
902 }
903
904 /* If STMT is an assignment that loads a value from an parameter declaration,
905 return the index of the parameter in ipa_node_params. Otherwise return -1. */
906
907 static int
908 load_from_param (struct ipa_func_body_info *fbi,
909 vec<ipa_param_descriptor> descriptors,
910 gimple *stmt)
911 {
912 if (!is_gimple_assign (stmt))
913 return -1;
914
915 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
916 if ((get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
917 && (get_gimple_rhs_class (rhs_code) != GIMPLE_UNARY_RHS))
918 return -1;
919
920 return load_from_param_1 (fbi, descriptors, stmt);
921 }
922
923 /* Return true if memory reference REF (which must be a load through parameter
924 with INDEX) loads data that are known to be unmodified in this function
925 before reaching statement STMT. */
926
927 static bool
928 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
929 int index, gimple *stmt, tree ref)
930 {
931 struct ipa_param_aa_status *paa;
932 bool modified = false;
933 ao_ref refd;
934
935 /* FIXME: FBI can be NULL if we are being called from outside
936 ipa_node_analysis or ipcp_transform_function, which currently happens
937 during inlining analysis. It would be great to extend fbi's lifetime and
938 always have it. Currently, we are just not afraid of too much walking in
939 that case. */
940 if (fbi)
941 {
942 if (aa_overwalked (fbi))
943 return false;
944 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
945 if (paa->ref_modified)
946 return false;
947 }
948 else
949 paa = NULL;
950
951 gcc_checking_assert (gimple_vuse (stmt));
952 ao_ref_init (&refd, ref);
953 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
954 &modified, NULL);
955 if (fbi)
956 fbi->aa_walked += walked;
957 if (paa && modified)
958 paa->ref_modified = true;
959 return !modified;
960 }
961
962 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
963 is known to be unmodified in this function before reaching call statement
964 CALL into which it is passed. FBI describes the function body. */
965
966 static bool
967 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
968 gimple *call, tree parm)
969 {
970 bool modified = false;
971 ao_ref refd;
972
973 /* It's unnecessary to calculate anything about memory contnets for a const
974 function because it is not goin to use it. But do not cache the result
975 either. Also, no such calculations for non-pointers. */
976 if (!gimple_vuse (call)
977 || !POINTER_TYPE_P (TREE_TYPE (parm))
978 || aa_overwalked (fbi))
979 return false;
980
981 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
982 gimple_bb (call),
983 index);
984 if (paa->pt_modified)
985 return false;
986
987 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
988 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
989 &modified, NULL);
990 fbi->aa_walked += walked;
991 if (modified)
992 paa->pt_modified = true;
993 return !modified;
994 }
995
996 /* Return true if we can prove that OP is a memory reference loading
997 data from an aggregate passed as a parameter.
998
999 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1000 false if it cannot prove that the value has not been modified before the
1001 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1002 if it cannot prove the value has not been modified, in that case it will
1003 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1004
1005 INFO and PARMS_AINFO describe parameters of the current function (but the
1006 latter can be NULL), STMT is the load statement. If function returns true,
1007 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1008 within the aggregate and whether it is a load from a value passed by
1009 reference respectively. */
1010
1011 bool
1012 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1013 vec<ipa_param_descriptor> descriptors,
1014 gimple *stmt, tree op, int *index_p,
1015 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1016 bool *by_ref_p, bool *guaranteed_unmodified)
1017 {
1018 int index;
1019 HOST_WIDE_INT size, max_size;
1020 bool reverse;
1021 tree base
1022 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
1023
1024 if (max_size == -1 || max_size != size || *offset_p < 0)
1025 return false;
1026
1027 if (DECL_P (base))
1028 {
1029 int index = ipa_get_param_decl_index_1 (descriptors, base);
1030 if (index >= 0
1031 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1032 {
1033 *index_p = index;
1034 *by_ref_p = false;
1035 if (size_p)
1036 *size_p = size;
1037 if (guaranteed_unmodified)
1038 *guaranteed_unmodified = true;
1039 return true;
1040 }
1041 return false;
1042 }
1043
1044 if (TREE_CODE (base) != MEM_REF
1045 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1046 || !integer_zerop (TREE_OPERAND (base, 1)))
1047 return false;
1048
1049 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1050 {
1051 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1052 index = ipa_get_param_decl_index_1 (descriptors, parm);
1053 }
1054 else
1055 {
1056 /* This branch catches situations where a pointer parameter is not a
1057 gimple register, for example:
1058
1059 void hip7(S*) (struct S * p)
1060 {
1061 void (*<T2e4>) (struct S *) D.1867;
1062 struct S * p.1;
1063
1064 <bb 2>:
1065 p.1_1 = p;
1066 D.1867_2 = p.1_1->f;
1067 D.1867_2 ();
1068 gdp = &p;
1069 */
1070
1071 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1072 index = load_from_unmodified_param (fbi, descriptors, def);
1073 }
1074
1075 if (index >= 0)
1076 {
1077 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1078 if (!data_preserved && !guaranteed_unmodified)
1079 return false;
1080
1081 *index_p = index;
1082 *by_ref_p = true;
1083 if (size_p)
1084 *size_p = size;
1085 if (guaranteed_unmodified)
1086 *guaranteed_unmodified = data_preserved;
1087 return true;
1088 }
1089 return false;
1090 }
1091
1092 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1093 of an assignment statement STMT, try to determine whether we are actually
1094 handling any of the following cases and construct an appropriate jump
1095 function into JFUNC if so:
1096
1097 1) The passed value is loaded from a formal parameter which is not a gimple
1098 register (most probably because it is addressable, the value has to be
1099 scalar) and we can guarantee the value has not changed. This case can
1100 therefore be described by a simple pass-through jump function. For example:
1101
1102 foo (int a)
1103 {
1104 int a.0;
1105
1106 a.0_2 = a;
1107 bar (a.0_2);
1108
1109 2) The passed value can be described by a simple arithmetic pass-through
1110 jump function. E.g.
1111
1112 foo (int a)
1113 {
1114 int D.2064;
1115
1116 D.2064_4 = a.1(D) + 4;
1117 bar (D.2064_4);
1118
1119 This case can also occur in combination of the previous one, e.g.:
1120
1121 foo (int a, int z)
1122 {
1123 int a.0;
1124 int D.2064;
1125
1126 a.0_3 = a;
1127 D.2064_4 = a.0_3 + 4;
1128 foo (D.2064_4);
1129
1130 3) The passed value is an address of an object within another one (which
1131 also passed by reference). Such situations are described by an ancestor
1132 jump function and describe situations such as:
1133
1134 B::foo() (struct B * const this)
1135 {
1136 struct A * D.1845;
1137
1138 D.1845_2 = &this_1(D)->D.1748;
1139 A::bar (D.1845_2);
1140
1141 INFO is the structure describing individual parameters access different
1142 stages of IPA optimizations. PARMS_AINFO contains the information that is
1143 only needed for intraprocedural analysis. */
1144
1145 static void
1146 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1147 struct ipa_node_params *info,
1148 struct ipa_jump_func *jfunc,
1149 gcall *call, gimple *stmt, tree name,
1150 tree param_type)
1151 {
1152 HOST_WIDE_INT offset, size, max_size;
1153 tree op1, tc_ssa, base, ssa;
1154 bool reverse;
1155 int index;
1156 gimple *stmt2 = stmt;
1157
1158 op1 = gimple_assign_rhs1 (stmt);
1159
1160 if (TREE_CODE (op1) == SSA_NAME)
1161 {
1162 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1163 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1164 else
1165 {
1166 index = load_from_param (fbi, info->descriptors,
1167 SSA_NAME_DEF_STMT (op1));
1168 stmt2 = SSA_NAME_DEF_STMT (op1);
1169 }
1170 tc_ssa = op1;
1171 }
1172 else
1173 {
1174 index = load_from_param (fbi, info->descriptors, stmt);
1175 tc_ssa = gimple_assign_lhs (stmt);
1176 }
1177
1178 if (index >= 0)
1179 {
1180 tree op2 = gimple_assign_rhs2 (stmt);
1181
1182 if (op2)
1183 {
1184 if (!is_gimple_ip_invariant (op2)
1185 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1186 && !useless_type_conversion_p (TREE_TYPE (name),
1187 TREE_TYPE (op1))))
1188 return;
1189
1190 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1191 gimple_assign_rhs_code (stmt));
1192 }
1193 else if (gimple_assign_single_p (stmt))
1194 {
1195 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1196 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1197 }
1198 else if (is_gimple_assign (stmt2)
1199 && (gimple_expr_code (stmt2) != NOP_EXPR)
1200 && (TREE_CODE_CLASS (gimple_expr_code (stmt2)) == tcc_unary))
1201 {
1202 ipa_set_jf_unary_pass_through (jfunc, index,
1203 gimple_assign_rhs_code (stmt2));
1204 }
1205 return;
1206 }
1207
1208 if (TREE_CODE (op1) != ADDR_EXPR)
1209 return;
1210 op1 = TREE_OPERAND (op1, 0);
1211 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1212 return;
1213 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1214 if (TREE_CODE (base) != MEM_REF
1215 /* If this is a varying address, punt. */
1216 || max_size == -1
1217 || max_size != size)
1218 return;
1219 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1220 ssa = TREE_OPERAND (base, 0);
1221 if (TREE_CODE (ssa) != SSA_NAME
1222 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1223 || offset < 0)
1224 return;
1225
1226 /* Dynamic types are changed in constructors and destructors. */
1227 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1228 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1229 ipa_set_ancestor_jf (jfunc, offset, index,
1230 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1231 }
1232
1233 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1234 it looks like:
1235
1236 iftmp.1_3 = &obj_2(D)->D.1762;
1237
1238 The base of the MEM_REF must be a default definition SSA NAME of a
1239 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1240 whole MEM_REF expression is returned and the offset calculated from any
1241 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1242 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1243
1244 static tree
1245 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1246 {
1247 HOST_WIDE_INT size, max_size;
1248 tree expr, parm, obj;
1249 bool reverse;
1250
1251 if (!gimple_assign_single_p (assign))
1252 return NULL_TREE;
1253 expr = gimple_assign_rhs1 (assign);
1254
1255 if (TREE_CODE (expr) != ADDR_EXPR)
1256 return NULL_TREE;
1257 expr = TREE_OPERAND (expr, 0);
1258 obj = expr;
1259 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1260
1261 if (TREE_CODE (expr) != MEM_REF
1262 /* If this is a varying address, punt. */
1263 || max_size == -1
1264 || max_size != size
1265 || *offset < 0)
1266 return NULL_TREE;
1267 parm = TREE_OPERAND (expr, 0);
1268 if (TREE_CODE (parm) != SSA_NAME
1269 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1270 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1271 return NULL_TREE;
1272
1273 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1274 *obj_p = obj;
1275 return expr;
1276 }
1277
1278
1279 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1280 statement PHI, try to find out whether NAME is in fact a
1281 multiple-inheritance typecast from a descendant into an ancestor of a formal
1282 parameter and thus can be described by an ancestor jump function and if so,
1283 write the appropriate function into JFUNC.
1284
1285 Essentially we want to match the following pattern:
1286
1287 if (obj_2(D) != 0B)
1288 goto <bb 3>;
1289 else
1290 goto <bb 4>;
1291
1292 <bb 3>:
1293 iftmp.1_3 = &obj_2(D)->D.1762;
1294
1295 <bb 4>:
1296 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1297 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1298 return D.1879_6; */
1299
1300 static void
1301 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1302 struct ipa_node_params *info,
1303 struct ipa_jump_func *jfunc,
1304 gcall *call, gphi *phi)
1305 {
1306 HOST_WIDE_INT offset;
1307 gimple *assign, *cond;
1308 basic_block phi_bb, assign_bb, cond_bb;
1309 tree tmp, parm, expr, obj;
1310 int index, i;
1311
1312 if (gimple_phi_num_args (phi) != 2)
1313 return;
1314
1315 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1316 tmp = PHI_ARG_DEF (phi, 0);
1317 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1318 tmp = PHI_ARG_DEF (phi, 1);
1319 else
1320 return;
1321 if (TREE_CODE (tmp) != SSA_NAME
1322 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1323 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1324 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1325 return;
1326
1327 assign = SSA_NAME_DEF_STMT (tmp);
1328 assign_bb = gimple_bb (assign);
1329 if (!single_pred_p (assign_bb))
1330 return;
1331 expr = get_ancestor_addr_info (assign, &obj, &offset);
1332 if (!expr)
1333 return;
1334 parm = TREE_OPERAND (expr, 0);
1335 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1336 if (index < 0)
1337 return;
1338
1339 cond_bb = single_pred (assign_bb);
1340 cond = last_stmt (cond_bb);
1341 if (!cond
1342 || gimple_code (cond) != GIMPLE_COND
1343 || gimple_cond_code (cond) != NE_EXPR
1344 || gimple_cond_lhs (cond) != parm
1345 || !integer_zerop (gimple_cond_rhs (cond)))
1346 return;
1347
1348 phi_bb = gimple_bb (phi);
1349 for (i = 0; i < 2; i++)
1350 {
1351 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1352 if (pred != assign_bb && pred != cond_bb)
1353 return;
1354 }
1355
1356 ipa_set_ancestor_jf (jfunc, offset, index,
1357 parm_ref_data_pass_through_p (fbi, index, call, parm));
1358 }
1359
1360 /* Inspect the given TYPE and return true iff it has the same structure (the
1361 same number of fields of the same types) as a C++ member pointer. If
1362 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1363 corresponding fields there. */
1364
1365 static bool
1366 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1367 {
1368 tree fld;
1369
1370 if (TREE_CODE (type) != RECORD_TYPE)
1371 return false;
1372
1373 fld = TYPE_FIELDS (type);
1374 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1375 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1376 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1377 return false;
1378
1379 if (method_ptr)
1380 *method_ptr = fld;
1381
1382 fld = DECL_CHAIN (fld);
1383 if (!fld || INTEGRAL_TYPE_P (fld)
1384 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1385 return false;
1386 if (delta)
1387 *delta = fld;
1388
1389 if (DECL_CHAIN (fld))
1390 return false;
1391
1392 return true;
1393 }
1394
1395 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1396 return the rhs of its defining statement. Otherwise return RHS as it
1397 is. */
1398
1399 static inline tree
1400 get_ssa_def_if_simple_copy (tree rhs)
1401 {
1402 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1403 {
1404 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1405
1406 if (gimple_assign_single_p (def_stmt))
1407 rhs = gimple_assign_rhs1 (def_stmt);
1408 else
1409 break;
1410 }
1411 return rhs;
1412 }
1413
1414 /* Simple linked list, describing known contents of an aggregate beforere
1415 call. */
1416
1417 struct ipa_known_agg_contents_list
1418 {
1419 /* Offset and size of the described part of the aggregate. */
1420 HOST_WIDE_INT offset, size;
1421 /* Known constant value or NULL if the contents is known to be unknown. */
1422 tree constant;
1423 /* Pointer to the next structure in the list. */
1424 struct ipa_known_agg_contents_list *next;
1425 };
1426
1427 /* Find the proper place in linked list of ipa_known_agg_contents_list
1428 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1429 unless there is a partial overlap, in which case return NULL, or such
1430 element is already there, in which case set *ALREADY_THERE to true. */
1431
1432 static struct ipa_known_agg_contents_list **
1433 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1434 HOST_WIDE_INT lhs_offset,
1435 HOST_WIDE_INT lhs_size,
1436 bool *already_there)
1437 {
1438 struct ipa_known_agg_contents_list **p = list;
1439 while (*p && (*p)->offset < lhs_offset)
1440 {
1441 if ((*p)->offset + (*p)->size > lhs_offset)
1442 return NULL;
1443 p = &(*p)->next;
1444 }
1445
1446 if (*p && (*p)->offset < lhs_offset + lhs_size)
1447 {
1448 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1449 /* We already know this value is subsequently overwritten with
1450 something else. */
1451 *already_there = true;
1452 else
1453 /* Otherwise this is a partial overlap which we cannot
1454 represent. */
1455 return NULL;
1456 }
1457 return p;
1458 }
1459
1460 /* Build aggregate jump function from LIST, assuming there are exactly
1461 CONST_COUNT constant entries there and that th offset of the passed argument
1462 is ARG_OFFSET and store it into JFUNC. */
1463
1464 static void
1465 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1466 int const_count, HOST_WIDE_INT arg_offset,
1467 struct ipa_jump_func *jfunc)
1468 {
1469 vec_alloc (jfunc->agg.items, const_count);
1470 while (list)
1471 {
1472 if (list->constant)
1473 {
1474 struct ipa_agg_jf_item item;
1475 item.offset = list->offset - arg_offset;
1476 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1477 item.value = unshare_expr_without_location (list->constant);
1478 jfunc->agg.items->quick_push (item);
1479 }
1480 list = list->next;
1481 }
1482 }
1483
1484 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1485 in ARG is filled in with constant values. ARG can either be an aggregate
1486 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1487 aggregate. JFUNC is the jump function into which the constants are
1488 subsequently stored. */
1489
1490 static void
1491 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1492 tree arg_type,
1493 struct ipa_jump_func *jfunc)
1494 {
1495 struct ipa_known_agg_contents_list *list = NULL;
1496 int item_count = 0, const_count = 0;
1497 HOST_WIDE_INT arg_offset, arg_size;
1498 gimple_stmt_iterator gsi;
1499 tree arg_base;
1500 bool check_ref, by_ref;
1501 ao_ref r;
1502
1503 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1504 return;
1505
1506 /* The function operates in three stages. First, we prepare check_ref, r,
1507 arg_base and arg_offset based on what is actually passed as an actual
1508 argument. */
1509
1510 if (POINTER_TYPE_P (arg_type))
1511 {
1512 by_ref = true;
1513 if (TREE_CODE (arg) == SSA_NAME)
1514 {
1515 tree type_size;
1516 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1517 return;
1518 check_ref = true;
1519 arg_base = arg;
1520 arg_offset = 0;
1521 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1522 arg_size = tree_to_uhwi (type_size);
1523 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1524 }
1525 else if (TREE_CODE (arg) == ADDR_EXPR)
1526 {
1527 HOST_WIDE_INT arg_max_size;
1528 bool reverse;
1529
1530 arg = TREE_OPERAND (arg, 0);
1531 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1532 &arg_max_size, &reverse);
1533 if (arg_max_size == -1
1534 || arg_max_size != arg_size
1535 || arg_offset < 0)
1536 return;
1537 if (DECL_P (arg_base))
1538 {
1539 check_ref = false;
1540 ao_ref_init (&r, arg_base);
1541 }
1542 else
1543 return;
1544 }
1545 else
1546 return;
1547 }
1548 else
1549 {
1550 HOST_WIDE_INT arg_max_size;
1551 bool reverse;
1552
1553 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1554
1555 by_ref = false;
1556 check_ref = false;
1557 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1558 &arg_max_size, &reverse);
1559 if (arg_max_size == -1
1560 || arg_max_size != arg_size
1561 || arg_offset < 0)
1562 return;
1563
1564 ao_ref_init (&r, arg);
1565 }
1566
1567 /* Second stage walks back the BB, looks at individual statements and as long
1568 as it is confident of how the statements affect contents of the
1569 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1570 describing it. */
1571 gsi = gsi_for_stmt (call);
1572 gsi_prev (&gsi);
1573 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1574 {
1575 struct ipa_known_agg_contents_list *n, **p;
1576 gimple *stmt = gsi_stmt (gsi);
1577 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1578 tree lhs, rhs, lhs_base;
1579 bool reverse;
1580
1581 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1582 continue;
1583 if (!gimple_assign_single_p (stmt))
1584 break;
1585
1586 lhs = gimple_assign_lhs (stmt);
1587 rhs = gimple_assign_rhs1 (stmt);
1588 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1589 || TREE_CODE (lhs) == BIT_FIELD_REF
1590 || contains_bitfld_component_ref_p (lhs))
1591 break;
1592
1593 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1594 &lhs_max_size, &reverse);
1595 if (lhs_max_size == -1
1596 || lhs_max_size != lhs_size)
1597 break;
1598
1599 if (check_ref)
1600 {
1601 if (TREE_CODE (lhs_base) != MEM_REF
1602 || TREE_OPERAND (lhs_base, 0) != arg_base
1603 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1604 break;
1605 }
1606 else if (lhs_base != arg_base)
1607 {
1608 if (DECL_P (lhs_base))
1609 continue;
1610 else
1611 break;
1612 }
1613
1614 bool already_there = false;
1615 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1616 &already_there);
1617 if (!p)
1618 break;
1619 if (already_there)
1620 continue;
1621
1622 rhs = get_ssa_def_if_simple_copy (rhs);
1623 n = XALLOCA (struct ipa_known_agg_contents_list);
1624 n->size = lhs_size;
1625 n->offset = lhs_offset;
1626 if (is_gimple_ip_invariant (rhs))
1627 {
1628 n->constant = rhs;
1629 const_count++;
1630 }
1631 else
1632 n->constant = NULL_TREE;
1633 n->next = *p;
1634 *p = n;
1635
1636 item_count++;
1637 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1638 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1639 break;
1640 }
1641
1642 /* Third stage just goes over the list and creates an appropriate vector of
1643 ipa_agg_jf_item structures out of it, of sourse only if there are
1644 any known constants to begin with. */
1645
1646 if (const_count)
1647 {
1648 jfunc->agg.by_ref = by_ref;
1649 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1650 }
1651 }
1652
1653 /* Return the Ith param type of callee associated with call graph
1654 edge E. */
1655
1656 tree
1657 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1658 {
1659 int n;
1660 tree type = (e->callee
1661 ? TREE_TYPE (e->callee->decl)
1662 : gimple_call_fntype (e->call_stmt));
1663 tree t = TYPE_ARG_TYPES (type);
1664
1665 for (n = 0; n < i; n++)
1666 {
1667 if (!t)
1668 break;
1669 t = TREE_CHAIN (t);
1670 }
1671 if (t)
1672 return TREE_VALUE (t);
1673 if (!e->callee)
1674 return NULL;
1675 t = DECL_ARGUMENTS (e->callee->decl);
1676 for (n = 0; n < i; n++)
1677 {
1678 if (!t)
1679 return NULL;
1680 t = TREE_CHAIN (t);
1681 }
1682 if (t)
1683 return TREE_TYPE (t);
1684 return NULL;
1685 }
1686
1687 /* Compute jump function for all arguments of callsite CS and insert the
1688 information in the jump_functions array in the ipa_edge_args corresponding
1689 to this callsite. */
1690
1691 static void
1692 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1693 struct cgraph_edge *cs)
1694 {
1695 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1696 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1697 gcall *call = cs->call_stmt;
1698 int n, arg_num = gimple_call_num_args (call);
1699 bool useful_context = false;
1700
1701 if (arg_num == 0 || args->jump_functions)
1702 return;
1703 vec_safe_grow_cleared (args->jump_functions, arg_num);
1704 if (flag_devirtualize)
1705 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1706
1707 if (gimple_call_internal_p (call))
1708 return;
1709 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1710 return;
1711
1712 for (n = 0; n < arg_num; n++)
1713 {
1714 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1715 tree arg = gimple_call_arg (call, n);
1716 tree param_type = ipa_get_callee_param_type (cs, n);
1717 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1718 {
1719 tree instance;
1720 struct ipa_polymorphic_call_context context (cs->caller->decl,
1721 arg, cs->call_stmt,
1722 &instance);
1723 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1724 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1725 if (!context.useless_p ())
1726 useful_context = true;
1727 }
1728
1729 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1730 {
1731 bool addr_nonzero = false;
1732 bool strict_overflow = false;
1733
1734 if (TREE_CODE (arg) == SSA_NAME
1735 && param_type
1736 && get_ptr_nonnull (arg))
1737 addr_nonzero = true;
1738 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1739 addr_nonzero = true;
1740
1741 if (addr_nonzero)
1742 {
1743 jfunc->vr_known = true;
1744 jfunc->m_vr.type = VR_ANTI_RANGE;
1745 jfunc->m_vr.min = build_int_cst (TREE_TYPE (arg), 0);
1746 jfunc->m_vr.max = build_int_cst (TREE_TYPE (arg), 0);
1747 jfunc->m_vr.equiv = NULL;
1748 }
1749 else
1750 gcc_assert (!jfunc->vr_known);
1751 }
1752 else
1753 {
1754 wide_int min, max;
1755 value_range_type type;
1756 if (TREE_CODE (arg) == SSA_NAME
1757 && param_type
1758 && (type = get_range_info (arg, &min, &max))
1759 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1760 {
1761 value_range vr;
1762
1763 vr.type = type;
1764 vr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1765 vr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1766 vr.equiv = NULL;
1767 extract_range_from_unary_expr (&jfunc->m_vr,
1768 NOP_EXPR,
1769 param_type,
1770 &vr, TREE_TYPE (arg));
1771 if (jfunc->m_vr.type == VR_RANGE
1772 || jfunc->m_vr.type == VR_ANTI_RANGE)
1773 jfunc->vr_known = true;
1774 else
1775 jfunc->vr_known = false;
1776 }
1777 else
1778 gcc_assert (!jfunc->vr_known);
1779 }
1780
1781 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1782 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1783 {
1784 jfunc->bits.known = true;
1785
1786 if (TREE_CODE (arg) == SSA_NAME)
1787 {
1788 jfunc->bits.value = 0;
1789 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1790 TYPE_SIGN (TREE_TYPE (arg)));
1791 }
1792 else
1793 {
1794 jfunc->bits.value = wi::to_widest (arg);
1795 jfunc->bits.mask = 0;
1796 }
1797 }
1798 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1799 {
1800 unsigned HOST_WIDE_INT bitpos;
1801 unsigned align;
1802
1803 jfunc->bits.known = true;
1804 get_pointer_alignment_1 (arg, &align, &bitpos);
1805 jfunc->bits.mask = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1806 .and_not (align / BITS_PER_UNIT - 1);
1807 jfunc->bits.value = bitpos / BITS_PER_UNIT;
1808 }
1809 else
1810 gcc_assert (!jfunc->bits.known);
1811
1812 if (is_gimple_ip_invariant (arg)
1813 || (VAR_P (arg)
1814 && is_global_var (arg)
1815 && TREE_READONLY (arg)))
1816 ipa_set_jf_constant (jfunc, arg, cs);
1817 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1818 && TREE_CODE (arg) == PARM_DECL)
1819 {
1820 int index = ipa_get_param_decl_index (info, arg);
1821
1822 gcc_assert (index >=0);
1823 /* Aggregate passed by value, check for pass-through, otherwise we
1824 will attempt to fill in aggregate contents later in this
1825 for cycle. */
1826 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1827 {
1828 ipa_set_jf_simple_pass_through (jfunc, index, false);
1829 continue;
1830 }
1831 }
1832 else if (TREE_CODE (arg) == SSA_NAME)
1833 {
1834 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1835 {
1836 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1837 if (index >= 0)
1838 {
1839 bool agg_p;
1840 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1841 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1842 }
1843 }
1844 else
1845 {
1846 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1847 if (is_gimple_assign (stmt))
1848 compute_complex_assign_jump_func (fbi, info, jfunc,
1849 call, stmt, arg, param_type);
1850 else if (gimple_code (stmt) == GIMPLE_PHI)
1851 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1852 call,
1853 as_a <gphi *> (stmt));
1854 }
1855 }
1856
1857 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1858 passed (because type conversions are ignored in gimple). Usually we can
1859 safely get type from function declaration, but in case of K&R prototypes or
1860 variadic functions we can try our luck with type of the pointer passed.
1861 TODO: Since we look for actual initialization of the memory object, we may better
1862 work out the type based on the memory stores we find. */
1863 if (!param_type)
1864 param_type = TREE_TYPE (arg);
1865
1866 if ((jfunc->type != IPA_JF_PASS_THROUGH
1867 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1868 && (jfunc->type != IPA_JF_ANCESTOR
1869 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1870 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1871 || POINTER_TYPE_P (param_type)))
1872 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1873 }
1874 if (!useful_context)
1875 vec_free (args->polymorphic_call_contexts);
1876 }
1877
1878 /* Compute jump functions for all edges - both direct and indirect - outgoing
1879 from BB. */
1880
1881 static void
1882 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1883 {
1884 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1885 int i;
1886 struct cgraph_edge *cs;
1887
1888 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1889 {
1890 struct cgraph_node *callee = cs->callee;
1891
1892 if (callee)
1893 {
1894 callee->ultimate_alias_target ();
1895 /* We do not need to bother analyzing calls to unknown functions
1896 unless they may become known during lto/whopr. */
1897 if (!callee->definition && !flag_lto)
1898 continue;
1899 }
1900 ipa_compute_jump_functions_for_edge (fbi, cs);
1901 }
1902 }
1903
1904 /* If STMT looks like a statement loading a value from a member pointer formal
1905 parameter, return that parameter and store the offset of the field to
1906 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1907 might be clobbered). If USE_DELTA, then we look for a use of the delta
1908 field rather than the pfn. */
1909
1910 static tree
1911 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1912 HOST_WIDE_INT *offset_p)
1913 {
1914 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1915
1916 if (!gimple_assign_single_p (stmt))
1917 return NULL_TREE;
1918
1919 rhs = gimple_assign_rhs1 (stmt);
1920 if (TREE_CODE (rhs) == COMPONENT_REF)
1921 {
1922 ref_field = TREE_OPERAND (rhs, 1);
1923 rhs = TREE_OPERAND (rhs, 0);
1924 }
1925 else
1926 ref_field = NULL_TREE;
1927 if (TREE_CODE (rhs) != MEM_REF)
1928 return NULL_TREE;
1929 rec = TREE_OPERAND (rhs, 0);
1930 if (TREE_CODE (rec) != ADDR_EXPR)
1931 return NULL_TREE;
1932 rec = TREE_OPERAND (rec, 0);
1933 if (TREE_CODE (rec) != PARM_DECL
1934 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1935 return NULL_TREE;
1936 ref_offset = TREE_OPERAND (rhs, 1);
1937
1938 if (use_delta)
1939 fld = delta_field;
1940 else
1941 fld = ptr_field;
1942 if (offset_p)
1943 *offset_p = int_bit_position (fld);
1944
1945 if (ref_field)
1946 {
1947 if (integer_nonzerop (ref_offset))
1948 return NULL_TREE;
1949 return ref_field == fld ? rec : NULL_TREE;
1950 }
1951 else
1952 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1953 : NULL_TREE;
1954 }
1955
1956 /* Returns true iff T is an SSA_NAME defined by a statement. */
1957
1958 static bool
1959 ipa_is_ssa_with_stmt_def (tree t)
1960 {
1961 if (TREE_CODE (t) == SSA_NAME
1962 && !SSA_NAME_IS_DEFAULT_DEF (t))
1963 return true;
1964 else
1965 return false;
1966 }
1967
1968 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1969 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1970 indirect call graph edge. */
1971
1972 static struct cgraph_edge *
1973 ipa_note_param_call (struct cgraph_node *node, int param_index,
1974 gcall *stmt)
1975 {
1976 struct cgraph_edge *cs;
1977
1978 cs = node->get_edge (stmt);
1979 cs->indirect_info->param_index = param_index;
1980 cs->indirect_info->agg_contents = 0;
1981 cs->indirect_info->member_ptr = 0;
1982 cs->indirect_info->guaranteed_unmodified = 0;
1983 return cs;
1984 }
1985
1986 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1987 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1988 intermediate information about each formal parameter. Currently it checks
1989 whether the call calls a pointer that is a formal parameter and if so, the
1990 parameter is marked with the called flag and an indirect call graph edge
1991 describing the call is created. This is very simple for ordinary pointers
1992 represented in SSA but not-so-nice when it comes to member pointers. The
1993 ugly part of this function does nothing more than trying to match the
1994 pattern of such a call. An example of such a pattern is the gimple dump
1995 below, the call is on the last line:
1996
1997 <bb 2>:
1998 f$__delta_5 = f.__delta;
1999 f$__pfn_24 = f.__pfn;
2000
2001 or
2002 <bb 2>:
2003 f$__delta_5 = MEM[(struct *)&f];
2004 f$__pfn_24 = MEM[(struct *)&f + 4B];
2005
2006 and a few lines below:
2007
2008 <bb 5>
2009 D.2496_3 = (int) f$__pfn_24;
2010 D.2497_4 = D.2496_3 & 1;
2011 if (D.2497_4 != 0)
2012 goto <bb 3>;
2013 else
2014 goto <bb 4>;
2015
2016 <bb 6>:
2017 D.2500_7 = (unsigned int) f$__delta_5;
2018 D.2501_8 = &S + D.2500_7;
2019 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2020 D.2503_10 = *D.2502_9;
2021 D.2504_12 = f$__pfn_24 + -1;
2022 D.2505_13 = (unsigned int) D.2504_12;
2023 D.2506_14 = D.2503_10 + D.2505_13;
2024 D.2507_15 = *D.2506_14;
2025 iftmp.11_16 = (String:: *) D.2507_15;
2026
2027 <bb 7>:
2028 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2029 D.2500_19 = (unsigned int) f$__delta_5;
2030 D.2508_20 = &S + D.2500_19;
2031 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2032
2033 Such patterns are results of simple calls to a member pointer:
2034
2035 int doprinting (int (MyString::* f)(int) const)
2036 {
2037 MyString S ("somestring");
2038
2039 return (S.*f)(4);
2040 }
2041
2042 Moreover, the function also looks for called pointers loaded from aggregates
2043 passed by value or reference. */
2044
2045 static void
2046 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2047 tree target)
2048 {
2049 struct ipa_node_params *info = fbi->info;
2050 HOST_WIDE_INT offset;
2051 bool by_ref;
2052
2053 if (SSA_NAME_IS_DEFAULT_DEF (target))
2054 {
2055 tree var = SSA_NAME_VAR (target);
2056 int index = ipa_get_param_decl_index (info, var);
2057 if (index >= 0)
2058 ipa_note_param_call (fbi->node, index, call);
2059 return;
2060 }
2061
2062 int index;
2063 gimple *def = SSA_NAME_DEF_STMT (target);
2064 bool guaranteed_unmodified;
2065 if (gimple_assign_single_p (def)
2066 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2067 gimple_assign_rhs1 (def), &index, &offset,
2068 NULL, &by_ref, &guaranteed_unmodified))
2069 {
2070 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2071 cs->indirect_info->offset = offset;
2072 cs->indirect_info->agg_contents = 1;
2073 cs->indirect_info->by_ref = by_ref;
2074 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2075 return;
2076 }
2077
2078 /* Now we need to try to match the complex pattern of calling a member
2079 pointer. */
2080 if (gimple_code (def) != GIMPLE_PHI
2081 || gimple_phi_num_args (def) != 2
2082 || !POINTER_TYPE_P (TREE_TYPE (target))
2083 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2084 return;
2085
2086 /* First, we need to check whether one of these is a load from a member
2087 pointer that is a parameter to this function. */
2088 tree n1 = PHI_ARG_DEF (def, 0);
2089 tree n2 = PHI_ARG_DEF (def, 1);
2090 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2091 return;
2092 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2093 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2094
2095 tree rec;
2096 basic_block bb, virt_bb;
2097 basic_block join = gimple_bb (def);
2098 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2099 {
2100 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2101 return;
2102
2103 bb = EDGE_PRED (join, 0)->src;
2104 virt_bb = gimple_bb (d2);
2105 }
2106 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2107 {
2108 bb = EDGE_PRED (join, 1)->src;
2109 virt_bb = gimple_bb (d1);
2110 }
2111 else
2112 return;
2113
2114 /* Second, we need to check that the basic blocks are laid out in the way
2115 corresponding to the pattern. */
2116
2117 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2118 || single_pred (virt_bb) != bb
2119 || single_succ (virt_bb) != join)
2120 return;
2121
2122 /* Third, let's see that the branching is done depending on the least
2123 significant bit of the pfn. */
2124
2125 gimple *branch = last_stmt (bb);
2126 if (!branch || gimple_code (branch) != GIMPLE_COND)
2127 return;
2128
2129 if ((gimple_cond_code (branch) != NE_EXPR
2130 && gimple_cond_code (branch) != EQ_EXPR)
2131 || !integer_zerop (gimple_cond_rhs (branch)))
2132 return;
2133
2134 tree cond = gimple_cond_lhs (branch);
2135 if (!ipa_is_ssa_with_stmt_def (cond))
2136 return;
2137
2138 def = SSA_NAME_DEF_STMT (cond);
2139 if (!is_gimple_assign (def)
2140 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2141 || !integer_onep (gimple_assign_rhs2 (def)))
2142 return;
2143
2144 cond = gimple_assign_rhs1 (def);
2145 if (!ipa_is_ssa_with_stmt_def (cond))
2146 return;
2147
2148 def = SSA_NAME_DEF_STMT (cond);
2149
2150 if (is_gimple_assign (def)
2151 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2152 {
2153 cond = gimple_assign_rhs1 (def);
2154 if (!ipa_is_ssa_with_stmt_def (cond))
2155 return;
2156 def = SSA_NAME_DEF_STMT (cond);
2157 }
2158
2159 tree rec2;
2160 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2161 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2162 == ptrmemfunc_vbit_in_delta),
2163 NULL);
2164 if (rec != rec2)
2165 return;
2166
2167 index = ipa_get_param_decl_index (info, rec);
2168 if (index >= 0
2169 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2170 {
2171 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2172 cs->indirect_info->offset = offset;
2173 cs->indirect_info->agg_contents = 1;
2174 cs->indirect_info->member_ptr = 1;
2175 cs->indirect_info->guaranteed_unmodified = 1;
2176 }
2177
2178 return;
2179 }
2180
2181 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2182 object referenced in the expression is a formal parameter of the caller
2183 FBI->node (described by FBI->info), create a call note for the
2184 statement. */
2185
2186 static void
2187 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2188 gcall *call, tree target)
2189 {
2190 tree obj = OBJ_TYPE_REF_OBJECT (target);
2191 int index;
2192 HOST_WIDE_INT anc_offset;
2193
2194 if (!flag_devirtualize)
2195 return;
2196
2197 if (TREE_CODE (obj) != SSA_NAME)
2198 return;
2199
2200 struct ipa_node_params *info = fbi->info;
2201 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2202 {
2203 struct ipa_jump_func jfunc;
2204 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2205 return;
2206
2207 anc_offset = 0;
2208 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2209 gcc_assert (index >= 0);
2210 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2211 call, &jfunc))
2212 return;
2213 }
2214 else
2215 {
2216 struct ipa_jump_func jfunc;
2217 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2218 tree expr;
2219
2220 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2221 if (!expr)
2222 return;
2223 index = ipa_get_param_decl_index (info,
2224 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2225 gcc_assert (index >= 0);
2226 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2227 call, &jfunc, anc_offset))
2228 return;
2229 }
2230
2231 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2232 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2233 ii->offset = anc_offset;
2234 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2235 ii->otr_type = obj_type_ref_class (target);
2236 ii->polymorphic = 1;
2237 }
2238
2239 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2240 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2241 containing intermediate information about each formal parameter. */
2242
2243 static void
2244 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2245 {
2246 tree target = gimple_call_fn (call);
2247
2248 if (!target
2249 || (TREE_CODE (target) != SSA_NAME
2250 && !virtual_method_call_p (target)))
2251 return;
2252
2253 struct cgraph_edge *cs = fbi->node->get_edge (call);
2254 /* If we previously turned the call into a direct call, there is
2255 no need to analyze. */
2256 if (cs && !cs->indirect_unknown_callee)
2257 return;
2258
2259 if (cs->indirect_info->polymorphic && flag_devirtualize)
2260 {
2261 tree instance;
2262 tree target = gimple_call_fn (call);
2263 ipa_polymorphic_call_context context (current_function_decl,
2264 target, call, &instance);
2265
2266 gcc_checking_assert (cs->indirect_info->otr_type
2267 == obj_type_ref_class (target));
2268 gcc_checking_assert (cs->indirect_info->otr_token
2269 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2270
2271 cs->indirect_info->vptr_changed
2272 = !context.get_dynamic_type (instance,
2273 OBJ_TYPE_REF_OBJECT (target),
2274 obj_type_ref_class (target), call);
2275 cs->indirect_info->context = context;
2276 }
2277
2278 if (TREE_CODE (target) == SSA_NAME)
2279 ipa_analyze_indirect_call_uses (fbi, call, target);
2280 else if (virtual_method_call_p (target))
2281 ipa_analyze_virtual_call_uses (fbi, call, target);
2282 }
2283
2284
2285 /* Analyze the call statement STMT with respect to formal parameters (described
2286 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2287 formal parameters are called. */
2288
2289 static void
2290 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2291 {
2292 if (is_gimple_call (stmt))
2293 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2294 }
2295
2296 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2297 If OP is a parameter declaration, mark it as used in the info structure
2298 passed in DATA. */
2299
2300 static bool
2301 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2302 {
2303 struct ipa_node_params *info = (struct ipa_node_params *) data;
2304
2305 op = get_base_address (op);
2306 if (op
2307 && TREE_CODE (op) == PARM_DECL)
2308 {
2309 int index = ipa_get_param_decl_index (info, op);
2310 gcc_assert (index >= 0);
2311 ipa_set_param_used (info, index, true);
2312 }
2313
2314 return false;
2315 }
2316
2317 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2318 the findings in various structures of the associated ipa_node_params
2319 structure, such as parameter flags, notes etc. FBI holds various data about
2320 the function being analyzed. */
2321
2322 static void
2323 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2324 {
2325 gimple_stmt_iterator gsi;
2326 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2327 {
2328 gimple *stmt = gsi_stmt (gsi);
2329
2330 if (is_gimple_debug (stmt))
2331 continue;
2332
2333 ipa_analyze_stmt_uses (fbi, stmt);
2334 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2335 visit_ref_for_mod_analysis,
2336 visit_ref_for_mod_analysis,
2337 visit_ref_for_mod_analysis);
2338 }
2339 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2340 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2341 visit_ref_for_mod_analysis,
2342 visit_ref_for_mod_analysis,
2343 visit_ref_for_mod_analysis);
2344 }
2345
2346 /* Calculate controlled uses of parameters of NODE. */
2347
2348 static void
2349 ipa_analyze_controlled_uses (struct cgraph_node *node)
2350 {
2351 struct ipa_node_params *info = IPA_NODE_REF (node);
2352
2353 for (int i = 0; i < ipa_get_param_count (info); i++)
2354 {
2355 tree parm = ipa_get_param (info, i);
2356 int controlled_uses = 0;
2357
2358 /* For SSA regs see if parameter is used. For non-SSA we compute
2359 the flag during modification analysis. */
2360 if (is_gimple_reg (parm))
2361 {
2362 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2363 parm);
2364 if (ddef && !has_zero_uses (ddef))
2365 {
2366 imm_use_iterator imm_iter;
2367 use_operand_p use_p;
2368
2369 ipa_set_param_used (info, i, true);
2370 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2371 if (!is_gimple_call (USE_STMT (use_p)))
2372 {
2373 if (!is_gimple_debug (USE_STMT (use_p)))
2374 {
2375 controlled_uses = IPA_UNDESCRIBED_USE;
2376 break;
2377 }
2378 }
2379 else
2380 controlled_uses++;
2381 }
2382 else
2383 controlled_uses = 0;
2384 }
2385 else
2386 controlled_uses = IPA_UNDESCRIBED_USE;
2387 ipa_set_controlled_uses (info, i, controlled_uses);
2388 }
2389 }
2390
2391 /* Free stuff in BI. */
2392
2393 static void
2394 free_ipa_bb_info (struct ipa_bb_info *bi)
2395 {
2396 bi->cg_edges.release ();
2397 bi->param_aa_statuses.release ();
2398 }
2399
2400 /* Dominator walker driving the analysis. */
2401
2402 class analysis_dom_walker : public dom_walker
2403 {
2404 public:
2405 analysis_dom_walker (struct ipa_func_body_info *fbi)
2406 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2407
2408 virtual edge before_dom_children (basic_block);
2409
2410 private:
2411 struct ipa_func_body_info *m_fbi;
2412 };
2413
2414 edge
2415 analysis_dom_walker::before_dom_children (basic_block bb)
2416 {
2417 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2418 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2419 return NULL;
2420 }
2421
2422 /* Release body info FBI. */
2423
2424 void
2425 ipa_release_body_info (struct ipa_func_body_info *fbi)
2426 {
2427 int i;
2428 struct ipa_bb_info *bi;
2429
2430 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2431 free_ipa_bb_info (bi);
2432 fbi->bb_infos.release ();
2433 }
2434
2435 /* Initialize the array describing properties of formal parameters
2436 of NODE, analyze their uses and compute jump functions associated
2437 with actual arguments of calls from within NODE. */
2438
2439 void
2440 ipa_analyze_node (struct cgraph_node *node)
2441 {
2442 struct ipa_func_body_info fbi;
2443 struct ipa_node_params *info;
2444
2445 ipa_check_create_node_params ();
2446 ipa_check_create_edge_args ();
2447 info = IPA_NODE_REF (node);
2448
2449 if (info->analysis_done)
2450 return;
2451 info->analysis_done = 1;
2452
2453 if (ipa_func_spec_opts_forbid_analysis_p (node))
2454 {
2455 for (int i = 0; i < ipa_get_param_count (info); i++)
2456 {
2457 ipa_set_param_used (info, i, true);
2458 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2459 }
2460 return;
2461 }
2462
2463 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2464 push_cfun (func);
2465 calculate_dominance_info (CDI_DOMINATORS);
2466 ipa_initialize_node_params (node);
2467 ipa_analyze_controlled_uses (node);
2468
2469 fbi.node = node;
2470 fbi.info = IPA_NODE_REF (node);
2471 fbi.bb_infos = vNULL;
2472 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2473 fbi.param_count = ipa_get_param_count (info);
2474 fbi.aa_walked = 0;
2475
2476 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2477 {
2478 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2479 bi->cg_edges.safe_push (cs);
2480 }
2481
2482 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2483 {
2484 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2485 bi->cg_edges.safe_push (cs);
2486 }
2487
2488 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2489
2490 ipa_release_body_info (&fbi);
2491 free_dominance_info (CDI_DOMINATORS);
2492 pop_cfun ();
2493 }
2494
2495 /* Update the jump functions associated with call graph edge E when the call
2496 graph edge CS is being inlined, assuming that E->caller is already (possibly
2497 indirectly) inlined into CS->callee and that E has not been inlined. */
2498
2499 static void
2500 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2501 struct cgraph_edge *e)
2502 {
2503 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2504 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2505 int count = ipa_get_cs_argument_count (args);
2506 int i;
2507
2508 for (i = 0; i < count; i++)
2509 {
2510 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2511 struct ipa_polymorphic_call_context *dst_ctx
2512 = ipa_get_ith_polymorhic_call_context (args, i);
2513
2514 if (dst->type == IPA_JF_ANCESTOR)
2515 {
2516 struct ipa_jump_func *src;
2517 int dst_fid = dst->value.ancestor.formal_id;
2518 struct ipa_polymorphic_call_context *src_ctx
2519 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2520
2521 /* Variable number of arguments can cause havoc if we try to access
2522 one that does not exist in the inlined edge. So make sure we
2523 don't. */
2524 if (dst_fid >= ipa_get_cs_argument_count (top))
2525 {
2526 ipa_set_jf_unknown (dst);
2527 continue;
2528 }
2529
2530 src = ipa_get_ith_jump_func (top, dst_fid);
2531
2532 if (src_ctx && !src_ctx->useless_p ())
2533 {
2534 struct ipa_polymorphic_call_context ctx = *src_ctx;
2535
2536 /* TODO: Make type preserved safe WRT contexts. */
2537 if (!ipa_get_jf_ancestor_type_preserved (dst))
2538 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2539 ctx.offset_by (dst->value.ancestor.offset);
2540 if (!ctx.useless_p ())
2541 {
2542 if (!dst_ctx)
2543 {
2544 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2545 count);
2546 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2547 }
2548
2549 dst_ctx->combine_with (ctx);
2550 }
2551 }
2552
2553 if (src->agg.items
2554 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2555 {
2556 struct ipa_agg_jf_item *item;
2557 int j;
2558
2559 /* Currently we do not produce clobber aggregate jump functions,
2560 replace with merging when we do. */
2561 gcc_assert (!dst->agg.items);
2562
2563 dst->agg.items = vec_safe_copy (src->agg.items);
2564 dst->agg.by_ref = src->agg.by_ref;
2565 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2566 item->offset -= dst->value.ancestor.offset;
2567 }
2568
2569 if (src->type == IPA_JF_PASS_THROUGH
2570 && src->value.pass_through.operation == NOP_EXPR)
2571 {
2572 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2573 dst->value.ancestor.agg_preserved &=
2574 src->value.pass_through.agg_preserved;
2575 }
2576 else if (src->type == IPA_JF_ANCESTOR)
2577 {
2578 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2579 dst->value.ancestor.offset += src->value.ancestor.offset;
2580 dst->value.ancestor.agg_preserved &=
2581 src->value.ancestor.agg_preserved;
2582 }
2583 else
2584 ipa_set_jf_unknown (dst);
2585 }
2586 else if (dst->type == IPA_JF_PASS_THROUGH)
2587 {
2588 struct ipa_jump_func *src;
2589 /* We must check range due to calls with variable number of arguments
2590 and we cannot combine jump functions with operations. */
2591 if (dst->value.pass_through.operation == NOP_EXPR
2592 && (dst->value.pass_through.formal_id
2593 < ipa_get_cs_argument_count (top)))
2594 {
2595 int dst_fid = dst->value.pass_through.formal_id;
2596 src = ipa_get_ith_jump_func (top, dst_fid);
2597 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2598 struct ipa_polymorphic_call_context *src_ctx
2599 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2600
2601 if (src_ctx && !src_ctx->useless_p ())
2602 {
2603 struct ipa_polymorphic_call_context ctx = *src_ctx;
2604
2605 /* TODO: Make type preserved safe WRT contexts. */
2606 if (!ipa_get_jf_pass_through_type_preserved (dst))
2607 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2608 if (!ctx.useless_p ())
2609 {
2610 if (!dst_ctx)
2611 {
2612 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2613 count);
2614 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2615 }
2616 dst_ctx->combine_with (ctx);
2617 }
2618 }
2619 switch (src->type)
2620 {
2621 case IPA_JF_UNKNOWN:
2622 ipa_set_jf_unknown (dst);
2623 break;
2624 case IPA_JF_CONST:
2625 ipa_set_jf_cst_copy (dst, src);
2626 break;
2627
2628 case IPA_JF_PASS_THROUGH:
2629 {
2630 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2631 enum tree_code operation;
2632 operation = ipa_get_jf_pass_through_operation (src);
2633
2634 if (operation == NOP_EXPR)
2635 {
2636 bool agg_p;
2637 agg_p = dst_agg_p
2638 && ipa_get_jf_pass_through_agg_preserved (src);
2639 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2640 }
2641 else
2642 {
2643 tree operand = ipa_get_jf_pass_through_operand (src);
2644 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2645 operation);
2646 }
2647 break;
2648 }
2649 case IPA_JF_ANCESTOR:
2650 {
2651 bool agg_p;
2652 agg_p = dst_agg_p
2653 && ipa_get_jf_ancestor_agg_preserved (src);
2654 ipa_set_ancestor_jf (dst,
2655 ipa_get_jf_ancestor_offset (src),
2656 ipa_get_jf_ancestor_formal_id (src),
2657 agg_p);
2658 break;
2659 }
2660 default:
2661 gcc_unreachable ();
2662 }
2663
2664 if (src->agg.items
2665 && (dst_agg_p || !src->agg.by_ref))
2666 {
2667 /* Currently we do not produce clobber aggregate jump
2668 functions, replace with merging when we do. */
2669 gcc_assert (!dst->agg.items);
2670
2671 dst->agg.by_ref = src->agg.by_ref;
2672 dst->agg.items = vec_safe_copy (src->agg.items);
2673 }
2674 }
2675 else
2676 ipa_set_jf_unknown (dst);
2677 }
2678 }
2679 }
2680
2681 /* If TARGET is an addr_expr of a function declaration, make it the
2682 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2683 Otherwise, return NULL. */
2684
2685 struct cgraph_edge *
2686 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2687 bool speculative)
2688 {
2689 struct cgraph_node *callee;
2690 struct inline_edge_summary *es = inline_edge_summary (ie);
2691 bool unreachable = false;
2692
2693 if (TREE_CODE (target) == ADDR_EXPR)
2694 target = TREE_OPERAND (target, 0);
2695 if (TREE_CODE (target) != FUNCTION_DECL)
2696 {
2697 target = canonicalize_constructor_val (target, NULL);
2698 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2699 {
2700 /* Member pointer call that goes through a VMT lookup. */
2701 if (ie->indirect_info->member_ptr
2702 /* Or if target is not an invariant expression and we do not
2703 know if it will evaulate to function at runtime.
2704 This can happen when folding through &VAR, where &VAR
2705 is IP invariant, but VAR itself is not.
2706
2707 TODO: Revisit this when GCC 5 is branched. It seems that
2708 member_ptr check is not needed and that we may try to fold
2709 the expression and see if VAR is readonly. */
2710 || !is_gimple_ip_invariant (target))
2711 {
2712 if (dump_enabled_p ())
2713 {
2714 location_t loc = gimple_location_safe (ie->call_stmt);
2715 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2716 "discovered direct call non-invariant "
2717 "%s/%i\n",
2718 ie->caller->name (), ie->caller->order);
2719 }
2720 return NULL;
2721 }
2722
2723
2724 if (dump_enabled_p ())
2725 {
2726 location_t loc = gimple_location_safe (ie->call_stmt);
2727 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2728 "discovered direct call to non-function in %s/%i, "
2729 "making it __builtin_unreachable\n",
2730 ie->caller->name (), ie->caller->order);
2731 }
2732
2733 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2734 callee = cgraph_node::get_create (target);
2735 unreachable = true;
2736 }
2737 else
2738 callee = cgraph_node::get (target);
2739 }
2740 else
2741 callee = cgraph_node::get (target);
2742
2743 /* Because may-edges are not explicitely represented and vtable may be external,
2744 we may create the first reference to the object in the unit. */
2745 if (!callee || callee->global.inlined_to)
2746 {
2747
2748 /* We are better to ensure we can refer to it.
2749 In the case of static functions we are out of luck, since we already
2750 removed its body. In the case of public functions we may or may
2751 not introduce the reference. */
2752 if (!canonicalize_constructor_val (target, NULL)
2753 || !TREE_PUBLIC (target))
2754 {
2755 if (dump_file)
2756 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2757 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2758 xstrdup_for_dump (ie->caller->name ()),
2759 ie->caller->order,
2760 xstrdup_for_dump (ie->callee->name ()),
2761 ie->callee->order);
2762 return NULL;
2763 }
2764 callee = cgraph_node::get_create (target);
2765 }
2766
2767 /* If the edge is already speculated. */
2768 if (speculative && ie->speculative)
2769 {
2770 struct cgraph_edge *e2;
2771 struct ipa_ref *ref;
2772 ie->speculative_call_info (e2, ie, ref);
2773 if (e2->callee->ultimate_alias_target ()
2774 != callee->ultimate_alias_target ())
2775 {
2776 if (dump_file)
2777 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2778 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2779 xstrdup_for_dump (ie->caller->name ()),
2780 ie->caller->order,
2781 xstrdup_for_dump (callee->name ()),
2782 callee->order,
2783 xstrdup_for_dump (e2->callee->name ()),
2784 e2->callee->order);
2785 }
2786 else
2787 {
2788 if (dump_file)
2789 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2790 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2791 xstrdup_for_dump (ie->caller->name ()),
2792 ie->caller->order,
2793 xstrdup_for_dump (callee->name ()),
2794 callee->order);
2795 }
2796 return NULL;
2797 }
2798
2799 if (!dbg_cnt (devirt))
2800 return NULL;
2801
2802 ipa_check_create_node_params ();
2803
2804 /* We can not make edges to inline clones. It is bug that someone removed
2805 the cgraph node too early. */
2806 gcc_assert (!callee->global.inlined_to);
2807
2808 if (dump_file && !unreachable)
2809 {
2810 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2811 "(%s/%i -> %s/%i), for stmt ",
2812 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2813 speculative ? "speculative" : "known",
2814 xstrdup_for_dump (ie->caller->name ()),
2815 ie->caller->order,
2816 xstrdup_for_dump (callee->name ()),
2817 callee->order);
2818 if (ie->call_stmt)
2819 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2820 else
2821 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2822 }
2823 if (dump_enabled_p ())
2824 {
2825 location_t loc = gimple_location_safe (ie->call_stmt);
2826
2827 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2828 "converting indirect call in %s to direct call to %s\n",
2829 ie->caller->name (), callee->name ());
2830 }
2831 if (!speculative)
2832 {
2833 struct cgraph_edge *orig = ie;
2834 ie = ie->make_direct (callee);
2835 /* If we resolved speculative edge the cost is already up to date
2836 for direct call (adjusted by inline_edge_duplication_hook). */
2837 if (ie == orig)
2838 {
2839 es = inline_edge_summary (ie);
2840 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2841 - eni_size_weights.call_cost);
2842 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2843 - eni_time_weights.call_cost);
2844 }
2845 }
2846 else
2847 {
2848 if (!callee->can_be_discarded_p ())
2849 {
2850 cgraph_node *alias;
2851 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2852 if (alias)
2853 callee = alias;
2854 }
2855 /* make_speculative will update ie's cost to direct call cost. */
2856 ie = ie->make_speculative
2857 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2858 }
2859
2860 return ie;
2861 }
2862
2863 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2864 CONSTRUCTOR and return it. Return NULL if the search fails for some
2865 reason. */
2866
2867 static tree
2868 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2869 {
2870 tree type = TREE_TYPE (constructor);
2871 if (TREE_CODE (type) != ARRAY_TYPE
2872 && TREE_CODE (type) != RECORD_TYPE)
2873 return NULL;
2874
2875 unsigned ix;
2876 tree index, val;
2877 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2878 {
2879 HOST_WIDE_INT elt_offset;
2880 if (TREE_CODE (type) == ARRAY_TYPE)
2881 {
2882 offset_int off;
2883 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2884 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2885
2886 if (index)
2887 {
2888 off = wi::to_offset (index);
2889 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2890 {
2891 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2892 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2893 off = wi::sext (off - wi::to_offset (low_bound),
2894 TYPE_PRECISION (TREE_TYPE (index)));
2895 }
2896 off *= wi::to_offset (unit_size);
2897 }
2898 else
2899 off = wi::to_offset (unit_size) * ix;
2900
2901 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2902 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2903 continue;
2904 elt_offset = off.to_shwi ();
2905 }
2906 else if (TREE_CODE (type) == RECORD_TYPE)
2907 {
2908 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2909 if (DECL_BIT_FIELD (index))
2910 continue;
2911 elt_offset = int_bit_position (index);
2912 }
2913 else
2914 gcc_unreachable ();
2915
2916 if (elt_offset > req_offset)
2917 return NULL;
2918
2919 if (TREE_CODE (val) == CONSTRUCTOR)
2920 return find_constructor_constant_at_offset (val,
2921 req_offset - elt_offset);
2922
2923 if (elt_offset == req_offset
2924 && is_gimple_reg_type (TREE_TYPE (val))
2925 && is_gimple_ip_invariant (val))
2926 return val;
2927 }
2928 return NULL;
2929 }
2930
2931 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2932 invariant from a static constructor and if so, return it. Otherwise return
2933 NULL. */
2934
2935 static tree
2936 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2937 {
2938 if (by_ref)
2939 {
2940 if (TREE_CODE (scalar) != ADDR_EXPR)
2941 return NULL;
2942 scalar = TREE_OPERAND (scalar, 0);
2943 }
2944
2945 if (!VAR_P (scalar)
2946 || !is_global_var (scalar)
2947 || !TREE_READONLY (scalar)
2948 || !DECL_INITIAL (scalar)
2949 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2950 return NULL;
2951
2952 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2953 }
2954
2955 /* Retrieve value from aggregate jump function AGG or static initializer of
2956 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2957 none. BY_REF specifies whether the value has to be passed by reference or
2958 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2959 to is set to true if the value comes from an initializer of a constant. */
2960
2961 tree
2962 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2963 HOST_WIDE_INT offset, bool by_ref,
2964 bool *from_global_constant)
2965 {
2966 struct ipa_agg_jf_item *item;
2967 int i;
2968
2969 if (scalar)
2970 {
2971 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2972 if (res)
2973 {
2974 if (from_global_constant)
2975 *from_global_constant = true;
2976 return res;
2977 }
2978 }
2979
2980 if (!agg
2981 || by_ref != agg->by_ref)
2982 return NULL;
2983
2984 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2985 if (item->offset == offset)
2986 {
2987 /* Currently we do not have clobber values, return NULL for them once
2988 we do. */
2989 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2990 if (from_global_constant)
2991 *from_global_constant = false;
2992 return item->value;
2993 }
2994 return NULL;
2995 }
2996
2997 /* Remove a reference to SYMBOL from the list of references of a node given by
2998 reference description RDESC. Return true if the reference has been
2999 successfully found and removed. */
3000
3001 static bool
3002 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3003 {
3004 struct ipa_ref *to_del;
3005 struct cgraph_edge *origin;
3006
3007 origin = rdesc->cs;
3008 if (!origin)
3009 return false;
3010 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3011 origin->lto_stmt_uid);
3012 if (!to_del)
3013 return false;
3014
3015 to_del->remove_reference ();
3016 if (dump_file)
3017 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3018 xstrdup_for_dump (origin->caller->name ()),
3019 origin->caller->order, xstrdup_for_dump (symbol->name ()));
3020 return true;
3021 }
3022
3023 /* If JFUNC has a reference description with refcount different from
3024 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3025 NULL. JFUNC must be a constant jump function. */
3026
3027 static struct ipa_cst_ref_desc *
3028 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3029 {
3030 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3031 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3032 return rdesc;
3033 else
3034 return NULL;
3035 }
3036
3037 /* If the value of constant jump function JFUNC is an address of a function
3038 declaration, return the associated call graph node. Otherwise return
3039 NULL. */
3040
3041 static cgraph_node *
3042 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3043 {
3044 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3045 tree cst = ipa_get_jf_constant (jfunc);
3046 if (TREE_CODE (cst) != ADDR_EXPR
3047 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3048 return NULL;
3049
3050 return cgraph_node::get (TREE_OPERAND (cst, 0));
3051 }
3052
3053
3054 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3055 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3056 the edge specified in the rdesc. Return false if either the symbol or the
3057 reference could not be found, otherwise return true. */
3058
3059 static bool
3060 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3061 {
3062 struct ipa_cst_ref_desc *rdesc;
3063 if (jfunc->type == IPA_JF_CONST
3064 && (rdesc = jfunc_rdesc_usable (jfunc))
3065 && --rdesc->refcount == 0)
3066 {
3067 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3068 if (!symbol)
3069 return false;
3070
3071 return remove_described_reference (symbol, rdesc);
3072 }
3073 return true;
3074 }
3075
3076 /* Try to find a destination for indirect edge IE that corresponds to a simple
3077 call or a call of a member function pointer and where the destination is a
3078 pointer formal parameter described by jump function JFUNC. If it can be
3079 determined, return the newly direct edge, otherwise return NULL.
3080 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3081
3082 static struct cgraph_edge *
3083 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3084 struct ipa_jump_func *jfunc,
3085 struct ipa_node_params *new_root_info)
3086 {
3087 struct cgraph_edge *cs;
3088 tree target;
3089 bool agg_contents = ie->indirect_info->agg_contents;
3090 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3091 if (agg_contents)
3092 {
3093 bool from_global_constant;
3094 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3095 ie->indirect_info->offset,
3096 ie->indirect_info->by_ref,
3097 &from_global_constant);
3098 if (target
3099 && !from_global_constant
3100 && !ie->indirect_info->guaranteed_unmodified)
3101 return NULL;
3102 }
3103 else
3104 target = scalar;
3105 if (!target)
3106 return NULL;
3107 cs = ipa_make_edge_direct_to_target (ie, target);
3108
3109 if (cs && !agg_contents)
3110 {
3111 bool ok;
3112 gcc_checking_assert (cs->callee
3113 && (cs != ie
3114 || jfunc->type != IPA_JF_CONST
3115 || !cgraph_node_for_jfunc (jfunc)
3116 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3117 ok = try_decrement_rdesc_refcount (jfunc);
3118 gcc_checking_assert (ok);
3119 }
3120
3121 return cs;
3122 }
3123
3124 /* Return the target to be used in cases of impossible devirtualization. IE
3125 and target (the latter can be NULL) are dumped when dumping is enabled. */
3126
3127 tree
3128 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3129 {
3130 if (dump_file)
3131 {
3132 if (target)
3133 fprintf (dump_file,
3134 "Type inconsistent devirtualization: %s/%i->%s\n",
3135 ie->caller->name (), ie->caller->order,
3136 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3137 else
3138 fprintf (dump_file,
3139 "No devirtualization target in %s/%i\n",
3140 ie->caller->name (), ie->caller->order);
3141 }
3142 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3143 cgraph_node::get_create (new_target);
3144 return new_target;
3145 }
3146
3147 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3148 call based on a formal parameter which is described by jump function JFUNC
3149 and if it can be determined, make it direct and return the direct edge.
3150 Otherwise, return NULL. CTX describes the polymorphic context that the
3151 parameter the call is based on brings along with it. */
3152
3153 static struct cgraph_edge *
3154 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3155 struct ipa_jump_func *jfunc,
3156 struct ipa_polymorphic_call_context ctx)
3157 {
3158 tree target = NULL;
3159 bool speculative = false;
3160
3161 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3162 return NULL;
3163
3164 gcc_assert (!ie->indirect_info->by_ref);
3165
3166 /* Try to do lookup via known virtual table pointer value. */
3167 if (!ie->indirect_info->vptr_changed
3168 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3169 {
3170 tree vtable;
3171 unsigned HOST_WIDE_INT offset;
3172 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3173 : NULL;
3174 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3175 ie->indirect_info->offset,
3176 true);
3177 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3178 {
3179 bool can_refer;
3180 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3181 vtable, offset, &can_refer);
3182 if (can_refer)
3183 {
3184 if (!t
3185 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3186 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3187 || !possible_polymorphic_call_target_p
3188 (ie, cgraph_node::get (t)))
3189 {
3190 /* Do not speculate builtin_unreachable, it is stupid! */
3191 if (!ie->indirect_info->vptr_changed)
3192 target = ipa_impossible_devirt_target (ie, target);
3193 else
3194 target = NULL;
3195 }
3196 else
3197 {
3198 target = t;
3199 speculative = ie->indirect_info->vptr_changed;
3200 }
3201 }
3202 }
3203 }
3204
3205 ipa_polymorphic_call_context ie_context (ie);
3206 vec <cgraph_node *>targets;
3207 bool final;
3208
3209 ctx.offset_by (ie->indirect_info->offset);
3210 if (ie->indirect_info->vptr_changed)
3211 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3212 ie->indirect_info->otr_type);
3213 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3214 targets = possible_polymorphic_call_targets
3215 (ie->indirect_info->otr_type,
3216 ie->indirect_info->otr_token,
3217 ctx, &final);
3218 if (final && targets.length () <= 1)
3219 {
3220 speculative = false;
3221 if (targets.length () == 1)
3222 target = targets[0]->decl;
3223 else
3224 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3225 }
3226 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3227 && !ie->speculative && ie->maybe_hot_p ())
3228 {
3229 cgraph_node *n;
3230 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3231 ie->indirect_info->otr_token,
3232 ie->indirect_info->context);
3233 if (n)
3234 {
3235 target = n->decl;
3236 speculative = true;
3237 }
3238 }
3239
3240 if (target)
3241 {
3242 if (!possible_polymorphic_call_target_p
3243 (ie, cgraph_node::get_create (target)))
3244 {
3245 if (speculative)
3246 return NULL;
3247 target = ipa_impossible_devirt_target (ie, target);
3248 }
3249 return ipa_make_edge_direct_to_target (ie, target, speculative);
3250 }
3251 else
3252 return NULL;
3253 }
3254
3255 /* Update the param called notes associated with NODE when CS is being inlined,
3256 assuming NODE is (potentially indirectly) inlined into CS->callee.
3257 Moreover, if the callee is discovered to be constant, create a new cgraph
3258 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3259 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3260
3261 static bool
3262 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3263 struct cgraph_node *node,
3264 vec<cgraph_edge *> *new_edges)
3265 {
3266 struct ipa_edge_args *top;
3267 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3268 struct ipa_node_params *new_root_info;
3269 bool res = false;
3270
3271 ipa_check_create_edge_args ();
3272 top = IPA_EDGE_REF (cs);
3273 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3274 ? cs->caller->global.inlined_to
3275 : cs->caller);
3276
3277 for (ie = node->indirect_calls; ie; ie = next_ie)
3278 {
3279 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3280 struct ipa_jump_func *jfunc;
3281 int param_index;
3282 cgraph_node *spec_target = NULL;
3283
3284 next_ie = ie->next_callee;
3285
3286 if (ici->param_index == -1)
3287 continue;
3288
3289 /* We must check range due to calls with variable number of arguments: */
3290 if (ici->param_index >= ipa_get_cs_argument_count (top))
3291 {
3292 ici->param_index = -1;
3293 continue;
3294 }
3295
3296 param_index = ici->param_index;
3297 jfunc = ipa_get_ith_jump_func (top, param_index);
3298
3299 if (ie->speculative)
3300 {
3301 struct cgraph_edge *de;
3302 struct ipa_ref *ref;
3303 ie->speculative_call_info (de, ie, ref);
3304 spec_target = de->callee;
3305 }
3306
3307 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3308 new_direct_edge = NULL;
3309 else if (ici->polymorphic)
3310 {
3311 ipa_polymorphic_call_context ctx;
3312 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3313 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3314 }
3315 else
3316 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3317 new_root_info);
3318 /* If speculation was removed, then we need to do nothing. */
3319 if (new_direct_edge && new_direct_edge != ie
3320 && new_direct_edge->callee == spec_target)
3321 {
3322 new_direct_edge->indirect_inlining_edge = 1;
3323 top = IPA_EDGE_REF (cs);
3324 res = true;
3325 if (!new_direct_edge->speculative)
3326 continue;
3327 }
3328 else if (new_direct_edge)
3329 {
3330 new_direct_edge->indirect_inlining_edge = 1;
3331 if (new_direct_edge->call_stmt)
3332 new_direct_edge->call_stmt_cannot_inline_p
3333 = !gimple_check_call_matching_types (
3334 new_direct_edge->call_stmt,
3335 new_direct_edge->callee->decl, false);
3336 if (new_edges)
3337 {
3338 new_edges->safe_push (new_direct_edge);
3339 res = true;
3340 }
3341 top = IPA_EDGE_REF (cs);
3342 /* If speculative edge was introduced we still need to update
3343 call info of the indirect edge. */
3344 if (!new_direct_edge->speculative)
3345 continue;
3346 }
3347 if (jfunc->type == IPA_JF_PASS_THROUGH
3348 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3349 {
3350 if (ici->agg_contents
3351 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3352 && !ici->polymorphic)
3353 ici->param_index = -1;
3354 else
3355 {
3356 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3357 if (ici->polymorphic
3358 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3359 ici->vptr_changed = true;
3360 }
3361 }
3362 else if (jfunc->type == IPA_JF_ANCESTOR)
3363 {
3364 if (ici->agg_contents
3365 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3366 && !ici->polymorphic)
3367 ici->param_index = -1;
3368 else
3369 {
3370 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3371 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3372 if (ici->polymorphic
3373 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3374 ici->vptr_changed = true;
3375 }
3376 }
3377 else
3378 /* Either we can find a destination for this edge now or never. */
3379 ici->param_index = -1;
3380 }
3381
3382 return res;
3383 }
3384
3385 /* Recursively traverse subtree of NODE (including node) made of inlined
3386 cgraph_edges when CS has been inlined and invoke
3387 update_indirect_edges_after_inlining on all nodes and
3388 update_jump_functions_after_inlining on all non-inlined edges that lead out
3389 of this subtree. Newly discovered indirect edges will be added to
3390 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3391 created. */
3392
3393 static bool
3394 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3395 struct cgraph_node *node,
3396 vec<cgraph_edge *> *new_edges)
3397 {
3398 struct cgraph_edge *e;
3399 bool res;
3400
3401 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3402
3403 for (e = node->callees; e; e = e->next_callee)
3404 if (!e->inline_failed)
3405 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3406 else
3407 update_jump_functions_after_inlining (cs, e);
3408 for (e = node->indirect_calls; e; e = e->next_callee)
3409 update_jump_functions_after_inlining (cs, e);
3410
3411 return res;
3412 }
3413
3414 /* Combine two controlled uses counts as done during inlining. */
3415
3416 static int
3417 combine_controlled_uses_counters (int c, int d)
3418 {
3419 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3420 return IPA_UNDESCRIBED_USE;
3421 else
3422 return c + d - 1;
3423 }
3424
3425 /* Propagate number of controlled users from CS->caleee to the new root of the
3426 tree of inlined nodes. */
3427
3428 static void
3429 propagate_controlled_uses (struct cgraph_edge *cs)
3430 {
3431 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3432 struct cgraph_node *new_root = cs->caller->global.inlined_to
3433 ? cs->caller->global.inlined_to : cs->caller;
3434 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3435 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3436 int count, i;
3437
3438 count = MIN (ipa_get_cs_argument_count (args),
3439 ipa_get_param_count (old_root_info));
3440 for (i = 0; i < count; i++)
3441 {
3442 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3443 struct ipa_cst_ref_desc *rdesc;
3444
3445 if (jf->type == IPA_JF_PASS_THROUGH)
3446 {
3447 int src_idx, c, d;
3448 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3449 c = ipa_get_controlled_uses (new_root_info, src_idx);
3450 d = ipa_get_controlled_uses (old_root_info, i);
3451
3452 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3453 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3454 c = combine_controlled_uses_counters (c, d);
3455 ipa_set_controlled_uses (new_root_info, src_idx, c);
3456 if (c == 0 && new_root_info->ipcp_orig_node)
3457 {
3458 struct cgraph_node *n;
3459 struct ipa_ref *ref;
3460 tree t = new_root_info->known_csts[src_idx];
3461
3462 if (t && TREE_CODE (t) == ADDR_EXPR
3463 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3464 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3465 && (ref = new_root->find_reference (n, NULL, 0)))
3466 {
3467 if (dump_file)
3468 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3469 "reference from %s/%i to %s/%i.\n",
3470 xstrdup_for_dump (new_root->name ()),
3471 new_root->order,
3472 xstrdup_for_dump (n->name ()), n->order);
3473 ref->remove_reference ();
3474 }
3475 }
3476 }
3477 else if (jf->type == IPA_JF_CONST
3478 && (rdesc = jfunc_rdesc_usable (jf)))
3479 {
3480 int d = ipa_get_controlled_uses (old_root_info, i);
3481 int c = rdesc->refcount;
3482 rdesc->refcount = combine_controlled_uses_counters (c, d);
3483 if (rdesc->refcount == 0)
3484 {
3485 tree cst = ipa_get_jf_constant (jf);
3486 struct cgraph_node *n;
3487 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3488 && TREE_CODE (TREE_OPERAND (cst, 0))
3489 == FUNCTION_DECL);
3490 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3491 if (n)
3492 {
3493 struct cgraph_node *clone;
3494 bool ok;
3495 ok = remove_described_reference (n, rdesc);
3496 gcc_checking_assert (ok);
3497
3498 clone = cs->caller;
3499 while (clone->global.inlined_to
3500 && clone != rdesc->cs->caller
3501 && IPA_NODE_REF (clone)->ipcp_orig_node)
3502 {
3503 struct ipa_ref *ref;
3504 ref = clone->find_reference (n, NULL, 0);
3505 if (ref)
3506 {
3507 if (dump_file)
3508 fprintf (dump_file, "ipa-prop: Removing "
3509 "cloning-created reference "
3510 "from %s/%i to %s/%i.\n",
3511 xstrdup_for_dump (clone->name ()),
3512 clone->order,
3513 xstrdup_for_dump (n->name ()),
3514 n->order);
3515 ref->remove_reference ();
3516 }
3517 clone = clone->callers->caller;
3518 }
3519 }
3520 }
3521 }
3522 }
3523
3524 for (i = ipa_get_param_count (old_root_info);
3525 i < ipa_get_cs_argument_count (args);
3526 i++)
3527 {
3528 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3529
3530 if (jf->type == IPA_JF_CONST)
3531 {
3532 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3533 if (rdesc)
3534 rdesc->refcount = IPA_UNDESCRIBED_USE;
3535 }
3536 else if (jf->type == IPA_JF_PASS_THROUGH)
3537 ipa_set_controlled_uses (new_root_info,
3538 jf->value.pass_through.formal_id,
3539 IPA_UNDESCRIBED_USE);
3540 }
3541 }
3542
3543 /* Update jump functions and call note functions on inlining the call site CS.
3544 CS is expected to lead to a node already cloned by
3545 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3546 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3547 created. */
3548
3549 bool
3550 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3551 vec<cgraph_edge *> *new_edges)
3552 {
3553 bool changed;
3554 /* Do nothing if the preparation phase has not been carried out yet
3555 (i.e. during early inlining). */
3556 if (!ipa_node_params_sum)
3557 return false;
3558 gcc_assert (ipa_edge_args_vector);
3559
3560 propagate_controlled_uses (cs);
3561 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3562
3563 return changed;
3564 }
3565
3566 /* Frees all dynamically allocated structures that the argument info points
3567 to. */
3568
3569 void
3570 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3571 {
3572 vec_free (args->jump_functions);
3573 memset (args, 0, sizeof (*args));
3574 }
3575
3576 /* Free all ipa_edge structures. */
3577
3578 void
3579 ipa_free_all_edge_args (void)
3580 {
3581 int i;
3582 struct ipa_edge_args *args;
3583
3584 if (!ipa_edge_args_vector)
3585 return;
3586
3587 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3588 ipa_free_edge_args_substructures (args);
3589
3590 vec_free (ipa_edge_args_vector);
3591 }
3592
3593 /* Frees all dynamically allocated structures that the param info points
3594 to. */
3595
3596 ipa_node_params::~ipa_node_params ()
3597 {
3598 descriptors.release ();
3599 free (lattices);
3600 /* Lattice values and their sources are deallocated with their alocation
3601 pool. */
3602 known_csts.release ();
3603 known_contexts.release ();
3604
3605 lattices = NULL;
3606 ipcp_orig_node = NULL;
3607 analysis_done = 0;
3608 node_enqueued = 0;
3609 do_clone_for_all_contexts = 0;
3610 is_all_contexts_clone = 0;
3611 node_dead = 0;
3612 }
3613
3614 /* Free all ipa_node_params structures. */
3615
3616 void
3617 ipa_free_all_node_params (void)
3618 {
3619 delete ipa_node_params_sum;
3620 ipa_node_params_sum = NULL;
3621 }
3622
3623 /* Grow ipcp_transformations if necessary. */
3624
3625 void
3626 ipcp_grow_transformations_if_necessary (void)
3627 {
3628 if (vec_safe_length (ipcp_transformations)
3629 <= (unsigned) symtab->cgraph_max_uid)
3630 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3631 }
3632
3633 /* Set the aggregate replacements of NODE to be AGGVALS. */
3634
3635 void
3636 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3637 struct ipa_agg_replacement_value *aggvals)
3638 {
3639 ipcp_grow_transformations_if_necessary ();
3640 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3641 }
3642
3643 /* Hook that is called by cgraph.c when an edge is removed. */
3644
3645 static void
3646 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3647 {
3648 struct ipa_edge_args *args;
3649
3650 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3651 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3652 return;
3653
3654 args = IPA_EDGE_REF (cs);
3655 if (args->jump_functions)
3656 {
3657 struct ipa_jump_func *jf;
3658 int i;
3659 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3660 {
3661 struct ipa_cst_ref_desc *rdesc;
3662 try_decrement_rdesc_refcount (jf);
3663 if (jf->type == IPA_JF_CONST
3664 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3665 && rdesc->cs == cs)
3666 rdesc->cs = NULL;
3667 }
3668 }
3669
3670 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3671 }
3672
3673 /* Hook that is called by cgraph.c when an edge is duplicated. */
3674
3675 static void
3676 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3677 void *)
3678 {
3679 struct ipa_edge_args *old_args, *new_args;
3680 unsigned int i;
3681
3682 ipa_check_create_edge_args ();
3683
3684 old_args = IPA_EDGE_REF (src);
3685 new_args = IPA_EDGE_REF (dst);
3686
3687 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3688 if (old_args->polymorphic_call_contexts)
3689 new_args->polymorphic_call_contexts
3690 = vec_safe_copy (old_args->polymorphic_call_contexts);
3691
3692 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3693 {
3694 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3695 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3696
3697 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3698
3699 if (src_jf->type == IPA_JF_CONST)
3700 {
3701 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3702
3703 if (!src_rdesc)
3704 dst_jf->value.constant.rdesc = NULL;
3705 else if (src->caller == dst->caller)
3706 {
3707 struct ipa_ref *ref;
3708 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3709 gcc_checking_assert (n);
3710 ref = src->caller->find_reference (n, src->call_stmt,
3711 src->lto_stmt_uid);
3712 gcc_checking_assert (ref);
3713 dst->caller->clone_reference (ref, ref->stmt);
3714
3715 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3716 dst_rdesc->cs = dst;
3717 dst_rdesc->refcount = src_rdesc->refcount;
3718 dst_rdesc->next_duplicate = NULL;
3719 dst_jf->value.constant.rdesc = dst_rdesc;
3720 }
3721 else if (src_rdesc->cs == src)
3722 {
3723 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3724 dst_rdesc->cs = dst;
3725 dst_rdesc->refcount = src_rdesc->refcount;
3726 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3727 src_rdesc->next_duplicate = dst_rdesc;
3728 dst_jf->value.constant.rdesc = dst_rdesc;
3729 }
3730 else
3731 {
3732 struct ipa_cst_ref_desc *dst_rdesc;
3733 /* This can happen during inlining, when a JFUNC can refer to a
3734 reference taken in a function up in the tree of inline clones.
3735 We need to find the duplicate that refers to our tree of
3736 inline clones. */
3737
3738 gcc_assert (dst->caller->global.inlined_to);
3739 for (dst_rdesc = src_rdesc->next_duplicate;
3740 dst_rdesc;
3741 dst_rdesc = dst_rdesc->next_duplicate)
3742 {
3743 struct cgraph_node *top;
3744 top = dst_rdesc->cs->caller->global.inlined_to
3745 ? dst_rdesc->cs->caller->global.inlined_to
3746 : dst_rdesc->cs->caller;
3747 if (dst->caller->global.inlined_to == top)
3748 break;
3749 }
3750 gcc_assert (dst_rdesc);
3751 dst_jf->value.constant.rdesc = dst_rdesc;
3752 }
3753 }
3754 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3755 && src->caller == dst->caller)
3756 {
3757 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3758 ? dst->caller->global.inlined_to : dst->caller;
3759 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3760 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3761
3762 int c = ipa_get_controlled_uses (root_info, idx);
3763 if (c != IPA_UNDESCRIBED_USE)
3764 {
3765 c++;
3766 ipa_set_controlled_uses (root_info, idx, c);
3767 }
3768 }
3769 }
3770 }
3771
3772 /* Analyze newly added function into callgraph. */
3773
3774 static void
3775 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3776 {
3777 if (node->has_gimple_body_p ())
3778 ipa_analyze_node (node);
3779 }
3780
3781 /* Hook that is called by summary when a node is duplicated. */
3782
3783 void
3784 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3785 ipa_node_params *old_info,
3786 ipa_node_params *new_info)
3787 {
3788 ipa_agg_replacement_value *old_av, *new_av;
3789
3790 new_info->descriptors = old_info->descriptors.copy ();
3791 new_info->lattices = NULL;
3792 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3793
3794 new_info->analysis_done = old_info->analysis_done;
3795 new_info->node_enqueued = old_info->node_enqueued;
3796 new_info->versionable = old_info->versionable;
3797
3798 old_av = ipa_get_agg_replacements_for_node (src);
3799 if (old_av)
3800 {
3801 new_av = NULL;
3802 while (old_av)
3803 {
3804 struct ipa_agg_replacement_value *v;
3805
3806 v = ggc_alloc<ipa_agg_replacement_value> ();
3807 memcpy (v, old_av, sizeof (*v));
3808 v->next = new_av;
3809 new_av = v;
3810 old_av = old_av->next;
3811 }
3812 ipa_set_node_agg_value_chain (dst, new_av);
3813 }
3814
3815 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3816
3817 if (src_trans)
3818 {
3819 ipcp_grow_transformations_if_necessary ();
3820 src_trans = ipcp_get_transformation_summary (src);
3821 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3822 vec<ipa_vr, va_gc> *&dst_vr
3823 = ipcp_get_transformation_summary (dst)->m_vr;
3824 if (vec_safe_length (src_trans->m_vr) > 0)
3825 {
3826 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3827 for (unsigned i = 0; i < src_vr->length (); ++i)
3828 dst_vr->quick_push ((*src_vr)[i]);
3829 }
3830 }
3831
3832 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3833 {
3834 ipcp_grow_transformations_if_necessary ();
3835 src_trans = ipcp_get_transformation_summary (src);
3836 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3837 vec<ipa_bits, va_gc> *&dst_bits
3838 = ipcp_get_transformation_summary (dst)->bits;
3839 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3840 for (unsigned i = 0; i < src_bits->length (); ++i)
3841 dst_bits->quick_push ((*src_bits)[i]);
3842 }
3843 }
3844
3845 /* Register our cgraph hooks if they are not already there. */
3846
3847 void
3848 ipa_register_cgraph_hooks (void)
3849 {
3850 ipa_check_create_node_params ();
3851
3852 if (!edge_removal_hook_holder)
3853 edge_removal_hook_holder =
3854 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3855 if (!edge_duplication_hook_holder)
3856 edge_duplication_hook_holder =
3857 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3858 function_insertion_hook_holder =
3859 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3860 }
3861
3862 /* Unregister our cgraph hooks if they are not already there. */
3863
3864 static void
3865 ipa_unregister_cgraph_hooks (void)
3866 {
3867 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3868 edge_removal_hook_holder = NULL;
3869 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3870 edge_duplication_hook_holder = NULL;
3871 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3872 function_insertion_hook_holder = NULL;
3873 }
3874
3875 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3876 longer needed after ipa-cp. */
3877
3878 void
3879 ipa_free_all_structures_after_ipa_cp (void)
3880 {
3881 if (!optimize && !in_lto_p)
3882 {
3883 ipa_free_all_edge_args ();
3884 ipa_free_all_node_params ();
3885 ipcp_sources_pool.release ();
3886 ipcp_cst_values_pool.release ();
3887 ipcp_poly_ctx_values_pool.release ();
3888 ipcp_agg_lattice_pool.release ();
3889 ipa_unregister_cgraph_hooks ();
3890 ipa_refdesc_pool.release ();
3891 }
3892 }
3893
3894 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3895 longer needed after indirect inlining. */
3896
3897 void
3898 ipa_free_all_structures_after_iinln (void)
3899 {
3900 ipa_free_all_edge_args ();
3901 ipa_free_all_node_params ();
3902 ipa_unregister_cgraph_hooks ();
3903 ipcp_sources_pool.release ();
3904 ipcp_cst_values_pool.release ();
3905 ipcp_poly_ctx_values_pool.release ();
3906 ipcp_agg_lattice_pool.release ();
3907 ipa_refdesc_pool.release ();
3908 }
3909
3910 /* Print ipa_tree_map data structures of all functions in the
3911 callgraph to F. */
3912
3913 void
3914 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3915 {
3916 int i, count;
3917 struct ipa_node_params *info;
3918
3919 if (!node->definition)
3920 return;
3921 info = IPA_NODE_REF (node);
3922 fprintf (f, " function %s/%i parameter descriptors:\n",
3923 node->name (), node->order);
3924 count = ipa_get_param_count (info);
3925 for (i = 0; i < count; i++)
3926 {
3927 int c;
3928
3929 fprintf (f, " ");
3930 ipa_dump_param (f, info, i);
3931 if (ipa_is_param_used (info, i))
3932 fprintf (f, " used");
3933 c = ipa_get_controlled_uses (info, i);
3934 if (c == IPA_UNDESCRIBED_USE)
3935 fprintf (f, " undescribed_use");
3936 else
3937 fprintf (f, " controlled_uses=%i", c);
3938 fprintf (f, "\n");
3939 }
3940 }
3941
3942 /* Print ipa_tree_map data structures of all functions in the
3943 callgraph to F. */
3944
3945 void
3946 ipa_print_all_params (FILE * f)
3947 {
3948 struct cgraph_node *node;
3949
3950 fprintf (f, "\nFunction parameters:\n");
3951 FOR_EACH_FUNCTION (node)
3952 ipa_print_node_params (f, node);
3953 }
3954
3955 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3956
3957 vec<tree>
3958 ipa_get_vector_of_formal_parms (tree fndecl)
3959 {
3960 vec<tree> args;
3961 int count;
3962 tree parm;
3963
3964 gcc_assert (!flag_wpa);
3965 count = count_formal_params (fndecl);
3966 args.create (count);
3967 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3968 args.quick_push (parm);
3969
3970 return args;
3971 }
3972
3973 /* Return a heap allocated vector containing types of formal parameters of
3974 function type FNTYPE. */
3975
3976 vec<tree>
3977 ipa_get_vector_of_formal_parm_types (tree fntype)
3978 {
3979 vec<tree> types;
3980 int count = 0;
3981 tree t;
3982
3983 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3984 count++;
3985
3986 types.create (count);
3987 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3988 types.quick_push (TREE_VALUE (t));
3989
3990 return types;
3991 }
3992
3993 /* Modify the function declaration FNDECL and its type according to the plan in
3994 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3995 to reflect the actual parameters being modified which are determined by the
3996 base_index field. */
3997
3998 void
3999 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
4000 {
4001 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4002 tree orig_type = TREE_TYPE (fndecl);
4003 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4004
4005 /* The following test is an ugly hack, some functions simply don't have any
4006 arguments in their type. This is probably a bug but well... */
4007 bool care_for_types = (old_arg_types != NULL_TREE);
4008 bool last_parm_void;
4009 vec<tree> otypes;
4010 if (care_for_types)
4011 {
4012 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4013 == void_type_node);
4014 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4015 if (last_parm_void)
4016 gcc_assert (oparms.length () + 1 == otypes.length ());
4017 else
4018 gcc_assert (oparms.length () == otypes.length ());
4019 }
4020 else
4021 {
4022 last_parm_void = false;
4023 otypes.create (0);
4024 }
4025
4026 int len = adjustments.length ();
4027 tree *link = &DECL_ARGUMENTS (fndecl);
4028 tree new_arg_types = NULL;
4029 for (int i = 0; i < len; i++)
4030 {
4031 struct ipa_parm_adjustment *adj;
4032 gcc_assert (link);
4033
4034 adj = &adjustments[i];
4035 tree parm;
4036 if (adj->op == IPA_PARM_OP_NEW)
4037 parm = NULL;
4038 else
4039 parm = oparms[adj->base_index];
4040 adj->base = parm;
4041
4042 if (adj->op == IPA_PARM_OP_COPY)
4043 {
4044 if (care_for_types)
4045 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4046 new_arg_types);
4047 *link = parm;
4048 link = &DECL_CHAIN (parm);
4049 }
4050 else if (adj->op != IPA_PARM_OP_REMOVE)
4051 {
4052 tree new_parm;
4053 tree ptype;
4054
4055 if (adj->by_ref)
4056 ptype = build_pointer_type (adj->type);
4057 else
4058 {
4059 ptype = adj->type;
4060 if (is_gimple_reg_type (ptype))
4061 {
4062 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4063 if (TYPE_ALIGN (ptype) != malign)
4064 ptype = build_aligned_type (ptype, malign);
4065 }
4066 }
4067
4068 if (care_for_types)
4069 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4070
4071 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4072 ptype);
4073 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4074 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4075 DECL_ARTIFICIAL (new_parm) = 1;
4076 DECL_ARG_TYPE (new_parm) = ptype;
4077 DECL_CONTEXT (new_parm) = fndecl;
4078 TREE_USED (new_parm) = 1;
4079 DECL_IGNORED_P (new_parm) = 1;
4080 layout_decl (new_parm, 0);
4081
4082 if (adj->op == IPA_PARM_OP_NEW)
4083 adj->base = NULL;
4084 else
4085 adj->base = parm;
4086 adj->new_decl = new_parm;
4087
4088 *link = new_parm;
4089 link = &DECL_CHAIN (new_parm);
4090 }
4091 }
4092
4093 *link = NULL_TREE;
4094
4095 tree new_reversed = NULL;
4096 if (care_for_types)
4097 {
4098 new_reversed = nreverse (new_arg_types);
4099 if (last_parm_void)
4100 {
4101 if (new_reversed)
4102 TREE_CHAIN (new_arg_types) = void_list_node;
4103 else
4104 new_reversed = void_list_node;
4105 }
4106 }
4107
4108 /* Use copy_node to preserve as much as possible from original type
4109 (debug info, attribute lists etc.)
4110 Exception is METHOD_TYPEs must have THIS argument.
4111 When we are asked to remove it, we need to build new FUNCTION_TYPE
4112 instead. */
4113 tree new_type = NULL;
4114 if (TREE_CODE (orig_type) != METHOD_TYPE
4115 || (adjustments[0].op == IPA_PARM_OP_COPY
4116 && adjustments[0].base_index == 0))
4117 {
4118 new_type = build_distinct_type_copy (orig_type);
4119 TYPE_ARG_TYPES (new_type) = new_reversed;
4120 }
4121 else
4122 {
4123 new_type
4124 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4125 new_reversed));
4126 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4127 DECL_VINDEX (fndecl) = NULL_TREE;
4128 }
4129
4130 /* When signature changes, we need to clear builtin info. */
4131 if (DECL_BUILT_IN (fndecl))
4132 {
4133 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4134 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4135 }
4136
4137 TREE_TYPE (fndecl) = new_type;
4138 DECL_VIRTUAL_P (fndecl) = 0;
4139 DECL_LANG_SPECIFIC (fndecl) = NULL;
4140 otypes.release ();
4141 oparms.release ();
4142 }
4143
4144 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4145 If this is a directly recursive call, CS must be NULL. Otherwise it must
4146 contain the corresponding call graph edge. */
4147
4148 void
4149 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4150 ipa_parm_adjustment_vec adjustments)
4151 {
4152 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4153 vec<tree> vargs;
4154 vec<tree, va_gc> **debug_args = NULL;
4155 gcall *new_stmt;
4156 gimple_stmt_iterator gsi, prev_gsi;
4157 tree callee_decl;
4158 int i, len;
4159
4160 len = adjustments.length ();
4161 vargs.create (len);
4162 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4163 current_node->remove_stmt_references (stmt);
4164
4165 gsi = gsi_for_stmt (stmt);
4166 prev_gsi = gsi;
4167 gsi_prev (&prev_gsi);
4168 for (i = 0; i < len; i++)
4169 {
4170 struct ipa_parm_adjustment *adj;
4171
4172 adj = &adjustments[i];
4173
4174 if (adj->op == IPA_PARM_OP_COPY)
4175 {
4176 tree arg = gimple_call_arg (stmt, adj->base_index);
4177
4178 vargs.quick_push (arg);
4179 }
4180 else if (adj->op != IPA_PARM_OP_REMOVE)
4181 {
4182 tree expr, base, off;
4183 location_t loc;
4184 unsigned int deref_align = 0;
4185 bool deref_base = false;
4186
4187 /* We create a new parameter out of the value of the old one, we can
4188 do the following kind of transformations:
4189
4190 - A scalar passed by reference is converted to a scalar passed by
4191 value. (adj->by_ref is false and the type of the original
4192 actual argument is a pointer to a scalar).
4193
4194 - A part of an aggregate is passed instead of the whole aggregate.
4195 The part can be passed either by value or by reference, this is
4196 determined by value of adj->by_ref. Moreover, the code below
4197 handles both situations when the original aggregate is passed by
4198 value (its type is not a pointer) and when it is passed by
4199 reference (it is a pointer to an aggregate).
4200
4201 When the new argument is passed by reference (adj->by_ref is true)
4202 it must be a part of an aggregate and therefore we form it by
4203 simply taking the address of a reference inside the original
4204 aggregate. */
4205
4206 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4207 base = gimple_call_arg (stmt, adj->base_index);
4208 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4209 : EXPR_LOCATION (base);
4210
4211 if (TREE_CODE (base) != ADDR_EXPR
4212 && POINTER_TYPE_P (TREE_TYPE (base)))
4213 off = build_int_cst (adj->alias_ptr_type,
4214 adj->offset / BITS_PER_UNIT);
4215 else
4216 {
4217 HOST_WIDE_INT base_offset;
4218 tree prev_base;
4219 bool addrof;
4220
4221 if (TREE_CODE (base) == ADDR_EXPR)
4222 {
4223 base = TREE_OPERAND (base, 0);
4224 addrof = true;
4225 }
4226 else
4227 addrof = false;
4228 prev_base = base;
4229 base = get_addr_base_and_unit_offset (base, &base_offset);
4230 /* Aggregate arguments can have non-invariant addresses. */
4231 if (!base)
4232 {
4233 base = build_fold_addr_expr (prev_base);
4234 off = build_int_cst (adj->alias_ptr_type,
4235 adj->offset / BITS_PER_UNIT);
4236 }
4237 else if (TREE_CODE (base) == MEM_REF)
4238 {
4239 if (!addrof)
4240 {
4241 deref_base = true;
4242 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4243 }
4244 off = build_int_cst (adj->alias_ptr_type,
4245 base_offset
4246 + adj->offset / BITS_PER_UNIT);
4247 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4248 off);
4249 base = TREE_OPERAND (base, 0);
4250 }
4251 else
4252 {
4253 off = build_int_cst (adj->alias_ptr_type,
4254 base_offset
4255 + adj->offset / BITS_PER_UNIT);
4256 base = build_fold_addr_expr (base);
4257 }
4258 }
4259
4260 if (!adj->by_ref)
4261 {
4262 tree type = adj->type;
4263 unsigned int align;
4264 unsigned HOST_WIDE_INT misalign;
4265
4266 if (deref_base)
4267 {
4268 align = deref_align;
4269 misalign = 0;
4270 }
4271 else
4272 {
4273 get_pointer_alignment_1 (base, &align, &misalign);
4274 if (TYPE_ALIGN (type) > align)
4275 align = TYPE_ALIGN (type);
4276 }
4277 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4278 * BITS_PER_UNIT);
4279 misalign = misalign & (align - 1);
4280 if (misalign != 0)
4281 align = least_bit_hwi (misalign);
4282 if (align < TYPE_ALIGN (type))
4283 type = build_aligned_type (type, align);
4284 base = force_gimple_operand_gsi (&gsi, base,
4285 true, NULL, true, GSI_SAME_STMT);
4286 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4287 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4288 /* If expr is not a valid gimple call argument emit
4289 a load into a temporary. */
4290 if (is_gimple_reg_type (TREE_TYPE (expr)))
4291 {
4292 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4293 if (gimple_in_ssa_p (cfun))
4294 {
4295 gimple_set_vuse (tem, gimple_vuse (stmt));
4296 expr = make_ssa_name (TREE_TYPE (expr), tem);
4297 }
4298 else
4299 expr = create_tmp_reg (TREE_TYPE (expr));
4300 gimple_assign_set_lhs (tem, expr);
4301 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4302 }
4303 }
4304 else
4305 {
4306 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4307 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4308 expr = build_fold_addr_expr (expr);
4309 expr = force_gimple_operand_gsi (&gsi, expr,
4310 true, NULL, true, GSI_SAME_STMT);
4311 }
4312 vargs.quick_push (expr);
4313 }
4314 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4315 {
4316 unsigned int ix;
4317 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4318 gimple *def_temp;
4319
4320 arg = gimple_call_arg (stmt, adj->base_index);
4321 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4322 {
4323 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4324 continue;
4325 arg = fold_convert_loc (gimple_location (stmt),
4326 TREE_TYPE (origin), arg);
4327 }
4328 if (debug_args == NULL)
4329 debug_args = decl_debug_args_insert (callee_decl);
4330 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4331 if (ddecl == origin)
4332 {
4333 ddecl = (**debug_args)[ix + 1];
4334 break;
4335 }
4336 if (ddecl == NULL)
4337 {
4338 ddecl = make_node (DEBUG_EXPR_DECL);
4339 DECL_ARTIFICIAL (ddecl) = 1;
4340 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4341 DECL_MODE (ddecl) = DECL_MODE (origin);
4342
4343 vec_safe_push (*debug_args, origin);
4344 vec_safe_push (*debug_args, ddecl);
4345 }
4346 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4347 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4348 }
4349 }
4350
4351 if (dump_file && (dump_flags & TDF_DETAILS))
4352 {
4353 fprintf (dump_file, "replacing stmt:");
4354 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4355 }
4356
4357 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4358 vargs.release ();
4359 if (gimple_call_lhs (stmt))
4360 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4361
4362 gimple_set_block (new_stmt, gimple_block (stmt));
4363 if (gimple_has_location (stmt))
4364 gimple_set_location (new_stmt, gimple_location (stmt));
4365 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4366 gimple_call_copy_flags (new_stmt, stmt);
4367 if (gimple_in_ssa_p (cfun))
4368 {
4369 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4370 if (gimple_vdef (stmt))
4371 {
4372 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4373 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4374 }
4375 }
4376
4377 if (dump_file && (dump_flags & TDF_DETAILS))
4378 {
4379 fprintf (dump_file, "with stmt:");
4380 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4381 fprintf (dump_file, "\n");
4382 }
4383 gsi_replace (&gsi, new_stmt, true);
4384 if (cs)
4385 cs->set_call_stmt (new_stmt);
4386 do
4387 {
4388 current_node->record_stmt_references (gsi_stmt (gsi));
4389 gsi_prev (&gsi);
4390 }
4391 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4392 }
4393
4394 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4395 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4396 specifies whether the function should care about type incompatibility the
4397 current and new expressions. If it is false, the function will leave
4398 incompatibility issues to the caller. Return true iff the expression
4399 was modified. */
4400
4401 bool
4402 ipa_modify_expr (tree *expr, bool convert,
4403 ipa_parm_adjustment_vec adjustments)
4404 {
4405 struct ipa_parm_adjustment *cand
4406 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4407 if (!cand)
4408 return false;
4409
4410 tree src;
4411 if (cand->by_ref)
4412 {
4413 src = build_simple_mem_ref (cand->new_decl);
4414 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4415 }
4416 else
4417 src = cand->new_decl;
4418
4419 if (dump_file && (dump_flags & TDF_DETAILS))
4420 {
4421 fprintf (dump_file, "About to replace expr ");
4422 print_generic_expr (dump_file, *expr, 0);
4423 fprintf (dump_file, " with ");
4424 print_generic_expr (dump_file, src, 0);
4425 fprintf (dump_file, "\n");
4426 }
4427
4428 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4429 {
4430 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4431 *expr = vce;
4432 }
4433 else
4434 *expr = src;
4435 return true;
4436 }
4437
4438 /* If T is an SSA_NAME, return NULL if it is not a default def or
4439 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4440 the base variable is always returned, regardless if it is a default
4441 def. Return T if it is not an SSA_NAME. */
4442
4443 static tree
4444 get_ssa_base_param (tree t, bool ignore_default_def)
4445 {
4446 if (TREE_CODE (t) == SSA_NAME)
4447 {
4448 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4449 return SSA_NAME_VAR (t);
4450 else
4451 return NULL_TREE;
4452 }
4453 return t;
4454 }
4455
4456 /* Given an expression, return an adjustment entry specifying the
4457 transformation to be done on EXPR. If no suitable adjustment entry
4458 was found, returns NULL.
4459
4460 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4461 default def, otherwise bail on them.
4462
4463 If CONVERT is non-NULL, this function will set *CONVERT if the
4464 expression provided is a component reference. ADJUSTMENTS is the
4465 adjustments vector. */
4466
4467 ipa_parm_adjustment *
4468 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4469 ipa_parm_adjustment_vec adjustments,
4470 bool ignore_default_def)
4471 {
4472 if (TREE_CODE (**expr) == BIT_FIELD_REF
4473 || TREE_CODE (**expr) == IMAGPART_EXPR
4474 || TREE_CODE (**expr) == REALPART_EXPR)
4475 {
4476 *expr = &TREE_OPERAND (**expr, 0);
4477 if (convert)
4478 *convert = true;
4479 }
4480
4481 HOST_WIDE_INT offset, size, max_size;
4482 bool reverse;
4483 tree base
4484 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4485 if (!base || size == -1 || max_size == -1)
4486 return NULL;
4487
4488 if (TREE_CODE (base) == MEM_REF)
4489 {
4490 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4491 base = TREE_OPERAND (base, 0);
4492 }
4493
4494 base = get_ssa_base_param (base, ignore_default_def);
4495 if (!base || TREE_CODE (base) != PARM_DECL)
4496 return NULL;
4497
4498 struct ipa_parm_adjustment *cand = NULL;
4499 unsigned int len = adjustments.length ();
4500 for (unsigned i = 0; i < len; i++)
4501 {
4502 struct ipa_parm_adjustment *adj = &adjustments[i];
4503
4504 if (adj->base == base
4505 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4506 {
4507 cand = adj;
4508 break;
4509 }
4510 }
4511
4512 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4513 return NULL;
4514 return cand;
4515 }
4516
4517 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4518
4519 static bool
4520 index_in_adjustments_multiple_times_p (int base_index,
4521 ipa_parm_adjustment_vec adjustments)
4522 {
4523 int i, len = adjustments.length ();
4524 bool one = false;
4525
4526 for (i = 0; i < len; i++)
4527 {
4528 struct ipa_parm_adjustment *adj;
4529 adj = &adjustments[i];
4530
4531 if (adj->base_index == base_index)
4532 {
4533 if (one)
4534 return true;
4535 else
4536 one = true;
4537 }
4538 }
4539 return false;
4540 }
4541
4542
4543 /* Return adjustments that should have the same effect on function parameters
4544 and call arguments as if they were first changed according to adjustments in
4545 INNER and then by adjustments in OUTER. */
4546
4547 ipa_parm_adjustment_vec
4548 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4549 ipa_parm_adjustment_vec outer)
4550 {
4551 int i, outlen = outer.length ();
4552 int inlen = inner.length ();
4553 int removals = 0;
4554 ipa_parm_adjustment_vec adjustments, tmp;
4555
4556 tmp.create (inlen);
4557 for (i = 0; i < inlen; i++)
4558 {
4559 struct ipa_parm_adjustment *n;
4560 n = &inner[i];
4561
4562 if (n->op == IPA_PARM_OP_REMOVE)
4563 removals++;
4564 else
4565 {
4566 /* FIXME: Handling of new arguments are not implemented yet. */
4567 gcc_assert (n->op != IPA_PARM_OP_NEW);
4568 tmp.quick_push (*n);
4569 }
4570 }
4571
4572 adjustments.create (outlen + removals);
4573 for (i = 0; i < outlen; i++)
4574 {
4575 struct ipa_parm_adjustment r;
4576 struct ipa_parm_adjustment *out = &outer[i];
4577 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4578
4579 memset (&r, 0, sizeof (r));
4580 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4581 if (out->op == IPA_PARM_OP_REMOVE)
4582 {
4583 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4584 {
4585 r.op = IPA_PARM_OP_REMOVE;
4586 adjustments.quick_push (r);
4587 }
4588 continue;
4589 }
4590 else
4591 {
4592 /* FIXME: Handling of new arguments are not implemented yet. */
4593 gcc_assert (out->op != IPA_PARM_OP_NEW);
4594 }
4595
4596 r.base_index = in->base_index;
4597 r.type = out->type;
4598
4599 /* FIXME: Create nonlocal value too. */
4600
4601 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4602 r.op = IPA_PARM_OP_COPY;
4603 else if (in->op == IPA_PARM_OP_COPY)
4604 r.offset = out->offset;
4605 else if (out->op == IPA_PARM_OP_COPY)
4606 r.offset = in->offset;
4607 else
4608 r.offset = in->offset + out->offset;
4609 adjustments.quick_push (r);
4610 }
4611
4612 for (i = 0; i < inlen; i++)
4613 {
4614 struct ipa_parm_adjustment *n = &inner[i];
4615
4616 if (n->op == IPA_PARM_OP_REMOVE)
4617 adjustments.quick_push (*n);
4618 }
4619
4620 tmp.release ();
4621 return adjustments;
4622 }
4623
4624 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4625 friendly way, assuming they are meant to be applied to FNDECL. */
4626
4627 void
4628 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4629 tree fndecl)
4630 {
4631 int i, len = adjustments.length ();
4632 bool first = true;
4633 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4634
4635 fprintf (file, "IPA param adjustments: ");
4636 for (i = 0; i < len; i++)
4637 {
4638 struct ipa_parm_adjustment *adj;
4639 adj = &adjustments[i];
4640
4641 if (!first)
4642 fprintf (file, " ");
4643 else
4644 first = false;
4645
4646 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4647 print_generic_expr (file, parms[adj->base_index], 0);
4648 if (adj->base)
4649 {
4650 fprintf (file, ", base: ");
4651 print_generic_expr (file, adj->base, 0);
4652 }
4653 if (adj->new_decl)
4654 {
4655 fprintf (file, ", new_decl: ");
4656 print_generic_expr (file, adj->new_decl, 0);
4657 }
4658 if (adj->new_ssa_base)
4659 {
4660 fprintf (file, ", new_ssa_base: ");
4661 print_generic_expr (file, adj->new_ssa_base, 0);
4662 }
4663
4664 if (adj->op == IPA_PARM_OP_COPY)
4665 fprintf (file, ", copy_param");
4666 else if (adj->op == IPA_PARM_OP_REMOVE)
4667 fprintf (file, ", remove_param");
4668 else
4669 fprintf (file, ", offset %li", (long) adj->offset);
4670 if (adj->by_ref)
4671 fprintf (file, ", by_ref");
4672 print_node_brief (file, ", type: ", adj->type, 0);
4673 fprintf (file, "\n");
4674 }
4675 parms.release ();
4676 }
4677
4678 /* Dump the AV linked list. */
4679
4680 void
4681 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4682 {
4683 bool comma = false;
4684 fprintf (f, " Aggregate replacements:");
4685 for (; av; av = av->next)
4686 {
4687 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4688 av->index, av->offset);
4689 print_generic_expr (f, av->value, 0);
4690 comma = true;
4691 }
4692 fprintf (f, "\n");
4693 }
4694
4695 /* Stream out jump function JUMP_FUNC to OB. */
4696
4697 static void
4698 ipa_write_jump_function (struct output_block *ob,
4699 struct ipa_jump_func *jump_func)
4700 {
4701 struct ipa_agg_jf_item *item;
4702 struct bitpack_d bp;
4703 int i, count;
4704
4705 streamer_write_uhwi (ob, jump_func->type);
4706 switch (jump_func->type)
4707 {
4708 case IPA_JF_UNKNOWN:
4709 break;
4710 case IPA_JF_CONST:
4711 gcc_assert (
4712 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4713 stream_write_tree (ob, jump_func->value.constant.value, true);
4714 break;
4715 case IPA_JF_PASS_THROUGH:
4716 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4717 if (jump_func->value.pass_through.operation == NOP_EXPR)
4718 {
4719 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4720 bp = bitpack_create (ob->main_stream);
4721 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4722 streamer_write_bitpack (&bp);
4723 }
4724 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4725 == tcc_unary)
4726 {
4727 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4728 }
4729 else
4730 {
4731 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4732 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4733 }
4734 break;
4735 case IPA_JF_ANCESTOR:
4736 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4737 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4738 bp = bitpack_create (ob->main_stream);
4739 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4740 streamer_write_bitpack (&bp);
4741 break;
4742 }
4743
4744 count = vec_safe_length (jump_func->agg.items);
4745 streamer_write_uhwi (ob, count);
4746 if (count)
4747 {
4748 bp = bitpack_create (ob->main_stream);
4749 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4750 streamer_write_bitpack (&bp);
4751 }
4752
4753 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4754 {
4755 streamer_write_uhwi (ob, item->offset);
4756 stream_write_tree (ob, item->value, true);
4757 }
4758
4759 bp = bitpack_create (ob->main_stream);
4760 bp_pack_value (&bp, jump_func->bits.known, 1);
4761 streamer_write_bitpack (&bp);
4762 if (jump_func->bits.known)
4763 {
4764 streamer_write_widest_int (ob, jump_func->bits.value);
4765 streamer_write_widest_int (ob, jump_func->bits.mask);
4766 }
4767 bp_pack_value (&bp, jump_func->vr_known, 1);
4768 streamer_write_bitpack (&bp);
4769 if (jump_func->vr_known)
4770 {
4771 streamer_write_enum (ob->main_stream, value_rang_type,
4772 VR_LAST, jump_func->m_vr.type);
4773 stream_write_tree (ob, jump_func->m_vr.min, true);
4774 stream_write_tree (ob, jump_func->m_vr.max, true);
4775 }
4776 }
4777
4778 /* Read in jump function JUMP_FUNC from IB. */
4779
4780 static void
4781 ipa_read_jump_function (struct lto_input_block *ib,
4782 struct ipa_jump_func *jump_func,
4783 struct cgraph_edge *cs,
4784 struct data_in *data_in)
4785 {
4786 enum jump_func_type jftype;
4787 enum tree_code operation;
4788 int i, count;
4789
4790 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4791 switch (jftype)
4792 {
4793 case IPA_JF_UNKNOWN:
4794 ipa_set_jf_unknown (jump_func);
4795 break;
4796 case IPA_JF_CONST:
4797 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4798 break;
4799 case IPA_JF_PASS_THROUGH:
4800 operation = (enum tree_code) streamer_read_uhwi (ib);
4801 if (operation == NOP_EXPR)
4802 {
4803 int formal_id = streamer_read_uhwi (ib);
4804 struct bitpack_d bp = streamer_read_bitpack (ib);
4805 bool agg_preserved = bp_unpack_value (&bp, 1);
4806 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4807 }
4808 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4809 {
4810 int formal_id = streamer_read_uhwi (ib);
4811 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4812 }
4813 else
4814 {
4815 tree operand = stream_read_tree (ib, data_in);
4816 int formal_id = streamer_read_uhwi (ib);
4817 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4818 operation);
4819 }
4820 break;
4821 case IPA_JF_ANCESTOR:
4822 {
4823 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4824 int formal_id = streamer_read_uhwi (ib);
4825 struct bitpack_d bp = streamer_read_bitpack (ib);
4826 bool agg_preserved = bp_unpack_value (&bp, 1);
4827 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4828 break;
4829 }
4830 }
4831
4832 count = streamer_read_uhwi (ib);
4833 vec_alloc (jump_func->agg.items, count);
4834 if (count)
4835 {
4836 struct bitpack_d bp = streamer_read_bitpack (ib);
4837 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4838 }
4839 for (i = 0; i < count; i++)
4840 {
4841 struct ipa_agg_jf_item item;
4842 item.offset = streamer_read_uhwi (ib);
4843 item.value = stream_read_tree (ib, data_in);
4844 jump_func->agg.items->quick_push (item);
4845 }
4846
4847 struct bitpack_d bp = streamer_read_bitpack (ib);
4848 bool bits_known = bp_unpack_value (&bp, 1);
4849 if (bits_known)
4850 {
4851 jump_func->bits.known = true;
4852 jump_func->bits.value = streamer_read_widest_int (ib);
4853 jump_func->bits.mask = streamer_read_widest_int (ib);
4854 }
4855 else
4856 jump_func->bits.known = false;
4857
4858 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4859 bool vr_known = bp_unpack_value (&vr_bp, 1);
4860 if (vr_known)
4861 {
4862 jump_func->vr_known = true;
4863 jump_func->m_vr.type = streamer_read_enum (ib,
4864 value_range_type,
4865 VR_LAST);
4866 jump_func->m_vr.min = stream_read_tree (ib, data_in);
4867 jump_func->m_vr.max = stream_read_tree (ib, data_in);
4868 }
4869 else
4870 jump_func->vr_known = false;
4871 }
4872
4873 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4874 relevant to indirect inlining to OB. */
4875
4876 static void
4877 ipa_write_indirect_edge_info (struct output_block *ob,
4878 struct cgraph_edge *cs)
4879 {
4880 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4881 struct bitpack_d bp;
4882
4883 streamer_write_hwi (ob, ii->param_index);
4884 bp = bitpack_create (ob->main_stream);
4885 bp_pack_value (&bp, ii->polymorphic, 1);
4886 bp_pack_value (&bp, ii->agg_contents, 1);
4887 bp_pack_value (&bp, ii->member_ptr, 1);
4888 bp_pack_value (&bp, ii->by_ref, 1);
4889 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4890 bp_pack_value (&bp, ii->vptr_changed, 1);
4891 streamer_write_bitpack (&bp);
4892 if (ii->agg_contents || ii->polymorphic)
4893 streamer_write_hwi (ob, ii->offset);
4894 else
4895 gcc_assert (ii->offset == 0);
4896
4897 if (ii->polymorphic)
4898 {
4899 streamer_write_hwi (ob, ii->otr_token);
4900 stream_write_tree (ob, ii->otr_type, true);
4901 ii->context.stream_out (ob);
4902 }
4903 }
4904
4905 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4906 relevant to indirect inlining from IB. */
4907
4908 static void
4909 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4910 struct data_in *data_in,
4911 struct cgraph_edge *cs)
4912 {
4913 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4914 struct bitpack_d bp;
4915
4916 ii->param_index = (int) streamer_read_hwi (ib);
4917 bp = streamer_read_bitpack (ib);
4918 ii->polymorphic = bp_unpack_value (&bp, 1);
4919 ii->agg_contents = bp_unpack_value (&bp, 1);
4920 ii->member_ptr = bp_unpack_value (&bp, 1);
4921 ii->by_ref = bp_unpack_value (&bp, 1);
4922 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4923 ii->vptr_changed = bp_unpack_value (&bp, 1);
4924 if (ii->agg_contents || ii->polymorphic)
4925 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4926 else
4927 ii->offset = 0;
4928 if (ii->polymorphic)
4929 {
4930 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4931 ii->otr_type = stream_read_tree (ib, data_in);
4932 ii->context.stream_in (ib, data_in);
4933 }
4934 }
4935
4936 /* Stream out NODE info to OB. */
4937
4938 static void
4939 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4940 {
4941 int node_ref;
4942 lto_symtab_encoder_t encoder;
4943 struct ipa_node_params *info = IPA_NODE_REF (node);
4944 int j;
4945 struct cgraph_edge *e;
4946 struct bitpack_d bp;
4947
4948 encoder = ob->decl_state->symtab_node_encoder;
4949 node_ref = lto_symtab_encoder_encode (encoder, node);
4950 streamer_write_uhwi (ob, node_ref);
4951
4952 streamer_write_uhwi (ob, ipa_get_param_count (info));
4953 for (j = 0; j < ipa_get_param_count (info); j++)
4954 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4955 bp = bitpack_create (ob->main_stream);
4956 gcc_assert (info->analysis_done
4957 || ipa_get_param_count (info) == 0);
4958 gcc_assert (!info->node_enqueued);
4959 gcc_assert (!info->ipcp_orig_node);
4960 for (j = 0; j < ipa_get_param_count (info); j++)
4961 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4962 streamer_write_bitpack (&bp);
4963 for (j = 0; j < ipa_get_param_count (info); j++)
4964 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4965 for (e = node->callees; e; e = e->next_callee)
4966 {
4967 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4968
4969 streamer_write_uhwi (ob,
4970 ipa_get_cs_argument_count (args) * 2
4971 + (args->polymorphic_call_contexts != NULL));
4972 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4973 {
4974 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4975 if (args->polymorphic_call_contexts != NULL)
4976 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4977 }
4978 }
4979 for (e = node->indirect_calls; e; e = e->next_callee)
4980 {
4981 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4982
4983 streamer_write_uhwi (ob,
4984 ipa_get_cs_argument_count (args) * 2
4985 + (args->polymorphic_call_contexts != NULL));
4986 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4987 {
4988 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4989 if (args->polymorphic_call_contexts != NULL)
4990 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4991 }
4992 ipa_write_indirect_edge_info (ob, e);
4993 }
4994 }
4995
4996 /* Stream in NODE info from IB. */
4997
4998 static void
4999 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
5000 struct data_in *data_in)
5001 {
5002 struct ipa_node_params *info = IPA_NODE_REF (node);
5003 int k;
5004 struct cgraph_edge *e;
5005 struct bitpack_d bp;
5006
5007 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
5008
5009 for (k = 0; k < ipa_get_param_count (info); k++)
5010 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
5011
5012 bp = streamer_read_bitpack (ib);
5013 if (ipa_get_param_count (info) != 0)
5014 info->analysis_done = true;
5015 info->node_enqueued = false;
5016 for (k = 0; k < ipa_get_param_count (info); k++)
5017 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
5018 for (k = 0; k < ipa_get_param_count (info); k++)
5019 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
5020 for (e = node->callees; e; e = e->next_callee)
5021 {
5022 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5023 int count = streamer_read_uhwi (ib);
5024 bool contexts_computed = count & 1;
5025 count /= 2;
5026
5027 if (!count)
5028 continue;
5029 vec_safe_grow_cleared (args->jump_functions, count);
5030 if (contexts_computed)
5031 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5032
5033 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5034 {
5035 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5036 data_in);
5037 if (contexts_computed)
5038 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5039 }
5040 }
5041 for (e = node->indirect_calls; e; e = e->next_callee)
5042 {
5043 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5044 int count = streamer_read_uhwi (ib);
5045 bool contexts_computed = count & 1;
5046 count /= 2;
5047
5048 if (count)
5049 {
5050 vec_safe_grow_cleared (args->jump_functions, count);
5051 if (contexts_computed)
5052 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5053 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5054 {
5055 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5056 data_in);
5057 if (contexts_computed)
5058 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5059 }
5060 }
5061 ipa_read_indirect_edge_info (ib, data_in, e);
5062 }
5063 }
5064
5065 /* Write jump functions for nodes in SET. */
5066
5067 void
5068 ipa_prop_write_jump_functions (void)
5069 {
5070 struct cgraph_node *node;
5071 struct output_block *ob;
5072 unsigned int count = 0;
5073 lto_symtab_encoder_iterator lsei;
5074 lto_symtab_encoder_t encoder;
5075
5076 if (!ipa_node_params_sum)
5077 return;
5078
5079 ob = create_output_block (LTO_section_jump_functions);
5080 encoder = ob->decl_state->symtab_node_encoder;
5081 ob->symbol = NULL;
5082 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5083 lsei_next_function_in_partition (&lsei))
5084 {
5085 node = lsei_cgraph_node (lsei);
5086 if (node->has_gimple_body_p ()
5087 && IPA_NODE_REF (node) != NULL)
5088 count++;
5089 }
5090
5091 streamer_write_uhwi (ob, count);
5092
5093 /* Process all of the functions. */
5094 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5095 lsei_next_function_in_partition (&lsei))
5096 {
5097 node = lsei_cgraph_node (lsei);
5098 if (node->has_gimple_body_p ()
5099 && IPA_NODE_REF (node) != NULL)
5100 ipa_write_node_info (ob, node);
5101 }
5102 streamer_write_char_stream (ob->main_stream, 0);
5103 produce_asm (ob, NULL);
5104 destroy_output_block (ob);
5105 }
5106
5107 /* Read section in file FILE_DATA of length LEN with data DATA. */
5108
5109 static void
5110 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5111 size_t len)
5112 {
5113 const struct lto_function_header *header =
5114 (const struct lto_function_header *) data;
5115 const int cfg_offset = sizeof (struct lto_function_header);
5116 const int main_offset = cfg_offset + header->cfg_size;
5117 const int string_offset = main_offset + header->main_size;
5118 struct data_in *data_in;
5119 unsigned int i;
5120 unsigned int count;
5121
5122 lto_input_block ib_main ((const char *) data + main_offset,
5123 header->main_size, file_data->mode_table);
5124
5125 data_in =
5126 lto_data_in_create (file_data, (const char *) data + string_offset,
5127 header->string_size, vNULL);
5128 count = streamer_read_uhwi (&ib_main);
5129
5130 for (i = 0; i < count; i++)
5131 {
5132 unsigned int index;
5133 struct cgraph_node *node;
5134 lto_symtab_encoder_t encoder;
5135
5136 index = streamer_read_uhwi (&ib_main);
5137 encoder = file_data->symtab_node_encoder;
5138 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5139 index));
5140 gcc_assert (node->definition);
5141 ipa_read_node_info (&ib_main, node, data_in);
5142 }
5143 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5144 len);
5145 lto_data_in_delete (data_in);
5146 }
5147
5148 /* Read ipcp jump functions. */
5149
5150 void
5151 ipa_prop_read_jump_functions (void)
5152 {
5153 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5154 struct lto_file_decl_data *file_data;
5155 unsigned int j = 0;
5156
5157 ipa_check_create_node_params ();
5158 ipa_check_create_edge_args ();
5159 ipa_register_cgraph_hooks ();
5160
5161 while ((file_data = file_data_vec[j++]))
5162 {
5163 size_t len;
5164 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5165
5166 if (data)
5167 ipa_prop_read_section (file_data, data, len);
5168 }
5169 }
5170
5171 /* After merging units, we can get mismatch in argument counts.
5172 Also decl merging might've rendered parameter lists obsolete.
5173 Also compute called_with_variable_arg info. */
5174
5175 void
5176 ipa_update_after_lto_read (void)
5177 {
5178 ipa_check_create_node_params ();
5179 ipa_check_create_edge_args ();
5180 }
5181
5182 void
5183 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5184 {
5185 int node_ref;
5186 unsigned int count = 0;
5187 lto_symtab_encoder_t encoder;
5188 struct ipa_agg_replacement_value *aggvals, *av;
5189
5190 aggvals = ipa_get_agg_replacements_for_node (node);
5191 encoder = ob->decl_state->symtab_node_encoder;
5192 node_ref = lto_symtab_encoder_encode (encoder, node);
5193 streamer_write_uhwi (ob, node_ref);
5194
5195 for (av = aggvals; av; av = av->next)
5196 count++;
5197 streamer_write_uhwi (ob, count);
5198
5199 for (av = aggvals; av; av = av->next)
5200 {
5201 struct bitpack_d bp;
5202
5203 streamer_write_uhwi (ob, av->offset);
5204 streamer_write_uhwi (ob, av->index);
5205 stream_write_tree (ob, av->value, true);
5206
5207 bp = bitpack_create (ob->main_stream);
5208 bp_pack_value (&bp, av->by_ref, 1);
5209 streamer_write_bitpack (&bp);
5210 }
5211
5212 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5213 if (ts && vec_safe_length (ts->m_vr) > 0)
5214 {
5215 count = ts->m_vr->length ();
5216 streamer_write_uhwi (ob, count);
5217 for (unsigned i = 0; i < count; ++i)
5218 {
5219 struct bitpack_d bp;
5220 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5221 bp = bitpack_create (ob->main_stream);
5222 bp_pack_value (&bp, parm_vr->known, 1);
5223 streamer_write_bitpack (&bp);
5224 if (parm_vr->known)
5225 {
5226 streamer_write_enum (ob->main_stream, value_rang_type,
5227 VR_LAST, parm_vr->type);
5228 streamer_write_wide_int (ob, parm_vr->min);
5229 streamer_write_wide_int (ob, parm_vr->max);
5230 }
5231 }
5232 }
5233 else
5234 streamer_write_uhwi (ob, 0);
5235
5236 if (ts && vec_safe_length (ts->bits) > 0)
5237 {
5238 count = ts->bits->length ();
5239 streamer_write_uhwi (ob, count);
5240
5241 for (unsigned i = 0; i < count; ++i)
5242 {
5243 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5244 struct bitpack_d bp = bitpack_create (ob->main_stream);
5245 bp_pack_value (&bp, bits_jfunc.known, 1);
5246 streamer_write_bitpack (&bp);
5247 if (bits_jfunc.known)
5248 {
5249 streamer_write_widest_int (ob, bits_jfunc.value);
5250 streamer_write_widest_int (ob, bits_jfunc.mask);
5251 }
5252 }
5253 }
5254 else
5255 streamer_write_uhwi (ob, 0);
5256 }
5257
5258 /* Stream in the aggregate value replacement chain for NODE from IB. */
5259
5260 static void
5261 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5262 data_in *data_in)
5263 {
5264 struct ipa_agg_replacement_value *aggvals = NULL;
5265 unsigned int count, i;
5266
5267 count = streamer_read_uhwi (ib);
5268 for (i = 0; i <count; i++)
5269 {
5270 struct ipa_agg_replacement_value *av;
5271 struct bitpack_d bp;
5272
5273 av = ggc_alloc<ipa_agg_replacement_value> ();
5274 av->offset = streamer_read_uhwi (ib);
5275 av->index = streamer_read_uhwi (ib);
5276 av->value = stream_read_tree (ib, data_in);
5277 bp = streamer_read_bitpack (ib);
5278 av->by_ref = bp_unpack_value (&bp, 1);
5279 av->next = aggvals;
5280 aggvals = av;
5281 }
5282 ipa_set_node_agg_value_chain (node, aggvals);
5283
5284 count = streamer_read_uhwi (ib);
5285 if (count > 0)
5286 {
5287 ipcp_grow_transformations_if_necessary ();
5288
5289 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5290 vec_safe_grow_cleared (ts->m_vr, count);
5291 for (i = 0; i < count; i++)
5292 {
5293 ipa_vr *parm_vr;
5294 parm_vr = &(*ts->m_vr)[i];
5295 struct bitpack_d bp;
5296 bp = streamer_read_bitpack (ib);
5297 parm_vr->known = bp_unpack_value (&bp, 1);
5298 if (parm_vr->known)
5299 {
5300 parm_vr->type = streamer_read_enum (ib, value_range_type,
5301 VR_LAST);
5302 parm_vr->min = streamer_read_wide_int (ib);
5303 parm_vr->max = streamer_read_wide_int (ib);
5304 }
5305 }
5306 }
5307 count = streamer_read_uhwi (ib);
5308 if (count > 0)
5309 {
5310 ipcp_grow_transformations_if_necessary ();
5311
5312 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5313 vec_safe_grow_cleared (ts->bits, count);
5314
5315 for (i = 0; i < count; i++)
5316 {
5317 ipa_bits& bits_jfunc = (*ts->bits)[i];
5318 struct bitpack_d bp = streamer_read_bitpack (ib);
5319 bits_jfunc.known = bp_unpack_value (&bp, 1);
5320 if (bits_jfunc.known)
5321 {
5322 bits_jfunc.value = streamer_read_widest_int (ib);
5323 bits_jfunc.mask = streamer_read_widest_int (ib);
5324 }
5325 }
5326 }
5327 }
5328
5329 /* Write all aggregate replacement for nodes in set. */
5330
5331 void
5332 ipcp_write_transformation_summaries (void)
5333 {
5334 struct cgraph_node *node;
5335 struct output_block *ob;
5336 unsigned int count = 0;
5337 lto_symtab_encoder_iterator lsei;
5338 lto_symtab_encoder_t encoder;
5339
5340 ob = create_output_block (LTO_section_ipcp_transform);
5341 encoder = ob->decl_state->symtab_node_encoder;
5342 ob->symbol = NULL;
5343 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5344 lsei_next_function_in_partition (&lsei))
5345 {
5346 node = lsei_cgraph_node (lsei);
5347 if (node->has_gimple_body_p ())
5348 count++;
5349 }
5350
5351 streamer_write_uhwi (ob, count);
5352
5353 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5354 lsei_next_function_in_partition (&lsei))
5355 {
5356 node = lsei_cgraph_node (lsei);
5357 if (node->has_gimple_body_p ())
5358 write_ipcp_transformation_info (ob, node);
5359 }
5360 streamer_write_char_stream (ob->main_stream, 0);
5361 produce_asm (ob, NULL);
5362 destroy_output_block (ob);
5363 }
5364
5365 /* Read replacements section in file FILE_DATA of length LEN with data
5366 DATA. */
5367
5368 static void
5369 read_replacements_section (struct lto_file_decl_data *file_data,
5370 const char *data,
5371 size_t len)
5372 {
5373 const struct lto_function_header *header =
5374 (const struct lto_function_header *) data;
5375 const int cfg_offset = sizeof (struct lto_function_header);
5376 const int main_offset = cfg_offset + header->cfg_size;
5377 const int string_offset = main_offset + header->main_size;
5378 struct data_in *data_in;
5379 unsigned int i;
5380 unsigned int count;
5381
5382 lto_input_block ib_main ((const char *) data + main_offset,
5383 header->main_size, file_data->mode_table);
5384
5385 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5386 header->string_size, vNULL);
5387 count = streamer_read_uhwi (&ib_main);
5388
5389 for (i = 0; i < count; i++)
5390 {
5391 unsigned int index;
5392 struct cgraph_node *node;
5393 lto_symtab_encoder_t encoder;
5394
5395 index = streamer_read_uhwi (&ib_main);
5396 encoder = file_data->symtab_node_encoder;
5397 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5398 index));
5399 gcc_assert (node->definition);
5400 read_ipcp_transformation_info (&ib_main, node, data_in);
5401 }
5402 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5403 len);
5404 lto_data_in_delete (data_in);
5405 }
5406
5407 /* Read IPA-CP aggregate replacements. */
5408
5409 void
5410 ipcp_read_transformation_summaries (void)
5411 {
5412 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5413 struct lto_file_decl_data *file_data;
5414 unsigned int j = 0;
5415
5416 while ((file_data = file_data_vec[j++]))
5417 {
5418 size_t len;
5419 const char *data = lto_get_section_data (file_data,
5420 LTO_section_ipcp_transform,
5421 NULL, &len);
5422 if (data)
5423 read_replacements_section (file_data, data, len);
5424 }
5425 }
5426
5427 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5428 NODE. */
5429
5430 static void
5431 adjust_agg_replacement_values (struct cgraph_node *node,
5432 struct ipa_agg_replacement_value *aggval)
5433 {
5434 struct ipa_agg_replacement_value *v;
5435 int i, c = 0, d = 0, *adj;
5436
5437 if (!node->clone.combined_args_to_skip)
5438 return;
5439
5440 for (v = aggval; v; v = v->next)
5441 {
5442 gcc_assert (v->index >= 0);
5443 if (c < v->index)
5444 c = v->index;
5445 }
5446 c++;
5447
5448 adj = XALLOCAVEC (int, c);
5449 for (i = 0; i < c; i++)
5450 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5451 {
5452 adj[i] = -1;
5453 d++;
5454 }
5455 else
5456 adj[i] = i - d;
5457
5458 for (v = aggval; v; v = v->next)
5459 v->index = adj[v->index];
5460 }
5461
5462 /* Dominator walker driving the ipcp modification phase. */
5463
5464 class ipcp_modif_dom_walker : public dom_walker
5465 {
5466 public:
5467 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5468 vec<ipa_param_descriptor> descs,
5469 struct ipa_agg_replacement_value *av,
5470 bool *sc, bool *cc)
5471 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5472 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5473
5474 virtual edge before_dom_children (basic_block);
5475
5476 private:
5477 struct ipa_func_body_info *m_fbi;
5478 vec<ipa_param_descriptor> m_descriptors;
5479 struct ipa_agg_replacement_value *m_aggval;
5480 bool *m_something_changed, *m_cfg_changed;
5481 };
5482
5483 edge
5484 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5485 {
5486 gimple_stmt_iterator gsi;
5487 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5488 {
5489 struct ipa_agg_replacement_value *v;
5490 gimple *stmt = gsi_stmt (gsi);
5491 tree rhs, val, t;
5492 HOST_WIDE_INT offset, size;
5493 int index;
5494 bool by_ref, vce;
5495
5496 if (!gimple_assign_load_p (stmt))
5497 continue;
5498 rhs = gimple_assign_rhs1 (stmt);
5499 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5500 continue;
5501
5502 vce = false;
5503 t = rhs;
5504 while (handled_component_p (t))
5505 {
5506 /* V_C_E can do things like convert an array of integers to one
5507 bigger integer and similar things we do not handle below. */
5508 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5509 {
5510 vce = true;
5511 break;
5512 }
5513 t = TREE_OPERAND (t, 0);
5514 }
5515 if (vce)
5516 continue;
5517
5518 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5519 &offset, &size, &by_ref))
5520 continue;
5521 for (v = m_aggval; v; v = v->next)
5522 if (v->index == index
5523 && v->offset == offset)
5524 break;
5525 if (!v
5526 || v->by_ref != by_ref
5527 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5528 continue;
5529
5530 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5531 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5532 {
5533 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5534 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5535 else if (TYPE_SIZE (TREE_TYPE (rhs))
5536 == TYPE_SIZE (TREE_TYPE (v->value)))
5537 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5538 else
5539 {
5540 if (dump_file)
5541 {
5542 fprintf (dump_file, " const ");
5543 print_generic_expr (dump_file, v->value, 0);
5544 fprintf (dump_file, " can't be converted to type of ");
5545 print_generic_expr (dump_file, rhs, 0);
5546 fprintf (dump_file, "\n");
5547 }
5548 continue;
5549 }
5550 }
5551 else
5552 val = v->value;
5553
5554 if (dump_file && (dump_flags & TDF_DETAILS))
5555 {
5556 fprintf (dump_file, "Modifying stmt:\n ");
5557 print_gimple_stmt (dump_file, stmt, 0, 0);
5558 }
5559 gimple_assign_set_rhs_from_tree (&gsi, val);
5560 update_stmt (stmt);
5561
5562 if (dump_file && (dump_flags & TDF_DETAILS))
5563 {
5564 fprintf (dump_file, "into:\n ");
5565 print_gimple_stmt (dump_file, stmt, 0, 0);
5566 fprintf (dump_file, "\n");
5567 }
5568
5569 *m_something_changed = true;
5570 if (maybe_clean_eh_stmt (stmt)
5571 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5572 *m_cfg_changed = true;
5573 }
5574 return NULL;
5575 }
5576
5577 /* Update bits info of formal parameters as described in
5578 ipcp_transformation_summary. */
5579
5580 static void
5581 ipcp_update_bits (struct cgraph_node *node)
5582 {
5583 tree parm = DECL_ARGUMENTS (node->decl);
5584 tree next_parm = parm;
5585 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5586
5587 if (!ts || vec_safe_length (ts->bits) == 0)
5588 return;
5589
5590 vec<ipa_bits, va_gc> &bits = *ts->bits;
5591 unsigned count = bits.length ();
5592
5593 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5594 {
5595 if (node->clone.combined_args_to_skip
5596 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5597 continue;
5598
5599 gcc_checking_assert (parm);
5600 next_parm = DECL_CHAIN (parm);
5601
5602 if (!bits[i].known
5603 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm)) || POINTER_TYPE_P (TREE_TYPE (parm)))
5604 || !is_gimple_reg (parm))
5605 continue;
5606
5607 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5608 if (!ddef)
5609 continue;
5610
5611 if (dump_file)
5612 {
5613 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5614 print_hex (bits[i].mask, dump_file);
5615 fprintf (dump_file, "\n");
5616 }
5617
5618 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5619 {
5620 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5621 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5622
5623 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5624 | wide_int::from (bits[i].value, prec, sgn);
5625 set_nonzero_bits (ddef, nonzero_bits);
5626 }
5627 else
5628 {
5629 unsigned tem = bits[i].mask.to_uhwi ();
5630 unsigned HOST_WIDE_INT bitpos = bits[i].value.to_uhwi ();
5631 unsigned align = tem & -tem;
5632 unsigned misalign = bitpos & (align - 1);
5633
5634 if (align > 1)
5635 {
5636 if (dump_file)
5637 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5638
5639 unsigned old_align, old_misalign;
5640 struct ptr_info_def *pi = get_ptr_info (ddef);
5641 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5642
5643 if (old_known
5644 && old_align > align)
5645 {
5646 if (dump_file)
5647 {
5648 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5649 if ((old_misalign & (align - 1)) != misalign)
5650 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5651 old_misalign, misalign);
5652 }
5653 continue;
5654 }
5655
5656 if (old_known
5657 && ((misalign & (old_align - 1)) != old_misalign)
5658 && dump_file)
5659 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5660 old_misalign, misalign);
5661
5662 set_ptr_info_alignment (pi, align, misalign);
5663 }
5664 }
5665 }
5666 }
5667
5668 /* Update value range of formal parameters as described in
5669 ipcp_transformation_summary. */
5670
5671 static void
5672 ipcp_update_vr (struct cgraph_node *node)
5673 {
5674 tree fndecl = node->decl;
5675 tree parm = DECL_ARGUMENTS (fndecl);
5676 tree next_parm = parm;
5677 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5678 if (!ts || vec_safe_length (ts->m_vr) == 0)
5679 return;
5680 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5681 unsigned count = vr.length ();
5682
5683 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5684 {
5685 if (node->clone.combined_args_to_skip
5686 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5687 continue;
5688 gcc_checking_assert (parm);
5689 next_parm = DECL_CHAIN (parm);
5690 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5691
5692 if (!ddef || !is_gimple_reg (parm))
5693 continue;
5694
5695 if (vr[i].known
5696 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5697 {
5698 tree type = TREE_TYPE (ddef);
5699 unsigned prec = TYPE_PRECISION (type);
5700 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5701 {
5702 if (dump_file)
5703 {
5704 fprintf (dump_file, "Setting value range of param %u ", i);
5705 fprintf (dump_file, "%s[",
5706 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5707 print_decs (vr[i].min, dump_file);
5708 fprintf (dump_file, ", ");
5709 print_decs (vr[i].max, dump_file);
5710 fprintf (dump_file, "]\n");
5711 }
5712 set_range_info (ddef, vr[i].type,
5713 wide_int_storage::from (vr[i].min, prec,
5714 TYPE_SIGN (type)),
5715 wide_int_storage::from (vr[i].max, prec,
5716 TYPE_SIGN (type)));
5717 }
5718 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5719 && vr[i].type == VR_ANTI_RANGE
5720 && wi::eq_p (vr[i].min, 0)
5721 && wi::eq_p (vr[i].max, 0))
5722 {
5723 if (dump_file)
5724 fprintf (dump_file, "Setting nonnull for %u\n", i);
5725 set_ptr_nonnull (ddef);
5726 }
5727 }
5728 }
5729 }
5730
5731 /* IPCP transformation phase doing propagation of aggregate values. */
5732
5733 unsigned int
5734 ipcp_transform_function (struct cgraph_node *node)
5735 {
5736 vec<ipa_param_descriptor> descriptors = vNULL;
5737 struct ipa_func_body_info fbi;
5738 struct ipa_agg_replacement_value *aggval;
5739 int param_count;
5740 bool cfg_changed = false, something_changed = false;
5741
5742 gcc_checking_assert (cfun);
5743 gcc_checking_assert (current_function_decl);
5744
5745 if (dump_file)
5746 fprintf (dump_file, "Modification phase of node %s/%i\n",
5747 node->name (), node->order);
5748
5749 ipcp_update_bits (node);
5750 ipcp_update_vr (node);
5751 aggval = ipa_get_agg_replacements_for_node (node);
5752 if (!aggval)
5753 return 0;
5754 param_count = count_formal_params (node->decl);
5755 if (param_count == 0)
5756 return 0;
5757 adjust_agg_replacement_values (node, aggval);
5758 if (dump_file)
5759 ipa_dump_agg_replacement_values (dump_file, aggval);
5760
5761 fbi.node = node;
5762 fbi.info = NULL;
5763 fbi.bb_infos = vNULL;
5764 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5765 fbi.param_count = param_count;
5766 fbi.aa_walked = 0;
5767
5768 descriptors.safe_grow_cleared (param_count);
5769 ipa_populate_param_decls (node, descriptors);
5770 calculate_dominance_info (CDI_DOMINATORS);
5771 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5772 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5773
5774 int i;
5775 struct ipa_bb_info *bi;
5776 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5777 free_ipa_bb_info (bi);
5778 fbi.bb_infos.release ();
5779 free_dominance_info (CDI_DOMINATORS);
5780 (*ipcp_transformations)[node->uid].agg_values = NULL;
5781 (*ipcp_transformations)[node->uid].bits = NULL;
5782 (*ipcp_transformations)[node->uid].m_vr = NULL;
5783
5784 descriptors.release ();
5785
5786 if (!something_changed)
5787 return 0;
5788 else if (cfg_changed)
5789 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5790 else
5791 return TODO_update_ssa_only_virtuals;
5792 }