]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
2015-10-29 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "expmed.h"
32 #include "insn-config.h"
33 #include "emit-rtl.h"
34 #include "tree-streamer.h"
35 #include "cgraph.h"
36 #include "diagnostic.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "flags.h"
43 #include "dojump.h"
44 #include "explow.h"
45 #include "calls.h"
46 #include "varasm.h"
47 #include "stmt.h"
48 #include "expr.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "gimplify.h"
52 #include "gimple-iterator.h"
53 #include "gimplify-me.h"
54 #include "gimple-walk.h"
55 #include "langhooks.h"
56 #include "symbol-summary.h"
57 #include "ipa-prop.h"
58 #include "tree-cfg.h"
59 #include "tree-into-ssa.h"
60 #include "tree-dfa.h"
61 #include "tree-inline.h"
62 #include "ipa-inline.h"
63 #include "gimple-pretty-print.h"
64 #include "params.h"
65 #include "ipa-utils.h"
66 #include "dbgcnt.h"
67 #include "domwalk.h"
68 #include "builtins.h"
69
70 /* Function summary where the parameter infos are actually stored. */
71 ipa_node_params_t *ipa_node_params_sum = NULL;
72 /* Vector of IPA-CP transformation data for each clone. */
73 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
74 /* Vector where the parameter infos are actually stored. */
75 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
76
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
79 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
80 static struct cgraph_node_hook_list *function_insertion_hook_holder;
81
82 /* Description of a reference to an IPA constant. */
83 struct ipa_cst_ref_desc
84 {
85 /* Edge that corresponds to the statement which took the reference. */
86 struct cgraph_edge *cs;
87 /* Linked list of duplicates created when call graph edges are cloned. */
88 struct ipa_cst_ref_desc *next_duplicate;
89 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
90 if out of control. */
91 int refcount;
92 };
93
94 /* Allocation pool for reference descriptions. */
95
96 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
97 ("IPA-PROP ref descriptions");
98
99 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
100 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
101
102 static bool
103 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
104 {
105 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
106
107 if (!fs_opts)
108 return false;
109 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
110 }
111
112 /* Return index of the formal whose tree is PTREE in function which corresponds
113 to INFO. */
114
115 static int
116 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
117 {
118 int i, count;
119
120 count = descriptors.length ();
121 for (i = 0; i < count; i++)
122 if (descriptors[i].decl == ptree)
123 return i;
124
125 return -1;
126 }
127
128 /* Return index of the formal whose tree is PTREE in function which corresponds
129 to INFO. */
130
131 int
132 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
133 {
134 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
135 }
136
137 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
138 NODE. */
139
140 static void
141 ipa_populate_param_decls (struct cgraph_node *node,
142 vec<ipa_param_descriptor> &descriptors)
143 {
144 tree fndecl;
145 tree fnargs;
146 tree parm;
147 int param_num;
148
149 fndecl = node->decl;
150 gcc_assert (gimple_has_body_p (fndecl));
151 fnargs = DECL_ARGUMENTS (fndecl);
152 param_num = 0;
153 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
154 {
155 descriptors[param_num].decl = parm;
156 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
157 true);
158 param_num++;
159 }
160 }
161
162 /* Return how many formal parameters FNDECL has. */
163
164 int
165 count_formal_params (tree fndecl)
166 {
167 tree parm;
168 int count = 0;
169 gcc_assert (gimple_has_body_p (fndecl));
170
171 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
172 count++;
173
174 return count;
175 }
176
177 /* Return the declaration of Ith formal parameter of the function corresponding
178 to INFO. Note there is no setter function as this array is built just once
179 using ipa_initialize_node_params. */
180
181 void
182 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
183 {
184 fprintf (file, "param #%i", i);
185 if (info->descriptors[i].decl)
186 {
187 fprintf (file, " ");
188 print_generic_expr (file, info->descriptors[i].decl, 0);
189 }
190 }
191
192 /* Initialize the ipa_node_params structure associated with NODE
193 to hold PARAM_COUNT parameters. */
194
195 void
196 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
197 {
198 struct ipa_node_params *info = IPA_NODE_REF (node);
199
200 if (!info->descriptors.exists () && param_count)
201 info->descriptors.safe_grow_cleared (param_count);
202 }
203
204 /* Initialize the ipa_node_params structure associated with NODE by counting
205 the function parameters, creating the descriptors and populating their
206 param_decls. */
207
208 void
209 ipa_initialize_node_params (struct cgraph_node *node)
210 {
211 struct ipa_node_params *info = IPA_NODE_REF (node);
212
213 if (!info->descriptors.exists ())
214 {
215 ipa_alloc_node_params (node, count_formal_params (node->decl));
216 ipa_populate_param_decls (node, info->descriptors);
217 }
218 }
219
220 /* Print the jump functions associated with call graph edge CS to file F. */
221
222 static void
223 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
224 {
225 int i, count;
226
227 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
228 for (i = 0; i < count; i++)
229 {
230 struct ipa_jump_func *jump_func;
231 enum jump_func_type type;
232
233 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
234 type = jump_func->type;
235
236 fprintf (f, " param %d: ", i);
237 if (type == IPA_JF_UNKNOWN)
238 fprintf (f, "UNKNOWN\n");
239 else if (type == IPA_JF_CONST)
240 {
241 tree val = jump_func->value.constant.value;
242 fprintf (f, "CONST: ");
243 print_generic_expr (f, val, 0);
244 if (TREE_CODE (val) == ADDR_EXPR
245 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
246 {
247 fprintf (f, " -> ");
248 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
249 0);
250 }
251 fprintf (f, "\n");
252 }
253 else if (type == IPA_JF_PASS_THROUGH)
254 {
255 fprintf (f, "PASS THROUGH: ");
256 fprintf (f, "%d, op %s",
257 jump_func->value.pass_through.formal_id,
258 get_tree_code_name(jump_func->value.pass_through.operation));
259 if (jump_func->value.pass_through.operation != NOP_EXPR)
260 {
261 fprintf (f, " ");
262 print_generic_expr (f,
263 jump_func->value.pass_through.operand, 0);
264 }
265 if (jump_func->value.pass_through.agg_preserved)
266 fprintf (f, ", agg_preserved");
267 fprintf (f, "\n");
268 }
269 else if (type == IPA_JF_ANCESTOR)
270 {
271 fprintf (f, "ANCESTOR: ");
272 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
273 jump_func->value.ancestor.formal_id,
274 jump_func->value.ancestor.offset);
275 if (jump_func->value.ancestor.agg_preserved)
276 fprintf (f, ", agg_preserved");
277 fprintf (f, "\n");
278 }
279
280 if (jump_func->agg.items)
281 {
282 struct ipa_agg_jf_item *item;
283 int j;
284
285 fprintf (f, " Aggregate passed by %s:\n",
286 jump_func->agg.by_ref ? "reference" : "value");
287 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
288 {
289 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
290 item->offset);
291 if (TYPE_P (item->value))
292 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
293 tree_to_uhwi (TYPE_SIZE (item->value)));
294 else
295 {
296 fprintf (f, "cst: ");
297 print_generic_expr (f, item->value, 0);
298 }
299 fprintf (f, "\n");
300 }
301 }
302
303 struct ipa_polymorphic_call_context *ctx
304 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
305 if (ctx && !ctx->useless_p ())
306 {
307 fprintf (f, " Context: ");
308 ctx->dump (dump_file);
309 }
310
311 if (jump_func->alignment.known)
312 {
313 fprintf (f, " Alignment: %u, misalignment: %u\n",
314 jump_func->alignment.align,
315 jump_func->alignment.misalign);
316 }
317 else
318 fprintf (f, " Unknown alignment\n");
319 }
320 }
321
322
323 /* Print the jump functions of all arguments on all call graph edges going from
324 NODE to file F. */
325
326 void
327 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
328 {
329 struct cgraph_edge *cs;
330
331 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
332 node->order);
333 for (cs = node->callees; cs; cs = cs->next_callee)
334 {
335 if (!ipa_edge_args_info_available_for_edge_p (cs))
336 continue;
337
338 fprintf (f, " callsite %s/%i -> %s/%i : \n",
339 xstrdup_for_dump (node->name ()), node->order,
340 xstrdup_for_dump (cs->callee->name ()),
341 cs->callee->order);
342 ipa_print_node_jump_functions_for_edge (f, cs);
343 }
344
345 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
346 {
347 struct cgraph_indirect_call_info *ii;
348 if (!ipa_edge_args_info_available_for_edge_p (cs))
349 continue;
350
351 ii = cs->indirect_info;
352 if (ii->agg_contents)
353 fprintf (f, " indirect %s callsite, calling param %i, "
354 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
355 ii->member_ptr ? "member ptr" : "aggregate",
356 ii->param_index, ii->offset,
357 ii->by_ref ? "by reference" : "by_value");
358 else
359 fprintf (f, " indirect %s callsite, calling param %i, "
360 "offset " HOST_WIDE_INT_PRINT_DEC,
361 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
362 ii->offset);
363
364 if (cs->call_stmt)
365 {
366 fprintf (f, ", for stmt ");
367 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
368 }
369 else
370 fprintf (f, "\n");
371 if (ii->polymorphic)
372 ii->context.dump (f);
373 ipa_print_node_jump_functions_for_edge (f, cs);
374 }
375 }
376
377 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
378
379 void
380 ipa_print_all_jump_functions (FILE *f)
381 {
382 struct cgraph_node *node;
383
384 fprintf (f, "\nJump functions:\n");
385 FOR_EACH_FUNCTION (node)
386 {
387 ipa_print_node_jump_functions (f, node);
388 }
389 }
390
391 /* Set jfunc to be a know-really nothing jump function. */
392
393 static void
394 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
395 {
396 jfunc->type = IPA_JF_UNKNOWN;
397 jfunc->alignment.known = false;
398 }
399
400 /* Set JFUNC to be a copy of another jmp (to be used by jump function
401 combination code). The two functions will share their rdesc. */
402
403 static void
404 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
405 struct ipa_jump_func *src)
406
407 {
408 gcc_checking_assert (src->type == IPA_JF_CONST);
409 dst->type = IPA_JF_CONST;
410 dst->value.constant = src->value.constant;
411 }
412
413 /* Set JFUNC to be a constant jmp function. */
414
415 static void
416 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
417 struct cgraph_edge *cs)
418 {
419 constant = unshare_expr (constant);
420 if (constant && EXPR_P (constant))
421 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
422 jfunc->type = IPA_JF_CONST;
423 jfunc->value.constant.value = unshare_expr_without_location (constant);
424
425 if (TREE_CODE (constant) == ADDR_EXPR
426 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
427 {
428 struct ipa_cst_ref_desc *rdesc;
429
430 rdesc = ipa_refdesc_pool.allocate ();
431 rdesc->cs = cs;
432 rdesc->next_duplicate = NULL;
433 rdesc->refcount = 1;
434 jfunc->value.constant.rdesc = rdesc;
435 }
436 else
437 jfunc->value.constant.rdesc = NULL;
438 }
439
440 /* Set JFUNC to be a simple pass-through jump function. */
441 static void
442 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
443 bool agg_preserved)
444 {
445 jfunc->type = IPA_JF_PASS_THROUGH;
446 jfunc->value.pass_through.operand = NULL_TREE;
447 jfunc->value.pass_through.formal_id = formal_id;
448 jfunc->value.pass_through.operation = NOP_EXPR;
449 jfunc->value.pass_through.agg_preserved = agg_preserved;
450 }
451
452 /* Set JFUNC to be an arithmetic pass through jump function. */
453
454 static void
455 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
456 tree operand, enum tree_code operation)
457 {
458 jfunc->type = IPA_JF_PASS_THROUGH;
459 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
460 jfunc->value.pass_through.formal_id = formal_id;
461 jfunc->value.pass_through.operation = operation;
462 jfunc->value.pass_through.agg_preserved = false;
463 }
464
465 /* Set JFUNC to be an ancestor jump function. */
466
467 static void
468 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
469 int formal_id, bool agg_preserved)
470 {
471 jfunc->type = IPA_JF_ANCESTOR;
472 jfunc->value.ancestor.formal_id = formal_id;
473 jfunc->value.ancestor.offset = offset;
474 jfunc->value.ancestor.agg_preserved = agg_preserved;
475 }
476
477 /* Get IPA BB information about the given BB. FBI is the context of analyzis
478 of this function body. */
479
480 static struct ipa_bb_info *
481 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
482 {
483 gcc_checking_assert (fbi);
484 return &fbi->bb_infos[bb->index];
485 }
486
487 /* Structure to be passed in between detect_type_change and
488 check_stmt_for_type_change. */
489
490 struct prop_type_change_info
491 {
492 /* Offset into the object where there is the virtual method pointer we are
493 looking for. */
494 HOST_WIDE_INT offset;
495 /* The declaration or SSA_NAME pointer of the base that we are checking for
496 type change. */
497 tree object;
498 /* Set to true if dynamic type change has been detected. */
499 bool type_maybe_changed;
500 };
501
502 /* Return true if STMT can modify a virtual method table pointer.
503
504 This function makes special assumptions about both constructors and
505 destructors which are all the functions that are allowed to alter the VMT
506 pointers. It assumes that destructors begin with assignment into all VMT
507 pointers and that constructors essentially look in the following way:
508
509 1) The very first thing they do is that they call constructors of ancestor
510 sub-objects that have them.
511
512 2) Then VMT pointers of this and all its ancestors is set to new values
513 corresponding to the type corresponding to the constructor.
514
515 3) Only afterwards, other stuff such as constructor of member sub-objects
516 and the code written by the user is run. Only this may include calling
517 virtual functions, directly or indirectly.
518
519 There is no way to call a constructor of an ancestor sub-object in any
520 other way.
521
522 This means that we do not have to care whether constructors get the correct
523 type information because they will always change it (in fact, if we define
524 the type to be given by the VMT pointer, it is undefined).
525
526 The most important fact to derive from the above is that if, for some
527 statement in the section 3, we try to detect whether the dynamic type has
528 changed, we can safely ignore all calls as we examine the function body
529 backwards until we reach statements in section 2 because these calls cannot
530 be ancestor constructors or destructors (if the input is not bogus) and so
531 do not change the dynamic type (this holds true only for automatically
532 allocated objects but at the moment we devirtualize only these). We then
533 must detect that statements in section 2 change the dynamic type and can try
534 to derive the new type. That is enough and we can stop, we will never see
535 the calls into constructors of sub-objects in this code. Therefore we can
536 safely ignore all call statements that we traverse.
537 */
538
539 static bool
540 stmt_may_be_vtbl_ptr_store (gimple *stmt)
541 {
542 if (is_gimple_call (stmt))
543 return false;
544 if (gimple_clobber_p (stmt))
545 return false;
546 else if (is_gimple_assign (stmt))
547 {
548 tree lhs = gimple_assign_lhs (stmt);
549
550 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
551 {
552 if (flag_strict_aliasing
553 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
554 return false;
555
556 if (TREE_CODE (lhs) == COMPONENT_REF
557 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
558 return false;
559 /* In the future we might want to use get_base_ref_and_offset to find
560 if there is a field corresponding to the offset and if so, proceed
561 almost like if it was a component ref. */
562 }
563 }
564 return true;
565 }
566
567 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
568 to check whether a particular statement may modify the virtual table
569 pointerIt stores its result into DATA, which points to a
570 prop_type_change_info structure. */
571
572 static bool
573 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
574 {
575 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
576 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
577
578 if (stmt_may_be_vtbl_ptr_store (stmt))
579 {
580 tci->type_maybe_changed = true;
581 return true;
582 }
583 else
584 return false;
585 }
586
587 /* See if ARG is PARAM_DECl describing instance passed by pointer
588 or reference in FUNCTION. Return false if the dynamic type may change
589 in between beggining of the function until CALL is invoked.
590
591 Generally functions are not allowed to change type of such instances,
592 but they call destructors. We assume that methods can not destroy the THIS
593 pointer. Also as a special cases, constructor and destructors may change
594 type of the THIS pointer. */
595
596 static bool
597 param_type_may_change_p (tree function, tree arg, gimple *call)
598 {
599 /* Pure functions can not do any changes on the dynamic type;
600 that require writting to memory. */
601 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
602 return false;
603 /* We need to check if we are within inlined consturctor
604 or destructor (ideally we would have way to check that the
605 inline cdtor is actually working on ARG, but we don't have
606 easy tie on this, so punt on all non-pure cdtors.
607 We may also record the types of cdtors and once we know type
608 of the instance match them.
609
610 Also code unification optimizations may merge calls from
611 different blocks making return values unreliable. So
612 do nothing during late optimization. */
613 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
614 return true;
615 if (TREE_CODE (arg) == SSA_NAME
616 && SSA_NAME_IS_DEFAULT_DEF (arg)
617 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
618 {
619 /* Normal (non-THIS) argument. */
620 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
621 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
622 /* THIS pointer of an method - here we want to watch constructors
623 and destructors as those definitely may change the dynamic
624 type. */
625 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
626 && !DECL_CXX_CONSTRUCTOR_P (function)
627 && !DECL_CXX_DESTRUCTOR_P (function)
628 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
629 {
630 /* Walk the inline stack and watch out for ctors/dtors. */
631 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
632 block = BLOCK_SUPERCONTEXT (block))
633 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
634 return true;
635 return false;
636 }
637 }
638 return true;
639 }
640
641 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
642 callsite CALL) by looking for assignments to its virtual table pointer. If
643 it is, return true and fill in the jump function JFUNC with relevant type
644 information or set it to unknown. ARG is the object itself (not a pointer
645 to it, unless dereferenced). BASE is the base of the memory access as
646 returned by get_ref_base_and_extent, as is the offset.
647
648 This is helper function for detect_type_change and detect_type_change_ssa
649 that does the heavy work which is usually unnecesary. */
650
651 static bool
652 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
653 gcall *call, struct ipa_jump_func *jfunc,
654 HOST_WIDE_INT offset)
655 {
656 struct prop_type_change_info tci;
657 ao_ref ao;
658 bool entry_reached = false;
659
660 gcc_checking_assert (DECL_P (arg)
661 || TREE_CODE (arg) == MEM_REF
662 || handled_component_p (arg));
663
664 comp_type = TYPE_MAIN_VARIANT (comp_type);
665
666 /* Const calls cannot call virtual methods through VMT and so type changes do
667 not matter. */
668 if (!flag_devirtualize || !gimple_vuse (call)
669 /* Be sure expected_type is polymorphic. */
670 || !comp_type
671 || TREE_CODE (comp_type) != RECORD_TYPE
672 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
673 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
674 return true;
675
676 ao_ref_init (&ao, arg);
677 ao.base = base;
678 ao.offset = offset;
679 ao.size = POINTER_SIZE;
680 ao.max_size = ao.size;
681
682 tci.offset = offset;
683 tci.object = get_base_address (arg);
684 tci.type_maybe_changed = false;
685
686 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
687 &tci, NULL, &entry_reached);
688 if (!tci.type_maybe_changed)
689 return false;
690
691 ipa_set_jf_unknown (jfunc);
692 return true;
693 }
694
695 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
696 If it is, return true and fill in the jump function JFUNC with relevant type
697 information or set it to unknown. ARG is the object itself (not a pointer
698 to it, unless dereferenced). BASE is the base of the memory access as
699 returned by get_ref_base_and_extent, as is the offset. */
700
701 static bool
702 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
703 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
704 {
705 if (!flag_devirtualize)
706 return false;
707
708 if (TREE_CODE (base) == MEM_REF
709 && !param_type_may_change_p (current_function_decl,
710 TREE_OPERAND (base, 0),
711 call))
712 return false;
713 return detect_type_change_from_memory_writes (arg, base, comp_type,
714 call, jfunc, offset);
715 }
716
717 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
718 SSA name (its dereference will become the base and the offset is assumed to
719 be zero). */
720
721 static bool
722 detect_type_change_ssa (tree arg, tree comp_type,
723 gcall *call, struct ipa_jump_func *jfunc)
724 {
725 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
726 if (!flag_devirtualize
727 || !POINTER_TYPE_P (TREE_TYPE (arg)))
728 return false;
729
730 if (!param_type_may_change_p (current_function_decl, arg, call))
731 return false;
732
733 arg = build2 (MEM_REF, ptr_type_node, arg,
734 build_int_cst (ptr_type_node, 0));
735
736 return detect_type_change_from_memory_writes (arg, arg, comp_type,
737 call, jfunc, 0);
738 }
739
740 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
741 boolean variable pointed to by DATA. */
742
743 static bool
744 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
745 void *data)
746 {
747 bool *b = (bool *) data;
748 *b = true;
749 return true;
750 }
751
752 /* Return true if we have already walked so many statements in AA that we
753 should really just start giving up. */
754
755 static bool
756 aa_overwalked (struct ipa_func_body_info *fbi)
757 {
758 gcc_checking_assert (fbi);
759 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
760 }
761
762 /* Find the nearest valid aa status for parameter specified by INDEX that
763 dominates BB. */
764
765 static struct ipa_param_aa_status *
766 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
767 int index)
768 {
769 while (true)
770 {
771 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
772 if (!bb)
773 return NULL;
774 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
775 if (!bi->param_aa_statuses.is_empty ()
776 && bi->param_aa_statuses[index].valid)
777 return &bi->param_aa_statuses[index];
778 }
779 }
780
781 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
782 structures and/or intialize the result with a dominating description as
783 necessary. */
784
785 static struct ipa_param_aa_status *
786 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
787 int index)
788 {
789 gcc_checking_assert (fbi);
790 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
791 if (bi->param_aa_statuses.is_empty ())
792 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
793 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
794 if (!paa->valid)
795 {
796 gcc_checking_assert (!paa->parm_modified
797 && !paa->ref_modified
798 && !paa->pt_modified);
799 struct ipa_param_aa_status *dom_paa;
800 dom_paa = find_dominating_aa_status (fbi, bb, index);
801 if (dom_paa)
802 *paa = *dom_paa;
803 else
804 paa->valid = true;
805 }
806
807 return paa;
808 }
809
810 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
811 a value known not to be modified in this function before reaching the
812 statement STMT. FBI holds information about the function we have so far
813 gathered but do not survive the summary building stage. */
814
815 static bool
816 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
817 gimple *stmt, tree parm_load)
818 {
819 struct ipa_param_aa_status *paa;
820 bool modified = false;
821 ao_ref refd;
822
823 /* FIXME: FBI can be NULL if we are being called from outside
824 ipa_node_analysis or ipcp_transform_function, which currently happens
825 during inlining analysis. It would be great to extend fbi's lifetime and
826 always have it. Currently, we are just not afraid of too much walking in
827 that case. */
828 if (fbi)
829 {
830 if (aa_overwalked (fbi))
831 return false;
832 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
833 if (paa->parm_modified)
834 return false;
835 }
836 else
837 paa = NULL;
838
839 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
840 ao_ref_init (&refd, parm_load);
841 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
842 &modified, NULL);
843 if (fbi)
844 fbi->aa_walked += walked;
845 if (paa && modified)
846 paa->parm_modified = true;
847 return !modified;
848 }
849
850 /* If STMT is an assignment that loads a value from an parameter declaration,
851 return the index of the parameter in ipa_node_params which has not been
852 modified. Otherwise return -1. */
853
854 static int
855 load_from_unmodified_param (struct ipa_func_body_info *fbi,
856 vec<ipa_param_descriptor> descriptors,
857 gimple *stmt)
858 {
859 int index;
860 tree op1;
861
862 if (!gimple_assign_single_p (stmt))
863 return -1;
864
865 op1 = gimple_assign_rhs1 (stmt);
866 if (TREE_CODE (op1) != PARM_DECL)
867 return -1;
868
869 index = ipa_get_param_decl_index_1 (descriptors, op1);
870 if (index < 0
871 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
872 return -1;
873
874 return index;
875 }
876
877 /* Return true if memory reference REF (which must be a load through parameter
878 with INDEX) loads data that are known to be unmodified in this function
879 before reaching statement STMT. */
880
881 static bool
882 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
883 int index, gimple *stmt, tree ref)
884 {
885 struct ipa_param_aa_status *paa;
886 bool modified = false;
887 ao_ref refd;
888
889 /* FIXME: FBI can be NULL if we are being called from outside
890 ipa_node_analysis or ipcp_transform_function, which currently happens
891 during inlining analysis. It would be great to extend fbi's lifetime and
892 always have it. Currently, we are just not afraid of too much walking in
893 that case. */
894 if (fbi)
895 {
896 if (aa_overwalked (fbi))
897 return false;
898 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
899 if (paa->ref_modified)
900 return false;
901 }
902 else
903 paa = NULL;
904
905 gcc_checking_assert (gimple_vuse (stmt));
906 ao_ref_init (&refd, ref);
907 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
908 &modified, NULL);
909 if (fbi)
910 fbi->aa_walked += walked;
911 if (paa && modified)
912 paa->ref_modified = true;
913 return !modified;
914 }
915
916 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
917 is known to be unmodified in this function before reaching call statement
918 CALL into which it is passed. FBI describes the function body. */
919
920 static bool
921 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
922 gimple *call, tree parm)
923 {
924 bool modified = false;
925 ao_ref refd;
926
927 /* It's unnecessary to calculate anything about memory contnets for a const
928 function because it is not goin to use it. But do not cache the result
929 either. Also, no such calculations for non-pointers. */
930 if (!gimple_vuse (call)
931 || !POINTER_TYPE_P (TREE_TYPE (parm))
932 || aa_overwalked (fbi))
933 return false;
934
935 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
936 gimple_bb (call),
937 index);
938 if (paa->pt_modified)
939 return false;
940
941 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
942 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
943 &modified, NULL);
944 fbi->aa_walked += walked;
945 if (modified)
946 paa->pt_modified = true;
947 return !modified;
948 }
949
950 /* Return true if we can prove that OP is a memory reference loading unmodified
951 data from an aggregate passed as a parameter and if the aggregate is passed
952 by reference, that the alias type of the load corresponds to the type of the
953 formal parameter (so that we can rely on this type for TBAA in callers).
954 INFO and PARMS_AINFO describe parameters of the current function (but the
955 latter can be NULL), STMT is the load statement. If function returns true,
956 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
957 within the aggregate and whether it is a load from a value passed by
958 reference respectively. */
959
960 bool
961 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
962 vec<ipa_param_descriptor> descriptors,
963 gimple *stmt, tree op, int *index_p,
964 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
965 bool *by_ref_p)
966 {
967 int index;
968 HOST_WIDE_INT size, max_size;
969 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
970
971 if (max_size == -1 || max_size != size || *offset_p < 0)
972 return false;
973
974 if (DECL_P (base))
975 {
976 int index = ipa_get_param_decl_index_1 (descriptors, base);
977 if (index >= 0
978 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
979 {
980 *index_p = index;
981 *by_ref_p = false;
982 if (size_p)
983 *size_p = size;
984 return true;
985 }
986 return false;
987 }
988
989 if (TREE_CODE (base) != MEM_REF
990 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
991 || !integer_zerop (TREE_OPERAND (base, 1)))
992 return false;
993
994 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
995 {
996 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
997 index = ipa_get_param_decl_index_1 (descriptors, parm);
998 }
999 else
1000 {
1001 /* This branch catches situations where a pointer parameter is not a
1002 gimple register, for example:
1003
1004 void hip7(S*) (struct S * p)
1005 {
1006 void (*<T2e4>) (struct S *) D.1867;
1007 struct S * p.1;
1008
1009 <bb 2>:
1010 p.1_1 = p;
1011 D.1867_2 = p.1_1->f;
1012 D.1867_2 ();
1013 gdp = &p;
1014 */
1015
1016 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1017 index = load_from_unmodified_param (fbi, descriptors, def);
1018 }
1019
1020 if (index >= 0
1021 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1022 {
1023 *index_p = index;
1024 *by_ref_p = true;
1025 if (size_p)
1026 *size_p = size;
1027 return true;
1028 }
1029 return false;
1030 }
1031
1032 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1033 of an assignment statement STMT, try to determine whether we are actually
1034 handling any of the following cases and construct an appropriate jump
1035 function into JFUNC if so:
1036
1037 1) The passed value is loaded from a formal parameter which is not a gimple
1038 register (most probably because it is addressable, the value has to be
1039 scalar) and we can guarantee the value has not changed. This case can
1040 therefore be described by a simple pass-through jump function. For example:
1041
1042 foo (int a)
1043 {
1044 int a.0;
1045
1046 a.0_2 = a;
1047 bar (a.0_2);
1048
1049 2) The passed value can be described by a simple arithmetic pass-through
1050 jump function. E.g.
1051
1052 foo (int a)
1053 {
1054 int D.2064;
1055
1056 D.2064_4 = a.1(D) + 4;
1057 bar (D.2064_4);
1058
1059 This case can also occur in combination of the previous one, e.g.:
1060
1061 foo (int a, int z)
1062 {
1063 int a.0;
1064 int D.2064;
1065
1066 a.0_3 = a;
1067 D.2064_4 = a.0_3 + 4;
1068 foo (D.2064_4);
1069
1070 3) The passed value is an address of an object within another one (which
1071 also passed by reference). Such situations are described by an ancestor
1072 jump function and describe situations such as:
1073
1074 B::foo() (struct B * const this)
1075 {
1076 struct A * D.1845;
1077
1078 D.1845_2 = &this_1(D)->D.1748;
1079 A::bar (D.1845_2);
1080
1081 INFO is the structure describing individual parameters access different
1082 stages of IPA optimizations. PARMS_AINFO contains the information that is
1083 only needed for intraprocedural analysis. */
1084
1085 static void
1086 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1087 struct ipa_node_params *info,
1088 struct ipa_jump_func *jfunc,
1089 gcall *call, gimple *stmt, tree name,
1090 tree param_type)
1091 {
1092 HOST_WIDE_INT offset, size, max_size;
1093 tree op1, tc_ssa, base, ssa;
1094 int index;
1095
1096 op1 = gimple_assign_rhs1 (stmt);
1097
1098 if (TREE_CODE (op1) == SSA_NAME)
1099 {
1100 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1101 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1102 else
1103 index = load_from_unmodified_param (fbi, info->descriptors,
1104 SSA_NAME_DEF_STMT (op1));
1105 tc_ssa = op1;
1106 }
1107 else
1108 {
1109 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1110 tc_ssa = gimple_assign_lhs (stmt);
1111 }
1112
1113 if (index >= 0)
1114 {
1115 tree op2 = gimple_assign_rhs2 (stmt);
1116
1117 if (op2)
1118 {
1119 if (!is_gimple_ip_invariant (op2)
1120 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1121 && !useless_type_conversion_p (TREE_TYPE (name),
1122 TREE_TYPE (op1))))
1123 return;
1124
1125 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1126 gimple_assign_rhs_code (stmt));
1127 }
1128 else if (gimple_assign_single_p (stmt))
1129 {
1130 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1131 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1132 }
1133 return;
1134 }
1135
1136 if (TREE_CODE (op1) != ADDR_EXPR)
1137 return;
1138 op1 = TREE_OPERAND (op1, 0);
1139 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1140 return;
1141 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1142 if (TREE_CODE (base) != MEM_REF
1143 /* If this is a varying address, punt. */
1144 || max_size == -1
1145 || max_size != size)
1146 return;
1147 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1148 ssa = TREE_OPERAND (base, 0);
1149 if (TREE_CODE (ssa) != SSA_NAME
1150 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1151 || offset < 0)
1152 return;
1153
1154 /* Dynamic types are changed in constructors and destructors. */
1155 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1156 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1157 ipa_set_ancestor_jf (jfunc, offset, index,
1158 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1159 }
1160
1161 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1162 it looks like:
1163
1164 iftmp.1_3 = &obj_2(D)->D.1762;
1165
1166 The base of the MEM_REF must be a default definition SSA NAME of a
1167 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1168 whole MEM_REF expression is returned and the offset calculated from any
1169 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1170 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1171
1172 static tree
1173 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1174 {
1175 HOST_WIDE_INT size, max_size;
1176 tree expr, parm, obj;
1177
1178 if (!gimple_assign_single_p (assign))
1179 return NULL_TREE;
1180 expr = gimple_assign_rhs1 (assign);
1181
1182 if (TREE_CODE (expr) != ADDR_EXPR)
1183 return NULL_TREE;
1184 expr = TREE_OPERAND (expr, 0);
1185 obj = expr;
1186 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1187
1188 if (TREE_CODE (expr) != MEM_REF
1189 /* If this is a varying address, punt. */
1190 || max_size == -1
1191 || max_size != size
1192 || *offset < 0)
1193 return NULL_TREE;
1194 parm = TREE_OPERAND (expr, 0);
1195 if (TREE_CODE (parm) != SSA_NAME
1196 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1197 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1198 return NULL_TREE;
1199
1200 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1201 *obj_p = obj;
1202 return expr;
1203 }
1204
1205
1206 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1207 statement PHI, try to find out whether NAME is in fact a
1208 multiple-inheritance typecast from a descendant into an ancestor of a formal
1209 parameter and thus can be described by an ancestor jump function and if so,
1210 write the appropriate function into JFUNC.
1211
1212 Essentially we want to match the following pattern:
1213
1214 if (obj_2(D) != 0B)
1215 goto <bb 3>;
1216 else
1217 goto <bb 4>;
1218
1219 <bb 3>:
1220 iftmp.1_3 = &obj_2(D)->D.1762;
1221
1222 <bb 4>:
1223 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1224 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1225 return D.1879_6; */
1226
1227 static void
1228 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1229 struct ipa_node_params *info,
1230 struct ipa_jump_func *jfunc,
1231 gcall *call, gphi *phi)
1232 {
1233 HOST_WIDE_INT offset;
1234 gimple *assign, *cond;
1235 basic_block phi_bb, assign_bb, cond_bb;
1236 tree tmp, parm, expr, obj;
1237 int index, i;
1238
1239 if (gimple_phi_num_args (phi) != 2)
1240 return;
1241
1242 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1243 tmp = PHI_ARG_DEF (phi, 0);
1244 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1245 tmp = PHI_ARG_DEF (phi, 1);
1246 else
1247 return;
1248 if (TREE_CODE (tmp) != SSA_NAME
1249 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1250 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1251 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1252 return;
1253
1254 assign = SSA_NAME_DEF_STMT (tmp);
1255 assign_bb = gimple_bb (assign);
1256 if (!single_pred_p (assign_bb))
1257 return;
1258 expr = get_ancestor_addr_info (assign, &obj, &offset);
1259 if (!expr)
1260 return;
1261 parm = TREE_OPERAND (expr, 0);
1262 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1263 if (index < 0)
1264 return;
1265
1266 cond_bb = single_pred (assign_bb);
1267 cond = last_stmt (cond_bb);
1268 if (!cond
1269 || gimple_code (cond) != GIMPLE_COND
1270 || gimple_cond_code (cond) != NE_EXPR
1271 || gimple_cond_lhs (cond) != parm
1272 || !integer_zerop (gimple_cond_rhs (cond)))
1273 return;
1274
1275 phi_bb = gimple_bb (phi);
1276 for (i = 0; i < 2; i++)
1277 {
1278 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1279 if (pred != assign_bb && pred != cond_bb)
1280 return;
1281 }
1282
1283 ipa_set_ancestor_jf (jfunc, offset, index,
1284 parm_ref_data_pass_through_p (fbi, index, call, parm));
1285 }
1286
1287 /* Inspect the given TYPE and return true iff it has the same structure (the
1288 same number of fields of the same types) as a C++ member pointer. If
1289 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1290 corresponding fields there. */
1291
1292 static bool
1293 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1294 {
1295 tree fld;
1296
1297 if (TREE_CODE (type) != RECORD_TYPE)
1298 return false;
1299
1300 fld = TYPE_FIELDS (type);
1301 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1302 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1303 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1304 return false;
1305
1306 if (method_ptr)
1307 *method_ptr = fld;
1308
1309 fld = DECL_CHAIN (fld);
1310 if (!fld || INTEGRAL_TYPE_P (fld)
1311 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1312 return false;
1313 if (delta)
1314 *delta = fld;
1315
1316 if (DECL_CHAIN (fld))
1317 return false;
1318
1319 return true;
1320 }
1321
1322 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1323 return the rhs of its defining statement. Otherwise return RHS as it
1324 is. */
1325
1326 static inline tree
1327 get_ssa_def_if_simple_copy (tree rhs)
1328 {
1329 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1330 {
1331 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1332
1333 if (gimple_assign_single_p (def_stmt))
1334 rhs = gimple_assign_rhs1 (def_stmt);
1335 else
1336 break;
1337 }
1338 return rhs;
1339 }
1340
1341 /* Simple linked list, describing known contents of an aggregate beforere
1342 call. */
1343
1344 struct ipa_known_agg_contents_list
1345 {
1346 /* Offset and size of the described part of the aggregate. */
1347 HOST_WIDE_INT offset, size;
1348 /* Known constant value or NULL if the contents is known to be unknown. */
1349 tree constant;
1350 /* Pointer to the next structure in the list. */
1351 struct ipa_known_agg_contents_list *next;
1352 };
1353
1354 /* Find the proper place in linked list of ipa_known_agg_contents_list
1355 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1356 unless there is a partial overlap, in which case return NULL, or such
1357 element is already there, in which case set *ALREADY_THERE to true. */
1358
1359 static struct ipa_known_agg_contents_list **
1360 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1361 HOST_WIDE_INT lhs_offset,
1362 HOST_WIDE_INT lhs_size,
1363 bool *already_there)
1364 {
1365 struct ipa_known_agg_contents_list **p = list;
1366 while (*p && (*p)->offset < lhs_offset)
1367 {
1368 if ((*p)->offset + (*p)->size > lhs_offset)
1369 return NULL;
1370 p = &(*p)->next;
1371 }
1372
1373 if (*p && (*p)->offset < lhs_offset + lhs_size)
1374 {
1375 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1376 /* We already know this value is subsequently overwritten with
1377 something else. */
1378 *already_there = true;
1379 else
1380 /* Otherwise this is a partial overlap which we cannot
1381 represent. */
1382 return NULL;
1383 }
1384 return p;
1385 }
1386
1387 /* Build aggregate jump function from LIST, assuming there are exactly
1388 CONST_COUNT constant entries there and that th offset of the passed argument
1389 is ARG_OFFSET and store it into JFUNC. */
1390
1391 static void
1392 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1393 int const_count, HOST_WIDE_INT arg_offset,
1394 struct ipa_jump_func *jfunc)
1395 {
1396 vec_alloc (jfunc->agg.items, const_count);
1397 while (list)
1398 {
1399 if (list->constant)
1400 {
1401 struct ipa_agg_jf_item item;
1402 item.offset = list->offset - arg_offset;
1403 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1404 item.value = unshare_expr_without_location (list->constant);
1405 jfunc->agg.items->quick_push (item);
1406 }
1407 list = list->next;
1408 }
1409 }
1410
1411 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1412 in ARG is filled in with constant values. ARG can either be an aggregate
1413 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1414 aggregate. JFUNC is the jump function into which the constants are
1415 subsequently stored. */
1416
1417 static void
1418 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1419 tree arg_type,
1420 struct ipa_jump_func *jfunc)
1421 {
1422 struct ipa_known_agg_contents_list *list = NULL;
1423 int item_count = 0, const_count = 0;
1424 HOST_WIDE_INT arg_offset, arg_size;
1425 gimple_stmt_iterator gsi;
1426 tree arg_base;
1427 bool check_ref, by_ref;
1428 ao_ref r;
1429
1430 /* The function operates in three stages. First, we prepare check_ref, r,
1431 arg_base and arg_offset based on what is actually passed as an actual
1432 argument. */
1433
1434 if (POINTER_TYPE_P (arg_type))
1435 {
1436 by_ref = true;
1437 if (TREE_CODE (arg) == SSA_NAME)
1438 {
1439 tree type_size;
1440 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1441 return;
1442 check_ref = true;
1443 arg_base = arg;
1444 arg_offset = 0;
1445 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1446 arg_size = tree_to_uhwi (type_size);
1447 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1448 }
1449 else if (TREE_CODE (arg) == ADDR_EXPR)
1450 {
1451 HOST_WIDE_INT arg_max_size;
1452
1453 arg = TREE_OPERAND (arg, 0);
1454 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1455 &arg_max_size);
1456 if (arg_max_size == -1
1457 || arg_max_size != arg_size
1458 || arg_offset < 0)
1459 return;
1460 if (DECL_P (arg_base))
1461 {
1462 check_ref = false;
1463 ao_ref_init (&r, arg_base);
1464 }
1465 else
1466 return;
1467 }
1468 else
1469 return;
1470 }
1471 else
1472 {
1473 HOST_WIDE_INT arg_max_size;
1474
1475 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1476
1477 by_ref = false;
1478 check_ref = false;
1479 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1480 &arg_max_size);
1481 if (arg_max_size == -1
1482 || arg_max_size != arg_size
1483 || arg_offset < 0)
1484 return;
1485
1486 ao_ref_init (&r, arg);
1487 }
1488
1489 /* Second stage walks back the BB, looks at individual statements and as long
1490 as it is confident of how the statements affect contents of the
1491 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1492 describing it. */
1493 gsi = gsi_for_stmt (call);
1494 gsi_prev (&gsi);
1495 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1496 {
1497 struct ipa_known_agg_contents_list *n, **p;
1498 gimple *stmt = gsi_stmt (gsi);
1499 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1500 tree lhs, rhs, lhs_base;
1501
1502 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1503 continue;
1504 if (!gimple_assign_single_p (stmt))
1505 break;
1506
1507 lhs = gimple_assign_lhs (stmt);
1508 rhs = gimple_assign_rhs1 (stmt);
1509 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1510 || TREE_CODE (lhs) == BIT_FIELD_REF
1511 || contains_bitfld_component_ref_p (lhs))
1512 break;
1513
1514 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1515 &lhs_max_size);
1516 if (lhs_max_size == -1
1517 || lhs_max_size != lhs_size)
1518 break;
1519
1520 if (check_ref)
1521 {
1522 if (TREE_CODE (lhs_base) != MEM_REF
1523 || TREE_OPERAND (lhs_base, 0) != arg_base
1524 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1525 break;
1526 }
1527 else if (lhs_base != arg_base)
1528 {
1529 if (DECL_P (lhs_base))
1530 continue;
1531 else
1532 break;
1533 }
1534
1535 bool already_there = false;
1536 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1537 &already_there);
1538 if (!p)
1539 break;
1540 if (already_there)
1541 continue;
1542
1543 rhs = get_ssa_def_if_simple_copy (rhs);
1544 n = XALLOCA (struct ipa_known_agg_contents_list);
1545 n->size = lhs_size;
1546 n->offset = lhs_offset;
1547 if (is_gimple_ip_invariant (rhs))
1548 {
1549 n->constant = rhs;
1550 const_count++;
1551 }
1552 else
1553 n->constant = NULL_TREE;
1554 n->next = *p;
1555 *p = n;
1556
1557 item_count++;
1558 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1559 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1560 break;
1561 }
1562
1563 /* Third stage just goes over the list and creates an appropriate vector of
1564 ipa_agg_jf_item structures out of it, of sourse only if there are
1565 any known constants to begin with. */
1566
1567 if (const_count)
1568 {
1569 jfunc->agg.by_ref = by_ref;
1570 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1571 }
1572 }
1573
1574 static tree
1575 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1576 {
1577 int n;
1578 tree type = (e->callee
1579 ? TREE_TYPE (e->callee->decl)
1580 : gimple_call_fntype (e->call_stmt));
1581 tree t = TYPE_ARG_TYPES (type);
1582
1583 for (n = 0; n < i; n++)
1584 {
1585 if (!t)
1586 break;
1587 t = TREE_CHAIN (t);
1588 }
1589 if (t)
1590 return TREE_VALUE (t);
1591 if (!e->callee)
1592 return NULL;
1593 t = DECL_ARGUMENTS (e->callee->decl);
1594 for (n = 0; n < i; n++)
1595 {
1596 if (!t)
1597 return NULL;
1598 t = TREE_CHAIN (t);
1599 }
1600 if (t)
1601 return TREE_TYPE (t);
1602 return NULL;
1603 }
1604
1605 /* Compute jump function for all arguments of callsite CS and insert the
1606 information in the jump_functions array in the ipa_edge_args corresponding
1607 to this callsite. */
1608
1609 static void
1610 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1611 struct cgraph_edge *cs)
1612 {
1613 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1614 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1615 gcall *call = cs->call_stmt;
1616 int n, arg_num = gimple_call_num_args (call);
1617 bool useful_context = false;
1618
1619 if (arg_num == 0 || args->jump_functions)
1620 return;
1621 vec_safe_grow_cleared (args->jump_functions, arg_num);
1622 if (flag_devirtualize)
1623 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1624
1625 if (gimple_call_internal_p (call))
1626 return;
1627 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1628 return;
1629
1630 for (n = 0; n < arg_num; n++)
1631 {
1632 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1633 tree arg = gimple_call_arg (call, n);
1634 tree param_type = ipa_get_callee_param_type (cs, n);
1635 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1636 {
1637 tree instance;
1638 struct ipa_polymorphic_call_context context (cs->caller->decl,
1639 arg, cs->call_stmt,
1640 &instance);
1641 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1642 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1643 if (!context.useless_p ())
1644 useful_context = true;
1645 }
1646
1647 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1648 {
1649 unsigned HOST_WIDE_INT hwi_bitpos;
1650 unsigned align;
1651
1652 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1653 && align % BITS_PER_UNIT == 0
1654 && hwi_bitpos % BITS_PER_UNIT == 0)
1655 {
1656 jfunc->alignment.known = true;
1657 jfunc->alignment.align = align / BITS_PER_UNIT;
1658 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1659 }
1660 else
1661 gcc_assert (!jfunc->alignment.known);
1662 }
1663 else
1664 gcc_assert (!jfunc->alignment.known);
1665
1666 if (is_gimple_ip_invariant (arg))
1667 ipa_set_jf_constant (jfunc, arg, cs);
1668 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1669 && TREE_CODE (arg) == PARM_DECL)
1670 {
1671 int index = ipa_get_param_decl_index (info, arg);
1672
1673 gcc_assert (index >=0);
1674 /* Aggregate passed by value, check for pass-through, otherwise we
1675 will attempt to fill in aggregate contents later in this
1676 for cycle. */
1677 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1678 {
1679 ipa_set_jf_simple_pass_through (jfunc, index, false);
1680 continue;
1681 }
1682 }
1683 else if (TREE_CODE (arg) == SSA_NAME)
1684 {
1685 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1686 {
1687 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1688 if (index >= 0)
1689 {
1690 bool agg_p;
1691 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1692 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1693 }
1694 }
1695 else
1696 {
1697 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1698 if (is_gimple_assign (stmt))
1699 compute_complex_assign_jump_func (fbi, info, jfunc,
1700 call, stmt, arg, param_type);
1701 else if (gimple_code (stmt) == GIMPLE_PHI)
1702 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1703 call,
1704 as_a <gphi *> (stmt));
1705 }
1706 }
1707
1708 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1709 passed (because type conversions are ignored in gimple). Usually we can
1710 safely get type from function declaration, but in case of K&R prototypes or
1711 variadic functions we can try our luck with type of the pointer passed.
1712 TODO: Since we look for actual initialization of the memory object, we may better
1713 work out the type based on the memory stores we find. */
1714 if (!param_type)
1715 param_type = TREE_TYPE (arg);
1716
1717 if ((jfunc->type != IPA_JF_PASS_THROUGH
1718 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1719 && (jfunc->type != IPA_JF_ANCESTOR
1720 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1721 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1722 || POINTER_TYPE_P (param_type)))
1723 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1724 }
1725 if (!useful_context)
1726 vec_free (args->polymorphic_call_contexts);
1727 }
1728
1729 /* Compute jump functions for all edges - both direct and indirect - outgoing
1730 from BB. */
1731
1732 static void
1733 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1734 {
1735 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1736 int i;
1737 struct cgraph_edge *cs;
1738
1739 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1740 {
1741 struct cgraph_node *callee = cs->callee;
1742
1743 if (callee)
1744 {
1745 callee->ultimate_alias_target ();
1746 /* We do not need to bother analyzing calls to unknown functions
1747 unless they may become known during lto/whopr. */
1748 if (!callee->definition && !flag_lto)
1749 continue;
1750 }
1751 ipa_compute_jump_functions_for_edge (fbi, cs);
1752 }
1753 }
1754
1755 /* If STMT looks like a statement loading a value from a member pointer formal
1756 parameter, return that parameter and store the offset of the field to
1757 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1758 might be clobbered). If USE_DELTA, then we look for a use of the delta
1759 field rather than the pfn. */
1760
1761 static tree
1762 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1763 HOST_WIDE_INT *offset_p)
1764 {
1765 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1766
1767 if (!gimple_assign_single_p (stmt))
1768 return NULL_TREE;
1769
1770 rhs = gimple_assign_rhs1 (stmt);
1771 if (TREE_CODE (rhs) == COMPONENT_REF)
1772 {
1773 ref_field = TREE_OPERAND (rhs, 1);
1774 rhs = TREE_OPERAND (rhs, 0);
1775 }
1776 else
1777 ref_field = NULL_TREE;
1778 if (TREE_CODE (rhs) != MEM_REF)
1779 return NULL_TREE;
1780 rec = TREE_OPERAND (rhs, 0);
1781 if (TREE_CODE (rec) != ADDR_EXPR)
1782 return NULL_TREE;
1783 rec = TREE_OPERAND (rec, 0);
1784 if (TREE_CODE (rec) != PARM_DECL
1785 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1786 return NULL_TREE;
1787 ref_offset = TREE_OPERAND (rhs, 1);
1788
1789 if (use_delta)
1790 fld = delta_field;
1791 else
1792 fld = ptr_field;
1793 if (offset_p)
1794 *offset_p = int_bit_position (fld);
1795
1796 if (ref_field)
1797 {
1798 if (integer_nonzerop (ref_offset))
1799 return NULL_TREE;
1800 return ref_field == fld ? rec : NULL_TREE;
1801 }
1802 else
1803 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1804 : NULL_TREE;
1805 }
1806
1807 /* Returns true iff T is an SSA_NAME defined by a statement. */
1808
1809 static bool
1810 ipa_is_ssa_with_stmt_def (tree t)
1811 {
1812 if (TREE_CODE (t) == SSA_NAME
1813 && !SSA_NAME_IS_DEFAULT_DEF (t))
1814 return true;
1815 else
1816 return false;
1817 }
1818
1819 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1820 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1821 indirect call graph edge. */
1822
1823 static struct cgraph_edge *
1824 ipa_note_param_call (struct cgraph_node *node, int param_index,
1825 gcall *stmt)
1826 {
1827 struct cgraph_edge *cs;
1828
1829 cs = node->get_edge (stmt);
1830 cs->indirect_info->param_index = param_index;
1831 cs->indirect_info->agg_contents = 0;
1832 cs->indirect_info->member_ptr = 0;
1833 return cs;
1834 }
1835
1836 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1837 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1838 intermediate information about each formal parameter. Currently it checks
1839 whether the call calls a pointer that is a formal parameter and if so, the
1840 parameter is marked with the called flag and an indirect call graph edge
1841 describing the call is created. This is very simple for ordinary pointers
1842 represented in SSA but not-so-nice when it comes to member pointers. The
1843 ugly part of this function does nothing more than trying to match the
1844 pattern of such a call. An example of such a pattern is the gimple dump
1845 below, the call is on the last line:
1846
1847 <bb 2>:
1848 f$__delta_5 = f.__delta;
1849 f$__pfn_24 = f.__pfn;
1850
1851 or
1852 <bb 2>:
1853 f$__delta_5 = MEM[(struct *)&f];
1854 f$__pfn_24 = MEM[(struct *)&f + 4B];
1855
1856 and a few lines below:
1857
1858 <bb 5>
1859 D.2496_3 = (int) f$__pfn_24;
1860 D.2497_4 = D.2496_3 & 1;
1861 if (D.2497_4 != 0)
1862 goto <bb 3>;
1863 else
1864 goto <bb 4>;
1865
1866 <bb 6>:
1867 D.2500_7 = (unsigned int) f$__delta_5;
1868 D.2501_8 = &S + D.2500_7;
1869 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1870 D.2503_10 = *D.2502_9;
1871 D.2504_12 = f$__pfn_24 + -1;
1872 D.2505_13 = (unsigned int) D.2504_12;
1873 D.2506_14 = D.2503_10 + D.2505_13;
1874 D.2507_15 = *D.2506_14;
1875 iftmp.11_16 = (String:: *) D.2507_15;
1876
1877 <bb 7>:
1878 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1879 D.2500_19 = (unsigned int) f$__delta_5;
1880 D.2508_20 = &S + D.2500_19;
1881 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1882
1883 Such patterns are results of simple calls to a member pointer:
1884
1885 int doprinting (int (MyString::* f)(int) const)
1886 {
1887 MyString S ("somestring");
1888
1889 return (S.*f)(4);
1890 }
1891
1892 Moreover, the function also looks for called pointers loaded from aggregates
1893 passed by value or reference. */
1894
1895 static void
1896 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
1897 tree target)
1898 {
1899 struct ipa_node_params *info = fbi->info;
1900 HOST_WIDE_INT offset;
1901 bool by_ref;
1902
1903 if (SSA_NAME_IS_DEFAULT_DEF (target))
1904 {
1905 tree var = SSA_NAME_VAR (target);
1906 int index = ipa_get_param_decl_index (info, var);
1907 if (index >= 0)
1908 ipa_note_param_call (fbi->node, index, call);
1909 return;
1910 }
1911
1912 int index;
1913 gimple *def = SSA_NAME_DEF_STMT (target);
1914 if (gimple_assign_single_p (def)
1915 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
1916 gimple_assign_rhs1 (def), &index, &offset,
1917 NULL, &by_ref))
1918 {
1919 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
1920 cs->indirect_info->offset = offset;
1921 cs->indirect_info->agg_contents = 1;
1922 cs->indirect_info->by_ref = by_ref;
1923 return;
1924 }
1925
1926 /* Now we need to try to match the complex pattern of calling a member
1927 pointer. */
1928 if (gimple_code (def) != GIMPLE_PHI
1929 || gimple_phi_num_args (def) != 2
1930 || !POINTER_TYPE_P (TREE_TYPE (target))
1931 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1932 return;
1933
1934 /* First, we need to check whether one of these is a load from a member
1935 pointer that is a parameter to this function. */
1936 tree n1 = PHI_ARG_DEF (def, 0);
1937 tree n2 = PHI_ARG_DEF (def, 1);
1938 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1939 return;
1940 gimple *d1 = SSA_NAME_DEF_STMT (n1);
1941 gimple *d2 = SSA_NAME_DEF_STMT (n2);
1942
1943 tree rec;
1944 basic_block bb, virt_bb;
1945 basic_block join = gimple_bb (def);
1946 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1947 {
1948 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1949 return;
1950
1951 bb = EDGE_PRED (join, 0)->src;
1952 virt_bb = gimple_bb (d2);
1953 }
1954 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1955 {
1956 bb = EDGE_PRED (join, 1)->src;
1957 virt_bb = gimple_bb (d1);
1958 }
1959 else
1960 return;
1961
1962 /* Second, we need to check that the basic blocks are laid out in the way
1963 corresponding to the pattern. */
1964
1965 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1966 || single_pred (virt_bb) != bb
1967 || single_succ (virt_bb) != join)
1968 return;
1969
1970 /* Third, let's see that the branching is done depending on the least
1971 significant bit of the pfn. */
1972
1973 gimple *branch = last_stmt (bb);
1974 if (!branch || gimple_code (branch) != GIMPLE_COND)
1975 return;
1976
1977 if ((gimple_cond_code (branch) != NE_EXPR
1978 && gimple_cond_code (branch) != EQ_EXPR)
1979 || !integer_zerop (gimple_cond_rhs (branch)))
1980 return;
1981
1982 tree cond = gimple_cond_lhs (branch);
1983 if (!ipa_is_ssa_with_stmt_def (cond))
1984 return;
1985
1986 def = SSA_NAME_DEF_STMT (cond);
1987 if (!is_gimple_assign (def)
1988 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1989 || !integer_onep (gimple_assign_rhs2 (def)))
1990 return;
1991
1992 cond = gimple_assign_rhs1 (def);
1993 if (!ipa_is_ssa_with_stmt_def (cond))
1994 return;
1995
1996 def = SSA_NAME_DEF_STMT (cond);
1997
1998 if (is_gimple_assign (def)
1999 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2000 {
2001 cond = gimple_assign_rhs1 (def);
2002 if (!ipa_is_ssa_with_stmt_def (cond))
2003 return;
2004 def = SSA_NAME_DEF_STMT (cond);
2005 }
2006
2007 tree rec2;
2008 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2009 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2010 == ptrmemfunc_vbit_in_delta),
2011 NULL);
2012 if (rec != rec2)
2013 return;
2014
2015 index = ipa_get_param_decl_index (info, rec);
2016 if (index >= 0
2017 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2018 {
2019 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2020 cs->indirect_info->offset = offset;
2021 cs->indirect_info->agg_contents = 1;
2022 cs->indirect_info->member_ptr = 1;
2023 }
2024
2025 return;
2026 }
2027
2028 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2029 object referenced in the expression is a formal parameter of the caller
2030 FBI->node (described by FBI->info), create a call note for the
2031 statement. */
2032
2033 static void
2034 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2035 gcall *call, tree target)
2036 {
2037 tree obj = OBJ_TYPE_REF_OBJECT (target);
2038 int index;
2039 HOST_WIDE_INT anc_offset;
2040
2041 if (!flag_devirtualize)
2042 return;
2043
2044 if (TREE_CODE (obj) != SSA_NAME)
2045 return;
2046
2047 struct ipa_node_params *info = fbi->info;
2048 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2049 {
2050 struct ipa_jump_func jfunc;
2051 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2052 return;
2053
2054 anc_offset = 0;
2055 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2056 gcc_assert (index >= 0);
2057 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2058 call, &jfunc))
2059 return;
2060 }
2061 else
2062 {
2063 struct ipa_jump_func jfunc;
2064 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2065 tree expr;
2066
2067 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2068 if (!expr)
2069 return;
2070 index = ipa_get_param_decl_index (info,
2071 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2072 gcc_assert (index >= 0);
2073 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2074 call, &jfunc, anc_offset))
2075 return;
2076 }
2077
2078 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2079 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2080 ii->offset = anc_offset;
2081 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2082 ii->otr_type = obj_type_ref_class (target);
2083 ii->polymorphic = 1;
2084 }
2085
2086 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2087 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2088 containing intermediate information about each formal parameter. */
2089
2090 static void
2091 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2092 {
2093 tree target = gimple_call_fn (call);
2094
2095 if (!target
2096 || (TREE_CODE (target) != SSA_NAME
2097 && !virtual_method_call_p (target)))
2098 return;
2099
2100 struct cgraph_edge *cs = fbi->node->get_edge (call);
2101 /* If we previously turned the call into a direct call, there is
2102 no need to analyze. */
2103 if (cs && !cs->indirect_unknown_callee)
2104 return;
2105
2106 if (cs->indirect_info->polymorphic && flag_devirtualize)
2107 {
2108 tree instance;
2109 tree target = gimple_call_fn (call);
2110 ipa_polymorphic_call_context context (current_function_decl,
2111 target, call, &instance);
2112
2113 gcc_checking_assert (cs->indirect_info->otr_type
2114 == obj_type_ref_class (target));
2115 gcc_checking_assert (cs->indirect_info->otr_token
2116 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2117
2118 cs->indirect_info->vptr_changed
2119 = !context.get_dynamic_type (instance,
2120 OBJ_TYPE_REF_OBJECT (target),
2121 obj_type_ref_class (target), call);
2122 cs->indirect_info->context = context;
2123 }
2124
2125 if (TREE_CODE (target) == SSA_NAME)
2126 ipa_analyze_indirect_call_uses (fbi, call, target);
2127 else if (virtual_method_call_p (target))
2128 ipa_analyze_virtual_call_uses (fbi, call, target);
2129 }
2130
2131
2132 /* Analyze the call statement STMT with respect to formal parameters (described
2133 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2134 formal parameters are called. */
2135
2136 static void
2137 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2138 {
2139 if (is_gimple_call (stmt))
2140 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2141 }
2142
2143 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2144 If OP is a parameter declaration, mark it as used in the info structure
2145 passed in DATA. */
2146
2147 static bool
2148 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2149 {
2150 struct ipa_node_params *info = (struct ipa_node_params *) data;
2151
2152 op = get_base_address (op);
2153 if (op
2154 && TREE_CODE (op) == PARM_DECL)
2155 {
2156 int index = ipa_get_param_decl_index (info, op);
2157 gcc_assert (index >= 0);
2158 ipa_set_param_used (info, index, true);
2159 }
2160
2161 return false;
2162 }
2163
2164 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2165 the findings in various structures of the associated ipa_node_params
2166 structure, such as parameter flags, notes etc. FBI holds various data about
2167 the function being analyzed. */
2168
2169 static void
2170 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2171 {
2172 gimple_stmt_iterator gsi;
2173 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2174 {
2175 gimple *stmt = gsi_stmt (gsi);
2176
2177 if (is_gimple_debug (stmt))
2178 continue;
2179
2180 ipa_analyze_stmt_uses (fbi, stmt);
2181 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2182 visit_ref_for_mod_analysis,
2183 visit_ref_for_mod_analysis,
2184 visit_ref_for_mod_analysis);
2185 }
2186 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2187 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2188 visit_ref_for_mod_analysis,
2189 visit_ref_for_mod_analysis,
2190 visit_ref_for_mod_analysis);
2191 }
2192
2193 /* Calculate controlled uses of parameters of NODE. */
2194
2195 static void
2196 ipa_analyze_controlled_uses (struct cgraph_node *node)
2197 {
2198 struct ipa_node_params *info = IPA_NODE_REF (node);
2199
2200 for (int i = 0; i < ipa_get_param_count (info); i++)
2201 {
2202 tree parm = ipa_get_param (info, i);
2203 int controlled_uses = 0;
2204
2205 /* For SSA regs see if parameter is used. For non-SSA we compute
2206 the flag during modification analysis. */
2207 if (is_gimple_reg (parm))
2208 {
2209 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2210 parm);
2211 if (ddef && !has_zero_uses (ddef))
2212 {
2213 imm_use_iterator imm_iter;
2214 use_operand_p use_p;
2215
2216 ipa_set_param_used (info, i, true);
2217 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2218 if (!is_gimple_call (USE_STMT (use_p)))
2219 {
2220 if (!is_gimple_debug (USE_STMT (use_p)))
2221 {
2222 controlled_uses = IPA_UNDESCRIBED_USE;
2223 break;
2224 }
2225 }
2226 else
2227 controlled_uses++;
2228 }
2229 else
2230 controlled_uses = 0;
2231 }
2232 else
2233 controlled_uses = IPA_UNDESCRIBED_USE;
2234 ipa_set_controlled_uses (info, i, controlled_uses);
2235 }
2236 }
2237
2238 /* Free stuff in BI. */
2239
2240 static void
2241 free_ipa_bb_info (struct ipa_bb_info *bi)
2242 {
2243 bi->cg_edges.release ();
2244 bi->param_aa_statuses.release ();
2245 }
2246
2247 /* Dominator walker driving the analysis. */
2248
2249 class analysis_dom_walker : public dom_walker
2250 {
2251 public:
2252 analysis_dom_walker (struct ipa_func_body_info *fbi)
2253 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2254
2255 virtual void before_dom_children (basic_block);
2256
2257 private:
2258 struct ipa_func_body_info *m_fbi;
2259 };
2260
2261 void
2262 analysis_dom_walker::before_dom_children (basic_block bb)
2263 {
2264 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2265 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2266 }
2267
2268 /* Initialize the array describing properties of formal parameters
2269 of NODE, analyze their uses and compute jump functions associated
2270 with actual arguments of calls from within NODE. */
2271
2272 void
2273 ipa_analyze_node (struct cgraph_node *node)
2274 {
2275 struct ipa_func_body_info fbi;
2276 struct ipa_node_params *info;
2277
2278 ipa_check_create_node_params ();
2279 ipa_check_create_edge_args ();
2280 info = IPA_NODE_REF (node);
2281
2282 if (info->analysis_done)
2283 return;
2284 info->analysis_done = 1;
2285
2286 if (ipa_func_spec_opts_forbid_analysis_p (node))
2287 {
2288 for (int i = 0; i < ipa_get_param_count (info); i++)
2289 {
2290 ipa_set_param_used (info, i, true);
2291 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2292 }
2293 return;
2294 }
2295
2296 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2297 push_cfun (func);
2298 calculate_dominance_info (CDI_DOMINATORS);
2299 ipa_initialize_node_params (node);
2300 ipa_analyze_controlled_uses (node);
2301
2302 fbi.node = node;
2303 fbi.info = IPA_NODE_REF (node);
2304 fbi.bb_infos = vNULL;
2305 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2306 fbi.param_count = ipa_get_param_count (info);
2307 fbi.aa_walked = 0;
2308
2309 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2310 {
2311 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2312 bi->cg_edges.safe_push (cs);
2313 }
2314
2315 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2316 {
2317 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2318 bi->cg_edges.safe_push (cs);
2319 }
2320
2321 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2322
2323 int i;
2324 struct ipa_bb_info *bi;
2325 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2326 free_ipa_bb_info (bi);
2327 fbi.bb_infos.release ();
2328 free_dominance_info (CDI_DOMINATORS);
2329 pop_cfun ();
2330 }
2331
2332 /* Update the jump functions associated with call graph edge E when the call
2333 graph edge CS is being inlined, assuming that E->caller is already (possibly
2334 indirectly) inlined into CS->callee and that E has not been inlined. */
2335
2336 static void
2337 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2338 struct cgraph_edge *e)
2339 {
2340 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2341 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2342 int count = ipa_get_cs_argument_count (args);
2343 int i;
2344
2345 for (i = 0; i < count; i++)
2346 {
2347 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2348 struct ipa_polymorphic_call_context *dst_ctx
2349 = ipa_get_ith_polymorhic_call_context (args, i);
2350
2351 if (dst->type == IPA_JF_ANCESTOR)
2352 {
2353 struct ipa_jump_func *src;
2354 int dst_fid = dst->value.ancestor.formal_id;
2355 struct ipa_polymorphic_call_context *src_ctx
2356 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2357
2358 /* Variable number of arguments can cause havoc if we try to access
2359 one that does not exist in the inlined edge. So make sure we
2360 don't. */
2361 if (dst_fid >= ipa_get_cs_argument_count (top))
2362 {
2363 ipa_set_jf_unknown (dst);
2364 continue;
2365 }
2366
2367 src = ipa_get_ith_jump_func (top, dst_fid);
2368
2369 if (src_ctx && !src_ctx->useless_p ())
2370 {
2371 struct ipa_polymorphic_call_context ctx = *src_ctx;
2372
2373 /* TODO: Make type preserved safe WRT contexts. */
2374 if (!ipa_get_jf_ancestor_type_preserved (dst))
2375 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2376 ctx.offset_by (dst->value.ancestor.offset);
2377 if (!ctx.useless_p ())
2378 {
2379 if (!dst_ctx)
2380 {
2381 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2382 count);
2383 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2384 }
2385
2386 dst_ctx->combine_with (ctx);
2387 }
2388 }
2389
2390 if (src->agg.items
2391 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2392 {
2393 struct ipa_agg_jf_item *item;
2394 int j;
2395
2396 /* Currently we do not produce clobber aggregate jump functions,
2397 replace with merging when we do. */
2398 gcc_assert (!dst->agg.items);
2399
2400 dst->agg.items = vec_safe_copy (src->agg.items);
2401 dst->agg.by_ref = src->agg.by_ref;
2402 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2403 item->offset -= dst->value.ancestor.offset;
2404 }
2405
2406 if (src->type == IPA_JF_PASS_THROUGH
2407 && src->value.pass_through.operation == NOP_EXPR)
2408 {
2409 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2410 dst->value.ancestor.agg_preserved &=
2411 src->value.pass_through.agg_preserved;
2412 }
2413 else if (src->type == IPA_JF_ANCESTOR)
2414 {
2415 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2416 dst->value.ancestor.offset += src->value.ancestor.offset;
2417 dst->value.ancestor.agg_preserved &=
2418 src->value.ancestor.agg_preserved;
2419 }
2420 else
2421 ipa_set_jf_unknown (dst);
2422 }
2423 else if (dst->type == IPA_JF_PASS_THROUGH)
2424 {
2425 struct ipa_jump_func *src;
2426 /* We must check range due to calls with variable number of arguments
2427 and we cannot combine jump functions with operations. */
2428 if (dst->value.pass_through.operation == NOP_EXPR
2429 && (dst->value.pass_through.formal_id
2430 < ipa_get_cs_argument_count (top)))
2431 {
2432 int dst_fid = dst->value.pass_through.formal_id;
2433 src = ipa_get_ith_jump_func (top, dst_fid);
2434 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2435 struct ipa_polymorphic_call_context *src_ctx
2436 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2437
2438 if (src_ctx && !src_ctx->useless_p ())
2439 {
2440 struct ipa_polymorphic_call_context ctx = *src_ctx;
2441
2442 /* TODO: Make type preserved safe WRT contexts. */
2443 if (!ipa_get_jf_pass_through_type_preserved (dst))
2444 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2445 if (!ctx.useless_p ())
2446 {
2447 if (!dst_ctx)
2448 {
2449 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2450 count);
2451 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2452 }
2453 dst_ctx->combine_with (ctx);
2454 }
2455 }
2456 switch (src->type)
2457 {
2458 case IPA_JF_UNKNOWN:
2459 ipa_set_jf_unknown (dst);
2460 break;
2461 case IPA_JF_CONST:
2462 ipa_set_jf_cst_copy (dst, src);
2463 break;
2464
2465 case IPA_JF_PASS_THROUGH:
2466 {
2467 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2468 enum tree_code operation;
2469 operation = ipa_get_jf_pass_through_operation (src);
2470
2471 if (operation == NOP_EXPR)
2472 {
2473 bool agg_p;
2474 agg_p = dst_agg_p
2475 && ipa_get_jf_pass_through_agg_preserved (src);
2476 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2477 }
2478 else
2479 {
2480 tree operand = ipa_get_jf_pass_through_operand (src);
2481 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2482 operation);
2483 }
2484 break;
2485 }
2486 case IPA_JF_ANCESTOR:
2487 {
2488 bool agg_p;
2489 agg_p = dst_agg_p
2490 && ipa_get_jf_ancestor_agg_preserved (src);
2491 ipa_set_ancestor_jf (dst,
2492 ipa_get_jf_ancestor_offset (src),
2493 ipa_get_jf_ancestor_formal_id (src),
2494 agg_p);
2495 break;
2496 }
2497 default:
2498 gcc_unreachable ();
2499 }
2500
2501 if (src->agg.items
2502 && (dst_agg_p || !src->agg.by_ref))
2503 {
2504 /* Currently we do not produce clobber aggregate jump
2505 functions, replace with merging when we do. */
2506 gcc_assert (!dst->agg.items);
2507
2508 dst->agg.by_ref = src->agg.by_ref;
2509 dst->agg.items = vec_safe_copy (src->agg.items);
2510 }
2511 }
2512 else
2513 ipa_set_jf_unknown (dst);
2514 }
2515 }
2516 }
2517
2518 /* If TARGET is an addr_expr of a function declaration, make it the
2519 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2520 Otherwise, return NULL. */
2521
2522 struct cgraph_edge *
2523 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2524 bool speculative)
2525 {
2526 struct cgraph_node *callee;
2527 struct inline_edge_summary *es = inline_edge_summary (ie);
2528 bool unreachable = false;
2529
2530 if (TREE_CODE (target) == ADDR_EXPR)
2531 target = TREE_OPERAND (target, 0);
2532 if (TREE_CODE (target) != FUNCTION_DECL)
2533 {
2534 target = canonicalize_constructor_val (target, NULL);
2535 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2536 {
2537 /* Member pointer call that goes through a VMT lookup. */
2538 if (ie->indirect_info->member_ptr
2539 /* Or if target is not an invariant expression and we do not
2540 know if it will evaulate to function at runtime.
2541 This can happen when folding through &VAR, where &VAR
2542 is IP invariant, but VAR itself is not.
2543
2544 TODO: Revisit this when GCC 5 is branched. It seems that
2545 member_ptr check is not needed and that we may try to fold
2546 the expression and see if VAR is readonly. */
2547 || !is_gimple_ip_invariant (target))
2548 {
2549 if (dump_enabled_p ())
2550 {
2551 location_t loc = gimple_location_safe (ie->call_stmt);
2552 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2553 "discovered direct call non-invariant "
2554 "%s/%i\n",
2555 ie->caller->name (), ie->caller->order);
2556 }
2557 return NULL;
2558 }
2559
2560
2561 if (dump_enabled_p ())
2562 {
2563 location_t loc = gimple_location_safe (ie->call_stmt);
2564 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2565 "discovered direct call to non-function in %s/%i, "
2566 "making it __builtin_unreachable\n",
2567 ie->caller->name (), ie->caller->order);
2568 }
2569
2570 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2571 callee = cgraph_node::get_create (target);
2572 unreachable = true;
2573 }
2574 else
2575 callee = cgraph_node::get (target);
2576 }
2577 else
2578 callee = cgraph_node::get (target);
2579
2580 /* Because may-edges are not explicitely represented and vtable may be external,
2581 we may create the first reference to the object in the unit. */
2582 if (!callee || callee->global.inlined_to)
2583 {
2584
2585 /* We are better to ensure we can refer to it.
2586 In the case of static functions we are out of luck, since we already
2587 removed its body. In the case of public functions we may or may
2588 not introduce the reference. */
2589 if (!canonicalize_constructor_val (target, NULL)
2590 || !TREE_PUBLIC (target))
2591 {
2592 if (dump_file)
2593 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2594 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2595 xstrdup_for_dump (ie->caller->name ()),
2596 ie->caller->order,
2597 xstrdup_for_dump (ie->callee->name ()),
2598 ie->callee->order);
2599 return NULL;
2600 }
2601 callee = cgraph_node::get_create (target);
2602 }
2603
2604 /* If the edge is already speculated. */
2605 if (speculative && ie->speculative)
2606 {
2607 struct cgraph_edge *e2;
2608 struct ipa_ref *ref;
2609 ie->speculative_call_info (e2, ie, ref);
2610 if (e2->callee->ultimate_alias_target ()
2611 != callee->ultimate_alias_target ())
2612 {
2613 if (dump_file)
2614 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2615 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2616 xstrdup_for_dump (ie->caller->name ()),
2617 ie->caller->order,
2618 xstrdup_for_dump (callee->name ()),
2619 callee->order,
2620 xstrdup_for_dump (e2->callee->name ()),
2621 e2->callee->order);
2622 }
2623 else
2624 {
2625 if (dump_file)
2626 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2627 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2628 xstrdup_for_dump (ie->caller->name ()),
2629 ie->caller->order,
2630 xstrdup_for_dump (callee->name ()),
2631 callee->order);
2632 }
2633 return NULL;
2634 }
2635
2636 if (!dbg_cnt (devirt))
2637 return NULL;
2638
2639 ipa_check_create_node_params ();
2640
2641 /* We can not make edges to inline clones. It is bug that someone removed
2642 the cgraph node too early. */
2643 gcc_assert (!callee->global.inlined_to);
2644
2645 if (dump_file && !unreachable)
2646 {
2647 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2648 "(%s/%i -> %s/%i), for stmt ",
2649 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2650 speculative ? "speculative" : "known",
2651 xstrdup_for_dump (ie->caller->name ()),
2652 ie->caller->order,
2653 xstrdup_for_dump (callee->name ()),
2654 callee->order);
2655 if (ie->call_stmt)
2656 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2657 else
2658 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2659 }
2660 if (dump_enabled_p ())
2661 {
2662 location_t loc = gimple_location_safe (ie->call_stmt);
2663
2664 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2665 "converting indirect call in %s to direct call to %s\n",
2666 ie->caller->name (), callee->name ());
2667 }
2668 if (!speculative)
2669 {
2670 struct cgraph_edge *orig = ie;
2671 ie = ie->make_direct (callee);
2672 /* If we resolved speculative edge the cost is already up to date
2673 for direct call (adjusted by inline_edge_duplication_hook). */
2674 if (ie == orig)
2675 {
2676 es = inline_edge_summary (ie);
2677 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2678 - eni_size_weights.call_cost);
2679 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2680 - eni_time_weights.call_cost);
2681 }
2682 }
2683 else
2684 {
2685 if (!callee->can_be_discarded_p ())
2686 {
2687 cgraph_node *alias;
2688 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2689 if (alias)
2690 callee = alias;
2691 }
2692 /* make_speculative will update ie's cost to direct call cost. */
2693 ie = ie->make_speculative
2694 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2695 }
2696
2697 return ie;
2698 }
2699
2700 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2701 return NULL if there is not any. BY_REF specifies whether the value has to
2702 be passed by reference or by value. */
2703
2704 tree
2705 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2706 HOST_WIDE_INT offset, bool by_ref)
2707 {
2708 struct ipa_agg_jf_item *item;
2709 int i;
2710
2711 if (by_ref != agg->by_ref)
2712 return NULL;
2713
2714 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2715 if (item->offset == offset)
2716 {
2717 /* Currently we do not have clobber values, return NULL for them once
2718 we do. */
2719 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2720 return item->value;
2721 }
2722 return NULL;
2723 }
2724
2725 /* Remove a reference to SYMBOL from the list of references of a node given by
2726 reference description RDESC. Return true if the reference has been
2727 successfully found and removed. */
2728
2729 static bool
2730 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2731 {
2732 struct ipa_ref *to_del;
2733 struct cgraph_edge *origin;
2734
2735 origin = rdesc->cs;
2736 if (!origin)
2737 return false;
2738 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2739 origin->lto_stmt_uid);
2740 if (!to_del)
2741 return false;
2742
2743 to_del->remove_reference ();
2744 if (dump_file)
2745 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2746 xstrdup_for_dump (origin->caller->name ()),
2747 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2748 return true;
2749 }
2750
2751 /* If JFUNC has a reference description with refcount different from
2752 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2753 NULL. JFUNC must be a constant jump function. */
2754
2755 static struct ipa_cst_ref_desc *
2756 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2757 {
2758 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2759 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2760 return rdesc;
2761 else
2762 return NULL;
2763 }
2764
2765 /* If the value of constant jump function JFUNC is an address of a function
2766 declaration, return the associated call graph node. Otherwise return
2767 NULL. */
2768
2769 static cgraph_node *
2770 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2771 {
2772 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2773 tree cst = ipa_get_jf_constant (jfunc);
2774 if (TREE_CODE (cst) != ADDR_EXPR
2775 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2776 return NULL;
2777
2778 return cgraph_node::get (TREE_OPERAND (cst, 0));
2779 }
2780
2781
2782 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2783 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2784 the edge specified in the rdesc. Return false if either the symbol or the
2785 reference could not be found, otherwise return true. */
2786
2787 static bool
2788 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2789 {
2790 struct ipa_cst_ref_desc *rdesc;
2791 if (jfunc->type == IPA_JF_CONST
2792 && (rdesc = jfunc_rdesc_usable (jfunc))
2793 && --rdesc->refcount == 0)
2794 {
2795 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2796 if (!symbol)
2797 return false;
2798
2799 return remove_described_reference (symbol, rdesc);
2800 }
2801 return true;
2802 }
2803
2804 /* Try to find a destination for indirect edge IE that corresponds to a simple
2805 call or a call of a member function pointer and where the destination is a
2806 pointer formal parameter described by jump function JFUNC. If it can be
2807 determined, return the newly direct edge, otherwise return NULL.
2808 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2809
2810 static struct cgraph_edge *
2811 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2812 struct ipa_jump_func *jfunc,
2813 struct ipa_node_params *new_root_info)
2814 {
2815 struct cgraph_edge *cs;
2816 tree target;
2817 bool agg_contents = ie->indirect_info->agg_contents;
2818
2819 if (ie->indirect_info->agg_contents)
2820 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2821 ie->indirect_info->offset,
2822 ie->indirect_info->by_ref);
2823 else
2824 target = ipa_value_from_jfunc (new_root_info, jfunc);
2825 if (!target)
2826 return NULL;
2827 cs = ipa_make_edge_direct_to_target (ie, target);
2828
2829 if (cs && !agg_contents)
2830 {
2831 bool ok;
2832 gcc_checking_assert (cs->callee
2833 && (cs != ie
2834 || jfunc->type != IPA_JF_CONST
2835 || !cgraph_node_for_jfunc (jfunc)
2836 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2837 ok = try_decrement_rdesc_refcount (jfunc);
2838 gcc_checking_assert (ok);
2839 }
2840
2841 return cs;
2842 }
2843
2844 /* Return the target to be used in cases of impossible devirtualization. IE
2845 and target (the latter can be NULL) are dumped when dumping is enabled. */
2846
2847 tree
2848 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2849 {
2850 if (dump_file)
2851 {
2852 if (target)
2853 fprintf (dump_file,
2854 "Type inconsistent devirtualization: %s/%i->%s\n",
2855 ie->caller->name (), ie->caller->order,
2856 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2857 else
2858 fprintf (dump_file,
2859 "No devirtualization target in %s/%i\n",
2860 ie->caller->name (), ie->caller->order);
2861 }
2862 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2863 cgraph_node::get_create (new_target);
2864 return new_target;
2865 }
2866
2867 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2868 call based on a formal parameter which is described by jump function JFUNC
2869 and if it can be determined, make it direct and return the direct edge.
2870 Otherwise, return NULL. CTX describes the polymorphic context that the
2871 parameter the call is based on brings along with it. */
2872
2873 static struct cgraph_edge *
2874 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2875 struct ipa_jump_func *jfunc,
2876 struct ipa_polymorphic_call_context ctx)
2877 {
2878 tree target = NULL;
2879 bool speculative = false;
2880
2881 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2882 return NULL;
2883
2884 gcc_assert (!ie->indirect_info->by_ref);
2885
2886 /* Try to do lookup via known virtual table pointer value. */
2887 if (!ie->indirect_info->vptr_changed
2888 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2889 {
2890 tree vtable;
2891 unsigned HOST_WIDE_INT offset;
2892 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2893 ie->indirect_info->offset,
2894 true);
2895 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2896 {
2897 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2898 vtable, offset);
2899 if (t)
2900 {
2901 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2902 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2903 || !possible_polymorphic_call_target_p
2904 (ie, cgraph_node::get (t)))
2905 {
2906 /* Do not speculate builtin_unreachable, it is stupid! */
2907 if (!ie->indirect_info->vptr_changed)
2908 target = ipa_impossible_devirt_target (ie, target);
2909 }
2910 else
2911 {
2912 target = t;
2913 speculative = ie->indirect_info->vptr_changed;
2914 }
2915 }
2916 }
2917 }
2918
2919 ipa_polymorphic_call_context ie_context (ie);
2920 vec <cgraph_node *>targets;
2921 bool final;
2922
2923 ctx.offset_by (ie->indirect_info->offset);
2924 if (ie->indirect_info->vptr_changed)
2925 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2926 ie->indirect_info->otr_type);
2927 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
2928 targets = possible_polymorphic_call_targets
2929 (ie->indirect_info->otr_type,
2930 ie->indirect_info->otr_token,
2931 ctx, &final);
2932 if (final && targets.length () <= 1)
2933 {
2934 speculative = false;
2935 if (targets.length () == 1)
2936 target = targets[0]->decl;
2937 else
2938 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2939 }
2940 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2941 && !ie->speculative && ie->maybe_hot_p ())
2942 {
2943 cgraph_node *n;
2944 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
2945 ie->indirect_info->otr_token,
2946 ie->indirect_info->context);
2947 if (n)
2948 {
2949 target = n->decl;
2950 speculative = true;
2951 }
2952 }
2953
2954 if (target)
2955 {
2956 if (!possible_polymorphic_call_target_p
2957 (ie, cgraph_node::get_create (target)))
2958 {
2959 if (speculative)
2960 return NULL;
2961 target = ipa_impossible_devirt_target (ie, target);
2962 }
2963 return ipa_make_edge_direct_to_target (ie, target, speculative);
2964 }
2965 else
2966 return NULL;
2967 }
2968
2969 /* Update the param called notes associated with NODE when CS is being inlined,
2970 assuming NODE is (potentially indirectly) inlined into CS->callee.
2971 Moreover, if the callee is discovered to be constant, create a new cgraph
2972 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2973 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2974
2975 static bool
2976 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2977 struct cgraph_node *node,
2978 vec<cgraph_edge *> *new_edges)
2979 {
2980 struct ipa_edge_args *top;
2981 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2982 struct ipa_node_params *new_root_info;
2983 bool res = false;
2984
2985 ipa_check_create_edge_args ();
2986 top = IPA_EDGE_REF (cs);
2987 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2988 ? cs->caller->global.inlined_to
2989 : cs->caller);
2990
2991 for (ie = node->indirect_calls; ie; ie = next_ie)
2992 {
2993 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2994 struct ipa_jump_func *jfunc;
2995 int param_index;
2996 cgraph_node *spec_target = NULL;
2997
2998 next_ie = ie->next_callee;
2999
3000 if (ici->param_index == -1)
3001 continue;
3002
3003 /* We must check range due to calls with variable number of arguments: */
3004 if (ici->param_index >= ipa_get_cs_argument_count (top))
3005 {
3006 ici->param_index = -1;
3007 continue;
3008 }
3009
3010 param_index = ici->param_index;
3011 jfunc = ipa_get_ith_jump_func (top, param_index);
3012
3013 if (ie->speculative)
3014 {
3015 struct cgraph_edge *de;
3016 struct ipa_ref *ref;
3017 ie->speculative_call_info (de, ie, ref);
3018 spec_target = de->callee;
3019 }
3020
3021 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3022 new_direct_edge = NULL;
3023 else if (ici->polymorphic)
3024 {
3025 ipa_polymorphic_call_context ctx;
3026 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3027 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3028 }
3029 else
3030 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3031 new_root_info);
3032 /* If speculation was removed, then we need to do nothing. */
3033 if (new_direct_edge && new_direct_edge != ie
3034 && new_direct_edge->callee == spec_target)
3035 {
3036 new_direct_edge->indirect_inlining_edge = 1;
3037 top = IPA_EDGE_REF (cs);
3038 res = true;
3039 if (!new_direct_edge->speculative)
3040 continue;
3041 }
3042 else if (new_direct_edge)
3043 {
3044 new_direct_edge->indirect_inlining_edge = 1;
3045 if (new_direct_edge->call_stmt)
3046 new_direct_edge->call_stmt_cannot_inline_p
3047 = !gimple_check_call_matching_types (
3048 new_direct_edge->call_stmt,
3049 new_direct_edge->callee->decl, false);
3050 if (new_edges)
3051 {
3052 new_edges->safe_push (new_direct_edge);
3053 res = true;
3054 }
3055 top = IPA_EDGE_REF (cs);
3056 /* If speculative edge was introduced we still need to update
3057 call info of the indirect edge. */
3058 if (!new_direct_edge->speculative)
3059 continue;
3060 }
3061 if (jfunc->type == IPA_JF_PASS_THROUGH
3062 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3063 {
3064 if (ici->agg_contents
3065 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3066 && !ici->polymorphic)
3067 ici->param_index = -1;
3068 else
3069 {
3070 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3071 if (ici->polymorphic
3072 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3073 ici->vptr_changed = true;
3074 }
3075 }
3076 else if (jfunc->type == IPA_JF_ANCESTOR)
3077 {
3078 if (ici->agg_contents
3079 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3080 && !ici->polymorphic)
3081 ici->param_index = -1;
3082 else
3083 {
3084 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3085 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3086 if (ici->polymorphic
3087 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3088 ici->vptr_changed = true;
3089 }
3090 }
3091 else
3092 /* Either we can find a destination for this edge now or never. */
3093 ici->param_index = -1;
3094 }
3095
3096 return res;
3097 }
3098
3099 /* Recursively traverse subtree of NODE (including node) made of inlined
3100 cgraph_edges when CS has been inlined and invoke
3101 update_indirect_edges_after_inlining on all nodes and
3102 update_jump_functions_after_inlining on all non-inlined edges that lead out
3103 of this subtree. Newly discovered indirect edges will be added to
3104 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3105 created. */
3106
3107 static bool
3108 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3109 struct cgraph_node *node,
3110 vec<cgraph_edge *> *new_edges)
3111 {
3112 struct cgraph_edge *e;
3113 bool res;
3114
3115 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3116
3117 for (e = node->callees; e; e = e->next_callee)
3118 if (!e->inline_failed)
3119 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3120 else
3121 update_jump_functions_after_inlining (cs, e);
3122 for (e = node->indirect_calls; e; e = e->next_callee)
3123 update_jump_functions_after_inlining (cs, e);
3124
3125 return res;
3126 }
3127
3128 /* Combine two controlled uses counts as done during inlining. */
3129
3130 static int
3131 combine_controlled_uses_counters (int c, int d)
3132 {
3133 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3134 return IPA_UNDESCRIBED_USE;
3135 else
3136 return c + d - 1;
3137 }
3138
3139 /* Propagate number of controlled users from CS->caleee to the new root of the
3140 tree of inlined nodes. */
3141
3142 static void
3143 propagate_controlled_uses (struct cgraph_edge *cs)
3144 {
3145 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3146 struct cgraph_node *new_root = cs->caller->global.inlined_to
3147 ? cs->caller->global.inlined_to : cs->caller;
3148 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3149 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3150 int count, i;
3151
3152 count = MIN (ipa_get_cs_argument_count (args),
3153 ipa_get_param_count (old_root_info));
3154 for (i = 0; i < count; i++)
3155 {
3156 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3157 struct ipa_cst_ref_desc *rdesc;
3158
3159 if (jf->type == IPA_JF_PASS_THROUGH)
3160 {
3161 int src_idx, c, d;
3162 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3163 c = ipa_get_controlled_uses (new_root_info, src_idx);
3164 d = ipa_get_controlled_uses (old_root_info, i);
3165
3166 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3167 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3168 c = combine_controlled_uses_counters (c, d);
3169 ipa_set_controlled_uses (new_root_info, src_idx, c);
3170 if (c == 0 && new_root_info->ipcp_orig_node)
3171 {
3172 struct cgraph_node *n;
3173 struct ipa_ref *ref;
3174 tree t = new_root_info->known_csts[src_idx];
3175
3176 if (t && TREE_CODE (t) == ADDR_EXPR
3177 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3178 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3179 && (ref = new_root->find_reference (n, NULL, 0)))
3180 {
3181 if (dump_file)
3182 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3183 "reference from %s/%i to %s/%i.\n",
3184 xstrdup_for_dump (new_root->name ()),
3185 new_root->order,
3186 xstrdup_for_dump (n->name ()), n->order);
3187 ref->remove_reference ();
3188 }
3189 }
3190 }
3191 else if (jf->type == IPA_JF_CONST
3192 && (rdesc = jfunc_rdesc_usable (jf)))
3193 {
3194 int d = ipa_get_controlled_uses (old_root_info, i);
3195 int c = rdesc->refcount;
3196 rdesc->refcount = combine_controlled_uses_counters (c, d);
3197 if (rdesc->refcount == 0)
3198 {
3199 tree cst = ipa_get_jf_constant (jf);
3200 struct cgraph_node *n;
3201 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3202 && TREE_CODE (TREE_OPERAND (cst, 0))
3203 == FUNCTION_DECL);
3204 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3205 if (n)
3206 {
3207 struct cgraph_node *clone;
3208 bool ok;
3209 ok = remove_described_reference (n, rdesc);
3210 gcc_checking_assert (ok);
3211
3212 clone = cs->caller;
3213 while (clone->global.inlined_to
3214 && clone != rdesc->cs->caller
3215 && IPA_NODE_REF (clone)->ipcp_orig_node)
3216 {
3217 struct ipa_ref *ref;
3218 ref = clone->find_reference (n, NULL, 0);
3219 if (ref)
3220 {
3221 if (dump_file)
3222 fprintf (dump_file, "ipa-prop: Removing "
3223 "cloning-created reference "
3224 "from %s/%i to %s/%i.\n",
3225 xstrdup_for_dump (clone->name ()),
3226 clone->order,
3227 xstrdup_for_dump (n->name ()),
3228 n->order);
3229 ref->remove_reference ();
3230 }
3231 clone = clone->callers->caller;
3232 }
3233 }
3234 }
3235 }
3236 }
3237
3238 for (i = ipa_get_param_count (old_root_info);
3239 i < ipa_get_cs_argument_count (args);
3240 i++)
3241 {
3242 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3243
3244 if (jf->type == IPA_JF_CONST)
3245 {
3246 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3247 if (rdesc)
3248 rdesc->refcount = IPA_UNDESCRIBED_USE;
3249 }
3250 else if (jf->type == IPA_JF_PASS_THROUGH)
3251 ipa_set_controlled_uses (new_root_info,
3252 jf->value.pass_through.formal_id,
3253 IPA_UNDESCRIBED_USE);
3254 }
3255 }
3256
3257 /* Update jump functions and call note functions on inlining the call site CS.
3258 CS is expected to lead to a node already cloned by
3259 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3260 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3261 created. */
3262
3263 bool
3264 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3265 vec<cgraph_edge *> *new_edges)
3266 {
3267 bool changed;
3268 /* Do nothing if the preparation phase has not been carried out yet
3269 (i.e. during early inlining). */
3270 if (!ipa_node_params_sum)
3271 return false;
3272 gcc_assert (ipa_edge_args_vector);
3273
3274 propagate_controlled_uses (cs);
3275 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3276
3277 return changed;
3278 }
3279
3280 /* Frees all dynamically allocated structures that the argument info points
3281 to. */
3282
3283 void
3284 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3285 {
3286 vec_free (args->jump_functions);
3287 memset (args, 0, sizeof (*args));
3288 }
3289
3290 /* Free all ipa_edge structures. */
3291
3292 void
3293 ipa_free_all_edge_args (void)
3294 {
3295 int i;
3296 struct ipa_edge_args *args;
3297
3298 if (!ipa_edge_args_vector)
3299 return;
3300
3301 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3302 ipa_free_edge_args_substructures (args);
3303
3304 vec_free (ipa_edge_args_vector);
3305 }
3306
3307 /* Frees all dynamically allocated structures that the param info points
3308 to. */
3309
3310 ipa_node_params::~ipa_node_params ()
3311 {
3312 descriptors.release ();
3313 free (lattices);
3314 /* Lattice values and their sources are deallocated with their alocation
3315 pool. */
3316 known_contexts.release ();
3317
3318 lattices = NULL;
3319 ipcp_orig_node = NULL;
3320 analysis_done = 0;
3321 node_enqueued = 0;
3322 do_clone_for_all_contexts = 0;
3323 is_all_contexts_clone = 0;
3324 node_dead = 0;
3325 }
3326
3327 /* Free all ipa_node_params structures. */
3328
3329 void
3330 ipa_free_all_node_params (void)
3331 {
3332 delete ipa_node_params_sum;
3333 ipa_node_params_sum = NULL;
3334 }
3335
3336 /* Grow ipcp_transformations if necessary. */
3337
3338 void
3339 ipcp_grow_transformations_if_necessary (void)
3340 {
3341 if (vec_safe_length (ipcp_transformations)
3342 <= (unsigned) symtab->cgraph_max_uid)
3343 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3344 }
3345
3346 /* Set the aggregate replacements of NODE to be AGGVALS. */
3347
3348 void
3349 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3350 struct ipa_agg_replacement_value *aggvals)
3351 {
3352 ipcp_grow_transformations_if_necessary ();
3353 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3354 }
3355
3356 /* Hook that is called by cgraph.c when an edge is removed. */
3357
3358 static void
3359 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3360 {
3361 struct ipa_edge_args *args;
3362
3363 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3364 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3365 return;
3366
3367 args = IPA_EDGE_REF (cs);
3368 if (args->jump_functions)
3369 {
3370 struct ipa_jump_func *jf;
3371 int i;
3372 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3373 {
3374 struct ipa_cst_ref_desc *rdesc;
3375 try_decrement_rdesc_refcount (jf);
3376 if (jf->type == IPA_JF_CONST
3377 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3378 && rdesc->cs == cs)
3379 rdesc->cs = NULL;
3380 }
3381 }
3382
3383 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3384 }
3385
3386 /* Hook that is called by cgraph.c when an edge is duplicated. */
3387
3388 static void
3389 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3390 void *)
3391 {
3392 struct ipa_edge_args *old_args, *new_args;
3393 unsigned int i;
3394
3395 ipa_check_create_edge_args ();
3396
3397 old_args = IPA_EDGE_REF (src);
3398 new_args = IPA_EDGE_REF (dst);
3399
3400 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3401 if (old_args->polymorphic_call_contexts)
3402 new_args->polymorphic_call_contexts
3403 = vec_safe_copy (old_args->polymorphic_call_contexts);
3404
3405 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3406 {
3407 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3408 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3409
3410 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3411
3412 if (src_jf->type == IPA_JF_CONST)
3413 {
3414 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3415
3416 if (!src_rdesc)
3417 dst_jf->value.constant.rdesc = NULL;
3418 else if (src->caller == dst->caller)
3419 {
3420 struct ipa_ref *ref;
3421 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3422 gcc_checking_assert (n);
3423 ref = src->caller->find_reference (n, src->call_stmt,
3424 src->lto_stmt_uid);
3425 gcc_checking_assert (ref);
3426 dst->caller->clone_reference (ref, ref->stmt);
3427
3428 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3429 dst_rdesc->cs = dst;
3430 dst_rdesc->refcount = src_rdesc->refcount;
3431 dst_rdesc->next_duplicate = NULL;
3432 dst_jf->value.constant.rdesc = dst_rdesc;
3433 }
3434 else if (src_rdesc->cs == src)
3435 {
3436 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3437 dst_rdesc->cs = dst;
3438 dst_rdesc->refcount = src_rdesc->refcount;
3439 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3440 src_rdesc->next_duplicate = dst_rdesc;
3441 dst_jf->value.constant.rdesc = dst_rdesc;
3442 }
3443 else
3444 {
3445 struct ipa_cst_ref_desc *dst_rdesc;
3446 /* This can happen during inlining, when a JFUNC can refer to a
3447 reference taken in a function up in the tree of inline clones.
3448 We need to find the duplicate that refers to our tree of
3449 inline clones. */
3450
3451 gcc_assert (dst->caller->global.inlined_to);
3452 for (dst_rdesc = src_rdesc->next_duplicate;
3453 dst_rdesc;
3454 dst_rdesc = dst_rdesc->next_duplicate)
3455 {
3456 struct cgraph_node *top;
3457 top = dst_rdesc->cs->caller->global.inlined_to
3458 ? dst_rdesc->cs->caller->global.inlined_to
3459 : dst_rdesc->cs->caller;
3460 if (dst->caller->global.inlined_to == top)
3461 break;
3462 }
3463 gcc_assert (dst_rdesc);
3464 dst_jf->value.constant.rdesc = dst_rdesc;
3465 }
3466 }
3467 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3468 && src->caller == dst->caller)
3469 {
3470 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3471 ? dst->caller->global.inlined_to : dst->caller;
3472 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3473 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3474
3475 int c = ipa_get_controlled_uses (root_info, idx);
3476 if (c != IPA_UNDESCRIBED_USE)
3477 {
3478 c++;
3479 ipa_set_controlled_uses (root_info, idx, c);
3480 }
3481 }
3482 }
3483 }
3484
3485 /* Analyze newly added function into callgraph. */
3486
3487 static void
3488 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3489 {
3490 if (node->has_gimple_body_p ())
3491 ipa_analyze_node (node);
3492 }
3493
3494 /* Hook that is called by summary when a node is duplicated. */
3495
3496 void
3497 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3498 ipa_node_params *old_info,
3499 ipa_node_params *new_info)
3500 {
3501 ipa_agg_replacement_value *old_av, *new_av;
3502
3503 new_info->descriptors = old_info->descriptors.copy ();
3504 new_info->lattices = NULL;
3505 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3506
3507 new_info->analysis_done = old_info->analysis_done;
3508 new_info->node_enqueued = old_info->node_enqueued;
3509 new_info->versionable = old_info->versionable;
3510
3511 old_av = ipa_get_agg_replacements_for_node (src);
3512 if (old_av)
3513 {
3514 new_av = NULL;
3515 while (old_av)
3516 {
3517 struct ipa_agg_replacement_value *v;
3518
3519 v = ggc_alloc<ipa_agg_replacement_value> ();
3520 memcpy (v, old_av, sizeof (*v));
3521 v->next = new_av;
3522 new_av = v;
3523 old_av = old_av->next;
3524 }
3525 ipa_set_node_agg_value_chain (dst, new_av);
3526 }
3527
3528 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3529
3530 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3531 {
3532 ipcp_grow_transformations_if_necessary ();
3533 src_trans = ipcp_get_transformation_summary (src);
3534 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3535 vec<ipa_alignment, va_gc> *&dst_alignments
3536 = ipcp_get_transformation_summary (dst)->alignments;
3537 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3538 for (unsigned i = 0; i < src_alignments->length (); ++i)
3539 dst_alignments->quick_push ((*src_alignments)[i]);
3540 }
3541 }
3542
3543 /* Register our cgraph hooks if they are not already there. */
3544
3545 void
3546 ipa_register_cgraph_hooks (void)
3547 {
3548 ipa_check_create_node_params ();
3549
3550 if (!edge_removal_hook_holder)
3551 edge_removal_hook_holder =
3552 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3553 if (!edge_duplication_hook_holder)
3554 edge_duplication_hook_holder =
3555 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3556 function_insertion_hook_holder =
3557 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3558 }
3559
3560 /* Unregister our cgraph hooks if they are not already there. */
3561
3562 static void
3563 ipa_unregister_cgraph_hooks (void)
3564 {
3565 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3566 edge_removal_hook_holder = NULL;
3567 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3568 edge_duplication_hook_holder = NULL;
3569 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3570 function_insertion_hook_holder = NULL;
3571 }
3572
3573 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3574 longer needed after ipa-cp. */
3575
3576 void
3577 ipa_free_all_structures_after_ipa_cp (void)
3578 {
3579 if (!optimize && !in_lto_p)
3580 {
3581 ipa_free_all_edge_args ();
3582 ipa_free_all_node_params ();
3583 ipcp_sources_pool.release ();
3584 ipcp_cst_values_pool.release ();
3585 ipcp_poly_ctx_values_pool.release ();
3586 ipcp_agg_lattice_pool.release ();
3587 ipa_unregister_cgraph_hooks ();
3588 ipa_refdesc_pool.release ();
3589 }
3590 }
3591
3592 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3593 longer needed after indirect inlining. */
3594
3595 void
3596 ipa_free_all_structures_after_iinln (void)
3597 {
3598 ipa_free_all_edge_args ();
3599 ipa_free_all_node_params ();
3600 ipa_unregister_cgraph_hooks ();
3601 ipcp_sources_pool.release ();
3602 ipcp_cst_values_pool.release ();
3603 ipcp_poly_ctx_values_pool.release ();
3604 ipcp_agg_lattice_pool.release ();
3605 ipa_refdesc_pool.release ();
3606 }
3607
3608 /* Print ipa_tree_map data structures of all functions in the
3609 callgraph to F. */
3610
3611 void
3612 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3613 {
3614 int i, count;
3615 struct ipa_node_params *info;
3616
3617 if (!node->definition)
3618 return;
3619 info = IPA_NODE_REF (node);
3620 fprintf (f, " function %s/%i parameter descriptors:\n",
3621 node->name (), node->order);
3622 count = ipa_get_param_count (info);
3623 for (i = 0; i < count; i++)
3624 {
3625 int c;
3626
3627 fprintf (f, " ");
3628 ipa_dump_param (f, info, i);
3629 if (ipa_is_param_used (info, i))
3630 fprintf (f, " used");
3631 c = ipa_get_controlled_uses (info, i);
3632 if (c == IPA_UNDESCRIBED_USE)
3633 fprintf (f, " undescribed_use");
3634 else
3635 fprintf (f, " controlled_uses=%i", c);
3636 fprintf (f, "\n");
3637 }
3638 }
3639
3640 /* Print ipa_tree_map data structures of all functions in the
3641 callgraph to F. */
3642
3643 void
3644 ipa_print_all_params (FILE * f)
3645 {
3646 struct cgraph_node *node;
3647
3648 fprintf (f, "\nFunction parameters:\n");
3649 FOR_EACH_FUNCTION (node)
3650 ipa_print_node_params (f, node);
3651 }
3652
3653 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3654
3655 vec<tree>
3656 ipa_get_vector_of_formal_parms (tree fndecl)
3657 {
3658 vec<tree> args;
3659 int count;
3660 tree parm;
3661
3662 gcc_assert (!flag_wpa);
3663 count = count_formal_params (fndecl);
3664 args.create (count);
3665 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3666 args.quick_push (parm);
3667
3668 return args;
3669 }
3670
3671 /* Return a heap allocated vector containing types of formal parameters of
3672 function type FNTYPE. */
3673
3674 vec<tree>
3675 ipa_get_vector_of_formal_parm_types (tree fntype)
3676 {
3677 vec<tree> types;
3678 int count = 0;
3679 tree t;
3680
3681 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3682 count++;
3683
3684 types.create (count);
3685 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3686 types.quick_push (TREE_VALUE (t));
3687
3688 return types;
3689 }
3690
3691 /* Modify the function declaration FNDECL and its type according to the plan in
3692 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3693 to reflect the actual parameters being modified which are determined by the
3694 base_index field. */
3695
3696 void
3697 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3698 {
3699 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3700 tree orig_type = TREE_TYPE (fndecl);
3701 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3702
3703 /* The following test is an ugly hack, some functions simply don't have any
3704 arguments in their type. This is probably a bug but well... */
3705 bool care_for_types = (old_arg_types != NULL_TREE);
3706 bool last_parm_void;
3707 vec<tree> otypes;
3708 if (care_for_types)
3709 {
3710 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3711 == void_type_node);
3712 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3713 if (last_parm_void)
3714 gcc_assert (oparms.length () + 1 == otypes.length ());
3715 else
3716 gcc_assert (oparms.length () == otypes.length ());
3717 }
3718 else
3719 {
3720 last_parm_void = false;
3721 otypes.create (0);
3722 }
3723
3724 int len = adjustments.length ();
3725 tree *link = &DECL_ARGUMENTS (fndecl);
3726 tree new_arg_types = NULL;
3727 for (int i = 0; i < len; i++)
3728 {
3729 struct ipa_parm_adjustment *adj;
3730 gcc_assert (link);
3731
3732 adj = &adjustments[i];
3733 tree parm;
3734 if (adj->op == IPA_PARM_OP_NEW)
3735 parm = NULL;
3736 else
3737 parm = oparms[adj->base_index];
3738 adj->base = parm;
3739
3740 if (adj->op == IPA_PARM_OP_COPY)
3741 {
3742 if (care_for_types)
3743 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3744 new_arg_types);
3745 *link = parm;
3746 link = &DECL_CHAIN (parm);
3747 }
3748 else if (adj->op != IPA_PARM_OP_REMOVE)
3749 {
3750 tree new_parm;
3751 tree ptype;
3752
3753 if (adj->by_ref)
3754 ptype = build_pointer_type (adj->type);
3755 else
3756 {
3757 ptype = adj->type;
3758 if (is_gimple_reg_type (ptype))
3759 {
3760 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3761 if (TYPE_ALIGN (ptype) < malign)
3762 ptype = build_aligned_type (ptype, malign);
3763 }
3764 }
3765
3766 if (care_for_types)
3767 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3768
3769 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3770 ptype);
3771 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3772 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3773 DECL_ARTIFICIAL (new_parm) = 1;
3774 DECL_ARG_TYPE (new_parm) = ptype;
3775 DECL_CONTEXT (new_parm) = fndecl;
3776 TREE_USED (new_parm) = 1;
3777 DECL_IGNORED_P (new_parm) = 1;
3778 layout_decl (new_parm, 0);
3779
3780 if (adj->op == IPA_PARM_OP_NEW)
3781 adj->base = NULL;
3782 else
3783 adj->base = parm;
3784 adj->new_decl = new_parm;
3785
3786 *link = new_parm;
3787 link = &DECL_CHAIN (new_parm);
3788 }
3789 }
3790
3791 *link = NULL_TREE;
3792
3793 tree new_reversed = NULL;
3794 if (care_for_types)
3795 {
3796 new_reversed = nreverse (new_arg_types);
3797 if (last_parm_void)
3798 {
3799 if (new_reversed)
3800 TREE_CHAIN (new_arg_types) = void_list_node;
3801 else
3802 new_reversed = void_list_node;
3803 }
3804 }
3805
3806 /* Use copy_node to preserve as much as possible from original type
3807 (debug info, attribute lists etc.)
3808 Exception is METHOD_TYPEs must have THIS argument.
3809 When we are asked to remove it, we need to build new FUNCTION_TYPE
3810 instead. */
3811 tree new_type = NULL;
3812 if (TREE_CODE (orig_type) != METHOD_TYPE
3813 || (adjustments[0].op == IPA_PARM_OP_COPY
3814 && adjustments[0].base_index == 0))
3815 {
3816 new_type = build_distinct_type_copy (orig_type);
3817 TYPE_ARG_TYPES (new_type) = new_reversed;
3818 }
3819 else
3820 {
3821 new_type
3822 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3823 new_reversed));
3824 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3825 DECL_VINDEX (fndecl) = NULL_TREE;
3826 }
3827
3828 /* When signature changes, we need to clear builtin info. */
3829 if (DECL_BUILT_IN (fndecl))
3830 {
3831 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3832 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3833 }
3834
3835 TREE_TYPE (fndecl) = new_type;
3836 DECL_VIRTUAL_P (fndecl) = 0;
3837 DECL_LANG_SPECIFIC (fndecl) = NULL;
3838 otypes.release ();
3839 oparms.release ();
3840 }
3841
3842 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3843 If this is a directly recursive call, CS must be NULL. Otherwise it must
3844 contain the corresponding call graph edge. */
3845
3846 void
3847 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3848 ipa_parm_adjustment_vec adjustments)
3849 {
3850 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3851 vec<tree> vargs;
3852 vec<tree, va_gc> **debug_args = NULL;
3853 gcall *new_stmt;
3854 gimple_stmt_iterator gsi, prev_gsi;
3855 tree callee_decl;
3856 int i, len;
3857
3858 len = adjustments.length ();
3859 vargs.create (len);
3860 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3861 current_node->remove_stmt_references (stmt);
3862
3863 gsi = gsi_for_stmt (stmt);
3864 prev_gsi = gsi;
3865 gsi_prev (&prev_gsi);
3866 for (i = 0; i < len; i++)
3867 {
3868 struct ipa_parm_adjustment *adj;
3869
3870 adj = &adjustments[i];
3871
3872 if (adj->op == IPA_PARM_OP_COPY)
3873 {
3874 tree arg = gimple_call_arg (stmt, adj->base_index);
3875
3876 vargs.quick_push (arg);
3877 }
3878 else if (adj->op != IPA_PARM_OP_REMOVE)
3879 {
3880 tree expr, base, off;
3881 location_t loc;
3882 unsigned int deref_align = 0;
3883 bool deref_base = false;
3884
3885 /* We create a new parameter out of the value of the old one, we can
3886 do the following kind of transformations:
3887
3888 - A scalar passed by reference is converted to a scalar passed by
3889 value. (adj->by_ref is false and the type of the original
3890 actual argument is a pointer to a scalar).
3891
3892 - A part of an aggregate is passed instead of the whole aggregate.
3893 The part can be passed either by value or by reference, this is
3894 determined by value of adj->by_ref. Moreover, the code below
3895 handles both situations when the original aggregate is passed by
3896 value (its type is not a pointer) and when it is passed by
3897 reference (it is a pointer to an aggregate).
3898
3899 When the new argument is passed by reference (adj->by_ref is true)
3900 it must be a part of an aggregate and therefore we form it by
3901 simply taking the address of a reference inside the original
3902 aggregate. */
3903
3904 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3905 base = gimple_call_arg (stmt, adj->base_index);
3906 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3907 : EXPR_LOCATION (base);
3908
3909 if (TREE_CODE (base) != ADDR_EXPR
3910 && POINTER_TYPE_P (TREE_TYPE (base)))
3911 off = build_int_cst (adj->alias_ptr_type,
3912 adj->offset / BITS_PER_UNIT);
3913 else
3914 {
3915 HOST_WIDE_INT base_offset;
3916 tree prev_base;
3917 bool addrof;
3918
3919 if (TREE_CODE (base) == ADDR_EXPR)
3920 {
3921 base = TREE_OPERAND (base, 0);
3922 addrof = true;
3923 }
3924 else
3925 addrof = false;
3926 prev_base = base;
3927 base = get_addr_base_and_unit_offset (base, &base_offset);
3928 /* Aggregate arguments can have non-invariant addresses. */
3929 if (!base)
3930 {
3931 base = build_fold_addr_expr (prev_base);
3932 off = build_int_cst (adj->alias_ptr_type,
3933 adj->offset / BITS_PER_UNIT);
3934 }
3935 else if (TREE_CODE (base) == MEM_REF)
3936 {
3937 if (!addrof)
3938 {
3939 deref_base = true;
3940 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3941 }
3942 off = build_int_cst (adj->alias_ptr_type,
3943 base_offset
3944 + adj->offset / BITS_PER_UNIT);
3945 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3946 off);
3947 base = TREE_OPERAND (base, 0);
3948 }
3949 else
3950 {
3951 off = build_int_cst (adj->alias_ptr_type,
3952 base_offset
3953 + adj->offset / BITS_PER_UNIT);
3954 base = build_fold_addr_expr (base);
3955 }
3956 }
3957
3958 if (!adj->by_ref)
3959 {
3960 tree type = adj->type;
3961 unsigned int align;
3962 unsigned HOST_WIDE_INT misalign;
3963
3964 if (deref_base)
3965 {
3966 align = deref_align;
3967 misalign = 0;
3968 }
3969 else
3970 {
3971 get_pointer_alignment_1 (base, &align, &misalign);
3972 if (TYPE_ALIGN (type) > align)
3973 align = TYPE_ALIGN (type);
3974 }
3975 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3976 * BITS_PER_UNIT);
3977 misalign = misalign & (align - 1);
3978 if (misalign != 0)
3979 align = (misalign & -misalign);
3980 if (align < TYPE_ALIGN (type))
3981 type = build_aligned_type (type, align);
3982 base = force_gimple_operand_gsi (&gsi, base,
3983 true, NULL, true, GSI_SAME_STMT);
3984 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3985 /* If expr is not a valid gimple call argument emit
3986 a load into a temporary. */
3987 if (is_gimple_reg_type (TREE_TYPE (expr)))
3988 {
3989 gimple *tem = gimple_build_assign (NULL_TREE, expr);
3990 if (gimple_in_ssa_p (cfun))
3991 {
3992 gimple_set_vuse (tem, gimple_vuse (stmt));
3993 expr = make_ssa_name (TREE_TYPE (expr), tem);
3994 }
3995 else
3996 expr = create_tmp_reg (TREE_TYPE (expr));
3997 gimple_assign_set_lhs (tem, expr);
3998 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
3999 }
4000 }
4001 else
4002 {
4003 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4004 expr = build_fold_addr_expr (expr);
4005 expr = force_gimple_operand_gsi (&gsi, expr,
4006 true, NULL, true, GSI_SAME_STMT);
4007 }
4008 vargs.quick_push (expr);
4009 }
4010 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4011 {
4012 unsigned int ix;
4013 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4014 gimple *def_temp;
4015
4016 arg = gimple_call_arg (stmt, adj->base_index);
4017 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4018 {
4019 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4020 continue;
4021 arg = fold_convert_loc (gimple_location (stmt),
4022 TREE_TYPE (origin), arg);
4023 }
4024 if (debug_args == NULL)
4025 debug_args = decl_debug_args_insert (callee_decl);
4026 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4027 if (ddecl == origin)
4028 {
4029 ddecl = (**debug_args)[ix + 1];
4030 break;
4031 }
4032 if (ddecl == NULL)
4033 {
4034 ddecl = make_node (DEBUG_EXPR_DECL);
4035 DECL_ARTIFICIAL (ddecl) = 1;
4036 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4037 DECL_MODE (ddecl) = DECL_MODE (origin);
4038
4039 vec_safe_push (*debug_args, origin);
4040 vec_safe_push (*debug_args, ddecl);
4041 }
4042 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4043 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4044 }
4045 }
4046
4047 if (dump_file && (dump_flags & TDF_DETAILS))
4048 {
4049 fprintf (dump_file, "replacing stmt:");
4050 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4051 }
4052
4053 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4054 vargs.release ();
4055 if (gimple_call_lhs (stmt))
4056 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4057
4058 gimple_set_block (new_stmt, gimple_block (stmt));
4059 if (gimple_has_location (stmt))
4060 gimple_set_location (new_stmt, gimple_location (stmt));
4061 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4062 gimple_call_copy_flags (new_stmt, stmt);
4063 if (gimple_in_ssa_p (cfun))
4064 {
4065 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4066 if (gimple_vdef (stmt))
4067 {
4068 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4069 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4070 }
4071 }
4072
4073 if (dump_file && (dump_flags & TDF_DETAILS))
4074 {
4075 fprintf (dump_file, "with stmt:");
4076 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4077 fprintf (dump_file, "\n");
4078 }
4079 gsi_replace (&gsi, new_stmt, true);
4080 if (cs)
4081 cs->set_call_stmt (new_stmt);
4082 do
4083 {
4084 current_node->record_stmt_references (gsi_stmt (gsi));
4085 gsi_prev (&gsi);
4086 }
4087 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4088 }
4089
4090 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4091 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4092 specifies whether the function should care about type incompatibility the
4093 current and new expressions. If it is false, the function will leave
4094 incompatibility issues to the caller. Return true iff the expression
4095 was modified. */
4096
4097 bool
4098 ipa_modify_expr (tree *expr, bool convert,
4099 ipa_parm_adjustment_vec adjustments)
4100 {
4101 struct ipa_parm_adjustment *cand
4102 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4103 if (!cand)
4104 return false;
4105
4106 tree src;
4107 if (cand->by_ref)
4108 src = build_simple_mem_ref (cand->new_decl);
4109 else
4110 src = cand->new_decl;
4111
4112 if (dump_file && (dump_flags & TDF_DETAILS))
4113 {
4114 fprintf (dump_file, "About to replace expr ");
4115 print_generic_expr (dump_file, *expr, 0);
4116 fprintf (dump_file, " with ");
4117 print_generic_expr (dump_file, src, 0);
4118 fprintf (dump_file, "\n");
4119 }
4120
4121 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4122 {
4123 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4124 *expr = vce;
4125 }
4126 else
4127 *expr = src;
4128 return true;
4129 }
4130
4131 /* If T is an SSA_NAME, return NULL if it is not a default def or
4132 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4133 the base variable is always returned, regardless if it is a default
4134 def. Return T if it is not an SSA_NAME. */
4135
4136 static tree
4137 get_ssa_base_param (tree t, bool ignore_default_def)
4138 {
4139 if (TREE_CODE (t) == SSA_NAME)
4140 {
4141 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4142 return SSA_NAME_VAR (t);
4143 else
4144 return NULL_TREE;
4145 }
4146 return t;
4147 }
4148
4149 /* Given an expression, return an adjustment entry specifying the
4150 transformation to be done on EXPR. If no suitable adjustment entry
4151 was found, returns NULL.
4152
4153 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4154 default def, otherwise bail on them.
4155
4156 If CONVERT is non-NULL, this function will set *CONVERT if the
4157 expression provided is a component reference. ADJUSTMENTS is the
4158 adjustments vector. */
4159
4160 ipa_parm_adjustment *
4161 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4162 ipa_parm_adjustment_vec adjustments,
4163 bool ignore_default_def)
4164 {
4165 if (TREE_CODE (**expr) == BIT_FIELD_REF
4166 || TREE_CODE (**expr) == IMAGPART_EXPR
4167 || TREE_CODE (**expr) == REALPART_EXPR)
4168 {
4169 *expr = &TREE_OPERAND (**expr, 0);
4170 if (convert)
4171 *convert = true;
4172 }
4173
4174 HOST_WIDE_INT offset, size, max_size;
4175 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4176 if (!base || size == -1 || max_size == -1)
4177 return NULL;
4178
4179 if (TREE_CODE (base) == MEM_REF)
4180 {
4181 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4182 base = TREE_OPERAND (base, 0);
4183 }
4184
4185 base = get_ssa_base_param (base, ignore_default_def);
4186 if (!base || TREE_CODE (base) != PARM_DECL)
4187 return NULL;
4188
4189 struct ipa_parm_adjustment *cand = NULL;
4190 unsigned int len = adjustments.length ();
4191 for (unsigned i = 0; i < len; i++)
4192 {
4193 struct ipa_parm_adjustment *adj = &adjustments[i];
4194
4195 if (adj->base == base
4196 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4197 {
4198 cand = adj;
4199 break;
4200 }
4201 }
4202
4203 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4204 return NULL;
4205 return cand;
4206 }
4207
4208 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4209
4210 static bool
4211 index_in_adjustments_multiple_times_p (int base_index,
4212 ipa_parm_adjustment_vec adjustments)
4213 {
4214 int i, len = adjustments.length ();
4215 bool one = false;
4216
4217 for (i = 0; i < len; i++)
4218 {
4219 struct ipa_parm_adjustment *adj;
4220 adj = &adjustments[i];
4221
4222 if (adj->base_index == base_index)
4223 {
4224 if (one)
4225 return true;
4226 else
4227 one = true;
4228 }
4229 }
4230 return false;
4231 }
4232
4233
4234 /* Return adjustments that should have the same effect on function parameters
4235 and call arguments as if they were first changed according to adjustments in
4236 INNER and then by adjustments in OUTER. */
4237
4238 ipa_parm_adjustment_vec
4239 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4240 ipa_parm_adjustment_vec outer)
4241 {
4242 int i, outlen = outer.length ();
4243 int inlen = inner.length ();
4244 int removals = 0;
4245 ipa_parm_adjustment_vec adjustments, tmp;
4246
4247 tmp.create (inlen);
4248 for (i = 0; i < inlen; i++)
4249 {
4250 struct ipa_parm_adjustment *n;
4251 n = &inner[i];
4252
4253 if (n->op == IPA_PARM_OP_REMOVE)
4254 removals++;
4255 else
4256 {
4257 /* FIXME: Handling of new arguments are not implemented yet. */
4258 gcc_assert (n->op != IPA_PARM_OP_NEW);
4259 tmp.quick_push (*n);
4260 }
4261 }
4262
4263 adjustments.create (outlen + removals);
4264 for (i = 0; i < outlen; i++)
4265 {
4266 struct ipa_parm_adjustment r;
4267 struct ipa_parm_adjustment *out = &outer[i];
4268 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4269
4270 memset (&r, 0, sizeof (r));
4271 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4272 if (out->op == IPA_PARM_OP_REMOVE)
4273 {
4274 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4275 {
4276 r.op = IPA_PARM_OP_REMOVE;
4277 adjustments.quick_push (r);
4278 }
4279 continue;
4280 }
4281 else
4282 {
4283 /* FIXME: Handling of new arguments are not implemented yet. */
4284 gcc_assert (out->op != IPA_PARM_OP_NEW);
4285 }
4286
4287 r.base_index = in->base_index;
4288 r.type = out->type;
4289
4290 /* FIXME: Create nonlocal value too. */
4291
4292 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4293 r.op = IPA_PARM_OP_COPY;
4294 else if (in->op == IPA_PARM_OP_COPY)
4295 r.offset = out->offset;
4296 else if (out->op == IPA_PARM_OP_COPY)
4297 r.offset = in->offset;
4298 else
4299 r.offset = in->offset + out->offset;
4300 adjustments.quick_push (r);
4301 }
4302
4303 for (i = 0; i < inlen; i++)
4304 {
4305 struct ipa_parm_adjustment *n = &inner[i];
4306
4307 if (n->op == IPA_PARM_OP_REMOVE)
4308 adjustments.quick_push (*n);
4309 }
4310
4311 tmp.release ();
4312 return adjustments;
4313 }
4314
4315 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4316 friendly way, assuming they are meant to be applied to FNDECL. */
4317
4318 void
4319 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4320 tree fndecl)
4321 {
4322 int i, len = adjustments.length ();
4323 bool first = true;
4324 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4325
4326 fprintf (file, "IPA param adjustments: ");
4327 for (i = 0; i < len; i++)
4328 {
4329 struct ipa_parm_adjustment *adj;
4330 adj = &adjustments[i];
4331
4332 if (!first)
4333 fprintf (file, " ");
4334 else
4335 first = false;
4336
4337 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4338 print_generic_expr (file, parms[adj->base_index], 0);
4339 if (adj->base)
4340 {
4341 fprintf (file, ", base: ");
4342 print_generic_expr (file, adj->base, 0);
4343 }
4344 if (adj->new_decl)
4345 {
4346 fprintf (file, ", new_decl: ");
4347 print_generic_expr (file, adj->new_decl, 0);
4348 }
4349 if (adj->new_ssa_base)
4350 {
4351 fprintf (file, ", new_ssa_base: ");
4352 print_generic_expr (file, adj->new_ssa_base, 0);
4353 }
4354
4355 if (adj->op == IPA_PARM_OP_COPY)
4356 fprintf (file, ", copy_param");
4357 else if (adj->op == IPA_PARM_OP_REMOVE)
4358 fprintf (file, ", remove_param");
4359 else
4360 fprintf (file, ", offset %li", (long) adj->offset);
4361 if (adj->by_ref)
4362 fprintf (file, ", by_ref");
4363 print_node_brief (file, ", type: ", adj->type, 0);
4364 fprintf (file, "\n");
4365 }
4366 parms.release ();
4367 }
4368
4369 /* Dump the AV linked list. */
4370
4371 void
4372 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4373 {
4374 bool comma = false;
4375 fprintf (f, " Aggregate replacements:");
4376 for (; av; av = av->next)
4377 {
4378 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4379 av->index, av->offset);
4380 print_generic_expr (f, av->value, 0);
4381 comma = true;
4382 }
4383 fprintf (f, "\n");
4384 }
4385
4386 /* Stream out jump function JUMP_FUNC to OB. */
4387
4388 static void
4389 ipa_write_jump_function (struct output_block *ob,
4390 struct ipa_jump_func *jump_func)
4391 {
4392 struct ipa_agg_jf_item *item;
4393 struct bitpack_d bp;
4394 int i, count;
4395
4396 streamer_write_uhwi (ob, jump_func->type);
4397 switch (jump_func->type)
4398 {
4399 case IPA_JF_UNKNOWN:
4400 break;
4401 case IPA_JF_CONST:
4402 gcc_assert (
4403 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4404 stream_write_tree (ob, jump_func->value.constant.value, true);
4405 break;
4406 case IPA_JF_PASS_THROUGH:
4407 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4408 if (jump_func->value.pass_through.operation == NOP_EXPR)
4409 {
4410 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4411 bp = bitpack_create (ob->main_stream);
4412 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4413 streamer_write_bitpack (&bp);
4414 }
4415 else
4416 {
4417 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4418 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4419 }
4420 break;
4421 case IPA_JF_ANCESTOR:
4422 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4423 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4424 bp = bitpack_create (ob->main_stream);
4425 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4426 streamer_write_bitpack (&bp);
4427 break;
4428 }
4429
4430 count = vec_safe_length (jump_func->agg.items);
4431 streamer_write_uhwi (ob, count);
4432 if (count)
4433 {
4434 bp = bitpack_create (ob->main_stream);
4435 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4436 streamer_write_bitpack (&bp);
4437 }
4438
4439 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4440 {
4441 streamer_write_uhwi (ob, item->offset);
4442 stream_write_tree (ob, item->value, true);
4443 }
4444
4445 bp = bitpack_create (ob->main_stream);
4446 bp_pack_value (&bp, jump_func->alignment.known, 1);
4447 streamer_write_bitpack (&bp);
4448 if (jump_func->alignment.known)
4449 {
4450 streamer_write_uhwi (ob, jump_func->alignment.align);
4451 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4452 }
4453 }
4454
4455 /* Read in jump function JUMP_FUNC from IB. */
4456
4457 static void
4458 ipa_read_jump_function (struct lto_input_block *ib,
4459 struct ipa_jump_func *jump_func,
4460 struct cgraph_edge *cs,
4461 struct data_in *data_in)
4462 {
4463 enum jump_func_type jftype;
4464 enum tree_code operation;
4465 int i, count;
4466
4467 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4468 switch (jftype)
4469 {
4470 case IPA_JF_UNKNOWN:
4471 ipa_set_jf_unknown (jump_func);
4472 break;
4473 case IPA_JF_CONST:
4474 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4475 break;
4476 case IPA_JF_PASS_THROUGH:
4477 operation = (enum tree_code) streamer_read_uhwi (ib);
4478 if (operation == NOP_EXPR)
4479 {
4480 int formal_id = streamer_read_uhwi (ib);
4481 struct bitpack_d bp = streamer_read_bitpack (ib);
4482 bool agg_preserved = bp_unpack_value (&bp, 1);
4483 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4484 }
4485 else
4486 {
4487 tree operand = stream_read_tree (ib, data_in);
4488 int formal_id = streamer_read_uhwi (ib);
4489 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4490 operation);
4491 }
4492 break;
4493 case IPA_JF_ANCESTOR:
4494 {
4495 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4496 int formal_id = streamer_read_uhwi (ib);
4497 struct bitpack_d bp = streamer_read_bitpack (ib);
4498 bool agg_preserved = bp_unpack_value (&bp, 1);
4499 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4500 break;
4501 }
4502 }
4503
4504 count = streamer_read_uhwi (ib);
4505 vec_alloc (jump_func->agg.items, count);
4506 if (count)
4507 {
4508 struct bitpack_d bp = streamer_read_bitpack (ib);
4509 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4510 }
4511 for (i = 0; i < count; i++)
4512 {
4513 struct ipa_agg_jf_item item;
4514 item.offset = streamer_read_uhwi (ib);
4515 item.value = stream_read_tree (ib, data_in);
4516 jump_func->agg.items->quick_push (item);
4517 }
4518
4519 struct bitpack_d bp = streamer_read_bitpack (ib);
4520 bool alignment_known = bp_unpack_value (&bp, 1);
4521 if (alignment_known)
4522 {
4523 jump_func->alignment.known = true;
4524 jump_func->alignment.align = streamer_read_uhwi (ib);
4525 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4526 }
4527 else
4528 jump_func->alignment.known = false;
4529 }
4530
4531 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4532 relevant to indirect inlining to OB. */
4533
4534 static void
4535 ipa_write_indirect_edge_info (struct output_block *ob,
4536 struct cgraph_edge *cs)
4537 {
4538 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4539 struct bitpack_d bp;
4540
4541 streamer_write_hwi (ob, ii->param_index);
4542 bp = bitpack_create (ob->main_stream);
4543 bp_pack_value (&bp, ii->polymorphic, 1);
4544 bp_pack_value (&bp, ii->agg_contents, 1);
4545 bp_pack_value (&bp, ii->member_ptr, 1);
4546 bp_pack_value (&bp, ii->by_ref, 1);
4547 bp_pack_value (&bp, ii->vptr_changed, 1);
4548 streamer_write_bitpack (&bp);
4549 if (ii->agg_contents || ii->polymorphic)
4550 streamer_write_hwi (ob, ii->offset);
4551 else
4552 gcc_assert (ii->offset == 0);
4553
4554 if (ii->polymorphic)
4555 {
4556 streamer_write_hwi (ob, ii->otr_token);
4557 stream_write_tree (ob, ii->otr_type, true);
4558 ii->context.stream_out (ob);
4559 }
4560 }
4561
4562 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4563 relevant to indirect inlining from IB. */
4564
4565 static void
4566 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4567 struct data_in *data_in,
4568 struct cgraph_edge *cs)
4569 {
4570 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4571 struct bitpack_d bp;
4572
4573 ii->param_index = (int) streamer_read_hwi (ib);
4574 bp = streamer_read_bitpack (ib);
4575 ii->polymorphic = bp_unpack_value (&bp, 1);
4576 ii->agg_contents = bp_unpack_value (&bp, 1);
4577 ii->member_ptr = bp_unpack_value (&bp, 1);
4578 ii->by_ref = bp_unpack_value (&bp, 1);
4579 ii->vptr_changed = bp_unpack_value (&bp, 1);
4580 if (ii->agg_contents || ii->polymorphic)
4581 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4582 else
4583 ii->offset = 0;
4584 if (ii->polymorphic)
4585 {
4586 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4587 ii->otr_type = stream_read_tree (ib, data_in);
4588 ii->context.stream_in (ib, data_in);
4589 }
4590 }
4591
4592 /* Stream out NODE info to OB. */
4593
4594 static void
4595 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4596 {
4597 int node_ref;
4598 lto_symtab_encoder_t encoder;
4599 struct ipa_node_params *info = IPA_NODE_REF (node);
4600 int j;
4601 struct cgraph_edge *e;
4602 struct bitpack_d bp;
4603
4604 encoder = ob->decl_state->symtab_node_encoder;
4605 node_ref = lto_symtab_encoder_encode (encoder, node);
4606 streamer_write_uhwi (ob, node_ref);
4607
4608 streamer_write_uhwi (ob, ipa_get_param_count (info));
4609 for (j = 0; j < ipa_get_param_count (info); j++)
4610 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4611 bp = bitpack_create (ob->main_stream);
4612 gcc_assert (info->analysis_done
4613 || ipa_get_param_count (info) == 0);
4614 gcc_assert (!info->node_enqueued);
4615 gcc_assert (!info->ipcp_orig_node);
4616 for (j = 0; j < ipa_get_param_count (info); j++)
4617 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4618 streamer_write_bitpack (&bp);
4619 for (j = 0; j < ipa_get_param_count (info); j++)
4620 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4621 for (e = node->callees; e; e = e->next_callee)
4622 {
4623 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4624
4625 streamer_write_uhwi (ob,
4626 ipa_get_cs_argument_count (args) * 2
4627 + (args->polymorphic_call_contexts != NULL));
4628 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4629 {
4630 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4631 if (args->polymorphic_call_contexts != NULL)
4632 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4633 }
4634 }
4635 for (e = node->indirect_calls; e; e = e->next_callee)
4636 {
4637 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4638
4639 streamer_write_uhwi (ob,
4640 ipa_get_cs_argument_count (args) * 2
4641 + (args->polymorphic_call_contexts != NULL));
4642 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4643 {
4644 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4645 if (args->polymorphic_call_contexts != NULL)
4646 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4647 }
4648 ipa_write_indirect_edge_info (ob, e);
4649 }
4650 }
4651
4652 /* Stream in NODE info from IB. */
4653
4654 static void
4655 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4656 struct data_in *data_in)
4657 {
4658 struct ipa_node_params *info = IPA_NODE_REF (node);
4659 int k;
4660 struct cgraph_edge *e;
4661 struct bitpack_d bp;
4662
4663 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4664
4665 for (k = 0; k < ipa_get_param_count (info); k++)
4666 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4667
4668 bp = streamer_read_bitpack (ib);
4669 if (ipa_get_param_count (info) != 0)
4670 info->analysis_done = true;
4671 info->node_enqueued = false;
4672 for (k = 0; k < ipa_get_param_count (info); k++)
4673 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4674 for (k = 0; k < ipa_get_param_count (info); k++)
4675 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4676 for (e = node->callees; e; e = e->next_callee)
4677 {
4678 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4679 int count = streamer_read_uhwi (ib);
4680 bool contexts_computed = count & 1;
4681 count /= 2;
4682
4683 if (!count)
4684 continue;
4685 vec_safe_grow_cleared (args->jump_functions, count);
4686 if (contexts_computed)
4687 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4688
4689 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4690 {
4691 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4692 data_in);
4693 if (contexts_computed)
4694 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4695 }
4696 }
4697 for (e = node->indirect_calls; e; e = e->next_callee)
4698 {
4699 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4700 int count = streamer_read_uhwi (ib);
4701 bool contexts_computed = count & 1;
4702 count /= 2;
4703
4704 if (count)
4705 {
4706 vec_safe_grow_cleared (args->jump_functions, count);
4707 if (contexts_computed)
4708 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4709 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4710 {
4711 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4712 data_in);
4713 if (contexts_computed)
4714 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4715 }
4716 }
4717 ipa_read_indirect_edge_info (ib, data_in, e);
4718 }
4719 }
4720
4721 /* Write jump functions for nodes in SET. */
4722
4723 void
4724 ipa_prop_write_jump_functions (void)
4725 {
4726 struct cgraph_node *node;
4727 struct output_block *ob;
4728 unsigned int count = 0;
4729 lto_symtab_encoder_iterator lsei;
4730 lto_symtab_encoder_t encoder;
4731
4732 if (!ipa_node_params_sum)
4733 return;
4734
4735 ob = create_output_block (LTO_section_jump_functions);
4736 encoder = ob->decl_state->symtab_node_encoder;
4737 ob->symbol = NULL;
4738 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4739 lsei_next_function_in_partition (&lsei))
4740 {
4741 node = lsei_cgraph_node (lsei);
4742 if (node->has_gimple_body_p ()
4743 && IPA_NODE_REF (node) != NULL)
4744 count++;
4745 }
4746
4747 streamer_write_uhwi (ob, count);
4748
4749 /* Process all of the functions. */
4750 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4751 lsei_next_function_in_partition (&lsei))
4752 {
4753 node = lsei_cgraph_node (lsei);
4754 if (node->has_gimple_body_p ()
4755 && IPA_NODE_REF (node) != NULL)
4756 ipa_write_node_info (ob, node);
4757 }
4758 streamer_write_char_stream (ob->main_stream, 0);
4759 produce_asm (ob, NULL);
4760 destroy_output_block (ob);
4761 }
4762
4763 /* Read section in file FILE_DATA of length LEN with data DATA. */
4764
4765 static void
4766 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4767 size_t len)
4768 {
4769 const struct lto_function_header *header =
4770 (const struct lto_function_header *) data;
4771 const int cfg_offset = sizeof (struct lto_function_header);
4772 const int main_offset = cfg_offset + header->cfg_size;
4773 const int string_offset = main_offset + header->main_size;
4774 struct data_in *data_in;
4775 unsigned int i;
4776 unsigned int count;
4777
4778 lto_input_block ib_main ((const char *) data + main_offset,
4779 header->main_size, file_data->mode_table);
4780
4781 data_in =
4782 lto_data_in_create (file_data, (const char *) data + string_offset,
4783 header->string_size, vNULL);
4784 count = streamer_read_uhwi (&ib_main);
4785
4786 for (i = 0; i < count; i++)
4787 {
4788 unsigned int index;
4789 struct cgraph_node *node;
4790 lto_symtab_encoder_t encoder;
4791
4792 index = streamer_read_uhwi (&ib_main);
4793 encoder = file_data->symtab_node_encoder;
4794 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4795 index));
4796 gcc_assert (node->definition);
4797 ipa_read_node_info (&ib_main, node, data_in);
4798 }
4799 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4800 len);
4801 lto_data_in_delete (data_in);
4802 }
4803
4804 /* Read ipcp jump functions. */
4805
4806 void
4807 ipa_prop_read_jump_functions (void)
4808 {
4809 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4810 struct lto_file_decl_data *file_data;
4811 unsigned int j = 0;
4812
4813 ipa_check_create_node_params ();
4814 ipa_check_create_edge_args ();
4815 ipa_register_cgraph_hooks ();
4816
4817 while ((file_data = file_data_vec[j++]))
4818 {
4819 size_t len;
4820 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4821
4822 if (data)
4823 ipa_prop_read_section (file_data, data, len);
4824 }
4825 }
4826
4827 /* After merging units, we can get mismatch in argument counts.
4828 Also decl merging might've rendered parameter lists obsolete.
4829 Also compute called_with_variable_arg info. */
4830
4831 void
4832 ipa_update_after_lto_read (void)
4833 {
4834 ipa_check_create_node_params ();
4835 ipa_check_create_edge_args ();
4836 }
4837
4838 void
4839 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4840 {
4841 int node_ref;
4842 unsigned int count = 0;
4843 lto_symtab_encoder_t encoder;
4844 struct ipa_agg_replacement_value *aggvals, *av;
4845
4846 aggvals = ipa_get_agg_replacements_for_node (node);
4847 encoder = ob->decl_state->symtab_node_encoder;
4848 node_ref = lto_symtab_encoder_encode (encoder, node);
4849 streamer_write_uhwi (ob, node_ref);
4850
4851 for (av = aggvals; av; av = av->next)
4852 count++;
4853 streamer_write_uhwi (ob, count);
4854
4855 for (av = aggvals; av; av = av->next)
4856 {
4857 struct bitpack_d bp;
4858
4859 streamer_write_uhwi (ob, av->offset);
4860 streamer_write_uhwi (ob, av->index);
4861 stream_write_tree (ob, av->value, true);
4862
4863 bp = bitpack_create (ob->main_stream);
4864 bp_pack_value (&bp, av->by_ref, 1);
4865 streamer_write_bitpack (&bp);
4866 }
4867
4868 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4869 if (ts && vec_safe_length (ts->alignments) > 0)
4870 {
4871 count = ts->alignments->length ();
4872
4873 streamer_write_uhwi (ob, count);
4874 for (unsigned i = 0; i < count; ++i)
4875 {
4876 ipa_alignment *parm_al = &(*ts->alignments)[i];
4877
4878 struct bitpack_d bp;
4879 bp = bitpack_create (ob->main_stream);
4880 bp_pack_value (&bp, parm_al->known, 1);
4881 streamer_write_bitpack (&bp);
4882 if (parm_al->known)
4883 {
4884 streamer_write_uhwi (ob, parm_al->align);
4885 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4886 parm_al->misalign);
4887 }
4888 }
4889 }
4890 else
4891 streamer_write_uhwi (ob, 0);
4892 }
4893
4894 /* Stream in the aggregate value replacement chain for NODE from IB. */
4895
4896 static void
4897 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4898 data_in *data_in)
4899 {
4900 struct ipa_agg_replacement_value *aggvals = NULL;
4901 unsigned int count, i;
4902
4903 count = streamer_read_uhwi (ib);
4904 for (i = 0; i <count; i++)
4905 {
4906 struct ipa_agg_replacement_value *av;
4907 struct bitpack_d bp;
4908
4909 av = ggc_alloc<ipa_agg_replacement_value> ();
4910 av->offset = streamer_read_uhwi (ib);
4911 av->index = streamer_read_uhwi (ib);
4912 av->value = stream_read_tree (ib, data_in);
4913 bp = streamer_read_bitpack (ib);
4914 av->by_ref = bp_unpack_value (&bp, 1);
4915 av->next = aggvals;
4916 aggvals = av;
4917 }
4918 ipa_set_node_agg_value_chain (node, aggvals);
4919
4920 count = streamer_read_uhwi (ib);
4921 if (count > 0)
4922 {
4923 ipcp_grow_transformations_if_necessary ();
4924
4925 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4926 vec_safe_grow_cleared (ts->alignments, count);
4927
4928 for (i = 0; i < count; i++)
4929 {
4930 ipa_alignment *parm_al;
4931 parm_al = &(*ts->alignments)[i];
4932 struct bitpack_d bp;
4933 bp = streamer_read_bitpack (ib);
4934 parm_al->known = bp_unpack_value (&bp, 1);
4935 if (parm_al->known)
4936 {
4937 parm_al->align = streamer_read_uhwi (ib);
4938 parm_al->misalign
4939 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
4940 0, parm_al->align);
4941 }
4942 }
4943 }
4944 }
4945
4946 /* Write all aggregate replacement for nodes in set. */
4947
4948 void
4949 ipcp_write_transformation_summaries (void)
4950 {
4951 struct cgraph_node *node;
4952 struct output_block *ob;
4953 unsigned int count = 0;
4954 lto_symtab_encoder_iterator lsei;
4955 lto_symtab_encoder_t encoder;
4956
4957 ob = create_output_block (LTO_section_ipcp_transform);
4958 encoder = ob->decl_state->symtab_node_encoder;
4959 ob->symbol = NULL;
4960 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4961 lsei_next_function_in_partition (&lsei))
4962 {
4963 node = lsei_cgraph_node (lsei);
4964 if (node->has_gimple_body_p ())
4965 count++;
4966 }
4967
4968 streamer_write_uhwi (ob, count);
4969
4970 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4971 lsei_next_function_in_partition (&lsei))
4972 {
4973 node = lsei_cgraph_node (lsei);
4974 if (node->has_gimple_body_p ())
4975 write_ipcp_transformation_info (ob, node);
4976 }
4977 streamer_write_char_stream (ob->main_stream, 0);
4978 produce_asm (ob, NULL);
4979 destroy_output_block (ob);
4980 }
4981
4982 /* Read replacements section in file FILE_DATA of length LEN with data
4983 DATA. */
4984
4985 static void
4986 read_replacements_section (struct lto_file_decl_data *file_data,
4987 const char *data,
4988 size_t len)
4989 {
4990 const struct lto_function_header *header =
4991 (const struct lto_function_header *) data;
4992 const int cfg_offset = sizeof (struct lto_function_header);
4993 const int main_offset = cfg_offset + header->cfg_size;
4994 const int string_offset = main_offset + header->main_size;
4995 struct data_in *data_in;
4996 unsigned int i;
4997 unsigned int count;
4998
4999 lto_input_block ib_main ((const char *) data + main_offset,
5000 header->main_size, file_data->mode_table);
5001
5002 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5003 header->string_size, vNULL);
5004 count = streamer_read_uhwi (&ib_main);
5005
5006 for (i = 0; i < count; i++)
5007 {
5008 unsigned int index;
5009 struct cgraph_node *node;
5010 lto_symtab_encoder_t encoder;
5011
5012 index = streamer_read_uhwi (&ib_main);
5013 encoder = file_data->symtab_node_encoder;
5014 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5015 index));
5016 gcc_assert (node->definition);
5017 read_ipcp_transformation_info (&ib_main, node, data_in);
5018 }
5019 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5020 len);
5021 lto_data_in_delete (data_in);
5022 }
5023
5024 /* Read IPA-CP aggregate replacements. */
5025
5026 void
5027 ipcp_read_transformation_summaries (void)
5028 {
5029 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5030 struct lto_file_decl_data *file_data;
5031 unsigned int j = 0;
5032
5033 while ((file_data = file_data_vec[j++]))
5034 {
5035 size_t len;
5036 const char *data = lto_get_section_data (file_data,
5037 LTO_section_ipcp_transform,
5038 NULL, &len);
5039 if (data)
5040 read_replacements_section (file_data, data, len);
5041 }
5042 }
5043
5044 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5045 NODE. */
5046
5047 static void
5048 adjust_agg_replacement_values (struct cgraph_node *node,
5049 struct ipa_agg_replacement_value *aggval)
5050 {
5051 struct ipa_agg_replacement_value *v;
5052 int i, c = 0, d = 0, *adj;
5053
5054 if (!node->clone.combined_args_to_skip)
5055 return;
5056
5057 for (v = aggval; v; v = v->next)
5058 {
5059 gcc_assert (v->index >= 0);
5060 if (c < v->index)
5061 c = v->index;
5062 }
5063 c++;
5064
5065 adj = XALLOCAVEC (int, c);
5066 for (i = 0; i < c; i++)
5067 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5068 {
5069 adj[i] = -1;
5070 d++;
5071 }
5072 else
5073 adj[i] = i - d;
5074
5075 for (v = aggval; v; v = v->next)
5076 v->index = adj[v->index];
5077 }
5078
5079 /* Dominator walker driving the ipcp modification phase. */
5080
5081 class ipcp_modif_dom_walker : public dom_walker
5082 {
5083 public:
5084 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5085 vec<ipa_param_descriptor> descs,
5086 struct ipa_agg_replacement_value *av,
5087 bool *sc, bool *cc)
5088 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5089 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5090
5091 virtual void before_dom_children (basic_block);
5092
5093 private:
5094 struct ipa_func_body_info *m_fbi;
5095 vec<ipa_param_descriptor> m_descriptors;
5096 struct ipa_agg_replacement_value *m_aggval;
5097 bool *m_something_changed, *m_cfg_changed;
5098 };
5099
5100 void
5101 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5102 {
5103 gimple_stmt_iterator gsi;
5104 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5105 {
5106 struct ipa_agg_replacement_value *v;
5107 gimple *stmt = gsi_stmt (gsi);
5108 tree rhs, val, t;
5109 HOST_WIDE_INT offset, size;
5110 int index;
5111 bool by_ref, vce;
5112
5113 if (!gimple_assign_load_p (stmt))
5114 continue;
5115 rhs = gimple_assign_rhs1 (stmt);
5116 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5117 continue;
5118
5119 vce = false;
5120 t = rhs;
5121 while (handled_component_p (t))
5122 {
5123 /* V_C_E can do things like convert an array of integers to one
5124 bigger integer and similar things we do not handle below. */
5125 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5126 {
5127 vce = true;
5128 break;
5129 }
5130 t = TREE_OPERAND (t, 0);
5131 }
5132 if (vce)
5133 continue;
5134
5135 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5136 &offset, &size, &by_ref))
5137 continue;
5138 for (v = m_aggval; v; v = v->next)
5139 if (v->index == index
5140 && v->offset == offset)
5141 break;
5142 if (!v
5143 || v->by_ref != by_ref
5144 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5145 continue;
5146
5147 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5148 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5149 {
5150 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5151 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5152 else if (TYPE_SIZE (TREE_TYPE (rhs))
5153 == TYPE_SIZE (TREE_TYPE (v->value)))
5154 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5155 else
5156 {
5157 if (dump_file)
5158 {
5159 fprintf (dump_file, " const ");
5160 print_generic_expr (dump_file, v->value, 0);
5161 fprintf (dump_file, " can't be converted to type of ");
5162 print_generic_expr (dump_file, rhs, 0);
5163 fprintf (dump_file, "\n");
5164 }
5165 continue;
5166 }
5167 }
5168 else
5169 val = v->value;
5170
5171 if (dump_file && (dump_flags & TDF_DETAILS))
5172 {
5173 fprintf (dump_file, "Modifying stmt:\n ");
5174 print_gimple_stmt (dump_file, stmt, 0, 0);
5175 }
5176 gimple_assign_set_rhs_from_tree (&gsi, val);
5177 update_stmt (stmt);
5178
5179 if (dump_file && (dump_flags & TDF_DETAILS))
5180 {
5181 fprintf (dump_file, "into:\n ");
5182 print_gimple_stmt (dump_file, stmt, 0, 0);
5183 fprintf (dump_file, "\n");
5184 }
5185
5186 *m_something_changed = true;
5187 if (maybe_clean_eh_stmt (stmt)
5188 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5189 *m_cfg_changed = true;
5190 }
5191
5192 }
5193
5194 /* Update alignment of formal parameters as described in
5195 ipcp_transformation_summary. */
5196
5197 static void
5198 ipcp_update_alignments (struct cgraph_node *node)
5199 {
5200 tree fndecl = node->decl;
5201 tree parm = DECL_ARGUMENTS (fndecl);
5202 tree next_parm = parm;
5203 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5204 if (!ts || vec_safe_length (ts->alignments) == 0)
5205 return;
5206 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5207 unsigned count = alignments.length ();
5208
5209 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5210 {
5211 if (node->clone.combined_args_to_skip
5212 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5213 continue;
5214 gcc_checking_assert (parm);
5215 next_parm = DECL_CHAIN (parm);
5216
5217 if (!alignments[i].known || !is_gimple_reg (parm))
5218 continue;
5219 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5220 if (!ddef)
5221 continue;
5222
5223 if (dump_file)
5224 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5225 "misalignment to %u\n", i, alignments[i].align,
5226 alignments[i].misalign);
5227
5228 struct ptr_info_def *pi = get_ptr_info (ddef);
5229 gcc_checking_assert (pi);
5230 unsigned old_align;
5231 unsigned old_misalign;
5232 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5233
5234 if (old_known
5235 && old_align >= alignments[i].align)
5236 {
5237 if (dump_file)
5238 fprintf (dump_file, " But the alignment was already %u.\n",
5239 old_align);
5240 continue;
5241 }
5242 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5243 }
5244 }
5245
5246 /* IPCP transformation phase doing propagation of aggregate values. */
5247
5248 unsigned int
5249 ipcp_transform_function (struct cgraph_node *node)
5250 {
5251 vec<ipa_param_descriptor> descriptors = vNULL;
5252 struct ipa_func_body_info fbi;
5253 struct ipa_agg_replacement_value *aggval;
5254 int param_count;
5255 bool cfg_changed = false, something_changed = false;
5256
5257 gcc_checking_assert (cfun);
5258 gcc_checking_assert (current_function_decl);
5259
5260 if (dump_file)
5261 fprintf (dump_file, "Modification phase of node %s/%i\n",
5262 node->name (), node->order);
5263
5264 ipcp_update_alignments (node);
5265 aggval = ipa_get_agg_replacements_for_node (node);
5266 if (!aggval)
5267 return 0;
5268 param_count = count_formal_params (node->decl);
5269 if (param_count == 0)
5270 return 0;
5271 adjust_agg_replacement_values (node, aggval);
5272 if (dump_file)
5273 ipa_dump_agg_replacement_values (dump_file, aggval);
5274
5275 fbi.node = node;
5276 fbi.info = NULL;
5277 fbi.bb_infos = vNULL;
5278 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5279 fbi.param_count = param_count;
5280 fbi.aa_walked = 0;
5281
5282 descriptors.safe_grow_cleared (param_count);
5283 ipa_populate_param_decls (node, descriptors);
5284 calculate_dominance_info (CDI_DOMINATORS);
5285 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5286 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5287
5288 int i;
5289 struct ipa_bb_info *bi;
5290 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5291 free_ipa_bb_info (bi);
5292 fbi.bb_infos.release ();
5293 free_dominance_info (CDI_DOMINATORS);
5294 (*ipcp_transformations)[node->uid].agg_values = NULL;
5295 (*ipcp_transformations)[node->uid].alignments = NULL;
5296 descriptors.release ();
5297
5298 if (!something_changed)
5299 return 0;
5300 else if (cfg_changed)
5301 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5302 else
5303 return TODO_update_ssa_only_virtuals;
5304 }