]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
Fix ice in set_value_range
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
62
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
67
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
70 {
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
78 };
79
80 /* Allocation pool for reference descriptions. */
81
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
84
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
87
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
90 {
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
92
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
96 }
97
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
100
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
103 {
104 int i, count;
105
106 count = descriptors.length ();
107 for (i = 0; i < count; i++)
108 if (descriptors[i].decl_or_type == ptree)
109 return i;
110
111 return -1;
112 }
113
114 /* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
116
117 int
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
119 {
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121 }
122
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
125
126 static void
127 ipa_populate_param_decls (struct cgraph_node *node,
128 vec<ipa_param_descriptor> &descriptors)
129 {
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
134
135 fndecl = node->decl;
136 gcc_assert (gimple_has_body_p (fndecl));
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
140 {
141 descriptors[param_num].decl_or_type = parm;
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
144 param_num++;
145 }
146 }
147
148 /* Return how many formal parameters FNDECL has. */
149
150 int
151 count_formal_params (tree fndecl)
152 {
153 tree parm;
154 int count = 0;
155 gcc_assert (gimple_has_body_p (fndecl));
156
157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
158 count++;
159
160 return count;
161 }
162
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
166
167 void
168 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
169 {
170 fprintf (file, "param #%i", i);
171 if (info->descriptors[i].decl_or_type)
172 {
173 fprintf (file, " ");
174 print_generic_expr (file, info->descriptors[i].decl_or_type, 0);
175 }
176 }
177
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
180
181 void
182 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
183 {
184 struct ipa_node_params *info = IPA_NODE_REF (node);
185
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
188 }
189
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
193
194 void
195 ipa_initialize_node_params (struct cgraph_node *node)
196 {
197 struct ipa_node_params *info = IPA_NODE_REF (node);
198
199 if (!info->descriptors.exists ())
200 {
201 ipa_alloc_node_params (node, count_formal_params (node->decl));
202 ipa_populate_param_decls (node, info->descriptors);
203 }
204 }
205
206 /* Print the jump functions associated with call graph edge CS to file F. */
207
208 static void
209 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
210 {
211 int i, count;
212
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
215 {
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
218
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
221
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
225 else if (type == IPA_JF_CONST)
226 {
227 tree val = jump_func->value.constant.value;
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
232 {
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
235 0);
236 }
237 fprintf (f, "\n");
238 }
239 else if (type == IPA_JF_PASS_THROUGH)
240 {
241 fprintf (f, "PASS THROUGH: ");
242 fprintf (f, "%d, op %s",
243 jump_func->value.pass_through.formal_id,
244 get_tree_code_name(jump_func->value.pass_through.operation));
245 if (jump_func->value.pass_through.operation != NOP_EXPR)
246 {
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
250 }
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
253 fprintf (f, "\n");
254 }
255 else if (type == IPA_JF_ANCESTOR)
256 {
257 fprintf (f, "ANCESTOR: ");
258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
263 fprintf (f, "\n");
264 }
265
266 if (jump_func->agg.items)
267 {
268 struct ipa_agg_jf_item *item;
269 int j;
270
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
274 {
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
279 tree_to_uhwi (TYPE_SIZE (item->value)));
280 else
281 {
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
284 }
285 fprintf (f, "\n");
286 }
287 }
288
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
292 {
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
295 }
296
297 if (jump_func->bits.known)
298 {
299 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
300 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
301 fprintf (f, "\n");
302 }
303 else
304 fprintf (f, " Unknown bits\n");
305
306 if (jump_func->vr_known)
307 {
308 fprintf (f, " VR ");
309 fprintf (f, "%s[",
310 (jump_func->m_vr.type == VR_ANTI_RANGE) ? "~" : "");
311 print_decs (jump_func->m_vr.min, f);
312 fprintf (f, ", ");
313 print_decs (jump_func->m_vr.max, f);
314 fprintf (f, "]\n");
315 }
316 else
317 fprintf (f, " Unknown VR\n");
318 }
319 }
320
321
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
327 {
328 struct cgraph_edge *cs;
329
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup_for_dump (node->name ()), node->order,
339 xstrdup_for_dump (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
343
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
345 {
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
349
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i, "
359 "offset " HOST_WIDE_INT_PRINT_DEC,
360 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
361 ii->offset);
362
363 if (cs->call_stmt)
364 {
365 fprintf (f, ", for stmt ");
366 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
367 }
368 else
369 fprintf (f, "\n");
370 if (ii->polymorphic)
371 ii->context.dump (f);
372 ipa_print_node_jump_functions_for_edge (f, cs);
373 }
374 }
375
376 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
377
378 void
379 ipa_print_all_jump_functions (FILE *f)
380 {
381 struct cgraph_node *node;
382
383 fprintf (f, "\nJump functions:\n");
384 FOR_EACH_FUNCTION (node)
385 {
386 ipa_print_node_jump_functions (f, node);
387 }
388 }
389
390 /* Set jfunc to be a know-really nothing jump function. */
391
392 static void
393 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
394 {
395 jfunc->type = IPA_JF_UNKNOWN;
396 jfunc->bits.known = false;
397 jfunc->vr_known = false;
398 }
399
400 /* Set JFUNC to be a copy of another jmp (to be used by jump function
401 combination code). The two functions will share their rdesc. */
402
403 static void
404 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
405 struct ipa_jump_func *src)
406
407 {
408 gcc_checking_assert (src->type == IPA_JF_CONST);
409 dst->type = IPA_JF_CONST;
410 dst->value.constant = src->value.constant;
411 }
412
413 /* Set JFUNC to be a constant jmp function. */
414
415 static void
416 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
417 struct cgraph_edge *cs)
418 {
419 jfunc->type = IPA_JF_CONST;
420 jfunc->value.constant.value = unshare_expr_without_location (constant);
421
422 if (TREE_CODE (constant) == ADDR_EXPR
423 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
424 {
425 struct ipa_cst_ref_desc *rdesc;
426
427 rdesc = ipa_refdesc_pool.allocate ();
428 rdesc->cs = cs;
429 rdesc->next_duplicate = NULL;
430 rdesc->refcount = 1;
431 jfunc->value.constant.rdesc = rdesc;
432 }
433 else
434 jfunc->value.constant.rdesc = NULL;
435 }
436
437 /* Set JFUNC to be a simple pass-through jump function. */
438 static void
439 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
440 bool agg_preserved)
441 {
442 jfunc->type = IPA_JF_PASS_THROUGH;
443 jfunc->value.pass_through.operand = NULL_TREE;
444 jfunc->value.pass_through.formal_id = formal_id;
445 jfunc->value.pass_through.operation = NOP_EXPR;
446 jfunc->value.pass_through.agg_preserved = agg_preserved;
447 }
448
449 /* Set JFUNC to be an arithmetic pass through jump function. */
450
451 static void
452 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
453 tree operand, enum tree_code operation)
454 {
455 jfunc->type = IPA_JF_PASS_THROUGH;
456 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
457 jfunc->value.pass_through.formal_id = formal_id;
458 jfunc->value.pass_through.operation = operation;
459 jfunc->value.pass_through.agg_preserved = false;
460 }
461
462 /* Set JFUNC to be an ancestor jump function. */
463
464 static void
465 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
466 int formal_id, bool agg_preserved)
467 {
468 jfunc->type = IPA_JF_ANCESTOR;
469 jfunc->value.ancestor.formal_id = formal_id;
470 jfunc->value.ancestor.offset = offset;
471 jfunc->value.ancestor.agg_preserved = agg_preserved;
472 }
473
474 /* Get IPA BB information about the given BB. FBI is the context of analyzis
475 of this function body. */
476
477 static struct ipa_bb_info *
478 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
479 {
480 gcc_checking_assert (fbi);
481 return &fbi->bb_infos[bb->index];
482 }
483
484 /* Structure to be passed in between detect_type_change and
485 check_stmt_for_type_change. */
486
487 struct prop_type_change_info
488 {
489 /* Offset into the object where there is the virtual method pointer we are
490 looking for. */
491 HOST_WIDE_INT offset;
492 /* The declaration or SSA_NAME pointer of the base that we are checking for
493 type change. */
494 tree object;
495 /* Set to true if dynamic type change has been detected. */
496 bool type_maybe_changed;
497 };
498
499 /* Return true if STMT can modify a virtual method table pointer.
500
501 This function makes special assumptions about both constructors and
502 destructors which are all the functions that are allowed to alter the VMT
503 pointers. It assumes that destructors begin with assignment into all VMT
504 pointers and that constructors essentially look in the following way:
505
506 1) The very first thing they do is that they call constructors of ancestor
507 sub-objects that have them.
508
509 2) Then VMT pointers of this and all its ancestors is set to new values
510 corresponding to the type corresponding to the constructor.
511
512 3) Only afterwards, other stuff such as constructor of member sub-objects
513 and the code written by the user is run. Only this may include calling
514 virtual functions, directly or indirectly.
515
516 There is no way to call a constructor of an ancestor sub-object in any
517 other way.
518
519 This means that we do not have to care whether constructors get the correct
520 type information because they will always change it (in fact, if we define
521 the type to be given by the VMT pointer, it is undefined).
522
523 The most important fact to derive from the above is that if, for some
524 statement in the section 3, we try to detect whether the dynamic type has
525 changed, we can safely ignore all calls as we examine the function body
526 backwards until we reach statements in section 2 because these calls cannot
527 be ancestor constructors or destructors (if the input is not bogus) and so
528 do not change the dynamic type (this holds true only for automatically
529 allocated objects but at the moment we devirtualize only these). We then
530 must detect that statements in section 2 change the dynamic type and can try
531 to derive the new type. That is enough and we can stop, we will never see
532 the calls into constructors of sub-objects in this code. Therefore we can
533 safely ignore all call statements that we traverse.
534 */
535
536 static bool
537 stmt_may_be_vtbl_ptr_store (gimple *stmt)
538 {
539 if (is_gimple_call (stmt))
540 return false;
541 if (gimple_clobber_p (stmt))
542 return false;
543 else if (is_gimple_assign (stmt))
544 {
545 tree lhs = gimple_assign_lhs (stmt);
546
547 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
548 {
549 if (flag_strict_aliasing
550 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
551 return false;
552
553 if (TREE_CODE (lhs) == COMPONENT_REF
554 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
555 return false;
556 /* In the future we might want to use get_base_ref_and_offset to find
557 if there is a field corresponding to the offset and if so, proceed
558 almost like if it was a component ref. */
559 }
560 }
561 return true;
562 }
563
564 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
565 to check whether a particular statement may modify the virtual table
566 pointerIt stores its result into DATA, which points to a
567 prop_type_change_info structure. */
568
569 static bool
570 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
571 {
572 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
573 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
574
575 if (stmt_may_be_vtbl_ptr_store (stmt))
576 {
577 tci->type_maybe_changed = true;
578 return true;
579 }
580 else
581 return false;
582 }
583
584 /* See if ARG is PARAM_DECl describing instance passed by pointer
585 or reference in FUNCTION. Return false if the dynamic type may change
586 in between beggining of the function until CALL is invoked.
587
588 Generally functions are not allowed to change type of such instances,
589 but they call destructors. We assume that methods can not destroy the THIS
590 pointer. Also as a special cases, constructor and destructors may change
591 type of the THIS pointer. */
592
593 static bool
594 param_type_may_change_p (tree function, tree arg, gimple *call)
595 {
596 /* Pure functions can not do any changes on the dynamic type;
597 that require writting to memory. */
598 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
599 return false;
600 /* We need to check if we are within inlined consturctor
601 or destructor (ideally we would have way to check that the
602 inline cdtor is actually working on ARG, but we don't have
603 easy tie on this, so punt on all non-pure cdtors.
604 We may also record the types of cdtors and once we know type
605 of the instance match them.
606
607 Also code unification optimizations may merge calls from
608 different blocks making return values unreliable. So
609 do nothing during late optimization. */
610 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
611 return true;
612 if (TREE_CODE (arg) == SSA_NAME
613 && SSA_NAME_IS_DEFAULT_DEF (arg)
614 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
615 {
616 /* Normal (non-THIS) argument. */
617 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
618 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
619 /* THIS pointer of an method - here we want to watch constructors
620 and destructors as those definitely may change the dynamic
621 type. */
622 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
623 && !DECL_CXX_CONSTRUCTOR_P (function)
624 && !DECL_CXX_DESTRUCTOR_P (function)
625 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
626 {
627 /* Walk the inline stack and watch out for ctors/dtors. */
628 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
629 block = BLOCK_SUPERCONTEXT (block))
630 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
631 return true;
632 return false;
633 }
634 }
635 return true;
636 }
637
638 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
639 callsite CALL) by looking for assignments to its virtual table pointer. If
640 it is, return true and fill in the jump function JFUNC with relevant type
641 information or set it to unknown. ARG is the object itself (not a pointer
642 to it, unless dereferenced). BASE is the base of the memory access as
643 returned by get_ref_base_and_extent, as is the offset.
644
645 This is helper function for detect_type_change and detect_type_change_ssa
646 that does the heavy work which is usually unnecesary. */
647
648 static bool
649 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
650 gcall *call, struct ipa_jump_func *jfunc,
651 HOST_WIDE_INT offset)
652 {
653 struct prop_type_change_info tci;
654 ao_ref ao;
655 bool entry_reached = false;
656
657 gcc_checking_assert (DECL_P (arg)
658 || TREE_CODE (arg) == MEM_REF
659 || handled_component_p (arg));
660
661 comp_type = TYPE_MAIN_VARIANT (comp_type);
662
663 /* Const calls cannot call virtual methods through VMT and so type changes do
664 not matter. */
665 if (!flag_devirtualize || !gimple_vuse (call)
666 /* Be sure expected_type is polymorphic. */
667 || !comp_type
668 || TREE_CODE (comp_type) != RECORD_TYPE
669 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
670 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
671 return true;
672
673 ao_ref_init (&ao, arg);
674 ao.base = base;
675 ao.offset = offset;
676 ao.size = POINTER_SIZE;
677 ao.max_size = ao.size;
678
679 tci.offset = offset;
680 tci.object = get_base_address (arg);
681 tci.type_maybe_changed = false;
682
683 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
684 &tci, NULL, &entry_reached);
685 if (!tci.type_maybe_changed)
686 return false;
687
688 ipa_set_jf_unknown (jfunc);
689 return true;
690 }
691
692 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
693 If it is, return true and fill in the jump function JFUNC with relevant type
694 information or set it to unknown. ARG is the object itself (not a pointer
695 to it, unless dereferenced). BASE is the base of the memory access as
696 returned by get_ref_base_and_extent, as is the offset. */
697
698 static bool
699 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
700 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
701 {
702 if (!flag_devirtualize)
703 return false;
704
705 if (TREE_CODE (base) == MEM_REF
706 && !param_type_may_change_p (current_function_decl,
707 TREE_OPERAND (base, 0),
708 call))
709 return false;
710 return detect_type_change_from_memory_writes (arg, base, comp_type,
711 call, jfunc, offset);
712 }
713
714 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
715 SSA name (its dereference will become the base and the offset is assumed to
716 be zero). */
717
718 static bool
719 detect_type_change_ssa (tree arg, tree comp_type,
720 gcall *call, struct ipa_jump_func *jfunc)
721 {
722 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
723 if (!flag_devirtualize
724 || !POINTER_TYPE_P (TREE_TYPE (arg)))
725 return false;
726
727 if (!param_type_may_change_p (current_function_decl, arg, call))
728 return false;
729
730 arg = build2 (MEM_REF, ptr_type_node, arg,
731 build_int_cst (ptr_type_node, 0));
732
733 return detect_type_change_from_memory_writes (arg, arg, comp_type,
734 call, jfunc, 0);
735 }
736
737 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
738 boolean variable pointed to by DATA. */
739
740 static bool
741 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
742 void *data)
743 {
744 bool *b = (bool *) data;
745 *b = true;
746 return true;
747 }
748
749 /* Return true if we have already walked so many statements in AA that we
750 should really just start giving up. */
751
752 static bool
753 aa_overwalked (struct ipa_func_body_info *fbi)
754 {
755 gcc_checking_assert (fbi);
756 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
757 }
758
759 /* Find the nearest valid aa status for parameter specified by INDEX that
760 dominates BB. */
761
762 static struct ipa_param_aa_status *
763 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
764 int index)
765 {
766 while (true)
767 {
768 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
769 if (!bb)
770 return NULL;
771 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
772 if (!bi->param_aa_statuses.is_empty ()
773 && bi->param_aa_statuses[index].valid)
774 return &bi->param_aa_statuses[index];
775 }
776 }
777
778 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
779 structures and/or intialize the result with a dominating description as
780 necessary. */
781
782 static struct ipa_param_aa_status *
783 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
784 int index)
785 {
786 gcc_checking_assert (fbi);
787 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
788 if (bi->param_aa_statuses.is_empty ())
789 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
790 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
791 if (!paa->valid)
792 {
793 gcc_checking_assert (!paa->parm_modified
794 && !paa->ref_modified
795 && !paa->pt_modified);
796 struct ipa_param_aa_status *dom_paa;
797 dom_paa = find_dominating_aa_status (fbi, bb, index);
798 if (dom_paa)
799 *paa = *dom_paa;
800 else
801 paa->valid = true;
802 }
803
804 return paa;
805 }
806
807 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
808 a value known not to be modified in this function before reaching the
809 statement STMT. FBI holds information about the function we have so far
810 gathered but do not survive the summary building stage. */
811
812 static bool
813 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
814 gimple *stmt, tree parm_load)
815 {
816 struct ipa_param_aa_status *paa;
817 bool modified = false;
818 ao_ref refd;
819
820 tree base = get_base_address (parm_load);
821 gcc_assert (TREE_CODE (base) == PARM_DECL);
822 if (TREE_READONLY (base))
823 return true;
824
825 /* FIXME: FBI can be NULL if we are being called from outside
826 ipa_node_analysis or ipcp_transform_function, which currently happens
827 during inlining analysis. It would be great to extend fbi's lifetime and
828 always have it. Currently, we are just not afraid of too much walking in
829 that case. */
830 if (fbi)
831 {
832 if (aa_overwalked (fbi))
833 return false;
834 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
835 if (paa->parm_modified)
836 return false;
837 }
838 else
839 paa = NULL;
840
841 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
842 ao_ref_init (&refd, parm_load);
843 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
844 &modified, NULL);
845 if (fbi)
846 fbi->aa_walked += walked;
847 if (paa && modified)
848 paa->parm_modified = true;
849 return !modified;
850 }
851
852 /* If STMT is an assignment that loads a value from an parameter declaration,
853 return the index of the parameter in ipa_node_params which has not been
854 modified. Otherwise return -1. */
855
856 static int
857 load_from_unmodified_param (struct ipa_func_body_info *fbi,
858 vec<ipa_param_descriptor> descriptors,
859 gimple *stmt)
860 {
861 int index;
862 tree op1;
863
864 if (!gimple_assign_single_p (stmt))
865 return -1;
866
867 op1 = gimple_assign_rhs1 (stmt);
868 if (TREE_CODE (op1) != PARM_DECL)
869 return -1;
870
871 index = ipa_get_param_decl_index_1 (descriptors, op1);
872 if (index < 0
873 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
874 return -1;
875
876 return index;
877 }
878
879 /* Return true if memory reference REF (which must be a load through parameter
880 with INDEX) loads data that are known to be unmodified in this function
881 before reaching statement STMT. */
882
883 static bool
884 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
885 int index, gimple *stmt, tree ref)
886 {
887 struct ipa_param_aa_status *paa;
888 bool modified = false;
889 ao_ref refd;
890
891 /* FIXME: FBI can be NULL if we are being called from outside
892 ipa_node_analysis or ipcp_transform_function, which currently happens
893 during inlining analysis. It would be great to extend fbi's lifetime and
894 always have it. Currently, we are just not afraid of too much walking in
895 that case. */
896 if (fbi)
897 {
898 if (aa_overwalked (fbi))
899 return false;
900 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
901 if (paa->ref_modified)
902 return false;
903 }
904 else
905 paa = NULL;
906
907 gcc_checking_assert (gimple_vuse (stmt));
908 ao_ref_init (&refd, ref);
909 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
910 &modified, NULL);
911 if (fbi)
912 fbi->aa_walked += walked;
913 if (paa && modified)
914 paa->ref_modified = true;
915 return !modified;
916 }
917
918 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
919 is known to be unmodified in this function before reaching call statement
920 CALL into which it is passed. FBI describes the function body. */
921
922 static bool
923 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
924 gimple *call, tree parm)
925 {
926 bool modified = false;
927 ao_ref refd;
928
929 /* It's unnecessary to calculate anything about memory contnets for a const
930 function because it is not goin to use it. But do not cache the result
931 either. Also, no such calculations for non-pointers. */
932 if (!gimple_vuse (call)
933 || !POINTER_TYPE_P (TREE_TYPE (parm))
934 || aa_overwalked (fbi))
935 return false;
936
937 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
938 gimple_bb (call),
939 index);
940 if (paa->pt_modified)
941 return false;
942
943 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
944 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
945 &modified, NULL);
946 fbi->aa_walked += walked;
947 if (modified)
948 paa->pt_modified = true;
949 return !modified;
950 }
951
952 /* Return true if we can prove that OP is a memory reference loading
953 data from an aggregate passed as a parameter.
954
955 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
956 false if it cannot prove that the value has not been modified before the
957 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
958 if it cannot prove the value has not been modified, in that case it will
959 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
960
961 INFO and PARMS_AINFO describe parameters of the current function (but the
962 latter can be NULL), STMT is the load statement. If function returns true,
963 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
964 within the aggregate and whether it is a load from a value passed by
965 reference respectively. */
966
967 bool
968 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
969 vec<ipa_param_descriptor> descriptors,
970 gimple *stmt, tree op, int *index_p,
971 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
972 bool *by_ref_p, bool *guaranteed_unmodified)
973 {
974 int index;
975 HOST_WIDE_INT size, max_size;
976 bool reverse;
977 tree base
978 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
979
980 if (max_size == -1 || max_size != size || *offset_p < 0)
981 return false;
982
983 if (DECL_P (base))
984 {
985 int index = ipa_get_param_decl_index_1 (descriptors, base);
986 if (index >= 0
987 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
988 {
989 *index_p = index;
990 *by_ref_p = false;
991 if (size_p)
992 *size_p = size;
993 if (guaranteed_unmodified)
994 *guaranteed_unmodified = true;
995 return true;
996 }
997 return false;
998 }
999
1000 if (TREE_CODE (base) != MEM_REF
1001 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1002 || !integer_zerop (TREE_OPERAND (base, 1)))
1003 return false;
1004
1005 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1006 {
1007 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1008 index = ipa_get_param_decl_index_1 (descriptors, parm);
1009 }
1010 else
1011 {
1012 /* This branch catches situations where a pointer parameter is not a
1013 gimple register, for example:
1014
1015 void hip7(S*) (struct S * p)
1016 {
1017 void (*<T2e4>) (struct S *) D.1867;
1018 struct S * p.1;
1019
1020 <bb 2>:
1021 p.1_1 = p;
1022 D.1867_2 = p.1_1->f;
1023 D.1867_2 ();
1024 gdp = &p;
1025 */
1026
1027 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1028 index = load_from_unmodified_param (fbi, descriptors, def);
1029 }
1030
1031 if (index >= 0)
1032 {
1033 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1034 if (!data_preserved && !guaranteed_unmodified)
1035 return false;
1036
1037 *index_p = index;
1038 *by_ref_p = true;
1039 if (size_p)
1040 *size_p = size;
1041 if (guaranteed_unmodified)
1042 *guaranteed_unmodified = data_preserved;
1043 return true;
1044 }
1045 return false;
1046 }
1047
1048 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1049 of an assignment statement STMT, try to determine whether we are actually
1050 handling any of the following cases and construct an appropriate jump
1051 function into JFUNC if so:
1052
1053 1) The passed value is loaded from a formal parameter which is not a gimple
1054 register (most probably because it is addressable, the value has to be
1055 scalar) and we can guarantee the value has not changed. This case can
1056 therefore be described by a simple pass-through jump function. For example:
1057
1058 foo (int a)
1059 {
1060 int a.0;
1061
1062 a.0_2 = a;
1063 bar (a.0_2);
1064
1065 2) The passed value can be described by a simple arithmetic pass-through
1066 jump function. E.g.
1067
1068 foo (int a)
1069 {
1070 int D.2064;
1071
1072 D.2064_4 = a.1(D) + 4;
1073 bar (D.2064_4);
1074
1075 This case can also occur in combination of the previous one, e.g.:
1076
1077 foo (int a, int z)
1078 {
1079 int a.0;
1080 int D.2064;
1081
1082 a.0_3 = a;
1083 D.2064_4 = a.0_3 + 4;
1084 foo (D.2064_4);
1085
1086 3) The passed value is an address of an object within another one (which
1087 also passed by reference). Such situations are described by an ancestor
1088 jump function and describe situations such as:
1089
1090 B::foo() (struct B * const this)
1091 {
1092 struct A * D.1845;
1093
1094 D.1845_2 = &this_1(D)->D.1748;
1095 A::bar (D.1845_2);
1096
1097 INFO is the structure describing individual parameters access different
1098 stages of IPA optimizations. PARMS_AINFO contains the information that is
1099 only needed for intraprocedural analysis. */
1100
1101 static void
1102 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1103 struct ipa_node_params *info,
1104 struct ipa_jump_func *jfunc,
1105 gcall *call, gimple *stmt, tree name,
1106 tree param_type)
1107 {
1108 HOST_WIDE_INT offset, size, max_size;
1109 tree op1, tc_ssa, base, ssa;
1110 bool reverse;
1111 int index;
1112
1113 op1 = gimple_assign_rhs1 (stmt);
1114
1115 if (TREE_CODE (op1) == SSA_NAME)
1116 {
1117 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1118 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1119 else
1120 index = load_from_unmodified_param (fbi, info->descriptors,
1121 SSA_NAME_DEF_STMT (op1));
1122 tc_ssa = op1;
1123 }
1124 else
1125 {
1126 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1127 tc_ssa = gimple_assign_lhs (stmt);
1128 }
1129
1130 if (index >= 0)
1131 {
1132 tree op2 = gimple_assign_rhs2 (stmt);
1133
1134 if (op2)
1135 {
1136 if (!is_gimple_ip_invariant (op2)
1137 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1138 && !useless_type_conversion_p (TREE_TYPE (name),
1139 TREE_TYPE (op1))))
1140 return;
1141
1142 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1143 gimple_assign_rhs_code (stmt));
1144 }
1145 else if (gimple_assign_single_p (stmt))
1146 {
1147 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1148 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1149 }
1150 return;
1151 }
1152
1153 if (TREE_CODE (op1) != ADDR_EXPR)
1154 return;
1155 op1 = TREE_OPERAND (op1, 0);
1156 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1157 return;
1158 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1159 if (TREE_CODE (base) != MEM_REF
1160 /* If this is a varying address, punt. */
1161 || max_size == -1
1162 || max_size != size)
1163 return;
1164 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1165 ssa = TREE_OPERAND (base, 0);
1166 if (TREE_CODE (ssa) != SSA_NAME
1167 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1168 || offset < 0)
1169 return;
1170
1171 /* Dynamic types are changed in constructors and destructors. */
1172 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1173 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1174 ipa_set_ancestor_jf (jfunc, offset, index,
1175 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1176 }
1177
1178 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1179 it looks like:
1180
1181 iftmp.1_3 = &obj_2(D)->D.1762;
1182
1183 The base of the MEM_REF must be a default definition SSA NAME of a
1184 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1185 whole MEM_REF expression is returned and the offset calculated from any
1186 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1187 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1188
1189 static tree
1190 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1191 {
1192 HOST_WIDE_INT size, max_size;
1193 tree expr, parm, obj;
1194 bool reverse;
1195
1196 if (!gimple_assign_single_p (assign))
1197 return NULL_TREE;
1198 expr = gimple_assign_rhs1 (assign);
1199
1200 if (TREE_CODE (expr) != ADDR_EXPR)
1201 return NULL_TREE;
1202 expr = TREE_OPERAND (expr, 0);
1203 obj = expr;
1204 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1205
1206 if (TREE_CODE (expr) != MEM_REF
1207 /* If this is a varying address, punt. */
1208 || max_size == -1
1209 || max_size != size
1210 || *offset < 0)
1211 return NULL_TREE;
1212 parm = TREE_OPERAND (expr, 0);
1213 if (TREE_CODE (parm) != SSA_NAME
1214 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1215 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1216 return NULL_TREE;
1217
1218 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1219 *obj_p = obj;
1220 return expr;
1221 }
1222
1223
1224 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1225 statement PHI, try to find out whether NAME is in fact a
1226 multiple-inheritance typecast from a descendant into an ancestor of a formal
1227 parameter and thus can be described by an ancestor jump function and if so,
1228 write the appropriate function into JFUNC.
1229
1230 Essentially we want to match the following pattern:
1231
1232 if (obj_2(D) != 0B)
1233 goto <bb 3>;
1234 else
1235 goto <bb 4>;
1236
1237 <bb 3>:
1238 iftmp.1_3 = &obj_2(D)->D.1762;
1239
1240 <bb 4>:
1241 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1242 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1243 return D.1879_6; */
1244
1245 static void
1246 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1247 struct ipa_node_params *info,
1248 struct ipa_jump_func *jfunc,
1249 gcall *call, gphi *phi)
1250 {
1251 HOST_WIDE_INT offset;
1252 gimple *assign, *cond;
1253 basic_block phi_bb, assign_bb, cond_bb;
1254 tree tmp, parm, expr, obj;
1255 int index, i;
1256
1257 if (gimple_phi_num_args (phi) != 2)
1258 return;
1259
1260 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1261 tmp = PHI_ARG_DEF (phi, 0);
1262 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1263 tmp = PHI_ARG_DEF (phi, 1);
1264 else
1265 return;
1266 if (TREE_CODE (tmp) != SSA_NAME
1267 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1268 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1269 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1270 return;
1271
1272 assign = SSA_NAME_DEF_STMT (tmp);
1273 assign_bb = gimple_bb (assign);
1274 if (!single_pred_p (assign_bb))
1275 return;
1276 expr = get_ancestor_addr_info (assign, &obj, &offset);
1277 if (!expr)
1278 return;
1279 parm = TREE_OPERAND (expr, 0);
1280 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1281 if (index < 0)
1282 return;
1283
1284 cond_bb = single_pred (assign_bb);
1285 cond = last_stmt (cond_bb);
1286 if (!cond
1287 || gimple_code (cond) != GIMPLE_COND
1288 || gimple_cond_code (cond) != NE_EXPR
1289 || gimple_cond_lhs (cond) != parm
1290 || !integer_zerop (gimple_cond_rhs (cond)))
1291 return;
1292
1293 phi_bb = gimple_bb (phi);
1294 for (i = 0; i < 2; i++)
1295 {
1296 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1297 if (pred != assign_bb && pred != cond_bb)
1298 return;
1299 }
1300
1301 ipa_set_ancestor_jf (jfunc, offset, index,
1302 parm_ref_data_pass_through_p (fbi, index, call, parm));
1303 }
1304
1305 /* Inspect the given TYPE and return true iff it has the same structure (the
1306 same number of fields of the same types) as a C++ member pointer. If
1307 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1308 corresponding fields there. */
1309
1310 static bool
1311 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1312 {
1313 tree fld;
1314
1315 if (TREE_CODE (type) != RECORD_TYPE)
1316 return false;
1317
1318 fld = TYPE_FIELDS (type);
1319 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1320 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1321 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1322 return false;
1323
1324 if (method_ptr)
1325 *method_ptr = fld;
1326
1327 fld = DECL_CHAIN (fld);
1328 if (!fld || INTEGRAL_TYPE_P (fld)
1329 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1330 return false;
1331 if (delta)
1332 *delta = fld;
1333
1334 if (DECL_CHAIN (fld))
1335 return false;
1336
1337 return true;
1338 }
1339
1340 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1341 return the rhs of its defining statement. Otherwise return RHS as it
1342 is. */
1343
1344 static inline tree
1345 get_ssa_def_if_simple_copy (tree rhs)
1346 {
1347 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1348 {
1349 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1350
1351 if (gimple_assign_single_p (def_stmt))
1352 rhs = gimple_assign_rhs1 (def_stmt);
1353 else
1354 break;
1355 }
1356 return rhs;
1357 }
1358
1359 /* Simple linked list, describing known contents of an aggregate beforere
1360 call. */
1361
1362 struct ipa_known_agg_contents_list
1363 {
1364 /* Offset and size of the described part of the aggregate. */
1365 HOST_WIDE_INT offset, size;
1366 /* Known constant value or NULL if the contents is known to be unknown. */
1367 tree constant;
1368 /* Pointer to the next structure in the list. */
1369 struct ipa_known_agg_contents_list *next;
1370 };
1371
1372 /* Find the proper place in linked list of ipa_known_agg_contents_list
1373 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1374 unless there is a partial overlap, in which case return NULL, or such
1375 element is already there, in which case set *ALREADY_THERE to true. */
1376
1377 static struct ipa_known_agg_contents_list **
1378 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1379 HOST_WIDE_INT lhs_offset,
1380 HOST_WIDE_INT lhs_size,
1381 bool *already_there)
1382 {
1383 struct ipa_known_agg_contents_list **p = list;
1384 while (*p && (*p)->offset < lhs_offset)
1385 {
1386 if ((*p)->offset + (*p)->size > lhs_offset)
1387 return NULL;
1388 p = &(*p)->next;
1389 }
1390
1391 if (*p && (*p)->offset < lhs_offset + lhs_size)
1392 {
1393 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1394 /* We already know this value is subsequently overwritten with
1395 something else. */
1396 *already_there = true;
1397 else
1398 /* Otherwise this is a partial overlap which we cannot
1399 represent. */
1400 return NULL;
1401 }
1402 return p;
1403 }
1404
1405 /* Build aggregate jump function from LIST, assuming there are exactly
1406 CONST_COUNT constant entries there and that th offset of the passed argument
1407 is ARG_OFFSET and store it into JFUNC. */
1408
1409 static void
1410 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1411 int const_count, HOST_WIDE_INT arg_offset,
1412 struct ipa_jump_func *jfunc)
1413 {
1414 vec_alloc (jfunc->agg.items, const_count);
1415 while (list)
1416 {
1417 if (list->constant)
1418 {
1419 struct ipa_agg_jf_item item;
1420 item.offset = list->offset - arg_offset;
1421 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1422 item.value = unshare_expr_without_location (list->constant);
1423 jfunc->agg.items->quick_push (item);
1424 }
1425 list = list->next;
1426 }
1427 }
1428
1429 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1430 in ARG is filled in with constant values. ARG can either be an aggregate
1431 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1432 aggregate. JFUNC is the jump function into which the constants are
1433 subsequently stored. */
1434
1435 static void
1436 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1437 tree arg_type,
1438 struct ipa_jump_func *jfunc)
1439 {
1440 struct ipa_known_agg_contents_list *list = NULL;
1441 int item_count = 0, const_count = 0;
1442 HOST_WIDE_INT arg_offset, arg_size;
1443 gimple_stmt_iterator gsi;
1444 tree arg_base;
1445 bool check_ref, by_ref;
1446 ao_ref r;
1447
1448 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1449 return;
1450
1451 /* The function operates in three stages. First, we prepare check_ref, r,
1452 arg_base and arg_offset based on what is actually passed as an actual
1453 argument. */
1454
1455 if (POINTER_TYPE_P (arg_type))
1456 {
1457 by_ref = true;
1458 if (TREE_CODE (arg) == SSA_NAME)
1459 {
1460 tree type_size;
1461 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1462 return;
1463 check_ref = true;
1464 arg_base = arg;
1465 arg_offset = 0;
1466 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1467 arg_size = tree_to_uhwi (type_size);
1468 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1469 }
1470 else if (TREE_CODE (arg) == ADDR_EXPR)
1471 {
1472 HOST_WIDE_INT arg_max_size;
1473 bool reverse;
1474
1475 arg = TREE_OPERAND (arg, 0);
1476 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1477 &arg_max_size, &reverse);
1478 if (arg_max_size == -1
1479 || arg_max_size != arg_size
1480 || arg_offset < 0)
1481 return;
1482 if (DECL_P (arg_base))
1483 {
1484 check_ref = false;
1485 ao_ref_init (&r, arg_base);
1486 }
1487 else
1488 return;
1489 }
1490 else
1491 return;
1492 }
1493 else
1494 {
1495 HOST_WIDE_INT arg_max_size;
1496 bool reverse;
1497
1498 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1499
1500 by_ref = false;
1501 check_ref = false;
1502 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1503 &arg_max_size, &reverse);
1504 if (arg_max_size == -1
1505 || arg_max_size != arg_size
1506 || arg_offset < 0)
1507 return;
1508
1509 ao_ref_init (&r, arg);
1510 }
1511
1512 /* Second stage walks back the BB, looks at individual statements and as long
1513 as it is confident of how the statements affect contents of the
1514 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1515 describing it. */
1516 gsi = gsi_for_stmt (call);
1517 gsi_prev (&gsi);
1518 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1519 {
1520 struct ipa_known_agg_contents_list *n, **p;
1521 gimple *stmt = gsi_stmt (gsi);
1522 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1523 tree lhs, rhs, lhs_base;
1524 bool reverse;
1525
1526 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1527 continue;
1528 if (!gimple_assign_single_p (stmt))
1529 break;
1530
1531 lhs = gimple_assign_lhs (stmt);
1532 rhs = gimple_assign_rhs1 (stmt);
1533 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1534 || TREE_CODE (lhs) == BIT_FIELD_REF
1535 || contains_bitfld_component_ref_p (lhs))
1536 break;
1537
1538 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1539 &lhs_max_size, &reverse);
1540 if (lhs_max_size == -1
1541 || lhs_max_size != lhs_size)
1542 break;
1543
1544 if (check_ref)
1545 {
1546 if (TREE_CODE (lhs_base) != MEM_REF
1547 || TREE_OPERAND (lhs_base, 0) != arg_base
1548 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1549 break;
1550 }
1551 else if (lhs_base != arg_base)
1552 {
1553 if (DECL_P (lhs_base))
1554 continue;
1555 else
1556 break;
1557 }
1558
1559 bool already_there = false;
1560 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1561 &already_there);
1562 if (!p)
1563 break;
1564 if (already_there)
1565 continue;
1566
1567 rhs = get_ssa_def_if_simple_copy (rhs);
1568 n = XALLOCA (struct ipa_known_agg_contents_list);
1569 n->size = lhs_size;
1570 n->offset = lhs_offset;
1571 if (is_gimple_ip_invariant (rhs))
1572 {
1573 n->constant = rhs;
1574 const_count++;
1575 }
1576 else
1577 n->constant = NULL_TREE;
1578 n->next = *p;
1579 *p = n;
1580
1581 item_count++;
1582 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1583 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1584 break;
1585 }
1586
1587 /* Third stage just goes over the list and creates an appropriate vector of
1588 ipa_agg_jf_item structures out of it, of sourse only if there are
1589 any known constants to begin with. */
1590
1591 if (const_count)
1592 {
1593 jfunc->agg.by_ref = by_ref;
1594 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1595 }
1596 }
1597
1598 /* Return the Ith param type of callee associated with call graph
1599 edge E. */
1600
1601 tree
1602 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1603 {
1604 int n;
1605 tree type = (e->callee
1606 ? TREE_TYPE (e->callee->decl)
1607 : gimple_call_fntype (e->call_stmt));
1608 tree t = TYPE_ARG_TYPES (type);
1609
1610 for (n = 0; n < i; n++)
1611 {
1612 if (!t)
1613 break;
1614 t = TREE_CHAIN (t);
1615 }
1616 if (t)
1617 return TREE_VALUE (t);
1618 if (!e->callee)
1619 return NULL;
1620 t = DECL_ARGUMENTS (e->callee->decl);
1621 for (n = 0; n < i; n++)
1622 {
1623 if (!t)
1624 return NULL;
1625 t = TREE_CHAIN (t);
1626 }
1627 if (t)
1628 return TREE_TYPE (t);
1629 return NULL;
1630 }
1631
1632 /* Compute jump function for all arguments of callsite CS and insert the
1633 information in the jump_functions array in the ipa_edge_args corresponding
1634 to this callsite. */
1635
1636 static void
1637 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1638 struct cgraph_edge *cs)
1639 {
1640 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1641 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1642 gcall *call = cs->call_stmt;
1643 int n, arg_num = gimple_call_num_args (call);
1644 bool useful_context = false;
1645
1646 if (arg_num == 0 || args->jump_functions)
1647 return;
1648 vec_safe_grow_cleared (args->jump_functions, arg_num);
1649 if (flag_devirtualize)
1650 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1651
1652 if (gimple_call_internal_p (call))
1653 return;
1654 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1655 return;
1656
1657 for (n = 0; n < arg_num; n++)
1658 {
1659 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1660 tree arg = gimple_call_arg (call, n);
1661 tree param_type = ipa_get_callee_param_type (cs, n);
1662 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1663 {
1664 tree instance;
1665 struct ipa_polymorphic_call_context context (cs->caller->decl,
1666 arg, cs->call_stmt,
1667 &instance);
1668 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1669 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1670 if (!context.useless_p ())
1671 useful_context = true;
1672 }
1673
1674 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1675 {
1676 bool addr_nonzero = false;
1677 bool strict_overflow = false;
1678
1679 if (TREE_CODE (arg) == SSA_NAME
1680 && param_type
1681 && get_ptr_nonnull (arg))
1682 addr_nonzero = true;
1683 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1684 addr_nonzero = true;
1685
1686 if (addr_nonzero)
1687 {
1688 jfunc->vr_known = true;
1689 jfunc->m_vr.type = VR_ANTI_RANGE;
1690 jfunc->m_vr.min = build_int_cst (TREE_TYPE (arg), 0);
1691 jfunc->m_vr.max = build_int_cst (TREE_TYPE (arg), 0);
1692 jfunc->m_vr.equiv = NULL;
1693 }
1694 else
1695 gcc_assert (!jfunc->vr_known);
1696 }
1697 else
1698 {
1699 wide_int min, max;
1700 value_range_type type;
1701 if (TREE_CODE (arg) == SSA_NAME
1702 && param_type
1703 && (type = get_range_info (arg, &min, &max))
1704 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1705 {
1706 value_range vr;
1707
1708 vr.type = type;
1709 vr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1710 vr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1711 vr.equiv = NULL;
1712 extract_range_from_unary_expr (&jfunc->m_vr,
1713 NOP_EXPR,
1714 param_type,
1715 &vr, TREE_TYPE (arg));
1716 if (jfunc->m_vr.type == VR_RANGE
1717 || jfunc->m_vr.type == VR_ANTI_RANGE)
1718 jfunc->vr_known = true;
1719 else
1720 jfunc->vr_known = false;
1721 }
1722 else
1723 gcc_assert (!jfunc->vr_known);
1724 }
1725
1726 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1727 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1728 {
1729 jfunc->bits.known = true;
1730
1731 if (TREE_CODE (arg) == SSA_NAME)
1732 {
1733 jfunc->bits.value = 0;
1734 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1735 TYPE_SIGN (TREE_TYPE (arg)));
1736 }
1737 else
1738 {
1739 jfunc->bits.value = wi::to_widest (arg);
1740 jfunc->bits.mask = 0;
1741 }
1742 }
1743 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1744 {
1745 unsigned HOST_WIDE_INT bitpos;
1746 unsigned align;
1747
1748 jfunc->bits.known = true;
1749 get_pointer_alignment_1 (arg, &align, &bitpos);
1750 jfunc->bits.mask = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1751 .and_not (align / BITS_PER_UNIT - 1);
1752 jfunc->bits.value = bitpos / BITS_PER_UNIT;
1753 }
1754 else
1755 gcc_assert (!jfunc->bits.known);
1756
1757 if (is_gimple_ip_invariant (arg)
1758 || (VAR_P (arg)
1759 && is_global_var (arg)
1760 && TREE_READONLY (arg)))
1761 ipa_set_jf_constant (jfunc, arg, cs);
1762 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1763 && TREE_CODE (arg) == PARM_DECL)
1764 {
1765 int index = ipa_get_param_decl_index (info, arg);
1766
1767 gcc_assert (index >=0);
1768 /* Aggregate passed by value, check for pass-through, otherwise we
1769 will attempt to fill in aggregate contents later in this
1770 for cycle. */
1771 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1772 {
1773 ipa_set_jf_simple_pass_through (jfunc, index, false);
1774 continue;
1775 }
1776 }
1777 else if (TREE_CODE (arg) == SSA_NAME)
1778 {
1779 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1780 {
1781 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1782 if (index >= 0)
1783 {
1784 bool agg_p;
1785 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1786 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1787 }
1788 }
1789 else
1790 {
1791 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1792 if (is_gimple_assign (stmt))
1793 compute_complex_assign_jump_func (fbi, info, jfunc,
1794 call, stmt, arg, param_type);
1795 else if (gimple_code (stmt) == GIMPLE_PHI)
1796 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1797 call,
1798 as_a <gphi *> (stmt));
1799 }
1800 }
1801
1802 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1803 passed (because type conversions are ignored in gimple). Usually we can
1804 safely get type from function declaration, but in case of K&R prototypes or
1805 variadic functions we can try our luck with type of the pointer passed.
1806 TODO: Since we look for actual initialization of the memory object, we may better
1807 work out the type based on the memory stores we find. */
1808 if (!param_type)
1809 param_type = TREE_TYPE (arg);
1810
1811 if ((jfunc->type != IPA_JF_PASS_THROUGH
1812 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1813 && (jfunc->type != IPA_JF_ANCESTOR
1814 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1815 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1816 || POINTER_TYPE_P (param_type)))
1817 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1818 }
1819 if (!useful_context)
1820 vec_free (args->polymorphic_call_contexts);
1821 }
1822
1823 /* Compute jump functions for all edges - both direct and indirect - outgoing
1824 from BB. */
1825
1826 static void
1827 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1828 {
1829 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1830 int i;
1831 struct cgraph_edge *cs;
1832
1833 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1834 {
1835 struct cgraph_node *callee = cs->callee;
1836
1837 if (callee)
1838 {
1839 callee->ultimate_alias_target ();
1840 /* We do not need to bother analyzing calls to unknown functions
1841 unless they may become known during lto/whopr. */
1842 if (!callee->definition && !flag_lto)
1843 continue;
1844 }
1845 ipa_compute_jump_functions_for_edge (fbi, cs);
1846 }
1847 }
1848
1849 /* If STMT looks like a statement loading a value from a member pointer formal
1850 parameter, return that parameter and store the offset of the field to
1851 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1852 might be clobbered). If USE_DELTA, then we look for a use of the delta
1853 field rather than the pfn. */
1854
1855 static tree
1856 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1857 HOST_WIDE_INT *offset_p)
1858 {
1859 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1860
1861 if (!gimple_assign_single_p (stmt))
1862 return NULL_TREE;
1863
1864 rhs = gimple_assign_rhs1 (stmt);
1865 if (TREE_CODE (rhs) == COMPONENT_REF)
1866 {
1867 ref_field = TREE_OPERAND (rhs, 1);
1868 rhs = TREE_OPERAND (rhs, 0);
1869 }
1870 else
1871 ref_field = NULL_TREE;
1872 if (TREE_CODE (rhs) != MEM_REF)
1873 return NULL_TREE;
1874 rec = TREE_OPERAND (rhs, 0);
1875 if (TREE_CODE (rec) != ADDR_EXPR)
1876 return NULL_TREE;
1877 rec = TREE_OPERAND (rec, 0);
1878 if (TREE_CODE (rec) != PARM_DECL
1879 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1880 return NULL_TREE;
1881 ref_offset = TREE_OPERAND (rhs, 1);
1882
1883 if (use_delta)
1884 fld = delta_field;
1885 else
1886 fld = ptr_field;
1887 if (offset_p)
1888 *offset_p = int_bit_position (fld);
1889
1890 if (ref_field)
1891 {
1892 if (integer_nonzerop (ref_offset))
1893 return NULL_TREE;
1894 return ref_field == fld ? rec : NULL_TREE;
1895 }
1896 else
1897 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1898 : NULL_TREE;
1899 }
1900
1901 /* Returns true iff T is an SSA_NAME defined by a statement. */
1902
1903 static bool
1904 ipa_is_ssa_with_stmt_def (tree t)
1905 {
1906 if (TREE_CODE (t) == SSA_NAME
1907 && !SSA_NAME_IS_DEFAULT_DEF (t))
1908 return true;
1909 else
1910 return false;
1911 }
1912
1913 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1914 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1915 indirect call graph edge. */
1916
1917 static struct cgraph_edge *
1918 ipa_note_param_call (struct cgraph_node *node, int param_index,
1919 gcall *stmt)
1920 {
1921 struct cgraph_edge *cs;
1922
1923 cs = node->get_edge (stmt);
1924 cs->indirect_info->param_index = param_index;
1925 cs->indirect_info->agg_contents = 0;
1926 cs->indirect_info->member_ptr = 0;
1927 cs->indirect_info->guaranteed_unmodified = 0;
1928 return cs;
1929 }
1930
1931 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1932 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1933 intermediate information about each formal parameter. Currently it checks
1934 whether the call calls a pointer that is a formal parameter and if so, the
1935 parameter is marked with the called flag and an indirect call graph edge
1936 describing the call is created. This is very simple for ordinary pointers
1937 represented in SSA but not-so-nice when it comes to member pointers. The
1938 ugly part of this function does nothing more than trying to match the
1939 pattern of such a call. An example of such a pattern is the gimple dump
1940 below, the call is on the last line:
1941
1942 <bb 2>:
1943 f$__delta_5 = f.__delta;
1944 f$__pfn_24 = f.__pfn;
1945
1946 or
1947 <bb 2>:
1948 f$__delta_5 = MEM[(struct *)&f];
1949 f$__pfn_24 = MEM[(struct *)&f + 4B];
1950
1951 and a few lines below:
1952
1953 <bb 5>
1954 D.2496_3 = (int) f$__pfn_24;
1955 D.2497_4 = D.2496_3 & 1;
1956 if (D.2497_4 != 0)
1957 goto <bb 3>;
1958 else
1959 goto <bb 4>;
1960
1961 <bb 6>:
1962 D.2500_7 = (unsigned int) f$__delta_5;
1963 D.2501_8 = &S + D.2500_7;
1964 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1965 D.2503_10 = *D.2502_9;
1966 D.2504_12 = f$__pfn_24 + -1;
1967 D.2505_13 = (unsigned int) D.2504_12;
1968 D.2506_14 = D.2503_10 + D.2505_13;
1969 D.2507_15 = *D.2506_14;
1970 iftmp.11_16 = (String:: *) D.2507_15;
1971
1972 <bb 7>:
1973 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1974 D.2500_19 = (unsigned int) f$__delta_5;
1975 D.2508_20 = &S + D.2500_19;
1976 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1977
1978 Such patterns are results of simple calls to a member pointer:
1979
1980 int doprinting (int (MyString::* f)(int) const)
1981 {
1982 MyString S ("somestring");
1983
1984 return (S.*f)(4);
1985 }
1986
1987 Moreover, the function also looks for called pointers loaded from aggregates
1988 passed by value or reference. */
1989
1990 static void
1991 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
1992 tree target)
1993 {
1994 struct ipa_node_params *info = fbi->info;
1995 HOST_WIDE_INT offset;
1996 bool by_ref;
1997
1998 if (SSA_NAME_IS_DEFAULT_DEF (target))
1999 {
2000 tree var = SSA_NAME_VAR (target);
2001 int index = ipa_get_param_decl_index (info, var);
2002 if (index >= 0)
2003 ipa_note_param_call (fbi->node, index, call);
2004 return;
2005 }
2006
2007 int index;
2008 gimple *def = SSA_NAME_DEF_STMT (target);
2009 bool guaranteed_unmodified;
2010 if (gimple_assign_single_p (def)
2011 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2012 gimple_assign_rhs1 (def), &index, &offset,
2013 NULL, &by_ref, &guaranteed_unmodified))
2014 {
2015 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2016 cs->indirect_info->offset = offset;
2017 cs->indirect_info->agg_contents = 1;
2018 cs->indirect_info->by_ref = by_ref;
2019 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2020 return;
2021 }
2022
2023 /* Now we need to try to match the complex pattern of calling a member
2024 pointer. */
2025 if (gimple_code (def) != GIMPLE_PHI
2026 || gimple_phi_num_args (def) != 2
2027 || !POINTER_TYPE_P (TREE_TYPE (target))
2028 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2029 return;
2030
2031 /* First, we need to check whether one of these is a load from a member
2032 pointer that is a parameter to this function. */
2033 tree n1 = PHI_ARG_DEF (def, 0);
2034 tree n2 = PHI_ARG_DEF (def, 1);
2035 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2036 return;
2037 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2038 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2039
2040 tree rec;
2041 basic_block bb, virt_bb;
2042 basic_block join = gimple_bb (def);
2043 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2044 {
2045 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2046 return;
2047
2048 bb = EDGE_PRED (join, 0)->src;
2049 virt_bb = gimple_bb (d2);
2050 }
2051 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2052 {
2053 bb = EDGE_PRED (join, 1)->src;
2054 virt_bb = gimple_bb (d1);
2055 }
2056 else
2057 return;
2058
2059 /* Second, we need to check that the basic blocks are laid out in the way
2060 corresponding to the pattern. */
2061
2062 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2063 || single_pred (virt_bb) != bb
2064 || single_succ (virt_bb) != join)
2065 return;
2066
2067 /* Third, let's see that the branching is done depending on the least
2068 significant bit of the pfn. */
2069
2070 gimple *branch = last_stmt (bb);
2071 if (!branch || gimple_code (branch) != GIMPLE_COND)
2072 return;
2073
2074 if ((gimple_cond_code (branch) != NE_EXPR
2075 && gimple_cond_code (branch) != EQ_EXPR)
2076 || !integer_zerop (gimple_cond_rhs (branch)))
2077 return;
2078
2079 tree cond = gimple_cond_lhs (branch);
2080 if (!ipa_is_ssa_with_stmt_def (cond))
2081 return;
2082
2083 def = SSA_NAME_DEF_STMT (cond);
2084 if (!is_gimple_assign (def)
2085 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2086 || !integer_onep (gimple_assign_rhs2 (def)))
2087 return;
2088
2089 cond = gimple_assign_rhs1 (def);
2090 if (!ipa_is_ssa_with_stmt_def (cond))
2091 return;
2092
2093 def = SSA_NAME_DEF_STMT (cond);
2094
2095 if (is_gimple_assign (def)
2096 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2097 {
2098 cond = gimple_assign_rhs1 (def);
2099 if (!ipa_is_ssa_with_stmt_def (cond))
2100 return;
2101 def = SSA_NAME_DEF_STMT (cond);
2102 }
2103
2104 tree rec2;
2105 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2106 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2107 == ptrmemfunc_vbit_in_delta),
2108 NULL);
2109 if (rec != rec2)
2110 return;
2111
2112 index = ipa_get_param_decl_index (info, rec);
2113 if (index >= 0
2114 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2115 {
2116 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2117 cs->indirect_info->offset = offset;
2118 cs->indirect_info->agg_contents = 1;
2119 cs->indirect_info->member_ptr = 1;
2120 cs->indirect_info->guaranteed_unmodified = 1;
2121 }
2122
2123 return;
2124 }
2125
2126 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2127 object referenced in the expression is a formal parameter of the caller
2128 FBI->node (described by FBI->info), create a call note for the
2129 statement. */
2130
2131 static void
2132 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2133 gcall *call, tree target)
2134 {
2135 tree obj = OBJ_TYPE_REF_OBJECT (target);
2136 int index;
2137 HOST_WIDE_INT anc_offset;
2138
2139 if (!flag_devirtualize)
2140 return;
2141
2142 if (TREE_CODE (obj) != SSA_NAME)
2143 return;
2144
2145 struct ipa_node_params *info = fbi->info;
2146 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2147 {
2148 struct ipa_jump_func jfunc;
2149 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2150 return;
2151
2152 anc_offset = 0;
2153 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2154 gcc_assert (index >= 0);
2155 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2156 call, &jfunc))
2157 return;
2158 }
2159 else
2160 {
2161 struct ipa_jump_func jfunc;
2162 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2163 tree expr;
2164
2165 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2166 if (!expr)
2167 return;
2168 index = ipa_get_param_decl_index (info,
2169 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2170 gcc_assert (index >= 0);
2171 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2172 call, &jfunc, anc_offset))
2173 return;
2174 }
2175
2176 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2177 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2178 ii->offset = anc_offset;
2179 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2180 ii->otr_type = obj_type_ref_class (target);
2181 ii->polymorphic = 1;
2182 }
2183
2184 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2185 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2186 containing intermediate information about each formal parameter. */
2187
2188 static void
2189 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2190 {
2191 tree target = gimple_call_fn (call);
2192
2193 if (!target
2194 || (TREE_CODE (target) != SSA_NAME
2195 && !virtual_method_call_p (target)))
2196 return;
2197
2198 struct cgraph_edge *cs = fbi->node->get_edge (call);
2199 /* If we previously turned the call into a direct call, there is
2200 no need to analyze. */
2201 if (cs && !cs->indirect_unknown_callee)
2202 return;
2203
2204 if (cs->indirect_info->polymorphic && flag_devirtualize)
2205 {
2206 tree instance;
2207 tree target = gimple_call_fn (call);
2208 ipa_polymorphic_call_context context (current_function_decl,
2209 target, call, &instance);
2210
2211 gcc_checking_assert (cs->indirect_info->otr_type
2212 == obj_type_ref_class (target));
2213 gcc_checking_assert (cs->indirect_info->otr_token
2214 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2215
2216 cs->indirect_info->vptr_changed
2217 = !context.get_dynamic_type (instance,
2218 OBJ_TYPE_REF_OBJECT (target),
2219 obj_type_ref_class (target), call);
2220 cs->indirect_info->context = context;
2221 }
2222
2223 if (TREE_CODE (target) == SSA_NAME)
2224 ipa_analyze_indirect_call_uses (fbi, call, target);
2225 else if (virtual_method_call_p (target))
2226 ipa_analyze_virtual_call_uses (fbi, call, target);
2227 }
2228
2229
2230 /* Analyze the call statement STMT with respect to formal parameters (described
2231 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2232 formal parameters are called. */
2233
2234 static void
2235 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2236 {
2237 if (is_gimple_call (stmt))
2238 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2239 }
2240
2241 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2242 If OP is a parameter declaration, mark it as used in the info structure
2243 passed in DATA. */
2244
2245 static bool
2246 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2247 {
2248 struct ipa_node_params *info = (struct ipa_node_params *) data;
2249
2250 op = get_base_address (op);
2251 if (op
2252 && TREE_CODE (op) == PARM_DECL)
2253 {
2254 int index = ipa_get_param_decl_index (info, op);
2255 gcc_assert (index >= 0);
2256 ipa_set_param_used (info, index, true);
2257 }
2258
2259 return false;
2260 }
2261
2262 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2263 the findings in various structures of the associated ipa_node_params
2264 structure, such as parameter flags, notes etc. FBI holds various data about
2265 the function being analyzed. */
2266
2267 static void
2268 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2269 {
2270 gimple_stmt_iterator gsi;
2271 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2272 {
2273 gimple *stmt = gsi_stmt (gsi);
2274
2275 if (is_gimple_debug (stmt))
2276 continue;
2277
2278 ipa_analyze_stmt_uses (fbi, stmt);
2279 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2280 visit_ref_for_mod_analysis,
2281 visit_ref_for_mod_analysis,
2282 visit_ref_for_mod_analysis);
2283 }
2284 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2285 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2286 visit_ref_for_mod_analysis,
2287 visit_ref_for_mod_analysis,
2288 visit_ref_for_mod_analysis);
2289 }
2290
2291 /* Calculate controlled uses of parameters of NODE. */
2292
2293 static void
2294 ipa_analyze_controlled_uses (struct cgraph_node *node)
2295 {
2296 struct ipa_node_params *info = IPA_NODE_REF (node);
2297
2298 for (int i = 0; i < ipa_get_param_count (info); i++)
2299 {
2300 tree parm = ipa_get_param (info, i);
2301 int controlled_uses = 0;
2302
2303 /* For SSA regs see if parameter is used. For non-SSA we compute
2304 the flag during modification analysis. */
2305 if (is_gimple_reg (parm))
2306 {
2307 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2308 parm);
2309 if (ddef && !has_zero_uses (ddef))
2310 {
2311 imm_use_iterator imm_iter;
2312 use_operand_p use_p;
2313
2314 ipa_set_param_used (info, i, true);
2315 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2316 if (!is_gimple_call (USE_STMT (use_p)))
2317 {
2318 if (!is_gimple_debug (USE_STMT (use_p)))
2319 {
2320 controlled_uses = IPA_UNDESCRIBED_USE;
2321 break;
2322 }
2323 }
2324 else
2325 controlled_uses++;
2326 }
2327 else
2328 controlled_uses = 0;
2329 }
2330 else
2331 controlled_uses = IPA_UNDESCRIBED_USE;
2332 ipa_set_controlled_uses (info, i, controlled_uses);
2333 }
2334 }
2335
2336 /* Free stuff in BI. */
2337
2338 static void
2339 free_ipa_bb_info (struct ipa_bb_info *bi)
2340 {
2341 bi->cg_edges.release ();
2342 bi->param_aa_statuses.release ();
2343 }
2344
2345 /* Dominator walker driving the analysis. */
2346
2347 class analysis_dom_walker : public dom_walker
2348 {
2349 public:
2350 analysis_dom_walker (struct ipa_func_body_info *fbi)
2351 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2352
2353 virtual edge before_dom_children (basic_block);
2354
2355 private:
2356 struct ipa_func_body_info *m_fbi;
2357 };
2358
2359 edge
2360 analysis_dom_walker::before_dom_children (basic_block bb)
2361 {
2362 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2363 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2364 return NULL;
2365 }
2366
2367 /* Release body info FBI. */
2368
2369 void
2370 ipa_release_body_info (struct ipa_func_body_info *fbi)
2371 {
2372 int i;
2373 struct ipa_bb_info *bi;
2374
2375 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2376 free_ipa_bb_info (bi);
2377 fbi->bb_infos.release ();
2378 }
2379
2380 /* Initialize the array describing properties of formal parameters
2381 of NODE, analyze their uses and compute jump functions associated
2382 with actual arguments of calls from within NODE. */
2383
2384 void
2385 ipa_analyze_node (struct cgraph_node *node)
2386 {
2387 struct ipa_func_body_info fbi;
2388 struct ipa_node_params *info;
2389
2390 ipa_check_create_node_params ();
2391 ipa_check_create_edge_args ();
2392 info = IPA_NODE_REF (node);
2393
2394 if (info->analysis_done)
2395 return;
2396 info->analysis_done = 1;
2397
2398 if (ipa_func_spec_opts_forbid_analysis_p (node))
2399 {
2400 for (int i = 0; i < ipa_get_param_count (info); i++)
2401 {
2402 ipa_set_param_used (info, i, true);
2403 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2404 }
2405 return;
2406 }
2407
2408 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2409 push_cfun (func);
2410 calculate_dominance_info (CDI_DOMINATORS);
2411 ipa_initialize_node_params (node);
2412 ipa_analyze_controlled_uses (node);
2413
2414 fbi.node = node;
2415 fbi.info = IPA_NODE_REF (node);
2416 fbi.bb_infos = vNULL;
2417 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2418 fbi.param_count = ipa_get_param_count (info);
2419 fbi.aa_walked = 0;
2420
2421 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2422 {
2423 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2424 bi->cg_edges.safe_push (cs);
2425 }
2426
2427 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2428 {
2429 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2430 bi->cg_edges.safe_push (cs);
2431 }
2432
2433 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2434
2435 ipa_release_body_info (&fbi);
2436 free_dominance_info (CDI_DOMINATORS);
2437 pop_cfun ();
2438 }
2439
2440 /* Update the jump functions associated with call graph edge E when the call
2441 graph edge CS is being inlined, assuming that E->caller is already (possibly
2442 indirectly) inlined into CS->callee and that E has not been inlined. */
2443
2444 static void
2445 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2446 struct cgraph_edge *e)
2447 {
2448 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2449 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2450 int count = ipa_get_cs_argument_count (args);
2451 int i;
2452
2453 for (i = 0; i < count; i++)
2454 {
2455 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2456 struct ipa_polymorphic_call_context *dst_ctx
2457 = ipa_get_ith_polymorhic_call_context (args, i);
2458
2459 if (dst->type == IPA_JF_ANCESTOR)
2460 {
2461 struct ipa_jump_func *src;
2462 int dst_fid = dst->value.ancestor.formal_id;
2463 struct ipa_polymorphic_call_context *src_ctx
2464 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2465
2466 /* Variable number of arguments can cause havoc if we try to access
2467 one that does not exist in the inlined edge. So make sure we
2468 don't. */
2469 if (dst_fid >= ipa_get_cs_argument_count (top))
2470 {
2471 ipa_set_jf_unknown (dst);
2472 continue;
2473 }
2474
2475 src = ipa_get_ith_jump_func (top, dst_fid);
2476
2477 if (src_ctx && !src_ctx->useless_p ())
2478 {
2479 struct ipa_polymorphic_call_context ctx = *src_ctx;
2480
2481 /* TODO: Make type preserved safe WRT contexts. */
2482 if (!ipa_get_jf_ancestor_type_preserved (dst))
2483 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2484 ctx.offset_by (dst->value.ancestor.offset);
2485 if (!ctx.useless_p ())
2486 {
2487 if (!dst_ctx)
2488 {
2489 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2490 count);
2491 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2492 }
2493
2494 dst_ctx->combine_with (ctx);
2495 }
2496 }
2497
2498 if (src->agg.items
2499 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2500 {
2501 struct ipa_agg_jf_item *item;
2502 int j;
2503
2504 /* Currently we do not produce clobber aggregate jump functions,
2505 replace with merging when we do. */
2506 gcc_assert (!dst->agg.items);
2507
2508 dst->agg.items = vec_safe_copy (src->agg.items);
2509 dst->agg.by_ref = src->agg.by_ref;
2510 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2511 item->offset -= dst->value.ancestor.offset;
2512 }
2513
2514 if (src->type == IPA_JF_PASS_THROUGH
2515 && src->value.pass_through.operation == NOP_EXPR)
2516 {
2517 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2518 dst->value.ancestor.agg_preserved &=
2519 src->value.pass_through.agg_preserved;
2520 }
2521 else if (src->type == IPA_JF_ANCESTOR)
2522 {
2523 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2524 dst->value.ancestor.offset += src->value.ancestor.offset;
2525 dst->value.ancestor.agg_preserved &=
2526 src->value.ancestor.agg_preserved;
2527 }
2528 else
2529 ipa_set_jf_unknown (dst);
2530 }
2531 else if (dst->type == IPA_JF_PASS_THROUGH)
2532 {
2533 struct ipa_jump_func *src;
2534 /* We must check range due to calls with variable number of arguments
2535 and we cannot combine jump functions with operations. */
2536 if (dst->value.pass_through.operation == NOP_EXPR
2537 && (dst->value.pass_through.formal_id
2538 < ipa_get_cs_argument_count (top)))
2539 {
2540 int dst_fid = dst->value.pass_through.formal_id;
2541 src = ipa_get_ith_jump_func (top, dst_fid);
2542 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2543 struct ipa_polymorphic_call_context *src_ctx
2544 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2545
2546 if (src_ctx && !src_ctx->useless_p ())
2547 {
2548 struct ipa_polymorphic_call_context ctx = *src_ctx;
2549
2550 /* TODO: Make type preserved safe WRT contexts. */
2551 if (!ipa_get_jf_pass_through_type_preserved (dst))
2552 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2553 if (!ctx.useless_p ())
2554 {
2555 if (!dst_ctx)
2556 {
2557 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2558 count);
2559 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2560 }
2561 dst_ctx->combine_with (ctx);
2562 }
2563 }
2564 switch (src->type)
2565 {
2566 case IPA_JF_UNKNOWN:
2567 ipa_set_jf_unknown (dst);
2568 break;
2569 case IPA_JF_CONST:
2570 ipa_set_jf_cst_copy (dst, src);
2571 break;
2572
2573 case IPA_JF_PASS_THROUGH:
2574 {
2575 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2576 enum tree_code operation;
2577 operation = ipa_get_jf_pass_through_operation (src);
2578
2579 if (operation == NOP_EXPR)
2580 {
2581 bool agg_p;
2582 agg_p = dst_agg_p
2583 && ipa_get_jf_pass_through_agg_preserved (src);
2584 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2585 }
2586 else
2587 {
2588 tree operand = ipa_get_jf_pass_through_operand (src);
2589 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2590 operation);
2591 }
2592 break;
2593 }
2594 case IPA_JF_ANCESTOR:
2595 {
2596 bool agg_p;
2597 agg_p = dst_agg_p
2598 && ipa_get_jf_ancestor_agg_preserved (src);
2599 ipa_set_ancestor_jf (dst,
2600 ipa_get_jf_ancestor_offset (src),
2601 ipa_get_jf_ancestor_formal_id (src),
2602 agg_p);
2603 break;
2604 }
2605 default:
2606 gcc_unreachable ();
2607 }
2608
2609 if (src->agg.items
2610 && (dst_agg_p || !src->agg.by_ref))
2611 {
2612 /* Currently we do not produce clobber aggregate jump
2613 functions, replace with merging when we do. */
2614 gcc_assert (!dst->agg.items);
2615
2616 dst->agg.by_ref = src->agg.by_ref;
2617 dst->agg.items = vec_safe_copy (src->agg.items);
2618 }
2619 }
2620 else
2621 ipa_set_jf_unknown (dst);
2622 }
2623 }
2624 }
2625
2626 /* If TARGET is an addr_expr of a function declaration, make it the
2627 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2628 Otherwise, return NULL. */
2629
2630 struct cgraph_edge *
2631 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2632 bool speculative)
2633 {
2634 struct cgraph_node *callee;
2635 struct inline_edge_summary *es = inline_edge_summary (ie);
2636 bool unreachable = false;
2637
2638 if (TREE_CODE (target) == ADDR_EXPR)
2639 target = TREE_OPERAND (target, 0);
2640 if (TREE_CODE (target) != FUNCTION_DECL)
2641 {
2642 target = canonicalize_constructor_val (target, NULL);
2643 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2644 {
2645 /* Member pointer call that goes through a VMT lookup. */
2646 if (ie->indirect_info->member_ptr
2647 /* Or if target is not an invariant expression and we do not
2648 know if it will evaulate to function at runtime.
2649 This can happen when folding through &VAR, where &VAR
2650 is IP invariant, but VAR itself is not.
2651
2652 TODO: Revisit this when GCC 5 is branched. It seems that
2653 member_ptr check is not needed and that we may try to fold
2654 the expression and see if VAR is readonly. */
2655 || !is_gimple_ip_invariant (target))
2656 {
2657 if (dump_enabled_p ())
2658 {
2659 location_t loc = gimple_location_safe (ie->call_stmt);
2660 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2661 "discovered direct call non-invariant "
2662 "%s/%i\n",
2663 ie->caller->name (), ie->caller->order);
2664 }
2665 return NULL;
2666 }
2667
2668
2669 if (dump_enabled_p ())
2670 {
2671 location_t loc = gimple_location_safe (ie->call_stmt);
2672 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2673 "discovered direct call to non-function in %s/%i, "
2674 "making it __builtin_unreachable\n",
2675 ie->caller->name (), ie->caller->order);
2676 }
2677
2678 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2679 callee = cgraph_node::get_create (target);
2680 unreachable = true;
2681 }
2682 else
2683 callee = cgraph_node::get (target);
2684 }
2685 else
2686 callee = cgraph_node::get (target);
2687
2688 /* Because may-edges are not explicitely represented and vtable may be external,
2689 we may create the first reference to the object in the unit. */
2690 if (!callee || callee->global.inlined_to)
2691 {
2692
2693 /* We are better to ensure we can refer to it.
2694 In the case of static functions we are out of luck, since we already
2695 removed its body. In the case of public functions we may or may
2696 not introduce the reference. */
2697 if (!canonicalize_constructor_val (target, NULL)
2698 || !TREE_PUBLIC (target))
2699 {
2700 if (dump_file)
2701 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2702 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2703 xstrdup_for_dump (ie->caller->name ()),
2704 ie->caller->order,
2705 xstrdup_for_dump (ie->callee->name ()),
2706 ie->callee->order);
2707 return NULL;
2708 }
2709 callee = cgraph_node::get_create (target);
2710 }
2711
2712 /* If the edge is already speculated. */
2713 if (speculative && ie->speculative)
2714 {
2715 struct cgraph_edge *e2;
2716 struct ipa_ref *ref;
2717 ie->speculative_call_info (e2, ie, ref);
2718 if (e2->callee->ultimate_alias_target ()
2719 != callee->ultimate_alias_target ())
2720 {
2721 if (dump_file)
2722 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2723 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2724 xstrdup_for_dump (ie->caller->name ()),
2725 ie->caller->order,
2726 xstrdup_for_dump (callee->name ()),
2727 callee->order,
2728 xstrdup_for_dump (e2->callee->name ()),
2729 e2->callee->order);
2730 }
2731 else
2732 {
2733 if (dump_file)
2734 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2735 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2736 xstrdup_for_dump (ie->caller->name ()),
2737 ie->caller->order,
2738 xstrdup_for_dump (callee->name ()),
2739 callee->order);
2740 }
2741 return NULL;
2742 }
2743
2744 if (!dbg_cnt (devirt))
2745 return NULL;
2746
2747 ipa_check_create_node_params ();
2748
2749 /* We can not make edges to inline clones. It is bug that someone removed
2750 the cgraph node too early. */
2751 gcc_assert (!callee->global.inlined_to);
2752
2753 if (dump_file && !unreachable)
2754 {
2755 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2756 "(%s/%i -> %s/%i), for stmt ",
2757 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2758 speculative ? "speculative" : "known",
2759 xstrdup_for_dump (ie->caller->name ()),
2760 ie->caller->order,
2761 xstrdup_for_dump (callee->name ()),
2762 callee->order);
2763 if (ie->call_stmt)
2764 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2765 else
2766 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2767 }
2768 if (dump_enabled_p ())
2769 {
2770 location_t loc = gimple_location_safe (ie->call_stmt);
2771
2772 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2773 "converting indirect call in %s to direct call to %s\n",
2774 ie->caller->name (), callee->name ());
2775 }
2776 if (!speculative)
2777 {
2778 struct cgraph_edge *orig = ie;
2779 ie = ie->make_direct (callee);
2780 /* If we resolved speculative edge the cost is already up to date
2781 for direct call (adjusted by inline_edge_duplication_hook). */
2782 if (ie == orig)
2783 {
2784 es = inline_edge_summary (ie);
2785 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2786 - eni_size_weights.call_cost);
2787 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2788 - eni_time_weights.call_cost);
2789 }
2790 }
2791 else
2792 {
2793 if (!callee->can_be_discarded_p ())
2794 {
2795 cgraph_node *alias;
2796 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2797 if (alias)
2798 callee = alias;
2799 }
2800 /* make_speculative will update ie's cost to direct call cost. */
2801 ie = ie->make_speculative
2802 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2803 }
2804
2805 return ie;
2806 }
2807
2808 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2809 CONSTRUCTOR and return it. Return NULL if the search fails for some
2810 reason. */
2811
2812 static tree
2813 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2814 {
2815 tree type = TREE_TYPE (constructor);
2816 if (TREE_CODE (type) != ARRAY_TYPE
2817 && TREE_CODE (type) != RECORD_TYPE)
2818 return NULL;
2819
2820 unsigned ix;
2821 tree index, val;
2822 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2823 {
2824 HOST_WIDE_INT elt_offset;
2825 if (TREE_CODE (type) == ARRAY_TYPE)
2826 {
2827 offset_int off;
2828 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2829 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2830
2831 if (index)
2832 {
2833 off = wi::to_offset (index);
2834 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2835 {
2836 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2837 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2838 off = wi::sext (off - wi::to_offset (low_bound),
2839 TYPE_PRECISION (TREE_TYPE (index)));
2840 }
2841 off *= wi::to_offset (unit_size);
2842 }
2843 else
2844 off = wi::to_offset (unit_size) * ix;
2845
2846 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2847 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2848 continue;
2849 elt_offset = off.to_shwi ();
2850 }
2851 else if (TREE_CODE (type) == RECORD_TYPE)
2852 {
2853 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2854 if (DECL_BIT_FIELD (index))
2855 continue;
2856 elt_offset = int_bit_position (index);
2857 }
2858 else
2859 gcc_unreachable ();
2860
2861 if (elt_offset > req_offset)
2862 return NULL;
2863
2864 if (TREE_CODE (val) == CONSTRUCTOR)
2865 return find_constructor_constant_at_offset (val,
2866 req_offset - elt_offset);
2867
2868 if (elt_offset == req_offset
2869 && is_gimple_reg_type (TREE_TYPE (val))
2870 && is_gimple_ip_invariant (val))
2871 return val;
2872 }
2873 return NULL;
2874 }
2875
2876 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2877 invariant from a static constructor and if so, return it. Otherwise return
2878 NULL. */
2879
2880 static tree
2881 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2882 {
2883 if (by_ref)
2884 {
2885 if (TREE_CODE (scalar) != ADDR_EXPR)
2886 return NULL;
2887 scalar = TREE_OPERAND (scalar, 0);
2888 }
2889
2890 if (!VAR_P (scalar)
2891 || !is_global_var (scalar)
2892 || !TREE_READONLY (scalar)
2893 || !DECL_INITIAL (scalar)
2894 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2895 return NULL;
2896
2897 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2898 }
2899
2900 /* Retrieve value from aggregate jump function AGG or static initializer of
2901 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2902 none. BY_REF specifies whether the value has to be passed by reference or
2903 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2904 to is set to true if the value comes from an initializer of a constant. */
2905
2906 tree
2907 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2908 HOST_WIDE_INT offset, bool by_ref,
2909 bool *from_global_constant)
2910 {
2911 struct ipa_agg_jf_item *item;
2912 int i;
2913
2914 if (scalar)
2915 {
2916 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2917 if (res)
2918 {
2919 if (from_global_constant)
2920 *from_global_constant = true;
2921 return res;
2922 }
2923 }
2924
2925 if (!agg
2926 || by_ref != agg->by_ref)
2927 return NULL;
2928
2929 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2930 if (item->offset == offset)
2931 {
2932 /* Currently we do not have clobber values, return NULL for them once
2933 we do. */
2934 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2935 if (from_global_constant)
2936 *from_global_constant = false;
2937 return item->value;
2938 }
2939 return NULL;
2940 }
2941
2942 /* Remove a reference to SYMBOL from the list of references of a node given by
2943 reference description RDESC. Return true if the reference has been
2944 successfully found and removed. */
2945
2946 static bool
2947 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2948 {
2949 struct ipa_ref *to_del;
2950 struct cgraph_edge *origin;
2951
2952 origin = rdesc->cs;
2953 if (!origin)
2954 return false;
2955 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2956 origin->lto_stmt_uid);
2957 if (!to_del)
2958 return false;
2959
2960 to_del->remove_reference ();
2961 if (dump_file)
2962 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2963 xstrdup_for_dump (origin->caller->name ()),
2964 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2965 return true;
2966 }
2967
2968 /* If JFUNC has a reference description with refcount different from
2969 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2970 NULL. JFUNC must be a constant jump function. */
2971
2972 static struct ipa_cst_ref_desc *
2973 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2974 {
2975 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2976 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2977 return rdesc;
2978 else
2979 return NULL;
2980 }
2981
2982 /* If the value of constant jump function JFUNC is an address of a function
2983 declaration, return the associated call graph node. Otherwise return
2984 NULL. */
2985
2986 static cgraph_node *
2987 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2988 {
2989 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2990 tree cst = ipa_get_jf_constant (jfunc);
2991 if (TREE_CODE (cst) != ADDR_EXPR
2992 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2993 return NULL;
2994
2995 return cgraph_node::get (TREE_OPERAND (cst, 0));
2996 }
2997
2998
2999 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3000 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3001 the edge specified in the rdesc. Return false if either the symbol or the
3002 reference could not be found, otherwise return true. */
3003
3004 static bool
3005 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3006 {
3007 struct ipa_cst_ref_desc *rdesc;
3008 if (jfunc->type == IPA_JF_CONST
3009 && (rdesc = jfunc_rdesc_usable (jfunc))
3010 && --rdesc->refcount == 0)
3011 {
3012 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3013 if (!symbol)
3014 return false;
3015
3016 return remove_described_reference (symbol, rdesc);
3017 }
3018 return true;
3019 }
3020
3021 /* Try to find a destination for indirect edge IE that corresponds to a simple
3022 call or a call of a member function pointer and where the destination is a
3023 pointer formal parameter described by jump function JFUNC. If it can be
3024 determined, return the newly direct edge, otherwise return NULL.
3025 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3026
3027 static struct cgraph_edge *
3028 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3029 struct ipa_jump_func *jfunc,
3030 struct ipa_node_params *new_root_info)
3031 {
3032 struct cgraph_edge *cs;
3033 tree target;
3034 bool agg_contents = ie->indirect_info->agg_contents;
3035 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3036 if (agg_contents)
3037 {
3038 bool from_global_constant;
3039 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3040 ie->indirect_info->offset,
3041 ie->indirect_info->by_ref,
3042 &from_global_constant);
3043 if (target
3044 && !from_global_constant
3045 && !ie->indirect_info->guaranteed_unmodified)
3046 return NULL;
3047 }
3048 else
3049 target = scalar;
3050 if (!target)
3051 return NULL;
3052 cs = ipa_make_edge_direct_to_target (ie, target);
3053
3054 if (cs && !agg_contents)
3055 {
3056 bool ok;
3057 gcc_checking_assert (cs->callee
3058 && (cs != ie
3059 || jfunc->type != IPA_JF_CONST
3060 || !cgraph_node_for_jfunc (jfunc)
3061 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3062 ok = try_decrement_rdesc_refcount (jfunc);
3063 gcc_checking_assert (ok);
3064 }
3065
3066 return cs;
3067 }
3068
3069 /* Return the target to be used in cases of impossible devirtualization. IE
3070 and target (the latter can be NULL) are dumped when dumping is enabled. */
3071
3072 tree
3073 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3074 {
3075 if (dump_file)
3076 {
3077 if (target)
3078 fprintf (dump_file,
3079 "Type inconsistent devirtualization: %s/%i->%s\n",
3080 ie->caller->name (), ie->caller->order,
3081 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3082 else
3083 fprintf (dump_file,
3084 "No devirtualization target in %s/%i\n",
3085 ie->caller->name (), ie->caller->order);
3086 }
3087 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3088 cgraph_node::get_create (new_target);
3089 return new_target;
3090 }
3091
3092 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3093 call based on a formal parameter which is described by jump function JFUNC
3094 and if it can be determined, make it direct and return the direct edge.
3095 Otherwise, return NULL. CTX describes the polymorphic context that the
3096 parameter the call is based on brings along with it. */
3097
3098 static struct cgraph_edge *
3099 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3100 struct ipa_jump_func *jfunc,
3101 struct ipa_polymorphic_call_context ctx)
3102 {
3103 tree target = NULL;
3104 bool speculative = false;
3105
3106 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3107 return NULL;
3108
3109 gcc_assert (!ie->indirect_info->by_ref);
3110
3111 /* Try to do lookup via known virtual table pointer value. */
3112 if (!ie->indirect_info->vptr_changed
3113 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3114 {
3115 tree vtable;
3116 unsigned HOST_WIDE_INT offset;
3117 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3118 : NULL;
3119 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3120 ie->indirect_info->offset,
3121 true);
3122 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3123 {
3124 bool can_refer;
3125 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3126 vtable, offset, &can_refer);
3127 if (can_refer)
3128 {
3129 if (!t
3130 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3131 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3132 || !possible_polymorphic_call_target_p
3133 (ie, cgraph_node::get (t)))
3134 {
3135 /* Do not speculate builtin_unreachable, it is stupid! */
3136 if (!ie->indirect_info->vptr_changed)
3137 target = ipa_impossible_devirt_target (ie, target);
3138 else
3139 target = NULL;
3140 }
3141 else
3142 {
3143 target = t;
3144 speculative = ie->indirect_info->vptr_changed;
3145 }
3146 }
3147 }
3148 }
3149
3150 ipa_polymorphic_call_context ie_context (ie);
3151 vec <cgraph_node *>targets;
3152 bool final;
3153
3154 ctx.offset_by (ie->indirect_info->offset);
3155 if (ie->indirect_info->vptr_changed)
3156 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3157 ie->indirect_info->otr_type);
3158 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3159 targets = possible_polymorphic_call_targets
3160 (ie->indirect_info->otr_type,
3161 ie->indirect_info->otr_token,
3162 ctx, &final);
3163 if (final && targets.length () <= 1)
3164 {
3165 speculative = false;
3166 if (targets.length () == 1)
3167 target = targets[0]->decl;
3168 else
3169 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3170 }
3171 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3172 && !ie->speculative && ie->maybe_hot_p ())
3173 {
3174 cgraph_node *n;
3175 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3176 ie->indirect_info->otr_token,
3177 ie->indirect_info->context);
3178 if (n)
3179 {
3180 target = n->decl;
3181 speculative = true;
3182 }
3183 }
3184
3185 if (target)
3186 {
3187 if (!possible_polymorphic_call_target_p
3188 (ie, cgraph_node::get_create (target)))
3189 {
3190 if (speculative)
3191 return NULL;
3192 target = ipa_impossible_devirt_target (ie, target);
3193 }
3194 return ipa_make_edge_direct_to_target (ie, target, speculative);
3195 }
3196 else
3197 return NULL;
3198 }
3199
3200 /* Update the param called notes associated with NODE when CS is being inlined,
3201 assuming NODE is (potentially indirectly) inlined into CS->callee.
3202 Moreover, if the callee is discovered to be constant, create a new cgraph
3203 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3204 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3205
3206 static bool
3207 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3208 struct cgraph_node *node,
3209 vec<cgraph_edge *> *new_edges)
3210 {
3211 struct ipa_edge_args *top;
3212 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3213 struct ipa_node_params *new_root_info;
3214 bool res = false;
3215
3216 ipa_check_create_edge_args ();
3217 top = IPA_EDGE_REF (cs);
3218 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3219 ? cs->caller->global.inlined_to
3220 : cs->caller);
3221
3222 for (ie = node->indirect_calls; ie; ie = next_ie)
3223 {
3224 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3225 struct ipa_jump_func *jfunc;
3226 int param_index;
3227 cgraph_node *spec_target = NULL;
3228
3229 next_ie = ie->next_callee;
3230
3231 if (ici->param_index == -1)
3232 continue;
3233
3234 /* We must check range due to calls with variable number of arguments: */
3235 if (ici->param_index >= ipa_get_cs_argument_count (top))
3236 {
3237 ici->param_index = -1;
3238 continue;
3239 }
3240
3241 param_index = ici->param_index;
3242 jfunc = ipa_get_ith_jump_func (top, param_index);
3243
3244 if (ie->speculative)
3245 {
3246 struct cgraph_edge *de;
3247 struct ipa_ref *ref;
3248 ie->speculative_call_info (de, ie, ref);
3249 spec_target = de->callee;
3250 }
3251
3252 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3253 new_direct_edge = NULL;
3254 else if (ici->polymorphic)
3255 {
3256 ipa_polymorphic_call_context ctx;
3257 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3258 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3259 }
3260 else
3261 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3262 new_root_info);
3263 /* If speculation was removed, then we need to do nothing. */
3264 if (new_direct_edge && new_direct_edge != ie
3265 && new_direct_edge->callee == spec_target)
3266 {
3267 new_direct_edge->indirect_inlining_edge = 1;
3268 top = IPA_EDGE_REF (cs);
3269 res = true;
3270 if (!new_direct_edge->speculative)
3271 continue;
3272 }
3273 else if (new_direct_edge)
3274 {
3275 new_direct_edge->indirect_inlining_edge = 1;
3276 if (new_direct_edge->call_stmt)
3277 new_direct_edge->call_stmt_cannot_inline_p
3278 = !gimple_check_call_matching_types (
3279 new_direct_edge->call_stmt,
3280 new_direct_edge->callee->decl, false);
3281 if (new_edges)
3282 {
3283 new_edges->safe_push (new_direct_edge);
3284 res = true;
3285 }
3286 top = IPA_EDGE_REF (cs);
3287 /* If speculative edge was introduced we still need to update
3288 call info of the indirect edge. */
3289 if (!new_direct_edge->speculative)
3290 continue;
3291 }
3292 if (jfunc->type == IPA_JF_PASS_THROUGH
3293 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3294 {
3295 if (ici->agg_contents
3296 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3297 && !ici->polymorphic)
3298 ici->param_index = -1;
3299 else
3300 {
3301 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3302 if (ici->polymorphic
3303 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3304 ici->vptr_changed = true;
3305 }
3306 }
3307 else if (jfunc->type == IPA_JF_ANCESTOR)
3308 {
3309 if (ici->agg_contents
3310 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3311 && !ici->polymorphic)
3312 ici->param_index = -1;
3313 else
3314 {
3315 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3316 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3317 if (ici->polymorphic
3318 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3319 ici->vptr_changed = true;
3320 }
3321 }
3322 else
3323 /* Either we can find a destination for this edge now or never. */
3324 ici->param_index = -1;
3325 }
3326
3327 return res;
3328 }
3329
3330 /* Recursively traverse subtree of NODE (including node) made of inlined
3331 cgraph_edges when CS has been inlined and invoke
3332 update_indirect_edges_after_inlining on all nodes and
3333 update_jump_functions_after_inlining on all non-inlined edges that lead out
3334 of this subtree. Newly discovered indirect edges will be added to
3335 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3336 created. */
3337
3338 static bool
3339 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3340 struct cgraph_node *node,
3341 vec<cgraph_edge *> *new_edges)
3342 {
3343 struct cgraph_edge *e;
3344 bool res;
3345
3346 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3347
3348 for (e = node->callees; e; e = e->next_callee)
3349 if (!e->inline_failed)
3350 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3351 else
3352 update_jump_functions_after_inlining (cs, e);
3353 for (e = node->indirect_calls; e; e = e->next_callee)
3354 update_jump_functions_after_inlining (cs, e);
3355
3356 return res;
3357 }
3358
3359 /* Combine two controlled uses counts as done during inlining. */
3360
3361 static int
3362 combine_controlled_uses_counters (int c, int d)
3363 {
3364 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3365 return IPA_UNDESCRIBED_USE;
3366 else
3367 return c + d - 1;
3368 }
3369
3370 /* Propagate number of controlled users from CS->caleee to the new root of the
3371 tree of inlined nodes. */
3372
3373 static void
3374 propagate_controlled_uses (struct cgraph_edge *cs)
3375 {
3376 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3377 struct cgraph_node *new_root = cs->caller->global.inlined_to
3378 ? cs->caller->global.inlined_to : cs->caller;
3379 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3380 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3381 int count, i;
3382
3383 count = MIN (ipa_get_cs_argument_count (args),
3384 ipa_get_param_count (old_root_info));
3385 for (i = 0; i < count; i++)
3386 {
3387 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3388 struct ipa_cst_ref_desc *rdesc;
3389
3390 if (jf->type == IPA_JF_PASS_THROUGH)
3391 {
3392 int src_idx, c, d;
3393 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3394 c = ipa_get_controlled_uses (new_root_info, src_idx);
3395 d = ipa_get_controlled_uses (old_root_info, i);
3396
3397 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3398 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3399 c = combine_controlled_uses_counters (c, d);
3400 ipa_set_controlled_uses (new_root_info, src_idx, c);
3401 if (c == 0 && new_root_info->ipcp_orig_node)
3402 {
3403 struct cgraph_node *n;
3404 struct ipa_ref *ref;
3405 tree t = new_root_info->known_csts[src_idx];
3406
3407 if (t && TREE_CODE (t) == ADDR_EXPR
3408 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3409 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3410 && (ref = new_root->find_reference (n, NULL, 0)))
3411 {
3412 if (dump_file)
3413 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3414 "reference from %s/%i to %s/%i.\n",
3415 xstrdup_for_dump (new_root->name ()),
3416 new_root->order,
3417 xstrdup_for_dump (n->name ()), n->order);
3418 ref->remove_reference ();
3419 }
3420 }
3421 }
3422 else if (jf->type == IPA_JF_CONST
3423 && (rdesc = jfunc_rdesc_usable (jf)))
3424 {
3425 int d = ipa_get_controlled_uses (old_root_info, i);
3426 int c = rdesc->refcount;
3427 rdesc->refcount = combine_controlled_uses_counters (c, d);
3428 if (rdesc->refcount == 0)
3429 {
3430 tree cst = ipa_get_jf_constant (jf);
3431 struct cgraph_node *n;
3432 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3433 && TREE_CODE (TREE_OPERAND (cst, 0))
3434 == FUNCTION_DECL);
3435 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3436 if (n)
3437 {
3438 struct cgraph_node *clone;
3439 bool ok;
3440 ok = remove_described_reference (n, rdesc);
3441 gcc_checking_assert (ok);
3442
3443 clone = cs->caller;
3444 while (clone->global.inlined_to
3445 && clone != rdesc->cs->caller
3446 && IPA_NODE_REF (clone)->ipcp_orig_node)
3447 {
3448 struct ipa_ref *ref;
3449 ref = clone->find_reference (n, NULL, 0);
3450 if (ref)
3451 {
3452 if (dump_file)
3453 fprintf (dump_file, "ipa-prop: Removing "
3454 "cloning-created reference "
3455 "from %s/%i to %s/%i.\n",
3456 xstrdup_for_dump (clone->name ()),
3457 clone->order,
3458 xstrdup_for_dump (n->name ()),
3459 n->order);
3460 ref->remove_reference ();
3461 }
3462 clone = clone->callers->caller;
3463 }
3464 }
3465 }
3466 }
3467 }
3468
3469 for (i = ipa_get_param_count (old_root_info);
3470 i < ipa_get_cs_argument_count (args);
3471 i++)
3472 {
3473 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3474
3475 if (jf->type == IPA_JF_CONST)
3476 {
3477 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3478 if (rdesc)
3479 rdesc->refcount = IPA_UNDESCRIBED_USE;
3480 }
3481 else if (jf->type == IPA_JF_PASS_THROUGH)
3482 ipa_set_controlled_uses (new_root_info,
3483 jf->value.pass_through.formal_id,
3484 IPA_UNDESCRIBED_USE);
3485 }
3486 }
3487
3488 /* Update jump functions and call note functions on inlining the call site CS.
3489 CS is expected to lead to a node already cloned by
3490 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3491 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3492 created. */
3493
3494 bool
3495 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3496 vec<cgraph_edge *> *new_edges)
3497 {
3498 bool changed;
3499 /* Do nothing if the preparation phase has not been carried out yet
3500 (i.e. during early inlining). */
3501 if (!ipa_node_params_sum)
3502 return false;
3503 gcc_assert (ipa_edge_args_vector);
3504
3505 propagate_controlled_uses (cs);
3506 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3507
3508 return changed;
3509 }
3510
3511 /* Frees all dynamically allocated structures that the argument info points
3512 to. */
3513
3514 void
3515 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3516 {
3517 vec_free (args->jump_functions);
3518 memset (args, 0, sizeof (*args));
3519 }
3520
3521 /* Free all ipa_edge structures. */
3522
3523 void
3524 ipa_free_all_edge_args (void)
3525 {
3526 int i;
3527 struct ipa_edge_args *args;
3528
3529 if (!ipa_edge_args_vector)
3530 return;
3531
3532 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3533 ipa_free_edge_args_substructures (args);
3534
3535 vec_free (ipa_edge_args_vector);
3536 }
3537
3538 /* Frees all dynamically allocated structures that the param info points
3539 to. */
3540
3541 ipa_node_params::~ipa_node_params ()
3542 {
3543 descriptors.release ();
3544 free (lattices);
3545 /* Lattice values and their sources are deallocated with their alocation
3546 pool. */
3547 known_csts.release ();
3548 known_contexts.release ();
3549
3550 lattices = NULL;
3551 ipcp_orig_node = NULL;
3552 analysis_done = 0;
3553 node_enqueued = 0;
3554 do_clone_for_all_contexts = 0;
3555 is_all_contexts_clone = 0;
3556 node_dead = 0;
3557 }
3558
3559 /* Free all ipa_node_params structures. */
3560
3561 void
3562 ipa_free_all_node_params (void)
3563 {
3564 delete ipa_node_params_sum;
3565 ipa_node_params_sum = NULL;
3566 }
3567
3568 /* Grow ipcp_transformations if necessary. */
3569
3570 void
3571 ipcp_grow_transformations_if_necessary (void)
3572 {
3573 if (vec_safe_length (ipcp_transformations)
3574 <= (unsigned) symtab->cgraph_max_uid)
3575 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3576 }
3577
3578 /* Set the aggregate replacements of NODE to be AGGVALS. */
3579
3580 void
3581 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3582 struct ipa_agg_replacement_value *aggvals)
3583 {
3584 ipcp_grow_transformations_if_necessary ();
3585 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3586 }
3587
3588 /* Hook that is called by cgraph.c when an edge is removed. */
3589
3590 static void
3591 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3592 {
3593 struct ipa_edge_args *args;
3594
3595 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3596 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3597 return;
3598
3599 args = IPA_EDGE_REF (cs);
3600 if (args->jump_functions)
3601 {
3602 struct ipa_jump_func *jf;
3603 int i;
3604 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3605 {
3606 struct ipa_cst_ref_desc *rdesc;
3607 try_decrement_rdesc_refcount (jf);
3608 if (jf->type == IPA_JF_CONST
3609 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3610 && rdesc->cs == cs)
3611 rdesc->cs = NULL;
3612 }
3613 }
3614
3615 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3616 }
3617
3618 /* Hook that is called by cgraph.c when an edge is duplicated. */
3619
3620 static void
3621 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3622 void *)
3623 {
3624 struct ipa_edge_args *old_args, *new_args;
3625 unsigned int i;
3626
3627 ipa_check_create_edge_args ();
3628
3629 old_args = IPA_EDGE_REF (src);
3630 new_args = IPA_EDGE_REF (dst);
3631
3632 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3633 if (old_args->polymorphic_call_contexts)
3634 new_args->polymorphic_call_contexts
3635 = vec_safe_copy (old_args->polymorphic_call_contexts);
3636
3637 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3638 {
3639 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3640 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3641
3642 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3643
3644 if (src_jf->type == IPA_JF_CONST)
3645 {
3646 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3647
3648 if (!src_rdesc)
3649 dst_jf->value.constant.rdesc = NULL;
3650 else if (src->caller == dst->caller)
3651 {
3652 struct ipa_ref *ref;
3653 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3654 gcc_checking_assert (n);
3655 ref = src->caller->find_reference (n, src->call_stmt,
3656 src->lto_stmt_uid);
3657 gcc_checking_assert (ref);
3658 dst->caller->clone_reference (ref, ref->stmt);
3659
3660 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3661 dst_rdesc->cs = dst;
3662 dst_rdesc->refcount = src_rdesc->refcount;
3663 dst_rdesc->next_duplicate = NULL;
3664 dst_jf->value.constant.rdesc = dst_rdesc;
3665 }
3666 else if (src_rdesc->cs == src)
3667 {
3668 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3669 dst_rdesc->cs = dst;
3670 dst_rdesc->refcount = src_rdesc->refcount;
3671 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3672 src_rdesc->next_duplicate = dst_rdesc;
3673 dst_jf->value.constant.rdesc = dst_rdesc;
3674 }
3675 else
3676 {
3677 struct ipa_cst_ref_desc *dst_rdesc;
3678 /* This can happen during inlining, when a JFUNC can refer to a
3679 reference taken in a function up in the tree of inline clones.
3680 We need to find the duplicate that refers to our tree of
3681 inline clones. */
3682
3683 gcc_assert (dst->caller->global.inlined_to);
3684 for (dst_rdesc = src_rdesc->next_duplicate;
3685 dst_rdesc;
3686 dst_rdesc = dst_rdesc->next_duplicate)
3687 {
3688 struct cgraph_node *top;
3689 top = dst_rdesc->cs->caller->global.inlined_to
3690 ? dst_rdesc->cs->caller->global.inlined_to
3691 : dst_rdesc->cs->caller;
3692 if (dst->caller->global.inlined_to == top)
3693 break;
3694 }
3695 gcc_assert (dst_rdesc);
3696 dst_jf->value.constant.rdesc = dst_rdesc;
3697 }
3698 }
3699 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3700 && src->caller == dst->caller)
3701 {
3702 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3703 ? dst->caller->global.inlined_to : dst->caller;
3704 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3705 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3706
3707 int c = ipa_get_controlled_uses (root_info, idx);
3708 if (c != IPA_UNDESCRIBED_USE)
3709 {
3710 c++;
3711 ipa_set_controlled_uses (root_info, idx, c);
3712 }
3713 }
3714 }
3715 }
3716
3717 /* Analyze newly added function into callgraph. */
3718
3719 static void
3720 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3721 {
3722 if (node->has_gimple_body_p ())
3723 ipa_analyze_node (node);
3724 }
3725
3726 /* Hook that is called by summary when a node is duplicated. */
3727
3728 void
3729 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3730 ipa_node_params *old_info,
3731 ipa_node_params *new_info)
3732 {
3733 ipa_agg_replacement_value *old_av, *new_av;
3734
3735 new_info->descriptors = old_info->descriptors.copy ();
3736 new_info->lattices = NULL;
3737 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3738
3739 new_info->analysis_done = old_info->analysis_done;
3740 new_info->node_enqueued = old_info->node_enqueued;
3741 new_info->versionable = old_info->versionable;
3742
3743 old_av = ipa_get_agg_replacements_for_node (src);
3744 if (old_av)
3745 {
3746 new_av = NULL;
3747 while (old_av)
3748 {
3749 struct ipa_agg_replacement_value *v;
3750
3751 v = ggc_alloc<ipa_agg_replacement_value> ();
3752 memcpy (v, old_av, sizeof (*v));
3753 v->next = new_av;
3754 new_av = v;
3755 old_av = old_av->next;
3756 }
3757 ipa_set_node_agg_value_chain (dst, new_av);
3758 }
3759
3760 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3761
3762 if (src_trans)
3763 {
3764 ipcp_grow_transformations_if_necessary ();
3765 src_trans = ipcp_get_transformation_summary (src);
3766 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3767 vec<ipa_vr, va_gc> *&dst_vr
3768 = ipcp_get_transformation_summary (dst)->m_vr;
3769 if (vec_safe_length (src_trans->m_vr) > 0)
3770 {
3771 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3772 for (unsigned i = 0; i < src_vr->length (); ++i)
3773 dst_vr->quick_push ((*src_vr)[i]);
3774 }
3775 }
3776
3777 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3778 {
3779 ipcp_grow_transformations_if_necessary ();
3780 src_trans = ipcp_get_transformation_summary (src);
3781 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3782 vec<ipa_bits, va_gc> *&dst_bits
3783 = ipcp_get_transformation_summary (dst)->bits;
3784 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3785 for (unsigned i = 0; i < src_bits->length (); ++i)
3786 dst_bits->quick_push ((*src_bits)[i]);
3787 }
3788 }
3789
3790 /* Register our cgraph hooks if they are not already there. */
3791
3792 void
3793 ipa_register_cgraph_hooks (void)
3794 {
3795 ipa_check_create_node_params ();
3796
3797 if (!edge_removal_hook_holder)
3798 edge_removal_hook_holder =
3799 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3800 if (!edge_duplication_hook_holder)
3801 edge_duplication_hook_holder =
3802 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3803 function_insertion_hook_holder =
3804 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3805 }
3806
3807 /* Unregister our cgraph hooks if they are not already there. */
3808
3809 static void
3810 ipa_unregister_cgraph_hooks (void)
3811 {
3812 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3813 edge_removal_hook_holder = NULL;
3814 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3815 edge_duplication_hook_holder = NULL;
3816 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3817 function_insertion_hook_holder = NULL;
3818 }
3819
3820 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3821 longer needed after ipa-cp. */
3822
3823 void
3824 ipa_free_all_structures_after_ipa_cp (void)
3825 {
3826 if (!optimize && !in_lto_p)
3827 {
3828 ipa_free_all_edge_args ();
3829 ipa_free_all_node_params ();
3830 ipcp_sources_pool.release ();
3831 ipcp_cst_values_pool.release ();
3832 ipcp_poly_ctx_values_pool.release ();
3833 ipcp_agg_lattice_pool.release ();
3834 ipa_unregister_cgraph_hooks ();
3835 ipa_refdesc_pool.release ();
3836 }
3837 }
3838
3839 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3840 longer needed after indirect inlining. */
3841
3842 void
3843 ipa_free_all_structures_after_iinln (void)
3844 {
3845 ipa_free_all_edge_args ();
3846 ipa_free_all_node_params ();
3847 ipa_unregister_cgraph_hooks ();
3848 ipcp_sources_pool.release ();
3849 ipcp_cst_values_pool.release ();
3850 ipcp_poly_ctx_values_pool.release ();
3851 ipcp_agg_lattice_pool.release ();
3852 ipa_refdesc_pool.release ();
3853 }
3854
3855 /* Print ipa_tree_map data structures of all functions in the
3856 callgraph to F. */
3857
3858 void
3859 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3860 {
3861 int i, count;
3862 struct ipa_node_params *info;
3863
3864 if (!node->definition)
3865 return;
3866 info = IPA_NODE_REF (node);
3867 fprintf (f, " function %s/%i parameter descriptors:\n",
3868 node->name (), node->order);
3869 count = ipa_get_param_count (info);
3870 for (i = 0; i < count; i++)
3871 {
3872 int c;
3873
3874 fprintf (f, " ");
3875 ipa_dump_param (f, info, i);
3876 if (ipa_is_param_used (info, i))
3877 fprintf (f, " used");
3878 c = ipa_get_controlled_uses (info, i);
3879 if (c == IPA_UNDESCRIBED_USE)
3880 fprintf (f, " undescribed_use");
3881 else
3882 fprintf (f, " controlled_uses=%i", c);
3883 fprintf (f, "\n");
3884 }
3885 }
3886
3887 /* Print ipa_tree_map data structures of all functions in the
3888 callgraph to F. */
3889
3890 void
3891 ipa_print_all_params (FILE * f)
3892 {
3893 struct cgraph_node *node;
3894
3895 fprintf (f, "\nFunction parameters:\n");
3896 FOR_EACH_FUNCTION (node)
3897 ipa_print_node_params (f, node);
3898 }
3899
3900 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3901
3902 vec<tree>
3903 ipa_get_vector_of_formal_parms (tree fndecl)
3904 {
3905 vec<tree> args;
3906 int count;
3907 tree parm;
3908
3909 gcc_assert (!flag_wpa);
3910 count = count_formal_params (fndecl);
3911 args.create (count);
3912 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3913 args.quick_push (parm);
3914
3915 return args;
3916 }
3917
3918 /* Return a heap allocated vector containing types of formal parameters of
3919 function type FNTYPE. */
3920
3921 vec<tree>
3922 ipa_get_vector_of_formal_parm_types (tree fntype)
3923 {
3924 vec<tree> types;
3925 int count = 0;
3926 tree t;
3927
3928 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3929 count++;
3930
3931 types.create (count);
3932 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3933 types.quick_push (TREE_VALUE (t));
3934
3935 return types;
3936 }
3937
3938 /* Modify the function declaration FNDECL and its type according to the plan in
3939 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3940 to reflect the actual parameters being modified which are determined by the
3941 base_index field. */
3942
3943 void
3944 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3945 {
3946 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3947 tree orig_type = TREE_TYPE (fndecl);
3948 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3949
3950 /* The following test is an ugly hack, some functions simply don't have any
3951 arguments in their type. This is probably a bug but well... */
3952 bool care_for_types = (old_arg_types != NULL_TREE);
3953 bool last_parm_void;
3954 vec<tree> otypes;
3955 if (care_for_types)
3956 {
3957 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3958 == void_type_node);
3959 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3960 if (last_parm_void)
3961 gcc_assert (oparms.length () + 1 == otypes.length ());
3962 else
3963 gcc_assert (oparms.length () == otypes.length ());
3964 }
3965 else
3966 {
3967 last_parm_void = false;
3968 otypes.create (0);
3969 }
3970
3971 int len = adjustments.length ();
3972 tree *link = &DECL_ARGUMENTS (fndecl);
3973 tree new_arg_types = NULL;
3974 for (int i = 0; i < len; i++)
3975 {
3976 struct ipa_parm_adjustment *adj;
3977 gcc_assert (link);
3978
3979 adj = &adjustments[i];
3980 tree parm;
3981 if (adj->op == IPA_PARM_OP_NEW)
3982 parm = NULL;
3983 else
3984 parm = oparms[adj->base_index];
3985 adj->base = parm;
3986
3987 if (adj->op == IPA_PARM_OP_COPY)
3988 {
3989 if (care_for_types)
3990 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3991 new_arg_types);
3992 *link = parm;
3993 link = &DECL_CHAIN (parm);
3994 }
3995 else if (adj->op != IPA_PARM_OP_REMOVE)
3996 {
3997 tree new_parm;
3998 tree ptype;
3999
4000 if (adj->by_ref)
4001 ptype = build_pointer_type (adj->type);
4002 else
4003 {
4004 ptype = adj->type;
4005 if (is_gimple_reg_type (ptype))
4006 {
4007 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4008 if (TYPE_ALIGN (ptype) != malign)
4009 ptype = build_aligned_type (ptype, malign);
4010 }
4011 }
4012
4013 if (care_for_types)
4014 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4015
4016 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4017 ptype);
4018 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4019 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4020 DECL_ARTIFICIAL (new_parm) = 1;
4021 DECL_ARG_TYPE (new_parm) = ptype;
4022 DECL_CONTEXT (new_parm) = fndecl;
4023 TREE_USED (new_parm) = 1;
4024 DECL_IGNORED_P (new_parm) = 1;
4025 layout_decl (new_parm, 0);
4026
4027 if (adj->op == IPA_PARM_OP_NEW)
4028 adj->base = NULL;
4029 else
4030 adj->base = parm;
4031 adj->new_decl = new_parm;
4032
4033 *link = new_parm;
4034 link = &DECL_CHAIN (new_parm);
4035 }
4036 }
4037
4038 *link = NULL_TREE;
4039
4040 tree new_reversed = NULL;
4041 if (care_for_types)
4042 {
4043 new_reversed = nreverse (new_arg_types);
4044 if (last_parm_void)
4045 {
4046 if (new_reversed)
4047 TREE_CHAIN (new_arg_types) = void_list_node;
4048 else
4049 new_reversed = void_list_node;
4050 }
4051 }
4052
4053 /* Use copy_node to preserve as much as possible from original type
4054 (debug info, attribute lists etc.)
4055 Exception is METHOD_TYPEs must have THIS argument.
4056 When we are asked to remove it, we need to build new FUNCTION_TYPE
4057 instead. */
4058 tree new_type = NULL;
4059 if (TREE_CODE (orig_type) != METHOD_TYPE
4060 || (adjustments[0].op == IPA_PARM_OP_COPY
4061 && adjustments[0].base_index == 0))
4062 {
4063 new_type = build_distinct_type_copy (orig_type);
4064 TYPE_ARG_TYPES (new_type) = new_reversed;
4065 }
4066 else
4067 {
4068 new_type
4069 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4070 new_reversed));
4071 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4072 DECL_VINDEX (fndecl) = NULL_TREE;
4073 }
4074
4075 /* When signature changes, we need to clear builtin info. */
4076 if (DECL_BUILT_IN (fndecl))
4077 {
4078 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4079 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4080 }
4081
4082 TREE_TYPE (fndecl) = new_type;
4083 DECL_VIRTUAL_P (fndecl) = 0;
4084 DECL_LANG_SPECIFIC (fndecl) = NULL;
4085 otypes.release ();
4086 oparms.release ();
4087 }
4088
4089 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4090 If this is a directly recursive call, CS must be NULL. Otherwise it must
4091 contain the corresponding call graph edge. */
4092
4093 void
4094 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4095 ipa_parm_adjustment_vec adjustments)
4096 {
4097 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4098 vec<tree> vargs;
4099 vec<tree, va_gc> **debug_args = NULL;
4100 gcall *new_stmt;
4101 gimple_stmt_iterator gsi, prev_gsi;
4102 tree callee_decl;
4103 int i, len;
4104
4105 len = adjustments.length ();
4106 vargs.create (len);
4107 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4108 current_node->remove_stmt_references (stmt);
4109
4110 gsi = gsi_for_stmt (stmt);
4111 prev_gsi = gsi;
4112 gsi_prev (&prev_gsi);
4113 for (i = 0; i < len; i++)
4114 {
4115 struct ipa_parm_adjustment *adj;
4116
4117 adj = &adjustments[i];
4118
4119 if (adj->op == IPA_PARM_OP_COPY)
4120 {
4121 tree arg = gimple_call_arg (stmt, adj->base_index);
4122
4123 vargs.quick_push (arg);
4124 }
4125 else if (adj->op != IPA_PARM_OP_REMOVE)
4126 {
4127 tree expr, base, off;
4128 location_t loc;
4129 unsigned int deref_align = 0;
4130 bool deref_base = false;
4131
4132 /* We create a new parameter out of the value of the old one, we can
4133 do the following kind of transformations:
4134
4135 - A scalar passed by reference is converted to a scalar passed by
4136 value. (adj->by_ref is false and the type of the original
4137 actual argument is a pointer to a scalar).
4138
4139 - A part of an aggregate is passed instead of the whole aggregate.
4140 The part can be passed either by value or by reference, this is
4141 determined by value of adj->by_ref. Moreover, the code below
4142 handles both situations when the original aggregate is passed by
4143 value (its type is not a pointer) and when it is passed by
4144 reference (it is a pointer to an aggregate).
4145
4146 When the new argument is passed by reference (adj->by_ref is true)
4147 it must be a part of an aggregate and therefore we form it by
4148 simply taking the address of a reference inside the original
4149 aggregate. */
4150
4151 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4152 base = gimple_call_arg (stmt, adj->base_index);
4153 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4154 : EXPR_LOCATION (base);
4155
4156 if (TREE_CODE (base) != ADDR_EXPR
4157 && POINTER_TYPE_P (TREE_TYPE (base)))
4158 off = build_int_cst (adj->alias_ptr_type,
4159 adj->offset / BITS_PER_UNIT);
4160 else
4161 {
4162 HOST_WIDE_INT base_offset;
4163 tree prev_base;
4164 bool addrof;
4165
4166 if (TREE_CODE (base) == ADDR_EXPR)
4167 {
4168 base = TREE_OPERAND (base, 0);
4169 addrof = true;
4170 }
4171 else
4172 addrof = false;
4173 prev_base = base;
4174 base = get_addr_base_and_unit_offset (base, &base_offset);
4175 /* Aggregate arguments can have non-invariant addresses. */
4176 if (!base)
4177 {
4178 base = build_fold_addr_expr (prev_base);
4179 off = build_int_cst (adj->alias_ptr_type,
4180 adj->offset / BITS_PER_UNIT);
4181 }
4182 else if (TREE_CODE (base) == MEM_REF)
4183 {
4184 if (!addrof)
4185 {
4186 deref_base = true;
4187 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4188 }
4189 off = build_int_cst (adj->alias_ptr_type,
4190 base_offset
4191 + adj->offset / BITS_PER_UNIT);
4192 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4193 off);
4194 base = TREE_OPERAND (base, 0);
4195 }
4196 else
4197 {
4198 off = build_int_cst (adj->alias_ptr_type,
4199 base_offset
4200 + adj->offset / BITS_PER_UNIT);
4201 base = build_fold_addr_expr (base);
4202 }
4203 }
4204
4205 if (!adj->by_ref)
4206 {
4207 tree type = adj->type;
4208 unsigned int align;
4209 unsigned HOST_WIDE_INT misalign;
4210
4211 if (deref_base)
4212 {
4213 align = deref_align;
4214 misalign = 0;
4215 }
4216 else
4217 {
4218 get_pointer_alignment_1 (base, &align, &misalign);
4219 if (TYPE_ALIGN (type) > align)
4220 align = TYPE_ALIGN (type);
4221 }
4222 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4223 * BITS_PER_UNIT);
4224 misalign = misalign & (align - 1);
4225 if (misalign != 0)
4226 align = least_bit_hwi (misalign);
4227 if (align < TYPE_ALIGN (type))
4228 type = build_aligned_type (type, align);
4229 base = force_gimple_operand_gsi (&gsi, base,
4230 true, NULL, true, GSI_SAME_STMT);
4231 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4232 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4233 /* If expr is not a valid gimple call argument emit
4234 a load into a temporary. */
4235 if (is_gimple_reg_type (TREE_TYPE (expr)))
4236 {
4237 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4238 if (gimple_in_ssa_p (cfun))
4239 {
4240 gimple_set_vuse (tem, gimple_vuse (stmt));
4241 expr = make_ssa_name (TREE_TYPE (expr), tem);
4242 }
4243 else
4244 expr = create_tmp_reg (TREE_TYPE (expr));
4245 gimple_assign_set_lhs (tem, expr);
4246 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4247 }
4248 }
4249 else
4250 {
4251 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4252 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4253 expr = build_fold_addr_expr (expr);
4254 expr = force_gimple_operand_gsi (&gsi, expr,
4255 true, NULL, true, GSI_SAME_STMT);
4256 }
4257 vargs.quick_push (expr);
4258 }
4259 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4260 {
4261 unsigned int ix;
4262 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4263 gimple *def_temp;
4264
4265 arg = gimple_call_arg (stmt, adj->base_index);
4266 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4267 {
4268 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4269 continue;
4270 arg = fold_convert_loc (gimple_location (stmt),
4271 TREE_TYPE (origin), arg);
4272 }
4273 if (debug_args == NULL)
4274 debug_args = decl_debug_args_insert (callee_decl);
4275 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4276 if (ddecl == origin)
4277 {
4278 ddecl = (**debug_args)[ix + 1];
4279 break;
4280 }
4281 if (ddecl == NULL)
4282 {
4283 ddecl = make_node (DEBUG_EXPR_DECL);
4284 DECL_ARTIFICIAL (ddecl) = 1;
4285 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4286 DECL_MODE (ddecl) = DECL_MODE (origin);
4287
4288 vec_safe_push (*debug_args, origin);
4289 vec_safe_push (*debug_args, ddecl);
4290 }
4291 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4292 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4293 }
4294 }
4295
4296 if (dump_file && (dump_flags & TDF_DETAILS))
4297 {
4298 fprintf (dump_file, "replacing stmt:");
4299 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4300 }
4301
4302 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4303 vargs.release ();
4304 if (gimple_call_lhs (stmt))
4305 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4306
4307 gimple_set_block (new_stmt, gimple_block (stmt));
4308 if (gimple_has_location (stmt))
4309 gimple_set_location (new_stmt, gimple_location (stmt));
4310 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4311 gimple_call_copy_flags (new_stmt, stmt);
4312 if (gimple_in_ssa_p (cfun))
4313 {
4314 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4315 if (gimple_vdef (stmt))
4316 {
4317 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4318 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4319 }
4320 }
4321
4322 if (dump_file && (dump_flags & TDF_DETAILS))
4323 {
4324 fprintf (dump_file, "with stmt:");
4325 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4326 fprintf (dump_file, "\n");
4327 }
4328 gsi_replace (&gsi, new_stmt, true);
4329 if (cs)
4330 cs->set_call_stmt (new_stmt);
4331 do
4332 {
4333 current_node->record_stmt_references (gsi_stmt (gsi));
4334 gsi_prev (&gsi);
4335 }
4336 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4337 }
4338
4339 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4340 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4341 specifies whether the function should care about type incompatibility the
4342 current and new expressions. If it is false, the function will leave
4343 incompatibility issues to the caller. Return true iff the expression
4344 was modified. */
4345
4346 bool
4347 ipa_modify_expr (tree *expr, bool convert,
4348 ipa_parm_adjustment_vec adjustments)
4349 {
4350 struct ipa_parm_adjustment *cand
4351 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4352 if (!cand)
4353 return false;
4354
4355 tree src;
4356 if (cand->by_ref)
4357 {
4358 src = build_simple_mem_ref (cand->new_decl);
4359 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4360 }
4361 else
4362 src = cand->new_decl;
4363
4364 if (dump_file && (dump_flags & TDF_DETAILS))
4365 {
4366 fprintf (dump_file, "About to replace expr ");
4367 print_generic_expr (dump_file, *expr, 0);
4368 fprintf (dump_file, " with ");
4369 print_generic_expr (dump_file, src, 0);
4370 fprintf (dump_file, "\n");
4371 }
4372
4373 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4374 {
4375 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4376 *expr = vce;
4377 }
4378 else
4379 *expr = src;
4380 return true;
4381 }
4382
4383 /* If T is an SSA_NAME, return NULL if it is not a default def or
4384 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4385 the base variable is always returned, regardless if it is a default
4386 def. Return T if it is not an SSA_NAME. */
4387
4388 static tree
4389 get_ssa_base_param (tree t, bool ignore_default_def)
4390 {
4391 if (TREE_CODE (t) == SSA_NAME)
4392 {
4393 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4394 return SSA_NAME_VAR (t);
4395 else
4396 return NULL_TREE;
4397 }
4398 return t;
4399 }
4400
4401 /* Given an expression, return an adjustment entry specifying the
4402 transformation to be done on EXPR. If no suitable adjustment entry
4403 was found, returns NULL.
4404
4405 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4406 default def, otherwise bail on them.
4407
4408 If CONVERT is non-NULL, this function will set *CONVERT if the
4409 expression provided is a component reference. ADJUSTMENTS is the
4410 adjustments vector. */
4411
4412 ipa_parm_adjustment *
4413 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4414 ipa_parm_adjustment_vec adjustments,
4415 bool ignore_default_def)
4416 {
4417 if (TREE_CODE (**expr) == BIT_FIELD_REF
4418 || TREE_CODE (**expr) == IMAGPART_EXPR
4419 || TREE_CODE (**expr) == REALPART_EXPR)
4420 {
4421 *expr = &TREE_OPERAND (**expr, 0);
4422 if (convert)
4423 *convert = true;
4424 }
4425
4426 HOST_WIDE_INT offset, size, max_size;
4427 bool reverse;
4428 tree base
4429 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4430 if (!base || size == -1 || max_size == -1)
4431 return NULL;
4432
4433 if (TREE_CODE (base) == MEM_REF)
4434 {
4435 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4436 base = TREE_OPERAND (base, 0);
4437 }
4438
4439 base = get_ssa_base_param (base, ignore_default_def);
4440 if (!base || TREE_CODE (base) != PARM_DECL)
4441 return NULL;
4442
4443 struct ipa_parm_adjustment *cand = NULL;
4444 unsigned int len = adjustments.length ();
4445 for (unsigned i = 0; i < len; i++)
4446 {
4447 struct ipa_parm_adjustment *adj = &adjustments[i];
4448
4449 if (adj->base == base
4450 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4451 {
4452 cand = adj;
4453 break;
4454 }
4455 }
4456
4457 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4458 return NULL;
4459 return cand;
4460 }
4461
4462 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4463
4464 static bool
4465 index_in_adjustments_multiple_times_p (int base_index,
4466 ipa_parm_adjustment_vec adjustments)
4467 {
4468 int i, len = adjustments.length ();
4469 bool one = false;
4470
4471 for (i = 0; i < len; i++)
4472 {
4473 struct ipa_parm_adjustment *adj;
4474 adj = &adjustments[i];
4475
4476 if (adj->base_index == base_index)
4477 {
4478 if (one)
4479 return true;
4480 else
4481 one = true;
4482 }
4483 }
4484 return false;
4485 }
4486
4487
4488 /* Return adjustments that should have the same effect on function parameters
4489 and call arguments as if they were first changed according to adjustments in
4490 INNER and then by adjustments in OUTER. */
4491
4492 ipa_parm_adjustment_vec
4493 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4494 ipa_parm_adjustment_vec outer)
4495 {
4496 int i, outlen = outer.length ();
4497 int inlen = inner.length ();
4498 int removals = 0;
4499 ipa_parm_adjustment_vec adjustments, tmp;
4500
4501 tmp.create (inlen);
4502 for (i = 0; i < inlen; i++)
4503 {
4504 struct ipa_parm_adjustment *n;
4505 n = &inner[i];
4506
4507 if (n->op == IPA_PARM_OP_REMOVE)
4508 removals++;
4509 else
4510 {
4511 /* FIXME: Handling of new arguments are not implemented yet. */
4512 gcc_assert (n->op != IPA_PARM_OP_NEW);
4513 tmp.quick_push (*n);
4514 }
4515 }
4516
4517 adjustments.create (outlen + removals);
4518 for (i = 0; i < outlen; i++)
4519 {
4520 struct ipa_parm_adjustment r;
4521 struct ipa_parm_adjustment *out = &outer[i];
4522 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4523
4524 memset (&r, 0, sizeof (r));
4525 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4526 if (out->op == IPA_PARM_OP_REMOVE)
4527 {
4528 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4529 {
4530 r.op = IPA_PARM_OP_REMOVE;
4531 adjustments.quick_push (r);
4532 }
4533 continue;
4534 }
4535 else
4536 {
4537 /* FIXME: Handling of new arguments are not implemented yet. */
4538 gcc_assert (out->op != IPA_PARM_OP_NEW);
4539 }
4540
4541 r.base_index = in->base_index;
4542 r.type = out->type;
4543
4544 /* FIXME: Create nonlocal value too. */
4545
4546 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4547 r.op = IPA_PARM_OP_COPY;
4548 else if (in->op == IPA_PARM_OP_COPY)
4549 r.offset = out->offset;
4550 else if (out->op == IPA_PARM_OP_COPY)
4551 r.offset = in->offset;
4552 else
4553 r.offset = in->offset + out->offset;
4554 adjustments.quick_push (r);
4555 }
4556
4557 for (i = 0; i < inlen; i++)
4558 {
4559 struct ipa_parm_adjustment *n = &inner[i];
4560
4561 if (n->op == IPA_PARM_OP_REMOVE)
4562 adjustments.quick_push (*n);
4563 }
4564
4565 tmp.release ();
4566 return adjustments;
4567 }
4568
4569 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4570 friendly way, assuming they are meant to be applied to FNDECL. */
4571
4572 void
4573 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4574 tree fndecl)
4575 {
4576 int i, len = adjustments.length ();
4577 bool first = true;
4578 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4579
4580 fprintf (file, "IPA param adjustments: ");
4581 for (i = 0; i < len; i++)
4582 {
4583 struct ipa_parm_adjustment *adj;
4584 adj = &adjustments[i];
4585
4586 if (!first)
4587 fprintf (file, " ");
4588 else
4589 first = false;
4590
4591 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4592 print_generic_expr (file, parms[adj->base_index], 0);
4593 if (adj->base)
4594 {
4595 fprintf (file, ", base: ");
4596 print_generic_expr (file, adj->base, 0);
4597 }
4598 if (adj->new_decl)
4599 {
4600 fprintf (file, ", new_decl: ");
4601 print_generic_expr (file, adj->new_decl, 0);
4602 }
4603 if (adj->new_ssa_base)
4604 {
4605 fprintf (file, ", new_ssa_base: ");
4606 print_generic_expr (file, adj->new_ssa_base, 0);
4607 }
4608
4609 if (adj->op == IPA_PARM_OP_COPY)
4610 fprintf (file, ", copy_param");
4611 else if (adj->op == IPA_PARM_OP_REMOVE)
4612 fprintf (file, ", remove_param");
4613 else
4614 fprintf (file, ", offset %li", (long) adj->offset);
4615 if (adj->by_ref)
4616 fprintf (file, ", by_ref");
4617 print_node_brief (file, ", type: ", adj->type, 0);
4618 fprintf (file, "\n");
4619 }
4620 parms.release ();
4621 }
4622
4623 /* Dump the AV linked list. */
4624
4625 void
4626 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4627 {
4628 bool comma = false;
4629 fprintf (f, " Aggregate replacements:");
4630 for (; av; av = av->next)
4631 {
4632 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4633 av->index, av->offset);
4634 print_generic_expr (f, av->value, 0);
4635 comma = true;
4636 }
4637 fprintf (f, "\n");
4638 }
4639
4640 /* Stream out jump function JUMP_FUNC to OB. */
4641
4642 static void
4643 ipa_write_jump_function (struct output_block *ob,
4644 struct ipa_jump_func *jump_func)
4645 {
4646 struct ipa_agg_jf_item *item;
4647 struct bitpack_d bp;
4648 int i, count;
4649
4650 streamer_write_uhwi (ob, jump_func->type);
4651 switch (jump_func->type)
4652 {
4653 case IPA_JF_UNKNOWN:
4654 break;
4655 case IPA_JF_CONST:
4656 gcc_assert (
4657 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4658 stream_write_tree (ob, jump_func->value.constant.value, true);
4659 break;
4660 case IPA_JF_PASS_THROUGH:
4661 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4662 if (jump_func->value.pass_through.operation == NOP_EXPR)
4663 {
4664 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4665 bp = bitpack_create (ob->main_stream);
4666 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4667 streamer_write_bitpack (&bp);
4668 }
4669 else
4670 {
4671 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4672 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4673 }
4674 break;
4675 case IPA_JF_ANCESTOR:
4676 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4677 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4678 bp = bitpack_create (ob->main_stream);
4679 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4680 streamer_write_bitpack (&bp);
4681 break;
4682 }
4683
4684 count = vec_safe_length (jump_func->agg.items);
4685 streamer_write_uhwi (ob, count);
4686 if (count)
4687 {
4688 bp = bitpack_create (ob->main_stream);
4689 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4690 streamer_write_bitpack (&bp);
4691 }
4692
4693 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4694 {
4695 streamer_write_uhwi (ob, item->offset);
4696 stream_write_tree (ob, item->value, true);
4697 }
4698
4699 bp = bitpack_create (ob->main_stream);
4700 bp_pack_value (&bp, jump_func->bits.known, 1);
4701 streamer_write_bitpack (&bp);
4702 if (jump_func->bits.known)
4703 {
4704 streamer_write_widest_int (ob, jump_func->bits.value);
4705 streamer_write_widest_int (ob, jump_func->bits.mask);
4706 }
4707 bp_pack_value (&bp, jump_func->vr_known, 1);
4708 streamer_write_bitpack (&bp);
4709 if (jump_func->vr_known)
4710 {
4711 streamer_write_enum (ob->main_stream, value_rang_type,
4712 VR_LAST, jump_func->m_vr.type);
4713 stream_write_tree (ob, jump_func->m_vr.min, true);
4714 stream_write_tree (ob, jump_func->m_vr.max, true);
4715 }
4716 }
4717
4718 /* Read in jump function JUMP_FUNC from IB. */
4719
4720 static void
4721 ipa_read_jump_function (struct lto_input_block *ib,
4722 struct ipa_jump_func *jump_func,
4723 struct cgraph_edge *cs,
4724 struct data_in *data_in)
4725 {
4726 enum jump_func_type jftype;
4727 enum tree_code operation;
4728 int i, count;
4729
4730 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4731 switch (jftype)
4732 {
4733 case IPA_JF_UNKNOWN:
4734 ipa_set_jf_unknown (jump_func);
4735 break;
4736 case IPA_JF_CONST:
4737 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4738 break;
4739 case IPA_JF_PASS_THROUGH:
4740 operation = (enum tree_code) streamer_read_uhwi (ib);
4741 if (operation == NOP_EXPR)
4742 {
4743 int formal_id = streamer_read_uhwi (ib);
4744 struct bitpack_d bp = streamer_read_bitpack (ib);
4745 bool agg_preserved = bp_unpack_value (&bp, 1);
4746 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4747 }
4748 else
4749 {
4750 tree operand = stream_read_tree (ib, data_in);
4751 int formal_id = streamer_read_uhwi (ib);
4752 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4753 operation);
4754 }
4755 break;
4756 case IPA_JF_ANCESTOR:
4757 {
4758 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4759 int formal_id = streamer_read_uhwi (ib);
4760 struct bitpack_d bp = streamer_read_bitpack (ib);
4761 bool agg_preserved = bp_unpack_value (&bp, 1);
4762 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4763 break;
4764 }
4765 }
4766
4767 count = streamer_read_uhwi (ib);
4768 vec_alloc (jump_func->agg.items, count);
4769 if (count)
4770 {
4771 struct bitpack_d bp = streamer_read_bitpack (ib);
4772 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4773 }
4774 for (i = 0; i < count; i++)
4775 {
4776 struct ipa_agg_jf_item item;
4777 item.offset = streamer_read_uhwi (ib);
4778 item.value = stream_read_tree (ib, data_in);
4779 jump_func->agg.items->quick_push (item);
4780 }
4781
4782 struct bitpack_d bp = streamer_read_bitpack (ib);
4783 bool bits_known = bp_unpack_value (&bp, 1);
4784 if (bits_known)
4785 {
4786 jump_func->bits.known = true;
4787 jump_func->bits.value = streamer_read_widest_int (ib);
4788 jump_func->bits.mask = streamer_read_widest_int (ib);
4789 }
4790 else
4791 jump_func->bits.known = false;
4792
4793 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4794 bool vr_known = bp_unpack_value (&vr_bp, 1);
4795 if (vr_known)
4796 {
4797 jump_func->vr_known = true;
4798 jump_func->m_vr.type = streamer_read_enum (ib,
4799 value_range_type,
4800 VR_LAST);
4801 jump_func->m_vr.min = stream_read_tree (ib, data_in);
4802 jump_func->m_vr.max = stream_read_tree (ib, data_in);
4803 }
4804 else
4805 jump_func->vr_known = false;
4806 }
4807
4808 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4809 relevant to indirect inlining to OB. */
4810
4811 static void
4812 ipa_write_indirect_edge_info (struct output_block *ob,
4813 struct cgraph_edge *cs)
4814 {
4815 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4816 struct bitpack_d bp;
4817
4818 streamer_write_hwi (ob, ii->param_index);
4819 bp = bitpack_create (ob->main_stream);
4820 bp_pack_value (&bp, ii->polymorphic, 1);
4821 bp_pack_value (&bp, ii->agg_contents, 1);
4822 bp_pack_value (&bp, ii->member_ptr, 1);
4823 bp_pack_value (&bp, ii->by_ref, 1);
4824 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4825 bp_pack_value (&bp, ii->vptr_changed, 1);
4826 streamer_write_bitpack (&bp);
4827 if (ii->agg_contents || ii->polymorphic)
4828 streamer_write_hwi (ob, ii->offset);
4829 else
4830 gcc_assert (ii->offset == 0);
4831
4832 if (ii->polymorphic)
4833 {
4834 streamer_write_hwi (ob, ii->otr_token);
4835 stream_write_tree (ob, ii->otr_type, true);
4836 ii->context.stream_out (ob);
4837 }
4838 }
4839
4840 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4841 relevant to indirect inlining from IB. */
4842
4843 static void
4844 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4845 struct data_in *data_in,
4846 struct cgraph_edge *cs)
4847 {
4848 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4849 struct bitpack_d bp;
4850
4851 ii->param_index = (int) streamer_read_hwi (ib);
4852 bp = streamer_read_bitpack (ib);
4853 ii->polymorphic = bp_unpack_value (&bp, 1);
4854 ii->agg_contents = bp_unpack_value (&bp, 1);
4855 ii->member_ptr = bp_unpack_value (&bp, 1);
4856 ii->by_ref = bp_unpack_value (&bp, 1);
4857 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4858 ii->vptr_changed = bp_unpack_value (&bp, 1);
4859 if (ii->agg_contents || ii->polymorphic)
4860 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4861 else
4862 ii->offset = 0;
4863 if (ii->polymorphic)
4864 {
4865 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4866 ii->otr_type = stream_read_tree (ib, data_in);
4867 ii->context.stream_in (ib, data_in);
4868 }
4869 }
4870
4871 /* Stream out NODE info to OB. */
4872
4873 static void
4874 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4875 {
4876 int node_ref;
4877 lto_symtab_encoder_t encoder;
4878 struct ipa_node_params *info = IPA_NODE_REF (node);
4879 int j;
4880 struct cgraph_edge *e;
4881 struct bitpack_d bp;
4882
4883 encoder = ob->decl_state->symtab_node_encoder;
4884 node_ref = lto_symtab_encoder_encode (encoder, node);
4885 streamer_write_uhwi (ob, node_ref);
4886
4887 streamer_write_uhwi (ob, ipa_get_param_count (info));
4888 for (j = 0; j < ipa_get_param_count (info); j++)
4889 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4890 bp = bitpack_create (ob->main_stream);
4891 gcc_assert (info->analysis_done
4892 || ipa_get_param_count (info) == 0);
4893 gcc_assert (!info->node_enqueued);
4894 gcc_assert (!info->ipcp_orig_node);
4895 for (j = 0; j < ipa_get_param_count (info); j++)
4896 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4897 streamer_write_bitpack (&bp);
4898 for (j = 0; j < ipa_get_param_count (info); j++)
4899 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4900 for (e = node->callees; e; e = e->next_callee)
4901 {
4902 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4903
4904 streamer_write_uhwi (ob,
4905 ipa_get_cs_argument_count (args) * 2
4906 + (args->polymorphic_call_contexts != NULL));
4907 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4908 {
4909 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4910 if (args->polymorphic_call_contexts != NULL)
4911 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4912 }
4913 }
4914 for (e = node->indirect_calls; e; e = e->next_callee)
4915 {
4916 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4917
4918 streamer_write_uhwi (ob,
4919 ipa_get_cs_argument_count (args) * 2
4920 + (args->polymorphic_call_contexts != NULL));
4921 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4922 {
4923 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4924 if (args->polymorphic_call_contexts != NULL)
4925 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4926 }
4927 ipa_write_indirect_edge_info (ob, e);
4928 }
4929 }
4930
4931 /* Stream in NODE info from IB. */
4932
4933 static void
4934 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4935 struct data_in *data_in)
4936 {
4937 struct ipa_node_params *info = IPA_NODE_REF (node);
4938 int k;
4939 struct cgraph_edge *e;
4940 struct bitpack_d bp;
4941
4942 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4943
4944 for (k = 0; k < ipa_get_param_count (info); k++)
4945 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4946
4947 bp = streamer_read_bitpack (ib);
4948 if (ipa_get_param_count (info) != 0)
4949 info->analysis_done = true;
4950 info->node_enqueued = false;
4951 for (k = 0; k < ipa_get_param_count (info); k++)
4952 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4953 for (k = 0; k < ipa_get_param_count (info); k++)
4954 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4955 for (e = node->callees; e; e = e->next_callee)
4956 {
4957 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4958 int count = streamer_read_uhwi (ib);
4959 bool contexts_computed = count & 1;
4960 count /= 2;
4961
4962 if (!count)
4963 continue;
4964 vec_safe_grow_cleared (args->jump_functions, count);
4965 if (contexts_computed)
4966 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4967
4968 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4969 {
4970 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4971 data_in);
4972 if (contexts_computed)
4973 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4974 }
4975 }
4976 for (e = node->indirect_calls; e; e = e->next_callee)
4977 {
4978 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4979 int count = streamer_read_uhwi (ib);
4980 bool contexts_computed = count & 1;
4981 count /= 2;
4982
4983 if (count)
4984 {
4985 vec_safe_grow_cleared (args->jump_functions, count);
4986 if (contexts_computed)
4987 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4988 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4989 {
4990 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4991 data_in);
4992 if (contexts_computed)
4993 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4994 }
4995 }
4996 ipa_read_indirect_edge_info (ib, data_in, e);
4997 }
4998 }
4999
5000 /* Write jump functions for nodes in SET. */
5001
5002 void
5003 ipa_prop_write_jump_functions (void)
5004 {
5005 struct cgraph_node *node;
5006 struct output_block *ob;
5007 unsigned int count = 0;
5008 lto_symtab_encoder_iterator lsei;
5009 lto_symtab_encoder_t encoder;
5010
5011 if (!ipa_node_params_sum)
5012 return;
5013
5014 ob = create_output_block (LTO_section_jump_functions);
5015 encoder = ob->decl_state->symtab_node_encoder;
5016 ob->symbol = NULL;
5017 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5018 lsei_next_function_in_partition (&lsei))
5019 {
5020 node = lsei_cgraph_node (lsei);
5021 if (node->has_gimple_body_p ()
5022 && IPA_NODE_REF (node) != NULL)
5023 count++;
5024 }
5025
5026 streamer_write_uhwi (ob, count);
5027
5028 /* Process all of the functions. */
5029 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5030 lsei_next_function_in_partition (&lsei))
5031 {
5032 node = lsei_cgraph_node (lsei);
5033 if (node->has_gimple_body_p ()
5034 && IPA_NODE_REF (node) != NULL)
5035 ipa_write_node_info (ob, node);
5036 }
5037 streamer_write_char_stream (ob->main_stream, 0);
5038 produce_asm (ob, NULL);
5039 destroy_output_block (ob);
5040 }
5041
5042 /* Read section in file FILE_DATA of length LEN with data DATA. */
5043
5044 static void
5045 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5046 size_t len)
5047 {
5048 const struct lto_function_header *header =
5049 (const struct lto_function_header *) data;
5050 const int cfg_offset = sizeof (struct lto_function_header);
5051 const int main_offset = cfg_offset + header->cfg_size;
5052 const int string_offset = main_offset + header->main_size;
5053 struct data_in *data_in;
5054 unsigned int i;
5055 unsigned int count;
5056
5057 lto_input_block ib_main ((const char *) data + main_offset,
5058 header->main_size, file_data->mode_table);
5059
5060 data_in =
5061 lto_data_in_create (file_data, (const char *) data + string_offset,
5062 header->string_size, vNULL);
5063 count = streamer_read_uhwi (&ib_main);
5064
5065 for (i = 0; i < count; i++)
5066 {
5067 unsigned int index;
5068 struct cgraph_node *node;
5069 lto_symtab_encoder_t encoder;
5070
5071 index = streamer_read_uhwi (&ib_main);
5072 encoder = file_data->symtab_node_encoder;
5073 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5074 index));
5075 gcc_assert (node->definition);
5076 ipa_read_node_info (&ib_main, node, data_in);
5077 }
5078 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5079 len);
5080 lto_data_in_delete (data_in);
5081 }
5082
5083 /* Read ipcp jump functions. */
5084
5085 void
5086 ipa_prop_read_jump_functions (void)
5087 {
5088 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5089 struct lto_file_decl_data *file_data;
5090 unsigned int j = 0;
5091
5092 ipa_check_create_node_params ();
5093 ipa_check_create_edge_args ();
5094 ipa_register_cgraph_hooks ();
5095
5096 while ((file_data = file_data_vec[j++]))
5097 {
5098 size_t len;
5099 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5100
5101 if (data)
5102 ipa_prop_read_section (file_data, data, len);
5103 }
5104 }
5105
5106 /* After merging units, we can get mismatch in argument counts.
5107 Also decl merging might've rendered parameter lists obsolete.
5108 Also compute called_with_variable_arg info. */
5109
5110 void
5111 ipa_update_after_lto_read (void)
5112 {
5113 ipa_check_create_node_params ();
5114 ipa_check_create_edge_args ();
5115 }
5116
5117 void
5118 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5119 {
5120 int node_ref;
5121 unsigned int count = 0;
5122 lto_symtab_encoder_t encoder;
5123 struct ipa_agg_replacement_value *aggvals, *av;
5124
5125 aggvals = ipa_get_agg_replacements_for_node (node);
5126 encoder = ob->decl_state->symtab_node_encoder;
5127 node_ref = lto_symtab_encoder_encode (encoder, node);
5128 streamer_write_uhwi (ob, node_ref);
5129
5130 for (av = aggvals; av; av = av->next)
5131 count++;
5132 streamer_write_uhwi (ob, count);
5133
5134 for (av = aggvals; av; av = av->next)
5135 {
5136 struct bitpack_d bp;
5137
5138 streamer_write_uhwi (ob, av->offset);
5139 streamer_write_uhwi (ob, av->index);
5140 stream_write_tree (ob, av->value, true);
5141
5142 bp = bitpack_create (ob->main_stream);
5143 bp_pack_value (&bp, av->by_ref, 1);
5144 streamer_write_bitpack (&bp);
5145 }
5146
5147 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5148 if (ts && vec_safe_length (ts->m_vr) > 0)
5149 {
5150 count = ts->m_vr->length ();
5151 streamer_write_uhwi (ob, count);
5152 for (unsigned i = 0; i < count; ++i)
5153 {
5154 struct bitpack_d bp;
5155 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5156 bp = bitpack_create (ob->main_stream);
5157 bp_pack_value (&bp, parm_vr->known, 1);
5158 streamer_write_bitpack (&bp);
5159 if (parm_vr->known)
5160 {
5161 streamer_write_enum (ob->main_stream, value_rang_type,
5162 VR_LAST, parm_vr->type);
5163 streamer_write_wide_int (ob, parm_vr->min);
5164 streamer_write_wide_int (ob, parm_vr->max);
5165 }
5166 }
5167 }
5168 else
5169 streamer_write_uhwi (ob, 0);
5170
5171 if (ts && vec_safe_length (ts->bits) > 0)
5172 {
5173 count = ts->bits->length ();
5174 streamer_write_uhwi (ob, count);
5175
5176 for (unsigned i = 0; i < count; ++i)
5177 {
5178 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5179 struct bitpack_d bp = bitpack_create (ob->main_stream);
5180 bp_pack_value (&bp, bits_jfunc.known, 1);
5181 streamer_write_bitpack (&bp);
5182 if (bits_jfunc.known)
5183 {
5184 streamer_write_widest_int (ob, bits_jfunc.value);
5185 streamer_write_widest_int (ob, bits_jfunc.mask);
5186 }
5187 }
5188 }
5189 else
5190 streamer_write_uhwi (ob, 0);
5191 }
5192
5193 /* Stream in the aggregate value replacement chain for NODE from IB. */
5194
5195 static void
5196 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5197 data_in *data_in)
5198 {
5199 struct ipa_agg_replacement_value *aggvals = NULL;
5200 unsigned int count, i;
5201
5202 count = streamer_read_uhwi (ib);
5203 for (i = 0; i <count; i++)
5204 {
5205 struct ipa_agg_replacement_value *av;
5206 struct bitpack_d bp;
5207
5208 av = ggc_alloc<ipa_agg_replacement_value> ();
5209 av->offset = streamer_read_uhwi (ib);
5210 av->index = streamer_read_uhwi (ib);
5211 av->value = stream_read_tree (ib, data_in);
5212 bp = streamer_read_bitpack (ib);
5213 av->by_ref = bp_unpack_value (&bp, 1);
5214 av->next = aggvals;
5215 aggvals = av;
5216 }
5217 ipa_set_node_agg_value_chain (node, aggvals);
5218
5219 count = streamer_read_uhwi (ib);
5220 if (count > 0)
5221 {
5222 ipcp_grow_transformations_if_necessary ();
5223
5224 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5225 vec_safe_grow_cleared (ts->m_vr, count);
5226 for (i = 0; i < count; i++)
5227 {
5228 ipa_vr *parm_vr;
5229 parm_vr = &(*ts->m_vr)[i];
5230 struct bitpack_d bp;
5231 bp = streamer_read_bitpack (ib);
5232 parm_vr->known = bp_unpack_value (&bp, 1);
5233 if (parm_vr->known)
5234 {
5235 parm_vr->type = streamer_read_enum (ib, value_range_type,
5236 VR_LAST);
5237 parm_vr->min = streamer_read_wide_int (ib);
5238 parm_vr->max = streamer_read_wide_int (ib);
5239 }
5240 }
5241 }
5242 count = streamer_read_uhwi (ib);
5243 if (count > 0)
5244 {
5245 ipcp_grow_transformations_if_necessary ();
5246
5247 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5248 vec_safe_grow_cleared (ts->bits, count);
5249
5250 for (i = 0; i < count; i++)
5251 {
5252 ipa_bits& bits_jfunc = (*ts->bits)[i];
5253 struct bitpack_d bp = streamer_read_bitpack (ib);
5254 bits_jfunc.known = bp_unpack_value (&bp, 1);
5255 if (bits_jfunc.known)
5256 {
5257 bits_jfunc.value = streamer_read_widest_int (ib);
5258 bits_jfunc.mask = streamer_read_widest_int (ib);
5259 }
5260 }
5261 }
5262 }
5263
5264 /* Write all aggregate replacement for nodes in set. */
5265
5266 void
5267 ipcp_write_transformation_summaries (void)
5268 {
5269 struct cgraph_node *node;
5270 struct output_block *ob;
5271 unsigned int count = 0;
5272 lto_symtab_encoder_iterator lsei;
5273 lto_symtab_encoder_t encoder;
5274
5275 ob = create_output_block (LTO_section_ipcp_transform);
5276 encoder = ob->decl_state->symtab_node_encoder;
5277 ob->symbol = NULL;
5278 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5279 lsei_next_function_in_partition (&lsei))
5280 {
5281 node = lsei_cgraph_node (lsei);
5282 if (node->has_gimple_body_p ())
5283 count++;
5284 }
5285
5286 streamer_write_uhwi (ob, count);
5287
5288 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5289 lsei_next_function_in_partition (&lsei))
5290 {
5291 node = lsei_cgraph_node (lsei);
5292 if (node->has_gimple_body_p ())
5293 write_ipcp_transformation_info (ob, node);
5294 }
5295 streamer_write_char_stream (ob->main_stream, 0);
5296 produce_asm (ob, NULL);
5297 destroy_output_block (ob);
5298 }
5299
5300 /* Read replacements section in file FILE_DATA of length LEN with data
5301 DATA. */
5302
5303 static void
5304 read_replacements_section (struct lto_file_decl_data *file_data,
5305 const char *data,
5306 size_t len)
5307 {
5308 const struct lto_function_header *header =
5309 (const struct lto_function_header *) data;
5310 const int cfg_offset = sizeof (struct lto_function_header);
5311 const int main_offset = cfg_offset + header->cfg_size;
5312 const int string_offset = main_offset + header->main_size;
5313 struct data_in *data_in;
5314 unsigned int i;
5315 unsigned int count;
5316
5317 lto_input_block ib_main ((const char *) data + main_offset,
5318 header->main_size, file_data->mode_table);
5319
5320 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5321 header->string_size, vNULL);
5322 count = streamer_read_uhwi (&ib_main);
5323
5324 for (i = 0; i < count; i++)
5325 {
5326 unsigned int index;
5327 struct cgraph_node *node;
5328 lto_symtab_encoder_t encoder;
5329
5330 index = streamer_read_uhwi (&ib_main);
5331 encoder = file_data->symtab_node_encoder;
5332 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5333 index));
5334 gcc_assert (node->definition);
5335 read_ipcp_transformation_info (&ib_main, node, data_in);
5336 }
5337 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5338 len);
5339 lto_data_in_delete (data_in);
5340 }
5341
5342 /* Read IPA-CP aggregate replacements. */
5343
5344 void
5345 ipcp_read_transformation_summaries (void)
5346 {
5347 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5348 struct lto_file_decl_data *file_data;
5349 unsigned int j = 0;
5350
5351 while ((file_data = file_data_vec[j++]))
5352 {
5353 size_t len;
5354 const char *data = lto_get_section_data (file_data,
5355 LTO_section_ipcp_transform,
5356 NULL, &len);
5357 if (data)
5358 read_replacements_section (file_data, data, len);
5359 }
5360 }
5361
5362 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5363 NODE. */
5364
5365 static void
5366 adjust_agg_replacement_values (struct cgraph_node *node,
5367 struct ipa_agg_replacement_value *aggval)
5368 {
5369 struct ipa_agg_replacement_value *v;
5370 int i, c = 0, d = 0, *adj;
5371
5372 if (!node->clone.combined_args_to_skip)
5373 return;
5374
5375 for (v = aggval; v; v = v->next)
5376 {
5377 gcc_assert (v->index >= 0);
5378 if (c < v->index)
5379 c = v->index;
5380 }
5381 c++;
5382
5383 adj = XALLOCAVEC (int, c);
5384 for (i = 0; i < c; i++)
5385 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5386 {
5387 adj[i] = -1;
5388 d++;
5389 }
5390 else
5391 adj[i] = i - d;
5392
5393 for (v = aggval; v; v = v->next)
5394 v->index = adj[v->index];
5395 }
5396
5397 /* Dominator walker driving the ipcp modification phase. */
5398
5399 class ipcp_modif_dom_walker : public dom_walker
5400 {
5401 public:
5402 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5403 vec<ipa_param_descriptor> descs,
5404 struct ipa_agg_replacement_value *av,
5405 bool *sc, bool *cc)
5406 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5407 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5408
5409 virtual edge before_dom_children (basic_block);
5410
5411 private:
5412 struct ipa_func_body_info *m_fbi;
5413 vec<ipa_param_descriptor> m_descriptors;
5414 struct ipa_agg_replacement_value *m_aggval;
5415 bool *m_something_changed, *m_cfg_changed;
5416 };
5417
5418 edge
5419 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5420 {
5421 gimple_stmt_iterator gsi;
5422 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5423 {
5424 struct ipa_agg_replacement_value *v;
5425 gimple *stmt = gsi_stmt (gsi);
5426 tree rhs, val, t;
5427 HOST_WIDE_INT offset, size;
5428 int index;
5429 bool by_ref, vce;
5430
5431 if (!gimple_assign_load_p (stmt))
5432 continue;
5433 rhs = gimple_assign_rhs1 (stmt);
5434 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5435 continue;
5436
5437 vce = false;
5438 t = rhs;
5439 while (handled_component_p (t))
5440 {
5441 /* V_C_E can do things like convert an array of integers to one
5442 bigger integer and similar things we do not handle below. */
5443 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5444 {
5445 vce = true;
5446 break;
5447 }
5448 t = TREE_OPERAND (t, 0);
5449 }
5450 if (vce)
5451 continue;
5452
5453 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5454 &offset, &size, &by_ref))
5455 continue;
5456 for (v = m_aggval; v; v = v->next)
5457 if (v->index == index
5458 && v->offset == offset)
5459 break;
5460 if (!v
5461 || v->by_ref != by_ref
5462 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5463 continue;
5464
5465 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5466 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5467 {
5468 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5469 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5470 else if (TYPE_SIZE (TREE_TYPE (rhs))
5471 == TYPE_SIZE (TREE_TYPE (v->value)))
5472 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5473 else
5474 {
5475 if (dump_file)
5476 {
5477 fprintf (dump_file, " const ");
5478 print_generic_expr (dump_file, v->value, 0);
5479 fprintf (dump_file, " can't be converted to type of ");
5480 print_generic_expr (dump_file, rhs, 0);
5481 fprintf (dump_file, "\n");
5482 }
5483 continue;
5484 }
5485 }
5486 else
5487 val = v->value;
5488
5489 if (dump_file && (dump_flags & TDF_DETAILS))
5490 {
5491 fprintf (dump_file, "Modifying stmt:\n ");
5492 print_gimple_stmt (dump_file, stmt, 0, 0);
5493 }
5494 gimple_assign_set_rhs_from_tree (&gsi, val);
5495 update_stmt (stmt);
5496
5497 if (dump_file && (dump_flags & TDF_DETAILS))
5498 {
5499 fprintf (dump_file, "into:\n ");
5500 print_gimple_stmt (dump_file, stmt, 0, 0);
5501 fprintf (dump_file, "\n");
5502 }
5503
5504 *m_something_changed = true;
5505 if (maybe_clean_eh_stmt (stmt)
5506 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5507 *m_cfg_changed = true;
5508 }
5509 return NULL;
5510 }
5511
5512 /* Update bits info of formal parameters as described in
5513 ipcp_transformation_summary. */
5514
5515 static void
5516 ipcp_update_bits (struct cgraph_node *node)
5517 {
5518 tree parm = DECL_ARGUMENTS (node->decl);
5519 tree next_parm = parm;
5520 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5521
5522 if (!ts || vec_safe_length (ts->bits) == 0)
5523 return;
5524
5525 vec<ipa_bits, va_gc> &bits = *ts->bits;
5526 unsigned count = bits.length ();
5527
5528 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5529 {
5530 if (node->clone.combined_args_to_skip
5531 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5532 continue;
5533
5534 gcc_checking_assert (parm);
5535 next_parm = DECL_CHAIN (parm);
5536
5537 if (!bits[i].known
5538 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm)) || POINTER_TYPE_P (TREE_TYPE (parm)))
5539 || !is_gimple_reg (parm))
5540 continue;
5541
5542 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5543 if (!ddef)
5544 continue;
5545
5546 if (dump_file)
5547 {
5548 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5549 print_hex (bits[i].mask, dump_file);
5550 fprintf (dump_file, "\n");
5551 }
5552
5553 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5554 {
5555 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5556 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5557
5558 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5559 | wide_int::from (bits[i].value, prec, sgn);
5560 set_nonzero_bits (ddef, nonzero_bits);
5561 }
5562 else
5563 {
5564 unsigned tem = bits[i].mask.to_uhwi ();
5565 unsigned HOST_WIDE_INT bitpos = bits[i].value.to_uhwi ();
5566 unsigned align = tem & -tem;
5567 unsigned misalign = bitpos & (align - 1);
5568
5569 if (align > 1)
5570 {
5571 if (dump_file)
5572 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5573
5574 unsigned old_align, old_misalign;
5575 struct ptr_info_def *pi = get_ptr_info (ddef);
5576 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5577
5578 if (old_known
5579 && old_align > align)
5580 {
5581 if (dump_file)
5582 {
5583 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5584 if ((old_misalign & (align - 1)) != misalign)
5585 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5586 old_misalign, misalign);
5587 }
5588 continue;
5589 }
5590
5591 if (old_known
5592 && ((misalign & (old_align - 1)) != old_misalign)
5593 && dump_file)
5594 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5595 old_misalign, misalign);
5596
5597 set_ptr_info_alignment (pi, align, misalign);
5598 }
5599 }
5600 }
5601 }
5602
5603 /* Update value range of formal parameters as described in
5604 ipcp_transformation_summary. */
5605
5606 static void
5607 ipcp_update_vr (struct cgraph_node *node)
5608 {
5609 tree fndecl = node->decl;
5610 tree parm = DECL_ARGUMENTS (fndecl);
5611 tree next_parm = parm;
5612 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5613 if (!ts || vec_safe_length (ts->m_vr) == 0)
5614 return;
5615 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5616 unsigned count = vr.length ();
5617
5618 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5619 {
5620 if (node->clone.combined_args_to_skip
5621 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5622 continue;
5623 gcc_checking_assert (parm);
5624 next_parm = DECL_CHAIN (parm);
5625 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5626
5627 if (!ddef || !is_gimple_reg (parm))
5628 continue;
5629
5630 if (vr[i].known
5631 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5632 {
5633 tree type = TREE_TYPE (ddef);
5634 unsigned prec = TYPE_PRECISION (type);
5635 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5636 {
5637 if (dump_file)
5638 {
5639 fprintf (dump_file, "Setting value range of param %u ", i);
5640 fprintf (dump_file, "%s[",
5641 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5642 print_decs (vr[i].min, dump_file);
5643 fprintf (dump_file, ", ");
5644 print_decs (vr[i].max, dump_file);
5645 fprintf (dump_file, "]\n");
5646 }
5647 set_range_info (ddef, vr[i].type,
5648 wide_int_storage::from (vr[i].min, prec,
5649 TYPE_SIGN (type)),
5650 wide_int_storage::from (vr[i].max, prec,
5651 TYPE_SIGN (type)));
5652 }
5653 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5654 && vr[i].type == VR_ANTI_RANGE
5655 && wi::eq_p (vr[i].min, 0)
5656 && wi::eq_p (vr[i].max, 0))
5657 {
5658 if (dump_file)
5659 fprintf (dump_file, "Setting nonnull for %u\n", i);
5660 set_ptr_nonnull (ddef);
5661 }
5662 }
5663 }
5664 }
5665
5666 /* IPCP transformation phase doing propagation of aggregate values. */
5667
5668 unsigned int
5669 ipcp_transform_function (struct cgraph_node *node)
5670 {
5671 vec<ipa_param_descriptor> descriptors = vNULL;
5672 struct ipa_func_body_info fbi;
5673 struct ipa_agg_replacement_value *aggval;
5674 int param_count;
5675 bool cfg_changed = false, something_changed = false;
5676
5677 gcc_checking_assert (cfun);
5678 gcc_checking_assert (current_function_decl);
5679
5680 if (dump_file)
5681 fprintf (dump_file, "Modification phase of node %s/%i\n",
5682 node->name (), node->order);
5683
5684 ipcp_update_bits (node);
5685 ipcp_update_vr (node);
5686 aggval = ipa_get_agg_replacements_for_node (node);
5687 if (!aggval)
5688 return 0;
5689 param_count = count_formal_params (node->decl);
5690 if (param_count == 0)
5691 return 0;
5692 adjust_agg_replacement_values (node, aggval);
5693 if (dump_file)
5694 ipa_dump_agg_replacement_values (dump_file, aggval);
5695
5696 fbi.node = node;
5697 fbi.info = NULL;
5698 fbi.bb_infos = vNULL;
5699 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5700 fbi.param_count = param_count;
5701 fbi.aa_walked = 0;
5702
5703 descriptors.safe_grow_cleared (param_count);
5704 ipa_populate_param_decls (node, descriptors);
5705 calculate_dominance_info (CDI_DOMINATORS);
5706 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5707 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5708
5709 int i;
5710 struct ipa_bb_info *bi;
5711 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5712 free_ipa_bb_info (bi);
5713 fbi.bb_infos.release ();
5714 free_dominance_info (CDI_DOMINATORS);
5715 (*ipcp_transformations)[node->uid].agg_values = NULL;
5716 (*ipcp_transformations)[node->uid].bits = NULL;
5717 (*ipcp_transformations)[node->uid].m_vr = NULL;
5718
5719 descriptors.release ();
5720
5721 if (!something_changed)
5722 return 0;
5723 else if (cfg_changed)
5724 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5725 else
5726 return TODO_update_ssa_only_virtuals;
5727 }