]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
Fix a call to ultimate_alias_target
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55 #include "tree-cfgcleanup.h"
56
57 /* Function summary where the parameter infos are actually stored. */
58 ipa_node_params_t *ipa_node_params_sum = NULL;
59
60 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
61
62 /* Edge summary for IPA-CP edge information. */
63 ipa_edge_args_sum_t *ipa_edge_args_sum;
64
65 /* Traits for a hash table for reusing already existing ipa_bits. */
66
67 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
68 {
69 typedef ipa_bits *value_type;
70 typedef ipa_bits *compare_type;
71 static hashval_t
72 hash (const ipa_bits *p)
73 {
74 hashval_t t = (hashval_t) p->value.to_shwi ();
75 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
76 }
77 static bool
78 equal (const ipa_bits *a, const ipa_bits *b)
79 {
80 return a->value == b->value && a->mask == b->mask;
81 }
82 static void
83 mark_empty (ipa_bits *&p)
84 {
85 p = NULL;
86 }
87 static bool
88 is_empty (const ipa_bits *p)
89 {
90 return p == NULL;
91 }
92 static bool
93 is_deleted (const ipa_bits *p)
94 {
95 return p == reinterpret_cast<const ipa_bits *> (1);
96 }
97 static void
98 mark_deleted (ipa_bits *&p)
99 {
100 p = reinterpret_cast<ipa_bits *> (1);
101 }
102 };
103
104 /* Hash table for avoid repeated allocations of equal ipa_bits. */
105 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106
107 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
108 the equiv bitmap is not hashed and is expected to be NULL. */
109
110 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range_base *>
111 {
112 typedef value_range_base *value_type;
113 typedef value_range_base *compare_type;
114 static hashval_t
115 hash (const value_range_base *p)
116 {
117 inchash::hash hstate (p->kind ());
118 inchash::add_expr (p->min (), hstate);
119 inchash::add_expr (p->max (), hstate);
120 return hstate.end ();
121 }
122 static bool
123 equal (const value_range_base *a, const value_range_base *b)
124 {
125 return a->equal_p (*b);
126 }
127 static void
128 mark_empty (value_range_base *&p)
129 {
130 p = NULL;
131 }
132 static bool
133 is_empty (const value_range_base *p)
134 {
135 return p == NULL;
136 }
137 static bool
138 is_deleted (const value_range_base *p)
139 {
140 return p == reinterpret_cast<const value_range_base *> (1);
141 }
142 static void
143 mark_deleted (value_range_base *&p)
144 {
145 p = reinterpret_cast<value_range_base *> (1);
146 }
147 };
148
149 /* Hash table for avoid repeated allocations of equal value_ranges. */
150 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
151
152 /* Holders of ipa cgraph hooks: */
153 static struct cgraph_node_hook_list *function_insertion_hook_holder;
154
155 /* Description of a reference to an IPA constant. */
156 struct ipa_cst_ref_desc
157 {
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge *cs;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc *next_duplicate;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
164 int refcount;
165 };
166
167 /* Allocation pool for reference descriptions. */
168
169 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
170 ("IPA-PROP ref descriptions");
171
172 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
174
175 static bool
176 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
177 {
178 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
179
180 if (!fs_opts)
181 return false;
182 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
183 }
184
185 /* Return index of the formal whose tree is PTREE in function which corresponds
186 to INFO. */
187
188 static int
189 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
190 tree ptree)
191 {
192 int i, count;
193
194 count = vec_safe_length (descriptors);
195 for (i = 0; i < count; i++)
196 if ((*descriptors)[i].decl_or_type == ptree)
197 return i;
198
199 return -1;
200 }
201
202 /* Return index of the formal whose tree is PTREE in function which corresponds
203 to INFO. */
204
205 int
206 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
207 {
208 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
209 }
210
211 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
212 NODE. */
213
214 static void
215 ipa_populate_param_decls (struct cgraph_node *node,
216 vec<ipa_param_descriptor, va_gc> &descriptors)
217 {
218 tree fndecl;
219 tree fnargs;
220 tree parm;
221 int param_num;
222
223 fndecl = node->decl;
224 gcc_assert (gimple_has_body_p (fndecl));
225 fnargs = DECL_ARGUMENTS (fndecl);
226 param_num = 0;
227 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
228 {
229 descriptors[param_num].decl_or_type = parm;
230 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
231 true);
232 param_num++;
233 }
234 }
235
236 /* Return how many formal parameters FNDECL has. */
237
238 int
239 count_formal_params (tree fndecl)
240 {
241 tree parm;
242 int count = 0;
243 gcc_assert (gimple_has_body_p (fndecl));
244
245 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
246 count++;
247
248 return count;
249 }
250
251 /* Return the declaration of Ith formal parameter of the function corresponding
252 to INFO. Note there is no setter function as this array is built just once
253 using ipa_initialize_node_params. */
254
255 void
256 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
257 {
258 fprintf (file, "param #%i", i);
259 if ((*info->descriptors)[i].decl_or_type)
260 {
261 fprintf (file, " ");
262 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
263 }
264 }
265
266 /* If necessary, allocate vector of parameter descriptors in info of NODE.
267 Return true if they were allocated, false if not. */
268
269 static bool
270 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
271 {
272 class ipa_node_params *info = IPA_NODE_REF (node);
273
274 if (!info->descriptors && param_count)
275 {
276 vec_safe_grow_cleared (info->descriptors, param_count);
277 return true;
278 }
279 else
280 return false;
281 }
282
283 /* Initialize the ipa_node_params structure associated with NODE by counting
284 the function parameters, creating the descriptors and populating their
285 param_decls. */
286
287 void
288 ipa_initialize_node_params (struct cgraph_node *node)
289 {
290 class ipa_node_params *info = IPA_NODE_REF (node);
291
292 if (!info->descriptors
293 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
294 ipa_populate_param_decls (node, *info->descriptors);
295 }
296
297 /* Print the jump functions associated with call graph edge CS to file F. */
298
299 static void
300 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
301 {
302 int i, count;
303
304 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
305 for (i = 0; i < count; i++)
306 {
307 struct ipa_jump_func *jump_func;
308 enum jump_func_type type;
309
310 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
311 type = jump_func->type;
312
313 fprintf (f, " param %d: ", i);
314 if (type == IPA_JF_UNKNOWN)
315 fprintf (f, "UNKNOWN\n");
316 else if (type == IPA_JF_CONST)
317 {
318 tree val = jump_func->value.constant.value;
319 fprintf (f, "CONST: ");
320 print_generic_expr (f, val);
321 if (TREE_CODE (val) == ADDR_EXPR
322 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
323 {
324 fprintf (f, " -> ");
325 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
326 }
327 fprintf (f, "\n");
328 }
329 else if (type == IPA_JF_PASS_THROUGH)
330 {
331 fprintf (f, "PASS THROUGH: ");
332 fprintf (f, "%d, op %s",
333 jump_func->value.pass_through.formal_id,
334 get_tree_code_name(jump_func->value.pass_through.operation));
335 if (jump_func->value.pass_through.operation != NOP_EXPR)
336 {
337 fprintf (f, " ");
338 print_generic_expr (f, jump_func->value.pass_through.operand);
339 }
340 if (jump_func->value.pass_through.agg_preserved)
341 fprintf (f, ", agg_preserved");
342 fprintf (f, "\n");
343 }
344 else if (type == IPA_JF_ANCESTOR)
345 {
346 fprintf (f, "ANCESTOR: ");
347 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
348 jump_func->value.ancestor.formal_id,
349 jump_func->value.ancestor.offset);
350 if (jump_func->value.ancestor.agg_preserved)
351 fprintf (f, ", agg_preserved");
352 fprintf (f, "\n");
353 }
354
355 if (jump_func->agg.items)
356 {
357 struct ipa_agg_jf_item *item;
358 int j;
359
360 fprintf (f, " Aggregate passed by %s:\n",
361 jump_func->agg.by_ref ? "reference" : "value");
362 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
363 {
364 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
365 item->offset);
366 if (TYPE_P (item->value))
367 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
368 tree_to_uhwi (TYPE_SIZE (item->value)));
369 else
370 {
371 fprintf (f, "cst: ");
372 print_generic_expr (f, item->value);
373 }
374 fprintf (f, "\n");
375 }
376 }
377
378 class ipa_polymorphic_call_context *ctx
379 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
380 if (ctx && !ctx->useless_p ())
381 {
382 fprintf (f, " Context: ");
383 ctx->dump (dump_file);
384 }
385
386 if (jump_func->bits)
387 {
388 fprintf (f, " value: ");
389 print_hex (jump_func->bits->value, f);
390 fprintf (f, ", mask: ");
391 print_hex (jump_func->bits->mask, f);
392 fprintf (f, "\n");
393 }
394 else
395 fprintf (f, " Unknown bits\n");
396
397 if (jump_func->m_vr)
398 {
399 fprintf (f, " VR ");
400 fprintf (f, "%s[",
401 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
402 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
403 fprintf (f, ", ");
404 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
405 fprintf (f, "]\n");
406 }
407 else
408 fprintf (f, " Unknown VR\n");
409 }
410 }
411
412
413 /* Print the jump functions of all arguments on all call graph edges going from
414 NODE to file F. */
415
416 void
417 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
418 {
419 struct cgraph_edge *cs;
420
421 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
422 for (cs = node->callees; cs; cs = cs->next_callee)
423 {
424 if (!ipa_edge_args_info_available_for_edge_p (cs))
425 continue;
426
427 fprintf (f, " callsite %s -> %s : \n",
428 node->dump_name (),
429 cs->callee->dump_name ());
430 ipa_print_node_jump_functions_for_edge (f, cs);
431 }
432
433 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
434 {
435 class cgraph_indirect_call_info *ii;
436 if (!ipa_edge_args_info_available_for_edge_p (cs))
437 continue;
438
439 ii = cs->indirect_info;
440 if (ii->agg_contents)
441 fprintf (f, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
443 ii->member_ptr ? "member ptr" : "aggregate",
444 ii->param_index, ii->offset,
445 ii->by_ref ? "by reference" : "by_value");
446 else
447 fprintf (f, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC,
449 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
450 ii->offset);
451
452 if (cs->call_stmt)
453 {
454 fprintf (f, ", for stmt ");
455 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
456 }
457 else
458 fprintf (f, "\n");
459 if (ii->polymorphic)
460 ii->context.dump (f);
461 ipa_print_node_jump_functions_for_edge (f, cs);
462 }
463 }
464
465 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
466
467 void
468 ipa_print_all_jump_functions (FILE *f)
469 {
470 struct cgraph_node *node;
471
472 fprintf (f, "\nJump functions:\n");
473 FOR_EACH_FUNCTION (node)
474 {
475 ipa_print_node_jump_functions (f, node);
476 }
477 }
478
479 /* Set jfunc to be a know-really nothing jump function. */
480
481 static void
482 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
483 {
484 jfunc->type = IPA_JF_UNKNOWN;
485 jfunc->bits = NULL;
486 jfunc->m_vr = NULL;
487 }
488
489 /* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
491
492 static void
493 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
494 struct ipa_jump_func *src)
495
496 {
497 gcc_checking_assert (src->type == IPA_JF_CONST);
498 dst->type = IPA_JF_CONST;
499 dst->value.constant = src->value.constant;
500 }
501
502 /* Set JFUNC to be a constant jmp function. */
503
504 static void
505 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
506 struct cgraph_edge *cs)
507 {
508 jfunc->type = IPA_JF_CONST;
509 jfunc->value.constant.value = unshare_expr_without_location (constant);
510
511 if (TREE_CODE (constant) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
513 {
514 struct ipa_cst_ref_desc *rdesc;
515
516 rdesc = ipa_refdesc_pool.allocate ();
517 rdesc->cs = cs;
518 rdesc->next_duplicate = NULL;
519 rdesc->refcount = 1;
520 jfunc->value.constant.rdesc = rdesc;
521 }
522 else
523 jfunc->value.constant.rdesc = NULL;
524 }
525
526 /* Set JFUNC to be a simple pass-through jump function. */
527 static void
528 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
529 bool agg_preserved)
530 {
531 jfunc->type = IPA_JF_PASS_THROUGH;
532 jfunc->value.pass_through.operand = NULL_TREE;
533 jfunc->value.pass_through.formal_id = formal_id;
534 jfunc->value.pass_through.operation = NOP_EXPR;
535 jfunc->value.pass_through.agg_preserved = agg_preserved;
536 }
537
538 /* Set JFUNC to be an unary pass through jump function. */
539
540 static void
541 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
542 enum tree_code operation)
543 {
544 jfunc->type = IPA_JF_PASS_THROUGH;
545 jfunc->value.pass_through.operand = NULL_TREE;
546 jfunc->value.pass_through.formal_id = formal_id;
547 jfunc->value.pass_through.operation = operation;
548 jfunc->value.pass_through.agg_preserved = false;
549 }
550 /* Set JFUNC to be an arithmetic pass through jump function. */
551
552 static void
553 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
554 tree operand, enum tree_code operation)
555 {
556 jfunc->type = IPA_JF_PASS_THROUGH;
557 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
558 jfunc->value.pass_through.formal_id = formal_id;
559 jfunc->value.pass_through.operation = operation;
560 jfunc->value.pass_through.agg_preserved = false;
561 }
562
563 /* Set JFUNC to be an ancestor jump function. */
564
565 static void
566 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
567 int formal_id, bool agg_preserved)
568 {
569 jfunc->type = IPA_JF_ANCESTOR;
570 jfunc->value.ancestor.formal_id = formal_id;
571 jfunc->value.ancestor.offset = offset;
572 jfunc->value.ancestor.agg_preserved = agg_preserved;
573 }
574
575 /* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
577
578 static struct ipa_bb_info *
579 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
580 {
581 gcc_checking_assert (fbi);
582 return &fbi->bb_infos[bb->index];
583 }
584
585 /* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
587
588 struct prop_type_change_info
589 {
590 /* Offset into the object where there is the virtual method pointer we are
591 looking for. */
592 HOST_WIDE_INT offset;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
594 type change. */
595 tree object;
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed;
598 };
599
600 /* Return true if STMT can modify a virtual method table pointer.
601
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
606
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
609
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
612
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
616
617 There is no way to call a constructor of an ancestor sub-object in any
618 other way.
619
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
623
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
635 */
636
637 static bool
638 stmt_may_be_vtbl_ptr_store (gimple *stmt)
639 {
640 if (is_gimple_call (stmt))
641 return false;
642 if (gimple_clobber_p (stmt))
643 return false;
644 else if (is_gimple_assign (stmt))
645 {
646 tree lhs = gimple_assign_lhs (stmt);
647
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
649 {
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
652 return false;
653
654 if (TREE_CODE (lhs) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
656 return false;
657 /* In the future we might want to use get_ref_base_and_extent to find
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
660 }
661 }
662 return true;
663 }
664
665 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
668 prop_type_change_info structure. */
669
670 static bool
671 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
672 {
673 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
674 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
675
676 if (stmt_may_be_vtbl_ptr_store (stmt))
677 {
678 tci->type_maybe_changed = true;
679 return true;
680 }
681 else
682 return false;
683 }
684
685 /* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
688
689 Generally functions are not allowed to change type of such instances,
690 but they call destructors. We assume that methods cannot destroy the THIS
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
693
694 static bool
695 param_type_may_change_p (tree function, tree arg, gimple *call)
696 {
697 /* Pure functions cannot do any changes on the dynamic type;
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
700 return false;
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
707
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
712 return true;
713 if (TREE_CODE (arg) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg)
715 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
716 {
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
719 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
720 /* THIS pointer of an method - here we want to watch constructors
721 and destructors as those definitely may change the dynamic
722 type. */
723 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function)
725 && !DECL_CXX_DESTRUCTOR_P (function)
726 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
727 {
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
730 block = BLOCK_SUPERCONTEXT (block))
731 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
732 return true;
733 return false;
734 }
735 }
736 return true;
737 }
738
739 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
744 returned by get_ref_base_and_extent, as is the offset.
745
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
748
749 static bool
750 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
751 tree base, tree comp_type, gcall *call,
752 struct ipa_jump_func *jfunc,
753 HOST_WIDE_INT offset)
754 {
755 struct prop_type_change_info tci;
756 ao_ref ao;
757
758 gcc_checking_assert (DECL_P (arg)
759 || TREE_CODE (arg) == MEM_REF
760 || handled_component_p (arg));
761
762 comp_type = TYPE_MAIN_VARIANT (comp_type);
763
764 /* Const calls cannot call virtual methods through VMT and so type changes do
765 not matter. */
766 if (!flag_devirtualize || !gimple_vuse (call)
767 /* Be sure expected_type is polymorphic. */
768 || !comp_type
769 || TREE_CODE (comp_type) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
772 return true;
773
774 ao_ref_init (&ao, arg);
775 ao.base = base;
776 ao.offset = offset;
777 ao.size = POINTER_SIZE;
778 ao.max_size = ao.size;
779
780 tci.offset = offset;
781 tci.object = get_base_address (arg);
782 tci.type_maybe_changed = false;
783
784 int walked
785 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
786 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
787
788 if (walked >= 0 && !tci.type_maybe_changed)
789 return false;
790
791 ipa_set_jf_unknown (jfunc);
792 return true;
793 }
794
795 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
796 If it is, return true and fill in the jump function JFUNC with relevant type
797 information or set it to unknown. ARG is the object itself (not a pointer
798 to it, unless dereferenced). BASE is the base of the memory access as
799 returned by get_ref_base_and_extent, as is the offset. */
800
801 static bool
802 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
803 tree comp_type, gcall *call, struct ipa_jump_func *jfunc,
804 HOST_WIDE_INT offset)
805 {
806 if (!flag_devirtualize)
807 return false;
808
809 if (TREE_CODE (base) == MEM_REF
810 && !param_type_may_change_p (current_function_decl,
811 TREE_OPERAND (base, 0),
812 call))
813 return false;
814 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
815 call, jfunc, offset);
816 }
817
818 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
819 SSA name (its dereference will become the base and the offset is assumed to
820 be zero). */
821
822 static bool
823 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
824 gcall *call, struct ipa_jump_func *jfunc)
825 {
826 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
827 if (!flag_devirtualize
828 || !POINTER_TYPE_P (TREE_TYPE (arg)))
829 return false;
830
831 if (!param_type_may_change_p (current_function_decl, arg, call))
832 return false;
833
834 arg = build2 (MEM_REF, ptr_type_node, arg,
835 build_int_cst (ptr_type_node, 0));
836
837 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
838 call, jfunc, 0);
839 }
840
841 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
842 boolean variable pointed to by DATA. */
843
844 static bool
845 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
846 void *data)
847 {
848 bool *b = (bool *) data;
849 *b = true;
850 return true;
851 }
852
853 /* Find the nearest valid aa status for parameter specified by INDEX that
854 dominates BB. */
855
856 static struct ipa_param_aa_status *
857 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
858 int index)
859 {
860 while (true)
861 {
862 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
863 if (!bb)
864 return NULL;
865 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
866 if (!bi->param_aa_statuses.is_empty ()
867 && bi->param_aa_statuses[index].valid)
868 return &bi->param_aa_statuses[index];
869 }
870 }
871
872 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
873 structures and/or intialize the result with a dominating description as
874 necessary. */
875
876 static struct ipa_param_aa_status *
877 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
878 int index)
879 {
880 gcc_checking_assert (fbi);
881 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
882 if (bi->param_aa_statuses.is_empty ())
883 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
884 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
885 if (!paa->valid)
886 {
887 gcc_checking_assert (!paa->parm_modified
888 && !paa->ref_modified
889 && !paa->pt_modified);
890 struct ipa_param_aa_status *dom_paa;
891 dom_paa = find_dominating_aa_status (fbi, bb, index);
892 if (dom_paa)
893 *paa = *dom_paa;
894 else
895 paa->valid = true;
896 }
897
898 return paa;
899 }
900
901 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
902 a value known not to be modified in this function before reaching the
903 statement STMT. FBI holds information about the function we have so far
904 gathered but do not survive the summary building stage. */
905
906 static bool
907 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
908 gimple *stmt, tree parm_load)
909 {
910 struct ipa_param_aa_status *paa;
911 bool modified = false;
912 ao_ref refd;
913
914 tree base = get_base_address (parm_load);
915 gcc_assert (TREE_CODE (base) == PARM_DECL);
916 if (TREE_READONLY (base))
917 return true;
918
919 gcc_checking_assert (fbi);
920 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
921 if (paa->parm_modified)
922 return false;
923
924 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
925 ao_ref_init (&refd, parm_load);
926 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
927 &modified, NULL, NULL,
928 fbi->aa_walk_budget + 1);
929 if (walked < 0)
930 {
931 modified = true;
932 if (fbi)
933 fbi->aa_walk_budget = 0;
934 }
935 else if (fbi)
936 fbi->aa_walk_budget -= walked;
937 if (paa && modified)
938 paa->parm_modified = true;
939 return !modified;
940 }
941
942 /* If STMT is an assignment that loads a value from an parameter declaration,
943 return the index of the parameter in ipa_node_params which has not been
944 modified. Otherwise return -1. */
945
946 static int
947 load_from_unmodified_param (struct ipa_func_body_info *fbi,
948 vec<ipa_param_descriptor, va_gc> *descriptors,
949 gimple *stmt)
950 {
951 int index;
952 tree op1;
953
954 if (!gimple_assign_single_p (stmt))
955 return -1;
956
957 op1 = gimple_assign_rhs1 (stmt);
958 if (TREE_CODE (op1) != PARM_DECL)
959 return -1;
960
961 index = ipa_get_param_decl_index_1 (descriptors, op1);
962 if (index < 0
963 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
964 return -1;
965
966 return index;
967 }
968
969 /* Return true if memory reference REF (which must be a load through parameter
970 with INDEX) loads data that are known to be unmodified in this function
971 before reaching statement STMT. */
972
973 static bool
974 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
975 int index, gimple *stmt, tree ref)
976 {
977 struct ipa_param_aa_status *paa;
978 bool modified = false;
979 ao_ref refd;
980
981 gcc_checking_assert (fbi);
982 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
983 if (paa->ref_modified)
984 return false;
985
986 gcc_checking_assert (gimple_vuse (stmt));
987 ao_ref_init (&refd, ref);
988 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
989 &modified, NULL, NULL,
990 fbi->aa_walk_budget + 1);
991 if (walked < 0)
992 {
993 modified = true;
994 fbi->aa_walk_budget = 0;
995 }
996 else
997 fbi->aa_walk_budget -= walked;
998 if (modified)
999 paa->ref_modified = true;
1000 return !modified;
1001 }
1002
1003 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1004 is known to be unmodified in this function before reaching call statement
1005 CALL into which it is passed. FBI describes the function body. */
1006
1007 static bool
1008 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1009 gimple *call, tree parm)
1010 {
1011 bool modified = false;
1012 ao_ref refd;
1013
1014 /* It's unnecessary to calculate anything about memory contnets for a const
1015 function because it is not goin to use it. But do not cache the result
1016 either. Also, no such calculations for non-pointers. */
1017 if (!gimple_vuse (call)
1018 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1019 return false;
1020
1021 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1022 gimple_bb (call),
1023 index);
1024 if (paa->pt_modified)
1025 return false;
1026
1027 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1028 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1029 &modified, NULL, NULL,
1030 fbi->aa_walk_budget + 1);
1031 if (walked < 0)
1032 {
1033 fbi->aa_walk_budget = 0;
1034 modified = true;
1035 }
1036 else
1037 fbi->aa_walk_budget -= walked;
1038 if (modified)
1039 paa->pt_modified = true;
1040 return !modified;
1041 }
1042
1043 /* Return true if we can prove that OP is a memory reference loading
1044 data from an aggregate passed as a parameter.
1045
1046 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1047 false if it cannot prove that the value has not been modified before the
1048 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1049 if it cannot prove the value has not been modified, in that case it will
1050 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1051
1052 INFO and PARMS_AINFO describe parameters of the current function (but the
1053 latter can be NULL), STMT is the load statement. If function returns true,
1054 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1055 within the aggregate and whether it is a load from a value passed by
1056 reference respectively. */
1057
1058 bool
1059 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1060 vec<ipa_param_descriptor, va_gc> *descriptors,
1061 gimple *stmt, tree op, int *index_p,
1062 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1063 bool *by_ref_p, bool *guaranteed_unmodified)
1064 {
1065 int index;
1066 HOST_WIDE_INT size;
1067 bool reverse;
1068 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1069
1070 if (!base)
1071 return false;
1072
1073 if (DECL_P (base))
1074 {
1075 int index = ipa_get_param_decl_index_1 (descriptors, base);
1076 if (index >= 0
1077 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1078 {
1079 *index_p = index;
1080 *by_ref_p = false;
1081 if (size_p)
1082 *size_p = size;
1083 if (guaranteed_unmodified)
1084 *guaranteed_unmodified = true;
1085 return true;
1086 }
1087 return false;
1088 }
1089
1090 if (TREE_CODE (base) != MEM_REF
1091 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1092 || !integer_zerop (TREE_OPERAND (base, 1)))
1093 return false;
1094
1095 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1096 {
1097 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1098 index = ipa_get_param_decl_index_1 (descriptors, parm);
1099 }
1100 else
1101 {
1102 /* This branch catches situations where a pointer parameter is not a
1103 gimple register, for example:
1104
1105 void hip7(S*) (struct S * p)
1106 {
1107 void (*<T2e4>) (struct S *) D.1867;
1108 struct S * p.1;
1109
1110 <bb 2>:
1111 p.1_1 = p;
1112 D.1867_2 = p.1_1->f;
1113 D.1867_2 ();
1114 gdp = &p;
1115 */
1116
1117 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1118 index = load_from_unmodified_param (fbi, descriptors, def);
1119 }
1120
1121 if (index >= 0)
1122 {
1123 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1124 if (!data_preserved && !guaranteed_unmodified)
1125 return false;
1126
1127 *index_p = index;
1128 *by_ref_p = true;
1129 if (size_p)
1130 *size_p = size;
1131 if (guaranteed_unmodified)
1132 *guaranteed_unmodified = data_preserved;
1133 return true;
1134 }
1135 return false;
1136 }
1137
1138 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1139 of an assignment statement STMT, try to determine whether we are actually
1140 handling any of the following cases and construct an appropriate jump
1141 function into JFUNC if so:
1142
1143 1) The passed value is loaded from a formal parameter which is not a gimple
1144 register (most probably because it is addressable, the value has to be
1145 scalar) and we can guarantee the value has not changed. This case can
1146 therefore be described by a simple pass-through jump function. For example:
1147
1148 foo (int a)
1149 {
1150 int a.0;
1151
1152 a.0_2 = a;
1153 bar (a.0_2);
1154
1155 2) The passed value can be described by a simple arithmetic pass-through
1156 jump function. E.g.
1157
1158 foo (int a)
1159 {
1160 int D.2064;
1161
1162 D.2064_4 = a.1(D) + 4;
1163 bar (D.2064_4);
1164
1165 This case can also occur in combination of the previous one, e.g.:
1166
1167 foo (int a, int z)
1168 {
1169 int a.0;
1170 int D.2064;
1171
1172 a.0_3 = a;
1173 D.2064_4 = a.0_3 + 4;
1174 foo (D.2064_4);
1175
1176 3) The passed value is an address of an object within another one (which
1177 also passed by reference). Such situations are described by an ancestor
1178 jump function and describe situations such as:
1179
1180 B::foo() (struct B * const this)
1181 {
1182 struct A * D.1845;
1183
1184 D.1845_2 = &this_1(D)->D.1748;
1185 A::bar (D.1845_2);
1186
1187 INFO is the structure describing individual parameters access different
1188 stages of IPA optimizations. PARMS_AINFO contains the information that is
1189 only needed for intraprocedural analysis. */
1190
1191 static void
1192 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1193 class ipa_node_params *info,
1194 struct ipa_jump_func *jfunc,
1195 gcall *call, gimple *stmt, tree name,
1196 tree param_type)
1197 {
1198 HOST_WIDE_INT offset, size;
1199 tree op1, tc_ssa, base, ssa;
1200 bool reverse;
1201 int index;
1202
1203 op1 = gimple_assign_rhs1 (stmt);
1204
1205 if (TREE_CODE (op1) == SSA_NAME)
1206 {
1207 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1208 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1209 else
1210 index = load_from_unmodified_param (fbi, info->descriptors,
1211 SSA_NAME_DEF_STMT (op1));
1212 tc_ssa = op1;
1213 }
1214 else
1215 {
1216 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1217 tc_ssa = gimple_assign_lhs (stmt);
1218 }
1219
1220 if (index >= 0)
1221 {
1222 switch (gimple_assign_rhs_class (stmt))
1223 {
1224 case GIMPLE_BINARY_RHS:
1225 {
1226 tree op2 = gimple_assign_rhs2 (stmt);
1227 if (!is_gimple_ip_invariant (op2)
1228 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1229 != tcc_comparison)
1230 && !useless_type_conversion_p (TREE_TYPE (name),
1231 TREE_TYPE (op1))))
1232 return;
1233
1234 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1235 gimple_assign_rhs_code (stmt));
1236 break;
1237 }
1238 case GIMPLE_SINGLE_RHS:
1239 {
1240 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1241 tc_ssa);
1242 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1243 break;
1244 }
1245 case GIMPLE_UNARY_RHS:
1246 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1247 ipa_set_jf_unary_pass_through (jfunc, index,
1248 gimple_assign_rhs_code (stmt));
1249 default:;
1250 }
1251 return;
1252 }
1253
1254 if (TREE_CODE (op1) != ADDR_EXPR)
1255 return;
1256 op1 = TREE_OPERAND (op1, 0);
1257 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1258 return;
1259 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1260 offset_int mem_offset;
1261 if (!base
1262 || TREE_CODE (base) != MEM_REF
1263 || !mem_ref_offset (base).is_constant (&mem_offset))
1264 return;
1265 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1266 ssa = TREE_OPERAND (base, 0);
1267 if (TREE_CODE (ssa) != SSA_NAME
1268 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1269 || offset < 0)
1270 return;
1271
1272 /* Dynamic types are changed in constructors and destructors. */
1273 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1274 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1275 ipa_set_ancestor_jf (jfunc, offset, index,
1276 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1277 }
1278
1279 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1280 it looks like:
1281
1282 iftmp.1_3 = &obj_2(D)->D.1762;
1283
1284 The base of the MEM_REF must be a default definition SSA NAME of a
1285 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1286 whole MEM_REF expression is returned and the offset calculated from any
1287 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1288 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1289
1290 static tree
1291 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1292 {
1293 HOST_WIDE_INT size;
1294 tree expr, parm, obj;
1295 bool reverse;
1296
1297 if (!gimple_assign_single_p (assign))
1298 return NULL_TREE;
1299 expr = gimple_assign_rhs1 (assign);
1300
1301 if (TREE_CODE (expr) != ADDR_EXPR)
1302 return NULL_TREE;
1303 expr = TREE_OPERAND (expr, 0);
1304 obj = expr;
1305 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1306
1307 offset_int mem_offset;
1308 if (!expr
1309 || TREE_CODE (expr) != MEM_REF
1310 || !mem_ref_offset (expr).is_constant (&mem_offset))
1311 return NULL_TREE;
1312 parm = TREE_OPERAND (expr, 0);
1313 if (TREE_CODE (parm) != SSA_NAME
1314 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1315 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1316 return NULL_TREE;
1317
1318 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1319 *obj_p = obj;
1320 return expr;
1321 }
1322
1323
1324 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1325 statement PHI, try to find out whether NAME is in fact a
1326 multiple-inheritance typecast from a descendant into an ancestor of a formal
1327 parameter and thus can be described by an ancestor jump function and if so,
1328 write the appropriate function into JFUNC.
1329
1330 Essentially we want to match the following pattern:
1331
1332 if (obj_2(D) != 0B)
1333 goto <bb 3>;
1334 else
1335 goto <bb 4>;
1336
1337 <bb 3>:
1338 iftmp.1_3 = &obj_2(D)->D.1762;
1339
1340 <bb 4>:
1341 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1342 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1343 return D.1879_6; */
1344
1345 static void
1346 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1347 class ipa_node_params *info,
1348 struct ipa_jump_func *jfunc,
1349 gcall *call, gphi *phi)
1350 {
1351 HOST_WIDE_INT offset;
1352 gimple *assign, *cond;
1353 basic_block phi_bb, assign_bb, cond_bb;
1354 tree tmp, parm, expr, obj;
1355 int index, i;
1356
1357 if (gimple_phi_num_args (phi) != 2)
1358 return;
1359
1360 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1361 tmp = PHI_ARG_DEF (phi, 0);
1362 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1363 tmp = PHI_ARG_DEF (phi, 1);
1364 else
1365 return;
1366 if (TREE_CODE (tmp) != SSA_NAME
1367 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1368 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1369 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1370 return;
1371
1372 assign = SSA_NAME_DEF_STMT (tmp);
1373 assign_bb = gimple_bb (assign);
1374 if (!single_pred_p (assign_bb))
1375 return;
1376 expr = get_ancestor_addr_info (assign, &obj, &offset);
1377 if (!expr)
1378 return;
1379 parm = TREE_OPERAND (expr, 0);
1380 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1381 if (index < 0)
1382 return;
1383
1384 cond_bb = single_pred (assign_bb);
1385 cond = last_stmt (cond_bb);
1386 if (!cond
1387 || gimple_code (cond) != GIMPLE_COND
1388 || gimple_cond_code (cond) != NE_EXPR
1389 || gimple_cond_lhs (cond) != parm
1390 || !integer_zerop (gimple_cond_rhs (cond)))
1391 return;
1392
1393 phi_bb = gimple_bb (phi);
1394 for (i = 0; i < 2; i++)
1395 {
1396 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1397 if (pred != assign_bb && pred != cond_bb)
1398 return;
1399 }
1400
1401 ipa_set_ancestor_jf (jfunc, offset, index,
1402 parm_ref_data_pass_through_p (fbi, index, call, parm));
1403 }
1404
1405 /* Inspect the given TYPE and return true iff it has the same structure (the
1406 same number of fields of the same types) as a C++ member pointer. If
1407 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1408 corresponding fields there. */
1409
1410 static bool
1411 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1412 {
1413 tree fld;
1414
1415 if (TREE_CODE (type) != RECORD_TYPE)
1416 return false;
1417
1418 fld = TYPE_FIELDS (type);
1419 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1420 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1421 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1422 return false;
1423
1424 if (method_ptr)
1425 *method_ptr = fld;
1426
1427 fld = DECL_CHAIN (fld);
1428 if (!fld || INTEGRAL_TYPE_P (fld)
1429 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1430 return false;
1431 if (delta)
1432 *delta = fld;
1433
1434 if (DECL_CHAIN (fld))
1435 return false;
1436
1437 return true;
1438 }
1439
1440 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1441 return the rhs of its defining statement. Otherwise return RHS as it
1442 is. */
1443
1444 static inline tree
1445 get_ssa_def_if_simple_copy (tree rhs)
1446 {
1447 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1448 {
1449 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1450
1451 if (gimple_assign_single_p (def_stmt))
1452 rhs = gimple_assign_rhs1 (def_stmt);
1453 else
1454 break;
1455 }
1456 return rhs;
1457 }
1458
1459 /* Simple linked list, describing known contents of an aggregate before
1460 call. */
1461
1462 struct ipa_known_agg_contents_list
1463 {
1464 /* Offset and size of the described part of the aggregate. */
1465 HOST_WIDE_INT offset, size;
1466 /* Known constant value or NULL if the contents is known to be unknown. */
1467 tree constant;
1468 /* Pointer to the next structure in the list. */
1469 struct ipa_known_agg_contents_list *next;
1470 };
1471
1472 /* Add a known content item into a linked list of ipa_known_agg_contents_list
1473 structure, in which all elements are sorted ascendingly by offset. */
1474
1475 static inline void
1476 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1477 struct ipa_known_agg_contents_list *item)
1478 {
1479 struct ipa_known_agg_contents_list *list = *plist;
1480
1481 for (; list; list = list->next)
1482 {
1483 if (list->offset >= item->offset)
1484 break;
1485
1486 plist = &list->next;
1487 }
1488
1489 item->next = list;
1490 *plist = item;
1491 }
1492
1493 /* Check whether a given known content is clobbered by certain element in
1494 a linked list of ipa_known_agg_contents_list. */
1495
1496 static inline bool
1497 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1498 struct ipa_known_agg_contents_list *item)
1499 {
1500 for (; list; list = list->next)
1501 {
1502 if (list->offset >= item->offset)
1503 return list->offset < item->offset + item->size;
1504
1505 if (list->offset + list->size > item->offset)
1506 return true;
1507 }
1508
1509 return false;
1510 }
1511
1512 /* Build aggregate jump function from LIST, assuming there are exactly
1513 CONST_COUNT constant entries there and that offset of the passed argument
1514 is ARG_OFFSET and store it into JFUNC. */
1515
1516 static void
1517 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1518 int const_count, HOST_WIDE_INT arg_offset,
1519 struct ipa_jump_func *jfunc)
1520 {
1521 vec_alloc (jfunc->agg.items, const_count);
1522 while (list)
1523 {
1524 if (list->constant)
1525 {
1526 struct ipa_agg_jf_item item;
1527 item.offset = list->offset - arg_offset;
1528 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1529 item.value = unshare_expr_without_location (list->constant);
1530 jfunc->agg.items->quick_push (item);
1531 }
1532 list = list->next;
1533 }
1534 }
1535
1536 /* If STMT is a memory store to the object whose address is BASE, extract
1537 information (offset, size, and value) into CONTENT, and return true,
1538 otherwise we conservatively assume the whole object is modified with
1539 unknown content, and return false. CHECK_REF means that access to object
1540 is expected to be in form of MEM_REF expression. */
1541
1542 static bool
1543 extract_mem_content (gimple *stmt, tree base, bool check_ref,
1544 struct ipa_known_agg_contents_list *content)
1545 {
1546 HOST_WIDE_INT lhs_offset, lhs_size;
1547 tree lhs, rhs, lhs_base;
1548 bool reverse;
1549
1550 if (!gimple_assign_single_p (stmt))
1551 return false;
1552
1553 lhs = gimple_assign_lhs (stmt);
1554 rhs = gimple_assign_rhs1 (stmt);
1555
1556 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1557 || TREE_CODE (lhs) == BIT_FIELD_REF
1558 || contains_bitfld_component_ref_p (lhs))
1559 return false;
1560
1561 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1562 &lhs_size, &reverse);
1563 if (!lhs_base)
1564 return false;
1565
1566 if (check_ref)
1567 {
1568 if (TREE_CODE (lhs_base) != MEM_REF
1569 || TREE_OPERAND (lhs_base, 0) != base
1570 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1571 return false;
1572 }
1573 else if (lhs_base != base)
1574 return false;
1575
1576 rhs = get_ssa_def_if_simple_copy (rhs);
1577
1578 content->size = lhs_size;
1579 content->offset = lhs_offset;
1580 content->constant = is_gimple_ip_invariant (rhs) ? rhs : NULL_TREE;
1581 content->next = NULL;
1582
1583 return true;
1584 }
1585
1586 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1587 in ARG is filled in with constant values. ARG can either be an aggregate
1588 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1589 aggregate. JFUNC is the jump function into which the constants are
1590 subsequently stored. AA_WALK_BUDGET_P points to limit on number of
1591 statements we allow get_continuation_for_phi to examine. */
1592
1593 static void
1594 determine_known_aggregate_parts (gcall *call, tree arg,
1595 tree arg_type,
1596 struct ipa_jump_func *jfunc,
1597 unsigned *aa_walk_budget_p)
1598 {
1599 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1600 bitmap visited = NULL;
1601 int item_count = 0, const_count = 0;
1602 int ipa_max_agg_items = PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS);
1603 HOST_WIDE_INT arg_offset, arg_size;
1604 tree arg_base;
1605 bool check_ref, by_ref;
1606 ao_ref r;
1607
1608 if (ipa_max_agg_items == 0)
1609 return;
1610
1611 /* The function operates in three stages. First, we prepare check_ref, r,
1612 arg_base and arg_offset based on what is actually passed as an actual
1613 argument. */
1614
1615 if (POINTER_TYPE_P (arg_type))
1616 {
1617 by_ref = true;
1618 if (TREE_CODE (arg) == SSA_NAME)
1619 {
1620 tree type_size;
1621 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1622 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1623 return;
1624 check_ref = true;
1625 arg_base = arg;
1626 arg_offset = 0;
1627 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1628 arg_size = tree_to_uhwi (type_size);
1629 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1630 }
1631 else if (TREE_CODE (arg) == ADDR_EXPR)
1632 {
1633 bool reverse;
1634
1635 arg = TREE_OPERAND (arg, 0);
1636 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1637 &arg_size, &reverse);
1638 if (!arg_base)
1639 return;
1640 if (DECL_P (arg_base))
1641 {
1642 check_ref = false;
1643 ao_ref_init (&r, arg_base);
1644 }
1645 else
1646 return;
1647 }
1648 else
1649 return;
1650 }
1651 else
1652 {
1653 bool reverse;
1654
1655 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1656
1657 by_ref = false;
1658 check_ref = false;
1659 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1660 &arg_size, &reverse);
1661 if (!arg_base)
1662 return;
1663
1664 ao_ref_init (&r, arg);
1665 }
1666
1667 /* Second stage traverses virtual SSA web backwards starting from the call
1668 statement, only looks at individual dominating virtual operand (its
1669 definition dominates the call), as long as it is confident that content
1670 of the aggregate is affected by definition of the virtual operand, it
1671 builds a sorted linked list of ipa_agg_jf_list describing that. */
1672
1673 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1674 {
1675 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
1676
1677 if (gimple_code (stmt) == GIMPLE_PHI)
1678 {
1679 dom_vuse = get_continuation_for_phi (stmt, &r, true,
1680 *aa_walk_budget_p,
1681 &visited, false, NULL, NULL);
1682 continue;
1683 }
1684
1685 if (stmt_may_clobber_ref_p_1 (stmt, &r))
1686 {
1687 struct ipa_known_agg_contents_list *content
1688 = XALLOCA (struct ipa_known_agg_contents_list);
1689
1690 if (!extract_mem_content (stmt, arg_base, check_ref, content))
1691 break;
1692
1693 /* Now we get a dominating virtual operand, and need to check
1694 whether its value is clobbered any other dominating one. */
1695 if (content->constant
1696 && !clobber_by_agg_contents_list_p (all_list, content))
1697 {
1698 struct ipa_known_agg_contents_list *copy
1699 = XALLOCA (struct ipa_known_agg_contents_list);
1700
1701 /* Add to the list consisting of only dominating virtual
1702 operands, whose definitions can finally reach the call. */
1703 add_to_agg_contents_list (&list, (*copy = *content, copy));
1704
1705 if (++const_count == ipa_max_agg_items)
1706 break;
1707 }
1708
1709 /* Add to the list consisting of all dominating virtual operands. */
1710 add_to_agg_contents_list (&all_list, content);
1711
1712 if (++item_count == 2 * ipa_max_agg_items)
1713 break;
1714 }
1715 dom_vuse = gimple_vuse (stmt);
1716 }
1717
1718 if (visited)
1719 BITMAP_FREE (visited);
1720
1721 /* Third stage just goes over the list and creates an appropriate vector of
1722 ipa_agg_jf_item structures out of it, of course only if there are
1723 any known constants to begin with. */
1724
1725 if (const_count)
1726 {
1727 jfunc->agg.by_ref = by_ref;
1728 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1729 }
1730 }
1731
1732
1733 /* Return the Ith param type of callee associated with call graph
1734 edge E. */
1735
1736 tree
1737 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1738 {
1739 int n;
1740 tree type = (e->callee
1741 ? TREE_TYPE (e->callee->decl)
1742 : gimple_call_fntype (e->call_stmt));
1743 tree t = TYPE_ARG_TYPES (type);
1744
1745 for (n = 0; n < i; n++)
1746 {
1747 if (!t)
1748 break;
1749 t = TREE_CHAIN (t);
1750 }
1751 if (t)
1752 return TREE_VALUE (t);
1753 if (!e->callee)
1754 return NULL;
1755 t = DECL_ARGUMENTS (e->callee->decl);
1756 for (n = 0; n < i; n++)
1757 {
1758 if (!t)
1759 return NULL;
1760 t = TREE_CHAIN (t);
1761 }
1762 if (t)
1763 return TREE_TYPE (t);
1764 return NULL;
1765 }
1766
1767 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1768 allocated structure or a previously existing one shared with other jump
1769 functions and/or transformation summaries. */
1770
1771 ipa_bits *
1772 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1773 {
1774 ipa_bits tmp;
1775 tmp.value = value;
1776 tmp.mask = mask;
1777
1778 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1779 if (*slot)
1780 return *slot;
1781
1782 ipa_bits *res = ggc_alloc<ipa_bits> ();
1783 res->value = value;
1784 res->mask = mask;
1785 *slot = res;
1786
1787 return res;
1788 }
1789
1790 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1791 table in order to avoid creating multiple same ipa_bits structures. */
1792
1793 static void
1794 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1795 const widest_int &mask)
1796 {
1797 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1798 }
1799
1800 /* Return a pointer to a value_range just like *TMP, but either find it in
1801 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1802
1803 static value_range_base *
1804 ipa_get_value_range (value_range_base *tmp)
1805 {
1806 value_range_base **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1807 if (*slot)
1808 return *slot;
1809
1810 value_range_base *vr = ggc_alloc<value_range_base> ();
1811 *vr = *tmp;
1812 *slot = vr;
1813
1814 return vr;
1815 }
1816
1817 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1818 equiv set. Use hash table in order to avoid creating multiple same copies of
1819 value_ranges. */
1820
1821 static value_range_base *
1822 ipa_get_value_range (enum value_range_kind type, tree min, tree max)
1823 {
1824 value_range_base tmp (type, min, max);
1825 return ipa_get_value_range (&tmp);
1826 }
1827
1828 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1829 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1830 same value_range structures. */
1831
1832 static void
1833 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
1834 tree min, tree max)
1835 {
1836 jf->m_vr = ipa_get_value_range (type, min, max);
1837 }
1838
1839 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
1840 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1841
1842 static void
1843 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range_base *tmp)
1844 {
1845 jf->m_vr = ipa_get_value_range (tmp);
1846 }
1847
1848 /* Compute jump function for all arguments of callsite CS and insert the
1849 information in the jump_functions array in the ipa_edge_args corresponding
1850 to this callsite. */
1851
1852 static void
1853 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1854 struct cgraph_edge *cs)
1855 {
1856 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
1857 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
1858 gcall *call = cs->call_stmt;
1859 int n, arg_num = gimple_call_num_args (call);
1860 bool useful_context = false;
1861
1862 if (arg_num == 0 || args->jump_functions)
1863 return;
1864 vec_safe_grow_cleared (args->jump_functions, arg_num);
1865 if (flag_devirtualize)
1866 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1867
1868 if (gimple_call_internal_p (call))
1869 return;
1870 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1871 return;
1872
1873 for (n = 0; n < arg_num; n++)
1874 {
1875 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1876 tree arg = gimple_call_arg (call, n);
1877 tree param_type = ipa_get_callee_param_type (cs, n);
1878 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1879 {
1880 tree instance;
1881 class ipa_polymorphic_call_context context (cs->caller->decl,
1882 arg, cs->call_stmt,
1883 &instance);
1884 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
1885 &fbi->aa_walk_budget);
1886 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1887 if (!context.useless_p ())
1888 useful_context = true;
1889 }
1890
1891 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1892 {
1893 bool addr_nonzero = false;
1894 bool strict_overflow = false;
1895
1896 if (TREE_CODE (arg) == SSA_NAME
1897 && param_type
1898 && get_ptr_nonnull (arg))
1899 addr_nonzero = true;
1900 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1901 addr_nonzero = true;
1902
1903 if (addr_nonzero)
1904 {
1905 tree z = build_int_cst (TREE_TYPE (arg), 0);
1906 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1907 }
1908 else
1909 gcc_assert (!jfunc->m_vr);
1910 }
1911 else
1912 {
1913 wide_int min, max;
1914 value_range_kind type;
1915 if (TREE_CODE (arg) == SSA_NAME
1916 && param_type
1917 && (type = get_range_info (arg, &min, &max))
1918 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1919 {
1920 value_range_base resvr;
1921 value_range_base tmpvr (type,
1922 wide_int_to_tree (TREE_TYPE (arg), min),
1923 wide_int_to_tree (TREE_TYPE (arg), max));
1924 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
1925 &tmpvr, TREE_TYPE (arg));
1926 if (!resvr.undefined_p () && !resvr.varying_p ())
1927 ipa_set_jfunc_vr (jfunc, &resvr);
1928 else
1929 gcc_assert (!jfunc->m_vr);
1930 }
1931 else
1932 gcc_assert (!jfunc->m_vr);
1933 }
1934
1935 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1936 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1937 {
1938 if (TREE_CODE (arg) == SSA_NAME)
1939 ipa_set_jfunc_bits (jfunc, 0,
1940 widest_int::from (get_nonzero_bits (arg),
1941 TYPE_SIGN (TREE_TYPE (arg))));
1942 else
1943 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1944 }
1945 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1946 {
1947 unsigned HOST_WIDE_INT bitpos;
1948 unsigned align;
1949
1950 get_pointer_alignment_1 (arg, &align, &bitpos);
1951 widest_int mask = wi::bit_and_not
1952 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1953 align / BITS_PER_UNIT - 1);
1954 widest_int value = bitpos / BITS_PER_UNIT;
1955 ipa_set_jfunc_bits (jfunc, value, mask);
1956 }
1957 else
1958 gcc_assert (!jfunc->bits);
1959
1960 if (is_gimple_ip_invariant (arg)
1961 || (VAR_P (arg)
1962 && is_global_var (arg)
1963 && TREE_READONLY (arg)))
1964 ipa_set_jf_constant (jfunc, arg, cs);
1965 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1966 && TREE_CODE (arg) == PARM_DECL)
1967 {
1968 int index = ipa_get_param_decl_index (info, arg);
1969
1970 gcc_assert (index >=0);
1971 /* Aggregate passed by value, check for pass-through, otherwise we
1972 will attempt to fill in aggregate contents later in this
1973 for cycle. */
1974 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1975 {
1976 ipa_set_jf_simple_pass_through (jfunc, index, false);
1977 continue;
1978 }
1979 }
1980 else if (TREE_CODE (arg) == SSA_NAME)
1981 {
1982 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1983 {
1984 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1985 if (index >= 0)
1986 {
1987 bool agg_p;
1988 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1989 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1990 }
1991 }
1992 else
1993 {
1994 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1995 if (is_gimple_assign (stmt))
1996 compute_complex_assign_jump_func (fbi, info, jfunc,
1997 call, stmt, arg, param_type);
1998 else if (gimple_code (stmt) == GIMPLE_PHI)
1999 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2000 call,
2001 as_a <gphi *> (stmt));
2002 }
2003 }
2004
2005 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2006 passed (because type conversions are ignored in gimple). Usually we can
2007 safely get type from function declaration, but in case of K&R prototypes or
2008 variadic functions we can try our luck with type of the pointer passed.
2009 TODO: Since we look for actual initialization of the memory object, we may better
2010 work out the type based on the memory stores we find. */
2011 if (!param_type)
2012 param_type = TREE_TYPE (arg);
2013
2014 if ((jfunc->type != IPA_JF_PASS_THROUGH
2015 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2016 && (jfunc->type != IPA_JF_ANCESTOR
2017 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2018 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2019 || POINTER_TYPE_P (param_type)))
2020 determine_known_aggregate_parts (call, arg, param_type, jfunc,
2021 &fbi->aa_walk_budget);
2022 }
2023 if (!useful_context)
2024 vec_free (args->polymorphic_call_contexts);
2025 }
2026
2027 /* Compute jump functions for all edges - both direct and indirect - outgoing
2028 from BB. */
2029
2030 static void
2031 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2032 {
2033 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2034 int i;
2035 struct cgraph_edge *cs;
2036
2037 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2038 {
2039 struct cgraph_node *callee = cs->callee;
2040
2041 if (callee)
2042 {
2043 callee = callee->ultimate_alias_target ();
2044 /* We do not need to bother analyzing calls to unknown functions
2045 unless they may become known during lto/whopr. */
2046 if (!callee->definition && !flag_lto)
2047 continue;
2048 }
2049 ipa_compute_jump_functions_for_edge (fbi, cs);
2050 }
2051 }
2052
2053 /* If STMT looks like a statement loading a value from a member pointer formal
2054 parameter, return that parameter and store the offset of the field to
2055 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2056 might be clobbered). If USE_DELTA, then we look for a use of the delta
2057 field rather than the pfn. */
2058
2059 static tree
2060 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2061 HOST_WIDE_INT *offset_p)
2062 {
2063 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2064
2065 if (!gimple_assign_single_p (stmt))
2066 return NULL_TREE;
2067
2068 rhs = gimple_assign_rhs1 (stmt);
2069 if (TREE_CODE (rhs) == COMPONENT_REF)
2070 {
2071 ref_field = TREE_OPERAND (rhs, 1);
2072 rhs = TREE_OPERAND (rhs, 0);
2073 }
2074 else
2075 ref_field = NULL_TREE;
2076 if (TREE_CODE (rhs) != MEM_REF)
2077 return NULL_TREE;
2078 rec = TREE_OPERAND (rhs, 0);
2079 if (TREE_CODE (rec) != ADDR_EXPR)
2080 return NULL_TREE;
2081 rec = TREE_OPERAND (rec, 0);
2082 if (TREE_CODE (rec) != PARM_DECL
2083 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2084 return NULL_TREE;
2085 ref_offset = TREE_OPERAND (rhs, 1);
2086
2087 if (use_delta)
2088 fld = delta_field;
2089 else
2090 fld = ptr_field;
2091 if (offset_p)
2092 *offset_p = int_bit_position (fld);
2093
2094 if (ref_field)
2095 {
2096 if (integer_nonzerop (ref_offset))
2097 return NULL_TREE;
2098 return ref_field == fld ? rec : NULL_TREE;
2099 }
2100 else
2101 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2102 : NULL_TREE;
2103 }
2104
2105 /* Returns true iff T is an SSA_NAME defined by a statement. */
2106
2107 static bool
2108 ipa_is_ssa_with_stmt_def (tree t)
2109 {
2110 if (TREE_CODE (t) == SSA_NAME
2111 && !SSA_NAME_IS_DEFAULT_DEF (t))
2112 return true;
2113 else
2114 return false;
2115 }
2116
2117 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2118 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2119 indirect call graph edge. */
2120
2121 static struct cgraph_edge *
2122 ipa_note_param_call (struct cgraph_node *node, int param_index,
2123 gcall *stmt)
2124 {
2125 struct cgraph_edge *cs;
2126
2127 cs = node->get_edge (stmt);
2128 cs->indirect_info->param_index = param_index;
2129 cs->indirect_info->agg_contents = 0;
2130 cs->indirect_info->member_ptr = 0;
2131 cs->indirect_info->guaranteed_unmodified = 0;
2132 return cs;
2133 }
2134
2135 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2136 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2137 intermediate information about each formal parameter. Currently it checks
2138 whether the call calls a pointer that is a formal parameter and if so, the
2139 parameter is marked with the called flag and an indirect call graph edge
2140 describing the call is created. This is very simple for ordinary pointers
2141 represented in SSA but not-so-nice when it comes to member pointers. The
2142 ugly part of this function does nothing more than trying to match the
2143 pattern of such a call. An example of such a pattern is the gimple dump
2144 below, the call is on the last line:
2145
2146 <bb 2>:
2147 f$__delta_5 = f.__delta;
2148 f$__pfn_24 = f.__pfn;
2149
2150 or
2151 <bb 2>:
2152 f$__delta_5 = MEM[(struct *)&f];
2153 f$__pfn_24 = MEM[(struct *)&f + 4B];
2154
2155 and a few lines below:
2156
2157 <bb 5>
2158 D.2496_3 = (int) f$__pfn_24;
2159 D.2497_4 = D.2496_3 & 1;
2160 if (D.2497_4 != 0)
2161 goto <bb 3>;
2162 else
2163 goto <bb 4>;
2164
2165 <bb 6>:
2166 D.2500_7 = (unsigned int) f$__delta_5;
2167 D.2501_8 = &S + D.2500_7;
2168 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2169 D.2503_10 = *D.2502_9;
2170 D.2504_12 = f$__pfn_24 + -1;
2171 D.2505_13 = (unsigned int) D.2504_12;
2172 D.2506_14 = D.2503_10 + D.2505_13;
2173 D.2507_15 = *D.2506_14;
2174 iftmp.11_16 = (String:: *) D.2507_15;
2175
2176 <bb 7>:
2177 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2178 D.2500_19 = (unsigned int) f$__delta_5;
2179 D.2508_20 = &S + D.2500_19;
2180 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2181
2182 Such patterns are results of simple calls to a member pointer:
2183
2184 int doprinting (int (MyString::* f)(int) const)
2185 {
2186 MyString S ("somestring");
2187
2188 return (S.*f)(4);
2189 }
2190
2191 Moreover, the function also looks for called pointers loaded from aggregates
2192 passed by value or reference. */
2193
2194 static void
2195 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2196 tree target)
2197 {
2198 class ipa_node_params *info = fbi->info;
2199 HOST_WIDE_INT offset;
2200 bool by_ref;
2201
2202 if (SSA_NAME_IS_DEFAULT_DEF (target))
2203 {
2204 tree var = SSA_NAME_VAR (target);
2205 int index = ipa_get_param_decl_index (info, var);
2206 if (index >= 0)
2207 ipa_note_param_call (fbi->node, index, call);
2208 return;
2209 }
2210
2211 int index;
2212 gimple *def = SSA_NAME_DEF_STMT (target);
2213 bool guaranteed_unmodified;
2214 if (gimple_assign_single_p (def)
2215 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2216 gimple_assign_rhs1 (def), &index, &offset,
2217 NULL, &by_ref, &guaranteed_unmodified))
2218 {
2219 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2220 cs->indirect_info->offset = offset;
2221 cs->indirect_info->agg_contents = 1;
2222 cs->indirect_info->by_ref = by_ref;
2223 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2224 return;
2225 }
2226
2227 /* Now we need to try to match the complex pattern of calling a member
2228 pointer. */
2229 if (gimple_code (def) != GIMPLE_PHI
2230 || gimple_phi_num_args (def) != 2
2231 || !POINTER_TYPE_P (TREE_TYPE (target))
2232 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2233 return;
2234
2235 /* First, we need to check whether one of these is a load from a member
2236 pointer that is a parameter to this function. */
2237 tree n1 = PHI_ARG_DEF (def, 0);
2238 tree n2 = PHI_ARG_DEF (def, 1);
2239 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2240 return;
2241 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2242 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2243
2244 tree rec;
2245 basic_block bb, virt_bb;
2246 basic_block join = gimple_bb (def);
2247 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2248 {
2249 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2250 return;
2251
2252 bb = EDGE_PRED (join, 0)->src;
2253 virt_bb = gimple_bb (d2);
2254 }
2255 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2256 {
2257 bb = EDGE_PRED (join, 1)->src;
2258 virt_bb = gimple_bb (d1);
2259 }
2260 else
2261 return;
2262
2263 /* Second, we need to check that the basic blocks are laid out in the way
2264 corresponding to the pattern. */
2265
2266 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2267 || single_pred (virt_bb) != bb
2268 || single_succ (virt_bb) != join)
2269 return;
2270
2271 /* Third, let's see that the branching is done depending on the least
2272 significant bit of the pfn. */
2273
2274 gimple *branch = last_stmt (bb);
2275 if (!branch || gimple_code (branch) != GIMPLE_COND)
2276 return;
2277
2278 if ((gimple_cond_code (branch) != NE_EXPR
2279 && gimple_cond_code (branch) != EQ_EXPR)
2280 || !integer_zerop (gimple_cond_rhs (branch)))
2281 return;
2282
2283 tree cond = gimple_cond_lhs (branch);
2284 if (!ipa_is_ssa_with_stmt_def (cond))
2285 return;
2286
2287 def = SSA_NAME_DEF_STMT (cond);
2288 if (!is_gimple_assign (def)
2289 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2290 || !integer_onep (gimple_assign_rhs2 (def)))
2291 return;
2292
2293 cond = gimple_assign_rhs1 (def);
2294 if (!ipa_is_ssa_with_stmt_def (cond))
2295 return;
2296
2297 def = SSA_NAME_DEF_STMT (cond);
2298
2299 if (is_gimple_assign (def)
2300 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2301 {
2302 cond = gimple_assign_rhs1 (def);
2303 if (!ipa_is_ssa_with_stmt_def (cond))
2304 return;
2305 def = SSA_NAME_DEF_STMT (cond);
2306 }
2307
2308 tree rec2;
2309 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2310 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2311 == ptrmemfunc_vbit_in_delta),
2312 NULL);
2313 if (rec != rec2)
2314 return;
2315
2316 index = ipa_get_param_decl_index (info, rec);
2317 if (index >= 0
2318 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2319 {
2320 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2321 cs->indirect_info->offset = offset;
2322 cs->indirect_info->agg_contents = 1;
2323 cs->indirect_info->member_ptr = 1;
2324 cs->indirect_info->guaranteed_unmodified = 1;
2325 }
2326
2327 return;
2328 }
2329
2330 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2331 object referenced in the expression is a formal parameter of the caller
2332 FBI->node (described by FBI->info), create a call note for the
2333 statement. */
2334
2335 static void
2336 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2337 gcall *call, tree target)
2338 {
2339 tree obj = OBJ_TYPE_REF_OBJECT (target);
2340 int index;
2341 HOST_WIDE_INT anc_offset;
2342
2343 if (!flag_devirtualize)
2344 return;
2345
2346 if (TREE_CODE (obj) != SSA_NAME)
2347 return;
2348
2349 class ipa_node_params *info = fbi->info;
2350 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2351 {
2352 struct ipa_jump_func jfunc;
2353 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2354 return;
2355
2356 anc_offset = 0;
2357 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2358 gcc_assert (index >= 0);
2359 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2360 call, &jfunc))
2361 return;
2362 }
2363 else
2364 {
2365 struct ipa_jump_func jfunc;
2366 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2367 tree expr;
2368
2369 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2370 if (!expr)
2371 return;
2372 index = ipa_get_param_decl_index (info,
2373 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2374 gcc_assert (index >= 0);
2375 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2376 call, &jfunc, anc_offset))
2377 return;
2378 }
2379
2380 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2381 class cgraph_indirect_call_info *ii = cs->indirect_info;
2382 ii->offset = anc_offset;
2383 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2384 ii->otr_type = obj_type_ref_class (target);
2385 ii->polymorphic = 1;
2386 }
2387
2388 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2389 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2390 containing intermediate information about each formal parameter. */
2391
2392 static void
2393 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2394 {
2395 tree target = gimple_call_fn (call);
2396
2397 if (!target
2398 || (TREE_CODE (target) != SSA_NAME
2399 && !virtual_method_call_p (target)))
2400 return;
2401
2402 struct cgraph_edge *cs = fbi->node->get_edge (call);
2403 /* If we previously turned the call into a direct call, there is
2404 no need to analyze. */
2405 if (cs && !cs->indirect_unknown_callee)
2406 return;
2407
2408 if (cs->indirect_info->polymorphic && flag_devirtualize)
2409 {
2410 tree instance;
2411 tree target = gimple_call_fn (call);
2412 ipa_polymorphic_call_context context (current_function_decl,
2413 target, call, &instance);
2414
2415 gcc_checking_assert (cs->indirect_info->otr_type
2416 == obj_type_ref_class (target));
2417 gcc_checking_assert (cs->indirect_info->otr_token
2418 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2419
2420 cs->indirect_info->vptr_changed
2421 = !context.get_dynamic_type (instance,
2422 OBJ_TYPE_REF_OBJECT (target),
2423 obj_type_ref_class (target), call,
2424 &fbi->aa_walk_budget);
2425 cs->indirect_info->context = context;
2426 }
2427
2428 if (TREE_CODE (target) == SSA_NAME)
2429 ipa_analyze_indirect_call_uses (fbi, call, target);
2430 else if (virtual_method_call_p (target))
2431 ipa_analyze_virtual_call_uses (fbi, call, target);
2432 }
2433
2434
2435 /* Analyze the call statement STMT with respect to formal parameters (described
2436 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2437 formal parameters are called. */
2438
2439 static void
2440 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2441 {
2442 if (is_gimple_call (stmt))
2443 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2444 }
2445
2446 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2447 If OP is a parameter declaration, mark it as used in the info structure
2448 passed in DATA. */
2449
2450 static bool
2451 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2452 {
2453 class ipa_node_params *info = (class ipa_node_params *) data;
2454
2455 op = get_base_address (op);
2456 if (op
2457 && TREE_CODE (op) == PARM_DECL)
2458 {
2459 int index = ipa_get_param_decl_index (info, op);
2460 gcc_assert (index >= 0);
2461 ipa_set_param_used (info, index, true);
2462 }
2463
2464 return false;
2465 }
2466
2467 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2468 the findings in various structures of the associated ipa_node_params
2469 structure, such as parameter flags, notes etc. FBI holds various data about
2470 the function being analyzed. */
2471
2472 static void
2473 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2474 {
2475 gimple_stmt_iterator gsi;
2476 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2477 {
2478 gimple *stmt = gsi_stmt (gsi);
2479
2480 if (is_gimple_debug (stmt))
2481 continue;
2482
2483 ipa_analyze_stmt_uses (fbi, stmt);
2484 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2485 visit_ref_for_mod_analysis,
2486 visit_ref_for_mod_analysis,
2487 visit_ref_for_mod_analysis);
2488 }
2489 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2490 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2491 visit_ref_for_mod_analysis,
2492 visit_ref_for_mod_analysis,
2493 visit_ref_for_mod_analysis);
2494 }
2495
2496 /* Calculate controlled uses of parameters of NODE. */
2497
2498 static void
2499 ipa_analyze_controlled_uses (struct cgraph_node *node)
2500 {
2501 class ipa_node_params *info = IPA_NODE_REF (node);
2502
2503 for (int i = 0; i < ipa_get_param_count (info); i++)
2504 {
2505 tree parm = ipa_get_param (info, i);
2506 int controlled_uses = 0;
2507
2508 /* For SSA regs see if parameter is used. For non-SSA we compute
2509 the flag during modification analysis. */
2510 if (is_gimple_reg (parm))
2511 {
2512 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2513 parm);
2514 if (ddef && !has_zero_uses (ddef))
2515 {
2516 imm_use_iterator imm_iter;
2517 use_operand_p use_p;
2518
2519 ipa_set_param_used (info, i, true);
2520 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2521 if (!is_gimple_call (USE_STMT (use_p)))
2522 {
2523 if (!is_gimple_debug (USE_STMT (use_p)))
2524 {
2525 controlled_uses = IPA_UNDESCRIBED_USE;
2526 break;
2527 }
2528 }
2529 else
2530 controlled_uses++;
2531 }
2532 else
2533 controlled_uses = 0;
2534 }
2535 else
2536 controlled_uses = IPA_UNDESCRIBED_USE;
2537 ipa_set_controlled_uses (info, i, controlled_uses);
2538 }
2539 }
2540
2541 /* Free stuff in BI. */
2542
2543 static void
2544 free_ipa_bb_info (struct ipa_bb_info *bi)
2545 {
2546 bi->cg_edges.release ();
2547 bi->param_aa_statuses.release ();
2548 }
2549
2550 /* Dominator walker driving the analysis. */
2551
2552 class analysis_dom_walker : public dom_walker
2553 {
2554 public:
2555 analysis_dom_walker (struct ipa_func_body_info *fbi)
2556 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2557
2558 virtual edge before_dom_children (basic_block);
2559
2560 private:
2561 struct ipa_func_body_info *m_fbi;
2562 };
2563
2564 edge
2565 analysis_dom_walker::before_dom_children (basic_block bb)
2566 {
2567 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2568 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2569 return NULL;
2570 }
2571
2572 /* Release body info FBI. */
2573
2574 void
2575 ipa_release_body_info (struct ipa_func_body_info *fbi)
2576 {
2577 int i;
2578 struct ipa_bb_info *bi;
2579
2580 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2581 free_ipa_bb_info (bi);
2582 fbi->bb_infos.release ();
2583 }
2584
2585 /* Initialize the array describing properties of formal parameters
2586 of NODE, analyze their uses and compute jump functions associated
2587 with actual arguments of calls from within NODE. */
2588
2589 void
2590 ipa_analyze_node (struct cgraph_node *node)
2591 {
2592 struct ipa_func_body_info fbi;
2593 class ipa_node_params *info;
2594
2595 ipa_check_create_node_params ();
2596 ipa_check_create_edge_args ();
2597 info = IPA_NODE_REF (node);
2598
2599 if (info->analysis_done)
2600 return;
2601 info->analysis_done = 1;
2602
2603 if (ipa_func_spec_opts_forbid_analysis_p (node))
2604 {
2605 for (int i = 0; i < ipa_get_param_count (info); i++)
2606 {
2607 ipa_set_param_used (info, i, true);
2608 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2609 }
2610 return;
2611 }
2612
2613 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2614 push_cfun (func);
2615 calculate_dominance_info (CDI_DOMINATORS);
2616 ipa_initialize_node_params (node);
2617 ipa_analyze_controlled_uses (node);
2618
2619 fbi.node = node;
2620 fbi.info = IPA_NODE_REF (node);
2621 fbi.bb_infos = vNULL;
2622 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2623 fbi.param_count = ipa_get_param_count (info);
2624 fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
2625
2626 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2627 {
2628 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2629 bi->cg_edges.safe_push (cs);
2630 }
2631
2632 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2633 {
2634 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2635 bi->cg_edges.safe_push (cs);
2636 }
2637
2638 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2639
2640 ipa_release_body_info (&fbi);
2641 free_dominance_info (CDI_DOMINATORS);
2642 pop_cfun ();
2643 }
2644
2645 /* Update the jump functions associated with call graph edge E when the call
2646 graph edge CS is being inlined, assuming that E->caller is already (possibly
2647 indirectly) inlined into CS->callee and that E has not been inlined. */
2648
2649 static void
2650 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2651 struct cgraph_edge *e)
2652 {
2653 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2654 class ipa_edge_args *args = IPA_EDGE_REF (e);
2655 if (!args)
2656 return;
2657 int count = ipa_get_cs_argument_count (args);
2658 int i;
2659
2660 for (i = 0; i < count; i++)
2661 {
2662 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2663 if (!top)
2664 {
2665 ipa_set_jf_unknown (dst);
2666 continue;
2667 }
2668 class ipa_polymorphic_call_context *dst_ctx
2669 = ipa_get_ith_polymorhic_call_context (args, i);
2670
2671 if (dst->type == IPA_JF_ANCESTOR)
2672 {
2673 struct ipa_jump_func *src;
2674 int dst_fid = dst->value.ancestor.formal_id;
2675 class ipa_polymorphic_call_context *src_ctx
2676 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2677
2678 /* Variable number of arguments can cause havoc if we try to access
2679 one that does not exist in the inlined edge. So make sure we
2680 don't. */
2681 if (dst_fid >= ipa_get_cs_argument_count (top))
2682 {
2683 ipa_set_jf_unknown (dst);
2684 continue;
2685 }
2686
2687 src = ipa_get_ith_jump_func (top, dst_fid);
2688
2689 if (src_ctx && !src_ctx->useless_p ())
2690 {
2691 class ipa_polymorphic_call_context ctx = *src_ctx;
2692
2693 /* TODO: Make type preserved safe WRT contexts. */
2694 if (!ipa_get_jf_ancestor_type_preserved (dst))
2695 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2696 ctx.offset_by (dst->value.ancestor.offset);
2697 if (!ctx.useless_p ())
2698 {
2699 if (!dst_ctx)
2700 {
2701 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2702 count);
2703 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2704 }
2705
2706 dst_ctx->combine_with (ctx);
2707 }
2708 }
2709
2710 if (src->agg.items
2711 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2712 {
2713 struct ipa_agg_jf_item *item;
2714 int j;
2715
2716 /* Currently we do not produce clobber aggregate jump functions,
2717 replace with merging when we do. */
2718 gcc_assert (!dst->agg.items);
2719
2720 dst->agg.items = vec_safe_copy (src->agg.items);
2721 dst->agg.by_ref = src->agg.by_ref;
2722 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2723 item->offset -= dst->value.ancestor.offset;
2724 }
2725
2726 if (src->type == IPA_JF_PASS_THROUGH
2727 && src->value.pass_through.operation == NOP_EXPR)
2728 {
2729 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2730 dst->value.ancestor.agg_preserved &=
2731 src->value.pass_through.agg_preserved;
2732 }
2733 else if (src->type == IPA_JF_ANCESTOR)
2734 {
2735 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2736 dst->value.ancestor.offset += src->value.ancestor.offset;
2737 dst->value.ancestor.agg_preserved &=
2738 src->value.ancestor.agg_preserved;
2739 }
2740 else
2741 ipa_set_jf_unknown (dst);
2742 }
2743 else if (dst->type == IPA_JF_PASS_THROUGH)
2744 {
2745 struct ipa_jump_func *src;
2746 /* We must check range due to calls with variable number of arguments
2747 and we cannot combine jump functions with operations. */
2748 if (dst->value.pass_through.operation == NOP_EXPR
2749 && (top && dst->value.pass_through.formal_id
2750 < ipa_get_cs_argument_count (top)))
2751 {
2752 int dst_fid = dst->value.pass_through.formal_id;
2753 src = ipa_get_ith_jump_func (top, dst_fid);
2754 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2755 class ipa_polymorphic_call_context *src_ctx
2756 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2757
2758 if (src_ctx && !src_ctx->useless_p ())
2759 {
2760 class ipa_polymorphic_call_context ctx = *src_ctx;
2761
2762 /* TODO: Make type preserved safe WRT contexts. */
2763 if (!ipa_get_jf_pass_through_type_preserved (dst))
2764 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2765 if (!ctx.useless_p ())
2766 {
2767 if (!dst_ctx)
2768 {
2769 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2770 count);
2771 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2772 }
2773 dst_ctx->combine_with (ctx);
2774 }
2775 }
2776 switch (src->type)
2777 {
2778 case IPA_JF_UNKNOWN:
2779 ipa_set_jf_unknown (dst);
2780 break;
2781 case IPA_JF_CONST:
2782 ipa_set_jf_cst_copy (dst, src);
2783 break;
2784
2785 case IPA_JF_PASS_THROUGH:
2786 {
2787 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2788 enum tree_code operation;
2789 operation = ipa_get_jf_pass_through_operation (src);
2790
2791 if (operation == NOP_EXPR)
2792 {
2793 bool agg_p;
2794 agg_p = dst_agg_p
2795 && ipa_get_jf_pass_through_agg_preserved (src);
2796 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2797 }
2798 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2799 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2800 else
2801 {
2802 tree operand = ipa_get_jf_pass_through_operand (src);
2803 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2804 operation);
2805 }
2806 break;
2807 }
2808 case IPA_JF_ANCESTOR:
2809 {
2810 bool agg_p;
2811 agg_p = dst_agg_p
2812 && ipa_get_jf_ancestor_agg_preserved (src);
2813 ipa_set_ancestor_jf (dst,
2814 ipa_get_jf_ancestor_offset (src),
2815 ipa_get_jf_ancestor_formal_id (src),
2816 agg_p);
2817 break;
2818 }
2819 default:
2820 gcc_unreachable ();
2821 }
2822
2823 if (src->agg.items
2824 && (dst_agg_p || !src->agg.by_ref))
2825 {
2826 /* Currently we do not produce clobber aggregate jump
2827 functions, replace with merging when we do. */
2828 gcc_assert (!dst->agg.items);
2829
2830 dst->agg.by_ref = src->agg.by_ref;
2831 dst->agg.items = vec_safe_copy (src->agg.items);
2832 }
2833 }
2834 else
2835 ipa_set_jf_unknown (dst);
2836 }
2837 }
2838 }
2839
2840 /* If TARGET is an addr_expr of a function declaration, make it the
2841 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2842 Otherwise, return NULL. */
2843
2844 struct cgraph_edge *
2845 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2846 bool speculative)
2847 {
2848 struct cgraph_node *callee;
2849 bool unreachable = false;
2850
2851 if (TREE_CODE (target) == ADDR_EXPR)
2852 target = TREE_OPERAND (target, 0);
2853 if (TREE_CODE (target) != FUNCTION_DECL)
2854 {
2855 target = canonicalize_constructor_val (target, NULL);
2856 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2857 {
2858 /* Member pointer call that goes through a VMT lookup. */
2859 if (ie->indirect_info->member_ptr
2860 /* Or if target is not an invariant expression and we do not
2861 know if it will evaulate to function at runtime.
2862 This can happen when folding through &VAR, where &VAR
2863 is IP invariant, but VAR itself is not.
2864
2865 TODO: Revisit this when GCC 5 is branched. It seems that
2866 member_ptr check is not needed and that we may try to fold
2867 the expression and see if VAR is readonly. */
2868 || !is_gimple_ip_invariant (target))
2869 {
2870 if (dump_enabled_p ())
2871 {
2872 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2873 "discovered direct call non-invariant %s\n",
2874 ie->caller->dump_name ());
2875 }
2876 return NULL;
2877 }
2878
2879
2880 if (dump_enabled_p ())
2881 {
2882 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2883 "discovered direct call to non-function in %s, "
2884 "making it __builtin_unreachable\n",
2885 ie->caller->dump_name ());
2886 }
2887
2888 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2889 callee = cgraph_node::get_create (target);
2890 unreachable = true;
2891 }
2892 else
2893 callee = cgraph_node::get (target);
2894 }
2895 else
2896 callee = cgraph_node::get (target);
2897
2898 /* Because may-edges are not explicitely represented and vtable may be external,
2899 we may create the first reference to the object in the unit. */
2900 if (!callee || callee->inlined_to)
2901 {
2902
2903 /* We are better to ensure we can refer to it.
2904 In the case of static functions we are out of luck, since we already
2905 removed its body. In the case of public functions we may or may
2906 not introduce the reference. */
2907 if (!canonicalize_constructor_val (target, NULL)
2908 || !TREE_PUBLIC (target))
2909 {
2910 if (dump_file)
2911 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2912 "(%s -> %s) but cannot refer to it. Giving up.\n",
2913 ie->caller->dump_name (),
2914 ie->callee->dump_name ());
2915 return NULL;
2916 }
2917 callee = cgraph_node::get_create (target);
2918 }
2919
2920 /* If the edge is already speculated. */
2921 if (speculative && ie->speculative)
2922 {
2923 struct cgraph_edge *e2;
2924 struct ipa_ref *ref;
2925 ie->speculative_call_info (e2, ie, ref);
2926 if (e2->callee->ultimate_alias_target ()
2927 != callee->ultimate_alias_target ())
2928 {
2929 if (dump_file)
2930 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2931 "target (%s -> %s) but the call is already "
2932 "speculated to %s. Giving up.\n",
2933 ie->caller->dump_name (), callee->dump_name (),
2934 e2->callee->dump_name ());
2935 }
2936 else
2937 {
2938 if (dump_file)
2939 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2940 "(%s -> %s) this agree with previous speculation.\n",
2941 ie->caller->dump_name (), callee->dump_name ());
2942 }
2943 return NULL;
2944 }
2945
2946 if (!dbg_cnt (devirt))
2947 return NULL;
2948
2949 ipa_check_create_node_params ();
2950
2951 /* We cannot make edges to inline clones. It is bug that someone removed
2952 the cgraph node too early. */
2953 gcc_assert (!callee->inlined_to);
2954
2955 if (dump_file && !unreachable)
2956 {
2957 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2958 "(%s -> %s), for stmt ",
2959 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2960 speculative ? "speculative" : "known",
2961 ie->caller->dump_name (),
2962 callee->dump_name ());
2963 if (ie->call_stmt)
2964 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2965 else
2966 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2967 }
2968 if (dump_enabled_p ())
2969 {
2970 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2971 "converting indirect call in %s to direct call to %s\n",
2972 ie->caller->name (), callee->name ());
2973 }
2974 if (!speculative)
2975 {
2976 struct cgraph_edge *orig = ie;
2977 ie = ie->make_direct (callee);
2978 /* If we resolved speculative edge the cost is already up to date
2979 for direct call (adjusted by inline_edge_duplication_hook). */
2980 if (ie == orig)
2981 {
2982 ipa_call_summary *es = ipa_call_summaries->get (ie);
2983 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2984 - eni_size_weights.call_cost);
2985 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2986 - eni_time_weights.call_cost);
2987 }
2988 }
2989 else
2990 {
2991 if (!callee->can_be_discarded_p ())
2992 {
2993 cgraph_node *alias;
2994 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2995 if (alias)
2996 callee = alias;
2997 }
2998 /* make_speculative will update ie's cost to direct call cost. */
2999 ie = ie->make_speculative
3000 (callee, ie->count.apply_scale (8, 10));
3001 }
3002
3003 return ie;
3004 }
3005
3006 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3007 CONSTRUCTOR and return it. Return NULL if the search fails for some
3008 reason. */
3009
3010 static tree
3011 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3012 {
3013 tree type = TREE_TYPE (constructor);
3014 if (TREE_CODE (type) != ARRAY_TYPE
3015 && TREE_CODE (type) != RECORD_TYPE)
3016 return NULL;
3017
3018 unsigned ix;
3019 tree index, val;
3020 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3021 {
3022 HOST_WIDE_INT elt_offset;
3023 if (TREE_CODE (type) == ARRAY_TYPE)
3024 {
3025 offset_int off;
3026 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3027 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3028
3029 if (index)
3030 {
3031 if (TREE_CODE (index) == RANGE_EXPR)
3032 off = wi::to_offset (TREE_OPERAND (index, 0));
3033 else
3034 off = wi::to_offset (index);
3035 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3036 {
3037 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3038 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3039 off = wi::sext (off - wi::to_offset (low_bound),
3040 TYPE_PRECISION (TREE_TYPE (index)));
3041 }
3042 off *= wi::to_offset (unit_size);
3043 /* ??? Handle more than just the first index of a
3044 RANGE_EXPR. */
3045 }
3046 else
3047 off = wi::to_offset (unit_size) * ix;
3048
3049 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3050 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3051 continue;
3052 elt_offset = off.to_shwi ();
3053 }
3054 else if (TREE_CODE (type) == RECORD_TYPE)
3055 {
3056 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3057 if (DECL_BIT_FIELD (index))
3058 continue;
3059 elt_offset = int_bit_position (index);
3060 }
3061 else
3062 gcc_unreachable ();
3063
3064 if (elt_offset > req_offset)
3065 return NULL;
3066
3067 if (TREE_CODE (val) == CONSTRUCTOR)
3068 return find_constructor_constant_at_offset (val,
3069 req_offset - elt_offset);
3070
3071 if (elt_offset == req_offset
3072 && is_gimple_reg_type (TREE_TYPE (val))
3073 && is_gimple_ip_invariant (val))
3074 return val;
3075 }
3076 return NULL;
3077 }
3078
3079 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3080 invariant from a static constructor and if so, return it. Otherwise return
3081 NULL. */
3082
3083 static tree
3084 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3085 {
3086 if (by_ref)
3087 {
3088 if (TREE_CODE (scalar) != ADDR_EXPR)
3089 return NULL;
3090 scalar = TREE_OPERAND (scalar, 0);
3091 }
3092
3093 if (!VAR_P (scalar)
3094 || !is_global_var (scalar)
3095 || !TREE_READONLY (scalar)
3096 || !DECL_INITIAL (scalar)
3097 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3098 return NULL;
3099
3100 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3101 }
3102
3103 /* Retrieve value from aggregate jump function AGG or static initializer of
3104 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3105 none. BY_REF specifies whether the value has to be passed by reference or
3106 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3107 to is set to true if the value comes from an initializer of a constant. */
3108
3109 tree
3110 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3111 HOST_WIDE_INT offset, bool by_ref,
3112 bool *from_global_constant)
3113 {
3114 struct ipa_agg_jf_item *item;
3115 int i;
3116
3117 if (scalar)
3118 {
3119 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3120 if (res)
3121 {
3122 if (from_global_constant)
3123 *from_global_constant = true;
3124 return res;
3125 }
3126 }
3127
3128 if (!agg
3129 || by_ref != agg->by_ref)
3130 return NULL;
3131
3132 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3133 if (item->offset == offset)
3134 {
3135 /* Currently we do not have clobber values, return NULL for them once
3136 we do. */
3137 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3138 if (from_global_constant)
3139 *from_global_constant = false;
3140 return item->value;
3141 }
3142 return NULL;
3143 }
3144
3145 /* Remove a reference to SYMBOL from the list of references of a node given by
3146 reference description RDESC. Return true if the reference has been
3147 successfully found and removed. */
3148
3149 static bool
3150 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3151 {
3152 struct ipa_ref *to_del;
3153 struct cgraph_edge *origin;
3154
3155 origin = rdesc->cs;
3156 if (!origin)
3157 return false;
3158 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3159 origin->lto_stmt_uid);
3160 if (!to_del)
3161 return false;
3162
3163 to_del->remove_reference ();
3164 if (dump_file)
3165 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3166 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3167 return true;
3168 }
3169
3170 /* If JFUNC has a reference description with refcount different from
3171 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3172 NULL. JFUNC must be a constant jump function. */
3173
3174 static struct ipa_cst_ref_desc *
3175 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3176 {
3177 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3178 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3179 return rdesc;
3180 else
3181 return NULL;
3182 }
3183
3184 /* If the value of constant jump function JFUNC is an address of a function
3185 declaration, return the associated call graph node. Otherwise return
3186 NULL. */
3187
3188 static cgraph_node *
3189 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3190 {
3191 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3192 tree cst = ipa_get_jf_constant (jfunc);
3193 if (TREE_CODE (cst) != ADDR_EXPR
3194 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3195 return NULL;
3196
3197 return cgraph_node::get (TREE_OPERAND (cst, 0));
3198 }
3199
3200
3201 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3202 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3203 the edge specified in the rdesc. Return false if either the symbol or the
3204 reference could not be found, otherwise return true. */
3205
3206 static bool
3207 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3208 {
3209 struct ipa_cst_ref_desc *rdesc;
3210 if (jfunc->type == IPA_JF_CONST
3211 && (rdesc = jfunc_rdesc_usable (jfunc))
3212 && --rdesc->refcount == 0)
3213 {
3214 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3215 if (!symbol)
3216 return false;
3217
3218 return remove_described_reference (symbol, rdesc);
3219 }
3220 return true;
3221 }
3222
3223 /* Try to find a destination for indirect edge IE that corresponds to a simple
3224 call or a call of a member function pointer and where the destination is a
3225 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3226 the type of the parameter to which the result of JFUNC is passed. If it can
3227 be determined, return the newly direct edge, otherwise return NULL.
3228 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3229
3230 static struct cgraph_edge *
3231 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3232 struct ipa_jump_func *jfunc, tree target_type,
3233 class ipa_node_params *new_root_info)
3234 {
3235 struct cgraph_edge *cs;
3236 tree target;
3237 bool agg_contents = ie->indirect_info->agg_contents;
3238 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3239 if (agg_contents)
3240 {
3241 bool from_global_constant;
3242 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3243 ie->indirect_info->offset,
3244 ie->indirect_info->by_ref,
3245 &from_global_constant);
3246 if (target
3247 && !from_global_constant
3248 && !ie->indirect_info->guaranteed_unmodified)
3249 return NULL;
3250 }
3251 else
3252 target = scalar;
3253 if (!target)
3254 return NULL;
3255 cs = ipa_make_edge_direct_to_target (ie, target);
3256
3257 if (cs && !agg_contents)
3258 {
3259 bool ok;
3260 gcc_checking_assert (cs->callee
3261 && (cs != ie
3262 || jfunc->type != IPA_JF_CONST
3263 || !cgraph_node_for_jfunc (jfunc)
3264 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3265 ok = try_decrement_rdesc_refcount (jfunc);
3266 gcc_checking_assert (ok);
3267 }
3268
3269 return cs;
3270 }
3271
3272 /* Return the target to be used in cases of impossible devirtualization. IE
3273 and target (the latter can be NULL) are dumped when dumping is enabled. */
3274
3275 tree
3276 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3277 {
3278 if (dump_file)
3279 {
3280 if (target)
3281 fprintf (dump_file,
3282 "Type inconsistent devirtualization: %s->%s\n",
3283 ie->caller->dump_name (),
3284 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3285 else
3286 fprintf (dump_file,
3287 "No devirtualization target in %s\n",
3288 ie->caller->dump_name ());
3289 }
3290 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3291 cgraph_node::get_create (new_target);
3292 return new_target;
3293 }
3294
3295 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3296 call based on a formal parameter which is described by jump function JFUNC
3297 and if it can be determined, make it direct and return the direct edge.
3298 Otherwise, return NULL. CTX describes the polymorphic context that the
3299 parameter the call is based on brings along with it. */
3300
3301 static struct cgraph_edge *
3302 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3303 struct ipa_jump_func *jfunc,
3304 class ipa_polymorphic_call_context ctx)
3305 {
3306 tree target = NULL;
3307 bool speculative = false;
3308
3309 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3310 return NULL;
3311
3312 gcc_assert (!ie->indirect_info->by_ref);
3313
3314 /* Try to do lookup via known virtual table pointer value. */
3315 if (!ie->indirect_info->vptr_changed
3316 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3317 {
3318 tree vtable;
3319 unsigned HOST_WIDE_INT offset;
3320 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3321 : NULL;
3322 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3323 ie->indirect_info->offset,
3324 true);
3325 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3326 {
3327 bool can_refer;
3328 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3329 vtable, offset, &can_refer);
3330 if (can_refer)
3331 {
3332 if (!t
3333 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3334 || !possible_polymorphic_call_target_p
3335 (ie, cgraph_node::get (t)))
3336 {
3337 /* Do not speculate builtin_unreachable, it is stupid! */
3338 if (!ie->indirect_info->vptr_changed)
3339 target = ipa_impossible_devirt_target (ie, target);
3340 else
3341 target = NULL;
3342 }
3343 else
3344 {
3345 target = t;
3346 speculative = ie->indirect_info->vptr_changed;
3347 }
3348 }
3349 }
3350 }
3351
3352 ipa_polymorphic_call_context ie_context (ie);
3353 vec <cgraph_node *>targets;
3354 bool final;
3355
3356 ctx.offset_by (ie->indirect_info->offset);
3357 if (ie->indirect_info->vptr_changed)
3358 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3359 ie->indirect_info->otr_type);
3360 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3361 targets = possible_polymorphic_call_targets
3362 (ie->indirect_info->otr_type,
3363 ie->indirect_info->otr_token,
3364 ctx, &final);
3365 if (final && targets.length () <= 1)
3366 {
3367 speculative = false;
3368 if (targets.length () == 1)
3369 target = targets[0]->decl;
3370 else
3371 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3372 }
3373 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3374 && !ie->speculative && ie->maybe_hot_p ())
3375 {
3376 cgraph_node *n;
3377 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3378 ie->indirect_info->otr_token,
3379 ie->indirect_info->context);
3380 if (n)
3381 {
3382 target = n->decl;
3383 speculative = true;
3384 }
3385 }
3386
3387 if (target)
3388 {
3389 if (!possible_polymorphic_call_target_p
3390 (ie, cgraph_node::get_create (target)))
3391 {
3392 if (speculative)
3393 return NULL;
3394 target = ipa_impossible_devirt_target (ie, target);
3395 }
3396 return ipa_make_edge_direct_to_target (ie, target, speculative);
3397 }
3398 else
3399 return NULL;
3400 }
3401
3402 /* Update the param called notes associated with NODE when CS is being inlined,
3403 assuming NODE is (potentially indirectly) inlined into CS->callee.
3404 Moreover, if the callee is discovered to be constant, create a new cgraph
3405 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3406 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3407
3408 static bool
3409 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3410 struct cgraph_node *node,
3411 vec<cgraph_edge *> *new_edges)
3412 {
3413 class ipa_edge_args *top;
3414 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3415 class ipa_node_params *new_root_info, *inlined_node_info;
3416 bool res = false;
3417
3418 ipa_check_create_edge_args ();
3419 top = IPA_EDGE_REF (cs);
3420 new_root_info = IPA_NODE_REF (cs->caller->inlined_to
3421 ? cs->caller->inlined_to
3422 : cs->caller);
3423 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3424
3425 for (ie = node->indirect_calls; ie; ie = next_ie)
3426 {
3427 class cgraph_indirect_call_info *ici = ie->indirect_info;
3428 struct ipa_jump_func *jfunc;
3429 int param_index;
3430 cgraph_node *spec_target = NULL;
3431
3432 next_ie = ie->next_callee;
3433
3434 if (ici->param_index == -1)
3435 continue;
3436
3437 /* We must check range due to calls with variable number of arguments: */
3438 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3439 {
3440 ici->param_index = -1;
3441 continue;
3442 }
3443
3444 param_index = ici->param_index;
3445 jfunc = ipa_get_ith_jump_func (top, param_index);
3446
3447 if (ie->speculative)
3448 {
3449 struct cgraph_edge *de;
3450 struct ipa_ref *ref;
3451 ie->speculative_call_info (de, ie, ref);
3452 spec_target = de->callee;
3453 }
3454
3455 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3456 new_direct_edge = NULL;
3457 else if (ici->polymorphic)
3458 {
3459 ipa_polymorphic_call_context ctx;
3460 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3461 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3462 }
3463 else
3464 {
3465 tree target_type = ipa_get_type (inlined_node_info, param_index);
3466 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3467 target_type,
3468 new_root_info);
3469 }
3470
3471 /* If speculation was removed, then we need to do nothing. */
3472 if (new_direct_edge && new_direct_edge != ie
3473 && new_direct_edge->callee == spec_target)
3474 {
3475 new_direct_edge->indirect_inlining_edge = 1;
3476 top = IPA_EDGE_REF (cs);
3477 res = true;
3478 if (!new_direct_edge->speculative)
3479 continue;
3480 }
3481 else if (new_direct_edge)
3482 {
3483 new_direct_edge->indirect_inlining_edge = 1;
3484 if (new_direct_edge->call_stmt)
3485 new_direct_edge->call_stmt_cannot_inline_p
3486 = !gimple_check_call_matching_types (
3487 new_direct_edge->call_stmt,
3488 new_direct_edge->callee->decl, false);
3489 if (new_edges)
3490 {
3491 new_edges->safe_push (new_direct_edge);
3492 res = true;
3493 }
3494 top = IPA_EDGE_REF (cs);
3495 /* If speculative edge was introduced we still need to update
3496 call info of the indirect edge. */
3497 if (!new_direct_edge->speculative)
3498 continue;
3499 }
3500 if (jfunc->type == IPA_JF_PASS_THROUGH
3501 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3502 {
3503 if (ici->agg_contents
3504 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3505 && !ici->polymorphic)
3506 ici->param_index = -1;
3507 else
3508 {
3509 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3510 if (ici->polymorphic
3511 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3512 ici->vptr_changed = true;
3513 }
3514 }
3515 else if (jfunc->type == IPA_JF_ANCESTOR)
3516 {
3517 if (ici->agg_contents
3518 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3519 && !ici->polymorphic)
3520 ici->param_index = -1;
3521 else
3522 {
3523 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3524 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3525 if (ici->polymorphic
3526 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3527 ici->vptr_changed = true;
3528 }
3529 }
3530 else
3531 /* Either we can find a destination for this edge now or never. */
3532 ici->param_index = -1;
3533 }
3534
3535 return res;
3536 }
3537
3538 /* Recursively traverse subtree of NODE (including node) made of inlined
3539 cgraph_edges when CS has been inlined and invoke
3540 update_indirect_edges_after_inlining on all nodes and
3541 update_jump_functions_after_inlining on all non-inlined edges that lead out
3542 of this subtree. Newly discovered indirect edges will be added to
3543 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3544 created. */
3545
3546 static bool
3547 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3548 struct cgraph_node *node,
3549 vec<cgraph_edge *> *new_edges)
3550 {
3551 struct cgraph_edge *e;
3552 bool res;
3553
3554 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3555
3556 for (e = node->callees; e; e = e->next_callee)
3557 if (!e->inline_failed)
3558 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3559 else
3560 update_jump_functions_after_inlining (cs, e);
3561 for (e = node->indirect_calls; e; e = e->next_callee)
3562 update_jump_functions_after_inlining (cs, e);
3563
3564 return res;
3565 }
3566
3567 /* Combine two controlled uses counts as done during inlining. */
3568
3569 static int
3570 combine_controlled_uses_counters (int c, int d)
3571 {
3572 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3573 return IPA_UNDESCRIBED_USE;
3574 else
3575 return c + d - 1;
3576 }
3577
3578 /* Propagate number of controlled users from CS->caleee to the new root of the
3579 tree of inlined nodes. */
3580
3581 static void
3582 propagate_controlled_uses (struct cgraph_edge *cs)
3583 {
3584 class ipa_edge_args *args = IPA_EDGE_REF (cs);
3585 if (!args)
3586 return;
3587 struct cgraph_node *new_root = cs->caller->inlined_to
3588 ? cs->caller->inlined_to : cs->caller;
3589 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3590 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3591 int count, i;
3592
3593 count = MIN (ipa_get_cs_argument_count (args),
3594 ipa_get_param_count (old_root_info));
3595 for (i = 0; i < count; i++)
3596 {
3597 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3598 struct ipa_cst_ref_desc *rdesc;
3599
3600 if (jf->type == IPA_JF_PASS_THROUGH)
3601 {
3602 int src_idx, c, d;
3603 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3604 c = ipa_get_controlled_uses (new_root_info, src_idx);
3605 d = ipa_get_controlled_uses (old_root_info, i);
3606
3607 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3608 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3609 c = combine_controlled_uses_counters (c, d);
3610 ipa_set_controlled_uses (new_root_info, src_idx, c);
3611 if (c == 0 && new_root_info->ipcp_orig_node)
3612 {
3613 struct cgraph_node *n;
3614 struct ipa_ref *ref;
3615 tree t = new_root_info->known_csts[src_idx];
3616
3617 if (t && TREE_CODE (t) == ADDR_EXPR
3618 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3619 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3620 && (ref = new_root->find_reference (n, NULL, 0)))
3621 {
3622 if (dump_file)
3623 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3624 "reference from %s to %s.\n",
3625 new_root->dump_name (),
3626 n->dump_name ());
3627 ref->remove_reference ();
3628 }
3629 }
3630 }
3631 else if (jf->type == IPA_JF_CONST
3632 && (rdesc = jfunc_rdesc_usable (jf)))
3633 {
3634 int d = ipa_get_controlled_uses (old_root_info, i);
3635 int c = rdesc->refcount;
3636 rdesc->refcount = combine_controlled_uses_counters (c, d);
3637 if (rdesc->refcount == 0)
3638 {
3639 tree cst = ipa_get_jf_constant (jf);
3640 struct cgraph_node *n;
3641 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3642 && TREE_CODE (TREE_OPERAND (cst, 0))
3643 == FUNCTION_DECL);
3644 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3645 if (n)
3646 {
3647 struct cgraph_node *clone;
3648 bool ok;
3649 ok = remove_described_reference (n, rdesc);
3650 gcc_checking_assert (ok);
3651
3652 clone = cs->caller;
3653 while (clone->inlined_to
3654 && clone != rdesc->cs->caller
3655 && IPA_NODE_REF (clone)->ipcp_orig_node)
3656 {
3657 struct ipa_ref *ref;
3658 ref = clone->find_reference (n, NULL, 0);
3659 if (ref)
3660 {
3661 if (dump_file)
3662 fprintf (dump_file, "ipa-prop: Removing "
3663 "cloning-created reference "
3664 "from %s to %s.\n",
3665 clone->dump_name (),
3666 n->dump_name ());
3667 ref->remove_reference ();
3668 }
3669 clone = clone->callers->caller;
3670 }
3671 }
3672 }
3673 }
3674 }
3675
3676 for (i = ipa_get_param_count (old_root_info);
3677 i < ipa_get_cs_argument_count (args);
3678 i++)
3679 {
3680 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3681
3682 if (jf->type == IPA_JF_CONST)
3683 {
3684 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3685 if (rdesc)
3686 rdesc->refcount = IPA_UNDESCRIBED_USE;
3687 }
3688 else if (jf->type == IPA_JF_PASS_THROUGH)
3689 ipa_set_controlled_uses (new_root_info,
3690 jf->value.pass_through.formal_id,
3691 IPA_UNDESCRIBED_USE);
3692 }
3693 }
3694
3695 /* Update jump functions and call note functions on inlining the call site CS.
3696 CS is expected to lead to a node already cloned by
3697 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3698 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3699 created. */
3700
3701 bool
3702 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3703 vec<cgraph_edge *> *new_edges)
3704 {
3705 bool changed;
3706 /* Do nothing if the preparation phase has not been carried out yet
3707 (i.e. during early inlining). */
3708 if (!ipa_node_params_sum)
3709 return false;
3710 gcc_assert (ipa_edge_args_sum);
3711
3712 propagate_controlled_uses (cs);
3713 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3714
3715 return changed;
3716 }
3717
3718 /* Ensure that array of edge arguments infos is big enough to accommodate a
3719 structure for all edges and reallocates it if not. Also, allocate
3720 associated hash tables is they do not already exist. */
3721
3722 void
3723 ipa_check_create_edge_args (void)
3724 {
3725 if (!ipa_edge_args_sum)
3726 ipa_edge_args_sum
3727 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
3728 ipa_edge_args_sum_t (symtab, true));
3729 if (!ipa_bits_hash_table)
3730 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3731 if (!ipa_vr_hash_table)
3732 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3733 }
3734
3735 /* Free all ipa_edge structures. */
3736
3737 void
3738 ipa_free_all_edge_args (void)
3739 {
3740 if (!ipa_edge_args_sum)
3741 return;
3742
3743 ipa_edge_args_sum->~ipa_edge_args_sum_t ();
3744 ggc_free (ipa_edge_args_sum);
3745 ipa_edge_args_sum = NULL;
3746 }
3747
3748 /* Free all ipa_node_params structures. */
3749
3750 void
3751 ipa_free_all_node_params (void)
3752 {
3753 ipa_node_params_sum->~ipa_node_params_t ();
3754 ggc_free (ipa_node_params_sum);
3755 ipa_node_params_sum = NULL;
3756 }
3757
3758 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3759 tables if they do not already exist. */
3760
3761 void
3762 ipcp_transformation_initialize (void)
3763 {
3764 if (!ipa_bits_hash_table)
3765 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3766 if (!ipa_vr_hash_table)
3767 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3768 if (ipcp_transformation_sum == NULL)
3769 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
3770 }
3771
3772 /* Release the IPA CP transformation summary. */
3773
3774 void
3775 ipcp_free_transformation_sum (void)
3776 {
3777 if (!ipcp_transformation_sum)
3778 return;
3779
3780 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
3781 ggc_free (ipcp_transformation_sum);
3782 ipcp_transformation_sum = NULL;
3783 }
3784
3785 /* Set the aggregate replacements of NODE to be AGGVALS. */
3786
3787 void
3788 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3789 struct ipa_agg_replacement_value *aggvals)
3790 {
3791 ipcp_transformation_initialize ();
3792 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3793 s->agg_values = aggvals;
3794 }
3795
3796 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3797 count data structures accordingly. */
3798
3799 void
3800 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3801 {
3802 if (args->jump_functions)
3803 {
3804 struct ipa_jump_func *jf;
3805 int i;
3806 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3807 {
3808 struct ipa_cst_ref_desc *rdesc;
3809 try_decrement_rdesc_refcount (jf);
3810 if (jf->type == IPA_JF_CONST
3811 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3812 && rdesc->cs == cs)
3813 rdesc->cs = NULL;
3814 }
3815 }
3816 }
3817
3818 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3819 reference count data strucutres accordingly. */
3820
3821 void
3822 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3823 ipa_edge_args *old_args, ipa_edge_args *new_args)
3824 {
3825 unsigned int i;
3826
3827 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3828 if (old_args->polymorphic_call_contexts)
3829 new_args->polymorphic_call_contexts
3830 = vec_safe_copy (old_args->polymorphic_call_contexts);
3831
3832 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3833 {
3834 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3835 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3836
3837 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3838
3839 if (src_jf->type == IPA_JF_CONST)
3840 {
3841 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3842
3843 if (!src_rdesc)
3844 dst_jf->value.constant.rdesc = NULL;
3845 else if (src->caller == dst->caller)
3846 {
3847 struct ipa_ref *ref;
3848 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3849 gcc_checking_assert (n);
3850 ref = src->caller->find_reference (n, src->call_stmt,
3851 src->lto_stmt_uid);
3852 gcc_checking_assert (ref);
3853 dst->caller->clone_reference (ref, ref->stmt);
3854
3855 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3856 dst_rdesc->cs = dst;
3857 dst_rdesc->refcount = src_rdesc->refcount;
3858 dst_rdesc->next_duplicate = NULL;
3859 dst_jf->value.constant.rdesc = dst_rdesc;
3860 }
3861 else if (src_rdesc->cs == src)
3862 {
3863 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3864 dst_rdesc->cs = dst;
3865 dst_rdesc->refcount = src_rdesc->refcount;
3866 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3867 src_rdesc->next_duplicate = dst_rdesc;
3868 dst_jf->value.constant.rdesc = dst_rdesc;
3869 }
3870 else
3871 {
3872 struct ipa_cst_ref_desc *dst_rdesc;
3873 /* This can happen during inlining, when a JFUNC can refer to a
3874 reference taken in a function up in the tree of inline clones.
3875 We need to find the duplicate that refers to our tree of
3876 inline clones. */
3877
3878 gcc_assert (dst->caller->inlined_to);
3879 for (dst_rdesc = src_rdesc->next_duplicate;
3880 dst_rdesc;
3881 dst_rdesc = dst_rdesc->next_duplicate)
3882 {
3883 struct cgraph_node *top;
3884 top = dst_rdesc->cs->caller->inlined_to
3885 ? dst_rdesc->cs->caller->inlined_to
3886 : dst_rdesc->cs->caller;
3887 if (dst->caller->inlined_to == top)
3888 break;
3889 }
3890 gcc_assert (dst_rdesc);
3891 dst_jf->value.constant.rdesc = dst_rdesc;
3892 }
3893 }
3894 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3895 && src->caller == dst->caller)
3896 {
3897 struct cgraph_node *inline_root = dst->caller->inlined_to
3898 ? dst->caller->inlined_to : dst->caller;
3899 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3900 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3901
3902 int c = ipa_get_controlled_uses (root_info, idx);
3903 if (c != IPA_UNDESCRIBED_USE)
3904 {
3905 c++;
3906 ipa_set_controlled_uses (root_info, idx, c);
3907 }
3908 }
3909 }
3910 }
3911
3912 /* Analyze newly added function into callgraph. */
3913
3914 static void
3915 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3916 {
3917 if (node->has_gimple_body_p ())
3918 ipa_analyze_node (node);
3919 }
3920
3921 /* Hook that is called by summary when a node is duplicated. */
3922
3923 void
3924 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3925 ipa_node_params *old_info,
3926 ipa_node_params *new_info)
3927 {
3928 ipa_agg_replacement_value *old_av, *new_av;
3929
3930 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3931 new_info->lattices = NULL;
3932 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3933 new_info->known_csts = old_info->known_csts.copy ();
3934 new_info->known_contexts = old_info->known_contexts.copy ();
3935
3936 new_info->analysis_done = old_info->analysis_done;
3937 new_info->node_enqueued = old_info->node_enqueued;
3938 new_info->versionable = old_info->versionable;
3939
3940 old_av = ipa_get_agg_replacements_for_node (src);
3941 if (old_av)
3942 {
3943 new_av = NULL;
3944 while (old_av)
3945 {
3946 struct ipa_agg_replacement_value *v;
3947
3948 v = ggc_alloc<ipa_agg_replacement_value> ();
3949 memcpy (v, old_av, sizeof (*v));
3950 v->next = new_av;
3951 new_av = v;
3952 old_av = old_av->next;
3953 }
3954 ipa_set_node_agg_value_chain (dst, new_av);
3955 }
3956
3957 ipcp_transformation *src_trans = ipcp_get_transformation_summary (src);
3958
3959 if (src_trans)
3960 {
3961 ipcp_transformation_initialize ();
3962 src_trans = ipcp_transformation_sum->get_create (src);
3963 ipcp_transformation *dst_trans
3964 = ipcp_transformation_sum->get_create (dst);
3965
3966 dst_trans->bits = vec_safe_copy (src_trans->bits);
3967
3968 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3969 vec<ipa_vr, va_gc> *&dst_vr
3970 = ipcp_get_transformation_summary (dst)->m_vr;
3971 if (vec_safe_length (src_trans->m_vr) > 0)
3972 {
3973 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3974 for (unsigned i = 0; i < src_vr->length (); ++i)
3975 dst_vr->quick_push ((*src_vr)[i]);
3976 }
3977 }
3978 }
3979
3980 /* Register our cgraph hooks if they are not already there. */
3981
3982 void
3983 ipa_register_cgraph_hooks (void)
3984 {
3985 ipa_check_create_node_params ();
3986 ipa_check_create_edge_args ();
3987
3988 function_insertion_hook_holder =
3989 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3990 }
3991
3992 /* Unregister our cgraph hooks if they are not already there. */
3993
3994 static void
3995 ipa_unregister_cgraph_hooks (void)
3996 {
3997 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3998 function_insertion_hook_holder = NULL;
3999 }
4000
4001 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4002 longer needed after ipa-cp. */
4003
4004 void
4005 ipa_free_all_structures_after_ipa_cp (void)
4006 {
4007 if (!optimize && !in_lto_p)
4008 {
4009 ipa_free_all_edge_args ();
4010 ipa_free_all_node_params ();
4011 ipcp_sources_pool.release ();
4012 ipcp_cst_values_pool.release ();
4013 ipcp_poly_ctx_values_pool.release ();
4014 ipcp_agg_lattice_pool.release ();
4015 ipa_unregister_cgraph_hooks ();
4016 ipa_refdesc_pool.release ();
4017 }
4018 }
4019
4020 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4021 longer needed after indirect inlining. */
4022
4023 void
4024 ipa_free_all_structures_after_iinln (void)
4025 {
4026 ipa_free_all_edge_args ();
4027 ipa_free_all_node_params ();
4028 ipa_unregister_cgraph_hooks ();
4029 ipcp_sources_pool.release ();
4030 ipcp_cst_values_pool.release ();
4031 ipcp_poly_ctx_values_pool.release ();
4032 ipcp_agg_lattice_pool.release ();
4033 ipa_refdesc_pool.release ();
4034 }
4035
4036 /* Print ipa_tree_map data structures of all functions in the
4037 callgraph to F. */
4038
4039 void
4040 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4041 {
4042 int i, count;
4043 class ipa_node_params *info;
4044
4045 if (!node->definition)
4046 return;
4047 info = IPA_NODE_REF (node);
4048 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4049 count = ipa_get_param_count (info);
4050 for (i = 0; i < count; i++)
4051 {
4052 int c;
4053
4054 fprintf (f, " ");
4055 ipa_dump_param (f, info, i);
4056 if (ipa_is_param_used (info, i))
4057 fprintf (f, " used");
4058 c = ipa_get_controlled_uses (info, i);
4059 if (c == IPA_UNDESCRIBED_USE)
4060 fprintf (f, " undescribed_use");
4061 else
4062 fprintf (f, " controlled_uses=%i", c);
4063 fprintf (f, "\n");
4064 }
4065 }
4066
4067 /* Print ipa_tree_map data structures of all functions in the
4068 callgraph to F. */
4069
4070 void
4071 ipa_print_all_params (FILE * f)
4072 {
4073 struct cgraph_node *node;
4074
4075 fprintf (f, "\nFunction parameters:\n");
4076 FOR_EACH_FUNCTION (node)
4077 ipa_print_node_params (f, node);
4078 }
4079
4080 /* Dump the AV linked list. */
4081
4082 void
4083 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4084 {
4085 bool comma = false;
4086 fprintf (f, " Aggregate replacements:");
4087 for (; av; av = av->next)
4088 {
4089 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4090 av->index, av->offset);
4091 print_generic_expr (f, av->value);
4092 comma = true;
4093 }
4094 fprintf (f, "\n");
4095 }
4096
4097 /* Stream out jump function JUMP_FUNC to OB. */
4098
4099 static void
4100 ipa_write_jump_function (struct output_block *ob,
4101 struct ipa_jump_func *jump_func)
4102 {
4103 struct ipa_agg_jf_item *item;
4104 struct bitpack_d bp;
4105 int i, count;
4106 int flag = 0;
4107
4108 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4109 as well as WPA memory by handling them specially. */
4110 if (jump_func->type == IPA_JF_CONST
4111 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4112 flag = 1;
4113
4114 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4115 switch (jump_func->type)
4116 {
4117 case IPA_JF_UNKNOWN:
4118 break;
4119 case IPA_JF_CONST:
4120 gcc_assert (
4121 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4122 stream_write_tree (ob,
4123 flag
4124 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4125 : jump_func->value.constant.value, true);
4126 break;
4127 case IPA_JF_PASS_THROUGH:
4128 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4129 if (jump_func->value.pass_through.operation == NOP_EXPR)
4130 {
4131 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4132 bp = bitpack_create (ob->main_stream);
4133 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4134 streamer_write_bitpack (&bp);
4135 }
4136 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4137 == tcc_unary)
4138 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4139 else
4140 {
4141 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4142 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4143 }
4144 break;
4145 case IPA_JF_ANCESTOR:
4146 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4147 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4148 bp = bitpack_create (ob->main_stream);
4149 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4150 streamer_write_bitpack (&bp);
4151 break;
4152 }
4153
4154 count = vec_safe_length (jump_func->agg.items);
4155 streamer_write_uhwi (ob, count);
4156 if (count)
4157 {
4158 bp = bitpack_create (ob->main_stream);
4159 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4160 streamer_write_bitpack (&bp);
4161 }
4162
4163 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4164 {
4165 streamer_write_uhwi (ob, item->offset);
4166 stream_write_tree (ob, item->value, true);
4167 }
4168
4169 bp = bitpack_create (ob->main_stream);
4170 bp_pack_value (&bp, !!jump_func->bits, 1);
4171 streamer_write_bitpack (&bp);
4172 if (jump_func->bits)
4173 {
4174 streamer_write_widest_int (ob, jump_func->bits->value);
4175 streamer_write_widest_int (ob, jump_func->bits->mask);
4176 }
4177 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4178 streamer_write_bitpack (&bp);
4179 if (jump_func->m_vr)
4180 {
4181 streamer_write_enum (ob->main_stream, value_rang_type,
4182 VR_LAST, jump_func->m_vr->kind ());
4183 stream_write_tree (ob, jump_func->m_vr->min (), true);
4184 stream_write_tree (ob, jump_func->m_vr->max (), true);
4185 }
4186 }
4187
4188 /* Read in jump function JUMP_FUNC from IB. */
4189
4190 static void
4191 ipa_read_jump_function (class lto_input_block *ib,
4192 struct ipa_jump_func *jump_func,
4193 struct cgraph_edge *cs,
4194 class data_in *data_in,
4195 bool prevails)
4196 {
4197 enum jump_func_type jftype;
4198 enum tree_code operation;
4199 int i, count;
4200 int val = streamer_read_uhwi (ib);
4201 bool flag = val & 1;
4202
4203 jftype = (enum jump_func_type) (val / 2);
4204 switch (jftype)
4205 {
4206 case IPA_JF_UNKNOWN:
4207 ipa_set_jf_unknown (jump_func);
4208 break;
4209 case IPA_JF_CONST:
4210 {
4211 tree t = stream_read_tree (ib, data_in);
4212 if (flag && prevails)
4213 t = build_fold_addr_expr (t);
4214 ipa_set_jf_constant (jump_func, t, cs);
4215 }
4216 break;
4217 case IPA_JF_PASS_THROUGH:
4218 operation = (enum tree_code) streamer_read_uhwi (ib);
4219 if (operation == NOP_EXPR)
4220 {
4221 int formal_id = streamer_read_uhwi (ib);
4222 struct bitpack_d bp = streamer_read_bitpack (ib);
4223 bool agg_preserved = bp_unpack_value (&bp, 1);
4224 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4225 }
4226 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4227 {
4228 int formal_id = streamer_read_uhwi (ib);
4229 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4230 }
4231 else
4232 {
4233 tree operand = stream_read_tree (ib, data_in);
4234 int formal_id = streamer_read_uhwi (ib);
4235 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4236 operation);
4237 }
4238 break;
4239 case IPA_JF_ANCESTOR:
4240 {
4241 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4242 int formal_id = streamer_read_uhwi (ib);
4243 struct bitpack_d bp = streamer_read_bitpack (ib);
4244 bool agg_preserved = bp_unpack_value (&bp, 1);
4245 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4246 break;
4247 }
4248 default:
4249 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4250 }
4251
4252 count = streamer_read_uhwi (ib);
4253 if (prevails)
4254 vec_alloc (jump_func->agg.items, count);
4255 if (count)
4256 {
4257 struct bitpack_d bp = streamer_read_bitpack (ib);
4258 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4259 }
4260 for (i = 0; i < count; i++)
4261 {
4262 struct ipa_agg_jf_item item;
4263 item.offset = streamer_read_uhwi (ib);
4264 item.value = stream_read_tree (ib, data_in);
4265 if (prevails)
4266 jump_func->agg.items->quick_push (item);
4267 }
4268
4269 struct bitpack_d bp = streamer_read_bitpack (ib);
4270 bool bits_known = bp_unpack_value (&bp, 1);
4271 if (bits_known)
4272 {
4273 widest_int value = streamer_read_widest_int (ib);
4274 widest_int mask = streamer_read_widest_int (ib);
4275 if (prevails)
4276 ipa_set_jfunc_bits (jump_func, value, mask);
4277 }
4278 else
4279 jump_func->bits = NULL;
4280
4281 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4282 bool vr_known = bp_unpack_value (&vr_bp, 1);
4283 if (vr_known)
4284 {
4285 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4286 VR_LAST);
4287 tree min = stream_read_tree (ib, data_in);
4288 tree max = stream_read_tree (ib, data_in);
4289 if (prevails)
4290 ipa_set_jfunc_vr (jump_func, type, min, max);
4291 }
4292 else
4293 jump_func->m_vr = NULL;
4294 }
4295
4296 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4297 relevant to indirect inlining to OB. */
4298
4299 static void
4300 ipa_write_indirect_edge_info (struct output_block *ob,
4301 struct cgraph_edge *cs)
4302 {
4303 class cgraph_indirect_call_info *ii = cs->indirect_info;
4304 struct bitpack_d bp;
4305
4306 streamer_write_hwi (ob, ii->param_index);
4307 bp = bitpack_create (ob->main_stream);
4308 bp_pack_value (&bp, ii->polymorphic, 1);
4309 bp_pack_value (&bp, ii->agg_contents, 1);
4310 bp_pack_value (&bp, ii->member_ptr, 1);
4311 bp_pack_value (&bp, ii->by_ref, 1);
4312 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4313 bp_pack_value (&bp, ii->vptr_changed, 1);
4314 streamer_write_bitpack (&bp);
4315 if (ii->agg_contents || ii->polymorphic)
4316 streamer_write_hwi (ob, ii->offset);
4317 else
4318 gcc_assert (ii->offset == 0);
4319
4320 if (ii->polymorphic)
4321 {
4322 streamer_write_hwi (ob, ii->otr_token);
4323 stream_write_tree (ob, ii->otr_type, true);
4324 ii->context.stream_out (ob);
4325 }
4326 }
4327
4328 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4329 relevant to indirect inlining from IB. */
4330
4331 static void
4332 ipa_read_indirect_edge_info (class lto_input_block *ib,
4333 class data_in *data_in,
4334 struct cgraph_edge *cs)
4335 {
4336 class cgraph_indirect_call_info *ii = cs->indirect_info;
4337 struct bitpack_d bp;
4338
4339 ii->param_index = (int) streamer_read_hwi (ib);
4340 bp = streamer_read_bitpack (ib);
4341 ii->polymorphic = bp_unpack_value (&bp, 1);
4342 ii->agg_contents = bp_unpack_value (&bp, 1);
4343 ii->member_ptr = bp_unpack_value (&bp, 1);
4344 ii->by_ref = bp_unpack_value (&bp, 1);
4345 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4346 ii->vptr_changed = bp_unpack_value (&bp, 1);
4347 if (ii->agg_contents || ii->polymorphic)
4348 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4349 else
4350 ii->offset = 0;
4351 if (ii->polymorphic)
4352 {
4353 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4354 ii->otr_type = stream_read_tree (ib, data_in);
4355 ii->context.stream_in (ib, data_in);
4356 }
4357 }
4358
4359 /* Stream out NODE info to OB. */
4360
4361 static void
4362 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4363 {
4364 int node_ref;
4365 lto_symtab_encoder_t encoder;
4366 class ipa_node_params *info = IPA_NODE_REF (node);
4367 int j;
4368 struct cgraph_edge *e;
4369 struct bitpack_d bp;
4370
4371 encoder = ob->decl_state->symtab_node_encoder;
4372 node_ref = lto_symtab_encoder_encode (encoder, node);
4373 streamer_write_uhwi (ob, node_ref);
4374
4375 streamer_write_uhwi (ob, ipa_get_param_count (info));
4376 for (j = 0; j < ipa_get_param_count (info); j++)
4377 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4378 bp = bitpack_create (ob->main_stream);
4379 gcc_assert (info->analysis_done
4380 || ipa_get_param_count (info) == 0);
4381 gcc_assert (!info->node_enqueued);
4382 gcc_assert (!info->ipcp_orig_node);
4383 for (j = 0; j < ipa_get_param_count (info); j++)
4384 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4385 streamer_write_bitpack (&bp);
4386 for (j = 0; j < ipa_get_param_count (info); j++)
4387 {
4388 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4389 stream_write_tree (ob, ipa_get_type (info, j), true);
4390 }
4391 for (e = node->callees; e; e = e->next_callee)
4392 {
4393 class ipa_edge_args *args = IPA_EDGE_REF (e);
4394
4395 if (!args)
4396 {
4397 streamer_write_uhwi (ob, 0);
4398 continue;
4399 }
4400
4401 streamer_write_uhwi (ob,
4402 ipa_get_cs_argument_count (args) * 2
4403 + (args->polymorphic_call_contexts != NULL));
4404 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4405 {
4406 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4407 if (args->polymorphic_call_contexts != NULL)
4408 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4409 }
4410 }
4411 for (e = node->indirect_calls; e; e = e->next_callee)
4412 {
4413 class ipa_edge_args *args = IPA_EDGE_REF (e);
4414 if (!args)
4415 streamer_write_uhwi (ob, 0);
4416 else
4417 {
4418 streamer_write_uhwi (ob,
4419 ipa_get_cs_argument_count (args) * 2
4420 + (args->polymorphic_call_contexts != NULL));
4421 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4422 {
4423 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4424 if (args->polymorphic_call_contexts != NULL)
4425 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4426 }
4427 }
4428 ipa_write_indirect_edge_info (ob, e);
4429 }
4430 }
4431
4432 /* Stream in edge E from IB. */
4433
4434 static void
4435 ipa_read_edge_info (class lto_input_block *ib,
4436 class data_in *data_in,
4437 struct cgraph_edge *e, bool prevails)
4438 {
4439 int count = streamer_read_uhwi (ib);
4440 bool contexts_computed = count & 1;
4441
4442 count /= 2;
4443 if (!count)
4444 return;
4445 if (prevails && e->possibly_call_in_translation_unit_p ())
4446 {
4447 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
4448 vec_safe_grow_cleared (args->jump_functions, count);
4449 if (contexts_computed)
4450 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4451 for (int k = 0; k < count; k++)
4452 {
4453 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4454 data_in, prevails);
4455 if (contexts_computed)
4456 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4457 (ib, data_in);
4458 }
4459 }
4460 else
4461 {
4462 for (int k = 0; k < count; k++)
4463 {
4464 struct ipa_jump_func dummy;
4465 ipa_read_jump_function (ib, &dummy, e,
4466 data_in, prevails);
4467 if (contexts_computed)
4468 {
4469 class ipa_polymorphic_call_context ctx;
4470 ctx.stream_in (ib, data_in);
4471 }
4472 }
4473 }
4474 }
4475
4476 /* Stream in NODE info from IB. */
4477
4478 static void
4479 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4480 class data_in *data_in)
4481 {
4482 int k;
4483 struct cgraph_edge *e;
4484 struct bitpack_d bp;
4485 bool prevails = node->prevailing_p ();
4486 class ipa_node_params *info = prevails ? IPA_NODE_REF (node) : NULL;
4487
4488 int param_count = streamer_read_uhwi (ib);
4489 if (prevails)
4490 {
4491 ipa_alloc_node_params (node, param_count);
4492 for (k = 0; k < param_count; k++)
4493 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4494 if (ipa_get_param_count (info) != 0)
4495 info->analysis_done = true;
4496 info->node_enqueued = false;
4497 }
4498 else
4499 for (k = 0; k < param_count; k++)
4500 streamer_read_uhwi (ib);
4501
4502 bp = streamer_read_bitpack (ib);
4503 for (k = 0; k < param_count; k++)
4504 {
4505 bool used = bp_unpack_value (&bp, 1);
4506
4507 if (prevails)
4508 ipa_set_param_used (info, k, used);
4509 }
4510 for (k = 0; k < param_count; k++)
4511 {
4512 int nuses = streamer_read_hwi (ib);
4513 tree type = stream_read_tree (ib, data_in);
4514
4515 if (prevails)
4516 {
4517 ipa_set_controlled_uses (info, k, nuses);
4518 (*info->descriptors)[k].decl_or_type = type;
4519 }
4520 }
4521 for (e = node->callees; e; e = e->next_callee)
4522 ipa_read_edge_info (ib, data_in, e, prevails);
4523 for (e = node->indirect_calls; e; e = e->next_callee)
4524 {
4525 ipa_read_edge_info (ib, data_in, e, prevails);
4526 ipa_read_indirect_edge_info (ib, data_in, e);
4527 }
4528 }
4529
4530 /* Write jump functions for nodes in SET. */
4531
4532 void
4533 ipa_prop_write_jump_functions (void)
4534 {
4535 struct cgraph_node *node;
4536 struct output_block *ob;
4537 unsigned int count = 0;
4538 lto_symtab_encoder_iterator lsei;
4539 lto_symtab_encoder_t encoder;
4540
4541 if (!ipa_node_params_sum || !ipa_edge_args_sum)
4542 return;
4543
4544 ob = create_output_block (LTO_section_jump_functions);
4545 encoder = ob->decl_state->symtab_node_encoder;
4546 ob->symbol = NULL;
4547 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4548 lsei_next_function_in_partition (&lsei))
4549 {
4550 node = lsei_cgraph_node (lsei);
4551 if (node->has_gimple_body_p ()
4552 && IPA_NODE_REF (node) != NULL)
4553 count++;
4554 }
4555
4556 streamer_write_uhwi (ob, count);
4557
4558 /* Process all of the functions. */
4559 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4560 lsei_next_function_in_partition (&lsei))
4561 {
4562 node = lsei_cgraph_node (lsei);
4563 if (node->has_gimple_body_p ()
4564 && IPA_NODE_REF (node) != NULL)
4565 ipa_write_node_info (ob, node);
4566 }
4567 streamer_write_char_stream (ob->main_stream, 0);
4568 produce_asm (ob, NULL);
4569 destroy_output_block (ob);
4570 }
4571
4572 /* Read section in file FILE_DATA of length LEN with data DATA. */
4573
4574 static void
4575 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4576 size_t len)
4577 {
4578 const struct lto_function_header *header =
4579 (const struct lto_function_header *) data;
4580 const int cfg_offset = sizeof (struct lto_function_header);
4581 const int main_offset = cfg_offset + header->cfg_size;
4582 const int string_offset = main_offset + header->main_size;
4583 class data_in *data_in;
4584 unsigned int i;
4585 unsigned int count;
4586
4587 lto_input_block ib_main ((const char *) data + main_offset,
4588 header->main_size, file_data->mode_table);
4589
4590 data_in =
4591 lto_data_in_create (file_data, (const char *) data + string_offset,
4592 header->string_size, vNULL);
4593 count = streamer_read_uhwi (&ib_main);
4594
4595 for (i = 0; i < count; i++)
4596 {
4597 unsigned int index;
4598 struct cgraph_node *node;
4599 lto_symtab_encoder_t encoder;
4600
4601 index = streamer_read_uhwi (&ib_main);
4602 encoder = file_data->symtab_node_encoder;
4603 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4604 index));
4605 gcc_assert (node->definition);
4606 ipa_read_node_info (&ib_main, node, data_in);
4607 }
4608 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4609 len);
4610 lto_data_in_delete (data_in);
4611 }
4612
4613 /* Read ipcp jump functions. */
4614
4615 void
4616 ipa_prop_read_jump_functions (void)
4617 {
4618 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4619 struct lto_file_decl_data *file_data;
4620 unsigned int j = 0;
4621
4622 ipa_check_create_node_params ();
4623 ipa_check_create_edge_args ();
4624 ipa_register_cgraph_hooks ();
4625
4626 while ((file_data = file_data_vec[j++]))
4627 {
4628 size_t len;
4629 const char *data
4630 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
4631 &len);
4632 if (data)
4633 ipa_prop_read_section (file_data, data, len);
4634 }
4635 }
4636
4637 void
4638 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4639 {
4640 int node_ref;
4641 unsigned int count = 0;
4642 lto_symtab_encoder_t encoder;
4643 struct ipa_agg_replacement_value *aggvals, *av;
4644
4645 aggvals = ipa_get_agg_replacements_for_node (node);
4646 encoder = ob->decl_state->symtab_node_encoder;
4647 node_ref = lto_symtab_encoder_encode (encoder, node);
4648 streamer_write_uhwi (ob, node_ref);
4649
4650 for (av = aggvals; av; av = av->next)
4651 count++;
4652 streamer_write_uhwi (ob, count);
4653
4654 for (av = aggvals; av; av = av->next)
4655 {
4656 struct bitpack_d bp;
4657
4658 streamer_write_uhwi (ob, av->offset);
4659 streamer_write_uhwi (ob, av->index);
4660 stream_write_tree (ob, av->value, true);
4661
4662 bp = bitpack_create (ob->main_stream);
4663 bp_pack_value (&bp, av->by_ref, 1);
4664 streamer_write_bitpack (&bp);
4665 }
4666
4667 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4668 if (ts && vec_safe_length (ts->m_vr) > 0)
4669 {
4670 count = ts->m_vr->length ();
4671 streamer_write_uhwi (ob, count);
4672 for (unsigned i = 0; i < count; ++i)
4673 {
4674 struct bitpack_d bp;
4675 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4676 bp = bitpack_create (ob->main_stream);
4677 bp_pack_value (&bp, parm_vr->known, 1);
4678 streamer_write_bitpack (&bp);
4679 if (parm_vr->known)
4680 {
4681 streamer_write_enum (ob->main_stream, value_rang_type,
4682 VR_LAST, parm_vr->type);
4683 streamer_write_wide_int (ob, parm_vr->min);
4684 streamer_write_wide_int (ob, parm_vr->max);
4685 }
4686 }
4687 }
4688 else
4689 streamer_write_uhwi (ob, 0);
4690
4691 if (ts && vec_safe_length (ts->bits) > 0)
4692 {
4693 count = ts->bits->length ();
4694 streamer_write_uhwi (ob, count);
4695
4696 for (unsigned i = 0; i < count; ++i)
4697 {
4698 const ipa_bits *bits_jfunc = (*ts->bits)[i];
4699 struct bitpack_d bp = bitpack_create (ob->main_stream);
4700 bp_pack_value (&bp, !!bits_jfunc, 1);
4701 streamer_write_bitpack (&bp);
4702 if (bits_jfunc)
4703 {
4704 streamer_write_widest_int (ob, bits_jfunc->value);
4705 streamer_write_widest_int (ob, bits_jfunc->mask);
4706 }
4707 }
4708 }
4709 else
4710 streamer_write_uhwi (ob, 0);
4711 }
4712
4713 /* Stream in the aggregate value replacement chain for NODE from IB. */
4714
4715 static void
4716 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4717 data_in *data_in)
4718 {
4719 struct ipa_agg_replacement_value *aggvals = NULL;
4720 unsigned int count, i;
4721
4722 count = streamer_read_uhwi (ib);
4723 for (i = 0; i <count; i++)
4724 {
4725 struct ipa_agg_replacement_value *av;
4726 struct bitpack_d bp;
4727
4728 av = ggc_alloc<ipa_agg_replacement_value> ();
4729 av->offset = streamer_read_uhwi (ib);
4730 av->index = streamer_read_uhwi (ib);
4731 av->value = stream_read_tree (ib, data_in);
4732 bp = streamer_read_bitpack (ib);
4733 av->by_ref = bp_unpack_value (&bp, 1);
4734 av->next = aggvals;
4735 aggvals = av;
4736 }
4737 ipa_set_node_agg_value_chain (node, aggvals);
4738
4739 count = streamer_read_uhwi (ib);
4740 if (count > 0)
4741 {
4742 ipcp_transformation_initialize ();
4743 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4744 vec_safe_grow_cleared (ts->m_vr, count);
4745 for (i = 0; i < count; i++)
4746 {
4747 ipa_vr *parm_vr;
4748 parm_vr = &(*ts->m_vr)[i];
4749 struct bitpack_d bp;
4750 bp = streamer_read_bitpack (ib);
4751 parm_vr->known = bp_unpack_value (&bp, 1);
4752 if (parm_vr->known)
4753 {
4754 parm_vr->type = streamer_read_enum (ib, value_range_kind,
4755 VR_LAST);
4756 parm_vr->min = streamer_read_wide_int (ib);
4757 parm_vr->max = streamer_read_wide_int (ib);
4758 }
4759 }
4760 }
4761 count = streamer_read_uhwi (ib);
4762 if (count > 0)
4763 {
4764 ipcp_transformation_initialize ();
4765 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4766 vec_safe_grow_cleared (ts->bits, count);
4767
4768 for (i = 0; i < count; i++)
4769 {
4770 struct bitpack_d bp = streamer_read_bitpack (ib);
4771 bool known = bp_unpack_value (&bp, 1);
4772 if (known)
4773 {
4774 ipa_bits *bits
4775 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4776 streamer_read_widest_int (ib));
4777 (*ts->bits)[i] = bits;
4778 }
4779 }
4780 }
4781 }
4782
4783 /* Write all aggregate replacement for nodes in set. */
4784
4785 void
4786 ipcp_write_transformation_summaries (void)
4787 {
4788 struct cgraph_node *node;
4789 struct output_block *ob;
4790 unsigned int count = 0;
4791 lto_symtab_encoder_iterator lsei;
4792 lto_symtab_encoder_t encoder;
4793
4794 ob = create_output_block (LTO_section_ipcp_transform);
4795 encoder = ob->decl_state->symtab_node_encoder;
4796 ob->symbol = NULL;
4797 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4798 lsei_next_function_in_partition (&lsei))
4799 {
4800 node = lsei_cgraph_node (lsei);
4801 if (node->has_gimple_body_p ())
4802 count++;
4803 }
4804
4805 streamer_write_uhwi (ob, count);
4806
4807 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4808 lsei_next_function_in_partition (&lsei))
4809 {
4810 node = lsei_cgraph_node (lsei);
4811 if (node->has_gimple_body_p ())
4812 write_ipcp_transformation_info (ob, node);
4813 }
4814 streamer_write_char_stream (ob->main_stream, 0);
4815 produce_asm (ob, NULL);
4816 destroy_output_block (ob);
4817 }
4818
4819 /* Read replacements section in file FILE_DATA of length LEN with data
4820 DATA. */
4821
4822 static void
4823 read_replacements_section (struct lto_file_decl_data *file_data,
4824 const char *data,
4825 size_t len)
4826 {
4827 const struct lto_function_header *header =
4828 (const struct lto_function_header *) data;
4829 const int cfg_offset = sizeof (struct lto_function_header);
4830 const int main_offset = cfg_offset + header->cfg_size;
4831 const int string_offset = main_offset + header->main_size;
4832 class data_in *data_in;
4833 unsigned int i;
4834 unsigned int count;
4835
4836 lto_input_block ib_main ((const char *) data + main_offset,
4837 header->main_size, file_data->mode_table);
4838
4839 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4840 header->string_size, vNULL);
4841 count = streamer_read_uhwi (&ib_main);
4842
4843 for (i = 0; i < count; i++)
4844 {
4845 unsigned int index;
4846 struct cgraph_node *node;
4847 lto_symtab_encoder_t encoder;
4848
4849 index = streamer_read_uhwi (&ib_main);
4850 encoder = file_data->symtab_node_encoder;
4851 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4852 index));
4853 gcc_assert (node->definition);
4854 read_ipcp_transformation_info (&ib_main, node, data_in);
4855 }
4856 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4857 len);
4858 lto_data_in_delete (data_in);
4859 }
4860
4861 /* Read IPA-CP aggregate replacements. */
4862
4863 void
4864 ipcp_read_transformation_summaries (void)
4865 {
4866 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4867 struct lto_file_decl_data *file_data;
4868 unsigned int j = 0;
4869
4870 while ((file_data = file_data_vec[j++]))
4871 {
4872 size_t len;
4873 const char *data
4874 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
4875 &len);
4876 if (data)
4877 read_replacements_section (file_data, data, len);
4878 }
4879 }
4880
4881 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4882 NODE. */
4883
4884 static void
4885 adjust_agg_replacement_values (struct cgraph_node *node,
4886 struct ipa_agg_replacement_value *aggval)
4887 {
4888 struct ipa_agg_replacement_value *v;
4889
4890 if (!node->clone.param_adjustments)
4891 return;
4892
4893 auto_vec<int, 16> new_indices;
4894 node->clone.param_adjustments->get_updated_indices (&new_indices);
4895 for (v = aggval; v; v = v->next)
4896 {
4897 gcc_checking_assert (v->index >= 0);
4898
4899 if ((unsigned) v->index < new_indices.length ())
4900 v->index = new_indices[v->index];
4901 else
4902 /* This can happen if we know about a constant passed by reference by
4903 an argument which is never actually used for anything, let alone
4904 loading that constant. */
4905 v->index = -1;
4906 }
4907 }
4908
4909 /* Dominator walker driving the ipcp modification phase. */
4910
4911 class ipcp_modif_dom_walker : public dom_walker
4912 {
4913 public:
4914 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4915 vec<ipa_param_descriptor, va_gc> *descs,
4916 struct ipa_agg_replacement_value *av,
4917 bool *sc, bool *cc)
4918 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4919 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4920
4921 virtual edge before_dom_children (basic_block);
4922
4923 private:
4924 struct ipa_func_body_info *m_fbi;
4925 vec<ipa_param_descriptor, va_gc> *m_descriptors;
4926 struct ipa_agg_replacement_value *m_aggval;
4927 bool *m_something_changed, *m_cfg_changed;
4928 };
4929
4930 edge
4931 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4932 {
4933 gimple_stmt_iterator gsi;
4934 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4935 {
4936 struct ipa_agg_replacement_value *v;
4937 gimple *stmt = gsi_stmt (gsi);
4938 tree rhs, val, t;
4939 HOST_WIDE_INT offset;
4940 poly_int64 size;
4941 int index;
4942 bool by_ref, vce;
4943
4944 if (!gimple_assign_load_p (stmt))
4945 continue;
4946 rhs = gimple_assign_rhs1 (stmt);
4947 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4948 continue;
4949
4950 vce = false;
4951 t = rhs;
4952 while (handled_component_p (t))
4953 {
4954 /* V_C_E can do things like convert an array of integers to one
4955 bigger integer and similar things we do not handle below. */
4956 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
4957 {
4958 vce = true;
4959 break;
4960 }
4961 t = TREE_OPERAND (t, 0);
4962 }
4963 if (vce)
4964 continue;
4965
4966 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4967 &offset, &size, &by_ref))
4968 continue;
4969 for (v = m_aggval; v; v = v->next)
4970 if (v->index == index
4971 && v->offset == offset)
4972 break;
4973 if (!v
4974 || v->by_ref != by_ref
4975 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
4976 size))
4977 continue;
4978
4979 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4980 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4981 {
4982 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4983 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4984 else if (TYPE_SIZE (TREE_TYPE (rhs))
4985 == TYPE_SIZE (TREE_TYPE (v->value)))
4986 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4987 else
4988 {
4989 if (dump_file)
4990 {
4991 fprintf (dump_file, " const ");
4992 print_generic_expr (dump_file, v->value);
4993 fprintf (dump_file, " can't be converted to type of ");
4994 print_generic_expr (dump_file, rhs);
4995 fprintf (dump_file, "\n");
4996 }
4997 continue;
4998 }
4999 }
5000 else
5001 val = v->value;
5002
5003 if (dump_file && (dump_flags & TDF_DETAILS))
5004 {
5005 fprintf (dump_file, "Modifying stmt:\n ");
5006 print_gimple_stmt (dump_file, stmt, 0);
5007 }
5008 gimple_assign_set_rhs_from_tree (&gsi, val);
5009 update_stmt (stmt);
5010
5011 if (dump_file && (dump_flags & TDF_DETAILS))
5012 {
5013 fprintf (dump_file, "into:\n ");
5014 print_gimple_stmt (dump_file, stmt, 0);
5015 fprintf (dump_file, "\n");
5016 }
5017
5018 *m_something_changed = true;
5019 if (maybe_clean_eh_stmt (stmt)
5020 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5021 *m_cfg_changed = true;
5022 }
5023 return NULL;
5024 }
5025
5026 /* Update bits info of formal parameters as described in
5027 ipcp_transformation. */
5028
5029 static void
5030 ipcp_update_bits (struct cgraph_node *node)
5031 {
5032 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5033
5034 if (!ts || vec_safe_length (ts->bits) == 0)
5035 return;
5036 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5037 unsigned count = bits.length ();
5038 if (!count)
5039 return;
5040
5041 auto_vec<int, 16> new_indices;
5042 bool need_remapping = false;
5043 if (node->clone.param_adjustments)
5044 {
5045 node->clone.param_adjustments->get_updated_indices (&new_indices);
5046 need_remapping = true;
5047 }
5048 auto_vec <tree, 16> parm_decls;
5049 push_function_arg_decls (&parm_decls, node->decl);
5050
5051 for (unsigned i = 0; i < count; ++i)
5052 {
5053 tree parm;
5054 if (need_remapping)
5055 {
5056 if (i >= new_indices.length ())
5057 continue;
5058 int idx = new_indices[i];
5059 if (idx < 0)
5060 continue;
5061 parm = parm_decls[idx];
5062 }
5063 else
5064 parm = parm_decls[i];
5065 gcc_checking_assert (parm);
5066
5067
5068 if (!bits[i]
5069 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5070 || POINTER_TYPE_P (TREE_TYPE (parm)))
5071 || !is_gimple_reg (parm))
5072 continue;
5073
5074 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5075 if (!ddef)
5076 continue;
5077
5078 if (dump_file)
5079 {
5080 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5081 print_hex (bits[i]->mask, dump_file);
5082 fprintf (dump_file, "\n");
5083 }
5084
5085 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5086 {
5087 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5088 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5089
5090 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5091 | wide_int::from (bits[i]->value, prec, sgn);
5092 set_nonzero_bits (ddef, nonzero_bits);
5093 }
5094 else
5095 {
5096 unsigned tem = bits[i]->mask.to_uhwi ();
5097 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5098 unsigned align = tem & -tem;
5099 unsigned misalign = bitpos & (align - 1);
5100
5101 if (align > 1)
5102 {
5103 if (dump_file)
5104 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5105
5106 unsigned old_align, old_misalign;
5107 struct ptr_info_def *pi = get_ptr_info (ddef);
5108 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5109
5110 if (old_known
5111 && old_align > align)
5112 {
5113 if (dump_file)
5114 {
5115 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5116 if ((old_misalign & (align - 1)) != misalign)
5117 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5118 old_misalign, misalign);
5119 }
5120 continue;
5121 }
5122
5123 if (old_known
5124 && ((misalign & (old_align - 1)) != old_misalign)
5125 && dump_file)
5126 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5127 old_misalign, misalign);
5128
5129 set_ptr_info_alignment (pi, align, misalign);
5130 }
5131 }
5132 }
5133 }
5134
5135 bool
5136 ipa_vr::nonzero_p (tree expr_type) const
5137 {
5138 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5139 return true;
5140
5141 unsigned prec = TYPE_PRECISION (expr_type);
5142 return (type == VR_RANGE
5143 && TYPE_UNSIGNED (expr_type)
5144 && wi::eq_p (min, wi::one (prec))
5145 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5146 }
5147
5148 /* Update value range of formal parameters as described in
5149 ipcp_transformation. */
5150
5151 static void
5152 ipcp_update_vr (struct cgraph_node *node)
5153 {
5154 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5155 if (!ts || vec_safe_length (ts->m_vr) == 0)
5156 return;
5157 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5158 unsigned count = vr.length ();
5159 if (!count)
5160 return;
5161
5162 auto_vec<int, 16> new_indices;
5163 bool need_remapping = false;
5164 if (node->clone.param_adjustments)
5165 {
5166 node->clone.param_adjustments->get_updated_indices (&new_indices);
5167 need_remapping = true;
5168 }
5169 auto_vec <tree, 16> parm_decls;
5170 push_function_arg_decls (&parm_decls, node->decl);
5171
5172 for (unsigned i = 0; i < count; ++i)
5173 {
5174 tree parm;
5175 int remapped_idx;
5176 if (need_remapping)
5177 {
5178 if (i >= new_indices.length ())
5179 continue;
5180 remapped_idx = new_indices[i];
5181 if (remapped_idx < 0)
5182 continue;
5183 }
5184 else
5185 remapped_idx = i;
5186
5187 parm = parm_decls[remapped_idx];
5188
5189 gcc_checking_assert (parm);
5190 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5191
5192 if (!ddef || !is_gimple_reg (parm))
5193 continue;
5194
5195 if (vr[i].known
5196 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5197 {
5198 tree type = TREE_TYPE (ddef);
5199 unsigned prec = TYPE_PRECISION (type);
5200 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5201 {
5202 if (dump_file)
5203 {
5204 fprintf (dump_file, "Setting value range of param %u "
5205 "(now %i) ", i, remapped_idx);
5206 fprintf (dump_file, "%s[",
5207 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5208 print_decs (vr[i].min, dump_file);
5209 fprintf (dump_file, ", ");
5210 print_decs (vr[i].max, dump_file);
5211 fprintf (dump_file, "]\n");
5212 }
5213 set_range_info (ddef, vr[i].type,
5214 wide_int_storage::from (vr[i].min, prec,
5215 TYPE_SIGN (type)),
5216 wide_int_storage::from (vr[i].max, prec,
5217 TYPE_SIGN (type)));
5218 }
5219 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5220 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5221 {
5222 if (dump_file)
5223 fprintf (dump_file, "Setting nonnull for %u\n", i);
5224 set_ptr_nonnull (ddef);
5225 }
5226 }
5227 }
5228 }
5229
5230 /* IPCP transformation phase doing propagation of aggregate values. */
5231
5232 unsigned int
5233 ipcp_transform_function (struct cgraph_node *node)
5234 {
5235 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5236 struct ipa_func_body_info fbi;
5237 struct ipa_agg_replacement_value *aggval;
5238 int param_count;
5239 bool cfg_changed = false, something_changed = false;
5240
5241 gcc_checking_assert (cfun);
5242 gcc_checking_assert (current_function_decl);
5243
5244 if (dump_file)
5245 fprintf (dump_file, "Modification phase of node %s\n",
5246 node->dump_name ());
5247
5248 ipcp_update_bits (node);
5249 ipcp_update_vr (node);
5250 aggval = ipa_get_agg_replacements_for_node (node);
5251 if (!aggval)
5252 return 0;
5253 param_count = count_formal_params (node->decl);
5254 if (param_count == 0)
5255 return 0;
5256 adjust_agg_replacement_values (node, aggval);
5257 if (dump_file)
5258 ipa_dump_agg_replacement_values (dump_file, aggval);
5259
5260 fbi.node = node;
5261 fbi.info = NULL;
5262 fbi.bb_infos = vNULL;
5263 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5264 fbi.param_count = param_count;
5265 fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
5266
5267 vec_safe_grow_cleared (descriptors, param_count);
5268 ipa_populate_param_decls (node, *descriptors);
5269 calculate_dominance_info (CDI_DOMINATORS);
5270 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5271 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5272
5273 int i;
5274 struct ipa_bb_info *bi;
5275 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5276 free_ipa_bb_info (bi);
5277 fbi.bb_infos.release ();
5278 free_dominance_info (CDI_DOMINATORS);
5279
5280 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5281 s->agg_values = NULL;
5282 s->bits = NULL;
5283 s->m_vr = NULL;
5284
5285 vec_free (descriptors);
5286
5287 if (!something_changed)
5288 return 0;
5289
5290 if (cfg_changed)
5291 delete_unreachable_blocks_update_callgraph (node, false);
5292
5293 return TODO_update_ssa_only_virtuals;
5294 }
5295
5296 #include "gt-ipa-prop.h"