]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
re PR ipa/92242 (LTO ICE in ipa_get_cs_argument_count ipa-prop.h:598)
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55 #include "tree-cfgcleanup.h"
56
57 /* Function summary where the parameter infos are actually stored. */
58 ipa_node_params_t *ipa_node_params_sum = NULL;
59
60 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
61
62 /* Edge summary for IPA-CP edge information. */
63 ipa_edge_args_sum_t *ipa_edge_args_sum;
64
65 /* Traits for a hash table for reusing already existing ipa_bits. */
66
67 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
68 {
69 typedef ipa_bits *value_type;
70 typedef ipa_bits *compare_type;
71 static hashval_t
72 hash (const ipa_bits *p)
73 {
74 hashval_t t = (hashval_t) p->value.to_shwi ();
75 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
76 }
77 static bool
78 equal (const ipa_bits *a, const ipa_bits *b)
79 {
80 return a->value == b->value && a->mask == b->mask;
81 }
82 static void
83 mark_empty (ipa_bits *&p)
84 {
85 p = NULL;
86 }
87 static bool
88 is_empty (const ipa_bits *p)
89 {
90 return p == NULL;
91 }
92 static bool
93 is_deleted (const ipa_bits *p)
94 {
95 return p == reinterpret_cast<const ipa_bits *> (1);
96 }
97 static void
98 mark_deleted (ipa_bits *&p)
99 {
100 p = reinterpret_cast<ipa_bits *> (1);
101 }
102 };
103
104 /* Hash table for avoid repeated allocations of equal ipa_bits. */
105 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106
107 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
108 the equiv bitmap is not hashed and is expected to be NULL. */
109
110 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range_base *>
111 {
112 typedef value_range_base *value_type;
113 typedef value_range_base *compare_type;
114 static hashval_t
115 hash (const value_range_base *p)
116 {
117 inchash::hash hstate (p->kind ());
118 inchash::add_expr (p->min (), hstate);
119 inchash::add_expr (p->max (), hstate);
120 return hstate.end ();
121 }
122 static bool
123 equal (const value_range_base *a, const value_range_base *b)
124 {
125 return a->equal_p (*b);
126 }
127 static void
128 mark_empty (value_range_base *&p)
129 {
130 p = NULL;
131 }
132 static bool
133 is_empty (const value_range_base *p)
134 {
135 return p == NULL;
136 }
137 static bool
138 is_deleted (const value_range_base *p)
139 {
140 return p == reinterpret_cast<const value_range_base *> (1);
141 }
142 static void
143 mark_deleted (value_range_base *&p)
144 {
145 p = reinterpret_cast<value_range_base *> (1);
146 }
147 };
148
149 /* Hash table for avoid repeated allocations of equal value_ranges. */
150 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
151
152 /* Holders of ipa cgraph hooks: */
153 static struct cgraph_node_hook_list *function_insertion_hook_holder;
154
155 /* Description of a reference to an IPA constant. */
156 struct ipa_cst_ref_desc
157 {
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge *cs;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc *next_duplicate;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
164 int refcount;
165 };
166
167 /* Allocation pool for reference descriptions. */
168
169 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
170 ("IPA-PROP ref descriptions");
171
172 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
174
175 static bool
176 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
177 {
178 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
179
180 if (!fs_opts)
181 return false;
182 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
183 }
184
185 /* Return index of the formal whose tree is PTREE in function which corresponds
186 to INFO. */
187
188 static int
189 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
190 tree ptree)
191 {
192 int i, count;
193
194 count = vec_safe_length (descriptors);
195 for (i = 0; i < count; i++)
196 if ((*descriptors)[i].decl_or_type == ptree)
197 return i;
198
199 return -1;
200 }
201
202 /* Return index of the formal whose tree is PTREE in function which corresponds
203 to INFO. */
204
205 int
206 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
207 {
208 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
209 }
210
211 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
212 NODE. */
213
214 static void
215 ipa_populate_param_decls (struct cgraph_node *node,
216 vec<ipa_param_descriptor, va_gc> &descriptors)
217 {
218 tree fndecl;
219 tree fnargs;
220 tree parm;
221 int param_num;
222
223 fndecl = node->decl;
224 gcc_assert (gimple_has_body_p (fndecl));
225 fnargs = DECL_ARGUMENTS (fndecl);
226 param_num = 0;
227 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
228 {
229 descriptors[param_num].decl_or_type = parm;
230 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
231 true);
232 param_num++;
233 }
234 }
235
236 /* Return how many formal parameters FNDECL has. */
237
238 int
239 count_formal_params (tree fndecl)
240 {
241 tree parm;
242 int count = 0;
243 gcc_assert (gimple_has_body_p (fndecl));
244
245 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
246 count++;
247
248 return count;
249 }
250
251 /* Return the declaration of Ith formal parameter of the function corresponding
252 to INFO. Note there is no setter function as this array is built just once
253 using ipa_initialize_node_params. */
254
255 void
256 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
257 {
258 fprintf (file, "param #%i", i);
259 if ((*info->descriptors)[i].decl_or_type)
260 {
261 fprintf (file, " ");
262 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
263 }
264 }
265
266 /* If necessary, allocate vector of parameter descriptors in info of NODE.
267 Return true if they were allocated, false if not. */
268
269 static bool
270 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
271 {
272 class ipa_node_params *info = IPA_NODE_REF (node);
273
274 if (!info->descriptors && param_count)
275 {
276 vec_safe_grow_cleared (info->descriptors, param_count);
277 return true;
278 }
279 else
280 return false;
281 }
282
283 /* Initialize the ipa_node_params structure associated with NODE by counting
284 the function parameters, creating the descriptors and populating their
285 param_decls. */
286
287 void
288 ipa_initialize_node_params (struct cgraph_node *node)
289 {
290 class ipa_node_params *info = IPA_NODE_REF (node);
291
292 if (!info->descriptors
293 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
294 ipa_populate_param_decls (node, *info->descriptors);
295 }
296
297 /* Print the jump functions associated with call graph edge CS to file F. */
298
299 static void
300 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
301 {
302 int i, count;
303
304 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
305 for (i = 0; i < count; i++)
306 {
307 struct ipa_jump_func *jump_func;
308 enum jump_func_type type;
309
310 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
311 type = jump_func->type;
312
313 fprintf (f, " param %d: ", i);
314 if (type == IPA_JF_UNKNOWN)
315 fprintf (f, "UNKNOWN\n");
316 else if (type == IPA_JF_CONST)
317 {
318 tree val = jump_func->value.constant.value;
319 fprintf (f, "CONST: ");
320 print_generic_expr (f, val);
321 if (TREE_CODE (val) == ADDR_EXPR
322 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
323 {
324 fprintf (f, " -> ");
325 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
326 }
327 fprintf (f, "\n");
328 }
329 else if (type == IPA_JF_PASS_THROUGH)
330 {
331 fprintf (f, "PASS THROUGH: ");
332 fprintf (f, "%d, op %s",
333 jump_func->value.pass_through.formal_id,
334 get_tree_code_name(jump_func->value.pass_through.operation));
335 if (jump_func->value.pass_through.operation != NOP_EXPR)
336 {
337 fprintf (f, " ");
338 print_generic_expr (f, jump_func->value.pass_through.operand);
339 }
340 if (jump_func->value.pass_through.agg_preserved)
341 fprintf (f, ", agg_preserved");
342 fprintf (f, "\n");
343 }
344 else if (type == IPA_JF_ANCESTOR)
345 {
346 fprintf (f, "ANCESTOR: ");
347 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
348 jump_func->value.ancestor.formal_id,
349 jump_func->value.ancestor.offset);
350 if (jump_func->value.ancestor.agg_preserved)
351 fprintf (f, ", agg_preserved");
352 fprintf (f, "\n");
353 }
354
355 if (jump_func->agg.items)
356 {
357 struct ipa_agg_jf_item *item;
358 int j;
359
360 fprintf (f, " Aggregate passed by %s:\n",
361 jump_func->agg.by_ref ? "reference" : "value");
362 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
363 {
364 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
365 item->offset);
366 if (TYPE_P (item->value))
367 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
368 tree_to_uhwi (TYPE_SIZE (item->value)));
369 else
370 {
371 fprintf (f, "cst: ");
372 print_generic_expr (f, item->value);
373 }
374 fprintf (f, "\n");
375 }
376 }
377
378 class ipa_polymorphic_call_context *ctx
379 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
380 if (ctx && !ctx->useless_p ())
381 {
382 fprintf (f, " Context: ");
383 ctx->dump (dump_file);
384 }
385
386 if (jump_func->bits)
387 {
388 fprintf (f, " value: ");
389 print_hex (jump_func->bits->value, f);
390 fprintf (f, ", mask: ");
391 print_hex (jump_func->bits->mask, f);
392 fprintf (f, "\n");
393 }
394 else
395 fprintf (f, " Unknown bits\n");
396
397 if (jump_func->m_vr)
398 {
399 fprintf (f, " VR ");
400 fprintf (f, "%s[",
401 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
402 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
403 fprintf (f, ", ");
404 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
405 fprintf (f, "]\n");
406 }
407 else
408 fprintf (f, " Unknown VR\n");
409 }
410 }
411
412
413 /* Print the jump functions of all arguments on all call graph edges going from
414 NODE to file F. */
415
416 void
417 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
418 {
419 struct cgraph_edge *cs;
420
421 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
422 for (cs = node->callees; cs; cs = cs->next_callee)
423 {
424 if (!ipa_edge_args_info_available_for_edge_p (cs))
425 continue;
426
427 fprintf (f, " callsite %s -> %s : \n",
428 node->dump_name (),
429 cs->callee->dump_name ());
430 ipa_print_node_jump_functions_for_edge (f, cs);
431 }
432
433 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
434 {
435 class cgraph_indirect_call_info *ii;
436 if (!ipa_edge_args_info_available_for_edge_p (cs))
437 continue;
438
439 ii = cs->indirect_info;
440 if (ii->agg_contents)
441 fprintf (f, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
443 ii->member_ptr ? "member ptr" : "aggregate",
444 ii->param_index, ii->offset,
445 ii->by_ref ? "by reference" : "by_value");
446 else
447 fprintf (f, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC,
449 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
450 ii->offset);
451
452 if (cs->call_stmt)
453 {
454 fprintf (f, ", for stmt ");
455 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
456 }
457 else
458 fprintf (f, "\n");
459 if (ii->polymorphic)
460 ii->context.dump (f);
461 ipa_print_node_jump_functions_for_edge (f, cs);
462 }
463 }
464
465 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
466
467 void
468 ipa_print_all_jump_functions (FILE *f)
469 {
470 struct cgraph_node *node;
471
472 fprintf (f, "\nJump functions:\n");
473 FOR_EACH_FUNCTION (node)
474 {
475 ipa_print_node_jump_functions (f, node);
476 }
477 }
478
479 /* Set jfunc to be a know-really nothing jump function. */
480
481 static void
482 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
483 {
484 jfunc->type = IPA_JF_UNKNOWN;
485 jfunc->bits = NULL;
486 jfunc->m_vr = NULL;
487 }
488
489 /* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
491
492 static void
493 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
494 struct ipa_jump_func *src)
495
496 {
497 gcc_checking_assert (src->type == IPA_JF_CONST);
498 dst->type = IPA_JF_CONST;
499 dst->value.constant = src->value.constant;
500 }
501
502 /* Set JFUNC to be a constant jmp function. */
503
504 static void
505 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
506 struct cgraph_edge *cs)
507 {
508 jfunc->type = IPA_JF_CONST;
509 jfunc->value.constant.value = unshare_expr_without_location (constant);
510
511 if (TREE_CODE (constant) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
513 {
514 struct ipa_cst_ref_desc *rdesc;
515
516 rdesc = ipa_refdesc_pool.allocate ();
517 rdesc->cs = cs;
518 rdesc->next_duplicate = NULL;
519 rdesc->refcount = 1;
520 jfunc->value.constant.rdesc = rdesc;
521 }
522 else
523 jfunc->value.constant.rdesc = NULL;
524 }
525
526 /* Set JFUNC to be a simple pass-through jump function. */
527 static void
528 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
529 bool agg_preserved)
530 {
531 jfunc->type = IPA_JF_PASS_THROUGH;
532 jfunc->value.pass_through.operand = NULL_TREE;
533 jfunc->value.pass_through.formal_id = formal_id;
534 jfunc->value.pass_through.operation = NOP_EXPR;
535 jfunc->value.pass_through.agg_preserved = agg_preserved;
536 }
537
538 /* Set JFUNC to be an unary pass through jump function. */
539
540 static void
541 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
542 enum tree_code operation)
543 {
544 jfunc->type = IPA_JF_PASS_THROUGH;
545 jfunc->value.pass_through.operand = NULL_TREE;
546 jfunc->value.pass_through.formal_id = formal_id;
547 jfunc->value.pass_through.operation = operation;
548 jfunc->value.pass_through.agg_preserved = false;
549 }
550 /* Set JFUNC to be an arithmetic pass through jump function. */
551
552 static void
553 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
554 tree operand, enum tree_code operation)
555 {
556 jfunc->type = IPA_JF_PASS_THROUGH;
557 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
558 jfunc->value.pass_through.formal_id = formal_id;
559 jfunc->value.pass_through.operation = operation;
560 jfunc->value.pass_through.agg_preserved = false;
561 }
562
563 /* Set JFUNC to be an ancestor jump function. */
564
565 static void
566 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
567 int formal_id, bool agg_preserved)
568 {
569 jfunc->type = IPA_JF_ANCESTOR;
570 jfunc->value.ancestor.formal_id = formal_id;
571 jfunc->value.ancestor.offset = offset;
572 jfunc->value.ancestor.agg_preserved = agg_preserved;
573 }
574
575 /* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
577
578 static struct ipa_bb_info *
579 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
580 {
581 gcc_checking_assert (fbi);
582 return &fbi->bb_infos[bb->index];
583 }
584
585 /* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
587
588 struct prop_type_change_info
589 {
590 /* Offset into the object where there is the virtual method pointer we are
591 looking for. */
592 HOST_WIDE_INT offset;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
594 type change. */
595 tree object;
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed;
598 };
599
600 /* Return true if STMT can modify a virtual method table pointer.
601
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
606
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
609
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
612
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
616
617 There is no way to call a constructor of an ancestor sub-object in any
618 other way.
619
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
623
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
635 */
636
637 static bool
638 stmt_may_be_vtbl_ptr_store (gimple *stmt)
639 {
640 if (is_gimple_call (stmt))
641 return false;
642 if (gimple_clobber_p (stmt))
643 return false;
644 else if (is_gimple_assign (stmt))
645 {
646 tree lhs = gimple_assign_lhs (stmt);
647
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
649 {
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
652 return false;
653
654 if (TREE_CODE (lhs) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
656 return false;
657 /* In the future we might want to use get_ref_base_and_extent to find
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
660 }
661 }
662 return true;
663 }
664
665 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
668 prop_type_change_info structure. */
669
670 static bool
671 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
672 {
673 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
674 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
675
676 if (stmt_may_be_vtbl_ptr_store (stmt))
677 {
678 tci->type_maybe_changed = true;
679 return true;
680 }
681 else
682 return false;
683 }
684
685 /* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
688
689 Generally functions are not allowed to change type of such instances,
690 but they call destructors. We assume that methods cannot destroy the THIS
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
693
694 static bool
695 param_type_may_change_p (tree function, tree arg, gimple *call)
696 {
697 /* Pure functions cannot do any changes on the dynamic type;
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
700 return false;
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
707
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
712 return true;
713 if (TREE_CODE (arg) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg)
715 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
716 {
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
719 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
720 /* THIS pointer of an method - here we want to watch constructors
721 and destructors as those definitely may change the dynamic
722 type. */
723 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function)
725 && !DECL_CXX_DESTRUCTOR_P (function)
726 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
727 {
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
730 block = BLOCK_SUPERCONTEXT (block))
731 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
732 return true;
733 return false;
734 }
735 }
736 return true;
737 }
738
739 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
744 returned by get_ref_base_and_extent, as is the offset.
745
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
748
749 static bool
750 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
751 tree base, tree comp_type, gcall *call,
752 struct ipa_jump_func *jfunc,
753 HOST_WIDE_INT offset)
754 {
755 struct prop_type_change_info tci;
756 ao_ref ao;
757
758 gcc_checking_assert (DECL_P (arg)
759 || TREE_CODE (arg) == MEM_REF
760 || handled_component_p (arg));
761
762 comp_type = TYPE_MAIN_VARIANT (comp_type);
763
764 /* Const calls cannot call virtual methods through VMT and so type changes do
765 not matter. */
766 if (!flag_devirtualize || !gimple_vuse (call)
767 /* Be sure expected_type is polymorphic. */
768 || !comp_type
769 || TREE_CODE (comp_type) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
772 return true;
773
774 ao_ref_init (&ao, arg);
775 ao.base = base;
776 ao.offset = offset;
777 ao.size = POINTER_SIZE;
778 ao.max_size = ao.size;
779
780 tci.offset = offset;
781 tci.object = get_base_address (arg);
782 tci.type_maybe_changed = false;
783
784 int walked
785 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
786 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
787
788 if (walked >= 0 && !tci.type_maybe_changed)
789 return false;
790
791 ipa_set_jf_unknown (jfunc);
792 return true;
793 }
794
795 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
796 If it is, return true and fill in the jump function JFUNC with relevant type
797 information or set it to unknown. ARG is the object itself (not a pointer
798 to it, unless dereferenced). BASE is the base of the memory access as
799 returned by get_ref_base_and_extent, as is the offset. */
800
801 static bool
802 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
803 tree comp_type, gcall *call, struct ipa_jump_func *jfunc,
804 HOST_WIDE_INT offset)
805 {
806 if (!flag_devirtualize)
807 return false;
808
809 if (TREE_CODE (base) == MEM_REF
810 && !param_type_may_change_p (current_function_decl,
811 TREE_OPERAND (base, 0),
812 call))
813 return false;
814 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
815 call, jfunc, offset);
816 }
817
818 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
819 SSA name (its dereference will become the base and the offset is assumed to
820 be zero). */
821
822 static bool
823 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
824 gcall *call, struct ipa_jump_func *jfunc)
825 {
826 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
827 if (!flag_devirtualize
828 || !POINTER_TYPE_P (TREE_TYPE (arg)))
829 return false;
830
831 if (!param_type_may_change_p (current_function_decl, arg, call))
832 return false;
833
834 arg = build2 (MEM_REF, ptr_type_node, arg,
835 build_int_cst (ptr_type_node, 0));
836
837 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
838 call, jfunc, 0);
839 }
840
841 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
842 boolean variable pointed to by DATA. */
843
844 static bool
845 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
846 void *data)
847 {
848 bool *b = (bool *) data;
849 *b = true;
850 return true;
851 }
852
853 /* Find the nearest valid aa status for parameter specified by INDEX that
854 dominates BB. */
855
856 static struct ipa_param_aa_status *
857 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
858 int index)
859 {
860 while (true)
861 {
862 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
863 if (!bb)
864 return NULL;
865 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
866 if (!bi->param_aa_statuses.is_empty ()
867 && bi->param_aa_statuses[index].valid)
868 return &bi->param_aa_statuses[index];
869 }
870 }
871
872 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
873 structures and/or intialize the result with a dominating description as
874 necessary. */
875
876 static struct ipa_param_aa_status *
877 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
878 int index)
879 {
880 gcc_checking_assert (fbi);
881 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
882 if (bi->param_aa_statuses.is_empty ())
883 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
884 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
885 if (!paa->valid)
886 {
887 gcc_checking_assert (!paa->parm_modified
888 && !paa->ref_modified
889 && !paa->pt_modified);
890 struct ipa_param_aa_status *dom_paa;
891 dom_paa = find_dominating_aa_status (fbi, bb, index);
892 if (dom_paa)
893 *paa = *dom_paa;
894 else
895 paa->valid = true;
896 }
897
898 return paa;
899 }
900
901 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
902 a value known not to be modified in this function before reaching the
903 statement STMT. FBI holds information about the function we have so far
904 gathered but do not survive the summary building stage. */
905
906 static bool
907 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
908 gimple *stmt, tree parm_load)
909 {
910 struct ipa_param_aa_status *paa;
911 bool modified = false;
912 ao_ref refd;
913
914 tree base = get_base_address (parm_load);
915 gcc_assert (TREE_CODE (base) == PARM_DECL);
916 if (TREE_READONLY (base))
917 return true;
918
919 gcc_checking_assert (fbi);
920 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
921 if (paa->parm_modified)
922 return false;
923
924 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
925 ao_ref_init (&refd, parm_load);
926 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
927 &modified, NULL, NULL,
928 fbi->aa_walk_budget + 1);
929 if (walked < 0)
930 {
931 modified = true;
932 if (fbi)
933 fbi->aa_walk_budget = 0;
934 }
935 else if (fbi)
936 fbi->aa_walk_budget -= walked;
937 if (paa && modified)
938 paa->parm_modified = true;
939 return !modified;
940 }
941
942 /* If STMT is an assignment that loads a value from an parameter declaration,
943 return the index of the parameter in ipa_node_params which has not been
944 modified. Otherwise return -1. */
945
946 static int
947 load_from_unmodified_param (struct ipa_func_body_info *fbi,
948 vec<ipa_param_descriptor, va_gc> *descriptors,
949 gimple *stmt)
950 {
951 int index;
952 tree op1;
953
954 if (!gimple_assign_single_p (stmt))
955 return -1;
956
957 op1 = gimple_assign_rhs1 (stmt);
958 if (TREE_CODE (op1) != PARM_DECL)
959 return -1;
960
961 index = ipa_get_param_decl_index_1 (descriptors, op1);
962 if (index < 0
963 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
964 return -1;
965
966 return index;
967 }
968
969 /* Return true if memory reference REF (which must be a load through parameter
970 with INDEX) loads data that are known to be unmodified in this function
971 before reaching statement STMT. */
972
973 static bool
974 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
975 int index, gimple *stmt, tree ref)
976 {
977 struct ipa_param_aa_status *paa;
978 bool modified = false;
979 ao_ref refd;
980
981 gcc_checking_assert (fbi);
982 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
983 if (paa->ref_modified)
984 return false;
985
986 gcc_checking_assert (gimple_vuse (stmt));
987 ao_ref_init (&refd, ref);
988 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
989 &modified, NULL, NULL,
990 fbi->aa_walk_budget + 1);
991 if (walked < 0)
992 {
993 modified = true;
994 fbi->aa_walk_budget = 0;
995 }
996 else
997 fbi->aa_walk_budget -= walked;
998 if (modified)
999 paa->ref_modified = true;
1000 return !modified;
1001 }
1002
1003 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1004 is known to be unmodified in this function before reaching call statement
1005 CALL into which it is passed. FBI describes the function body. */
1006
1007 static bool
1008 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1009 gimple *call, tree parm)
1010 {
1011 bool modified = false;
1012 ao_ref refd;
1013
1014 /* It's unnecessary to calculate anything about memory contnets for a const
1015 function because it is not goin to use it. But do not cache the result
1016 either. Also, no such calculations for non-pointers. */
1017 if (!gimple_vuse (call)
1018 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1019 return false;
1020
1021 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1022 gimple_bb (call),
1023 index);
1024 if (paa->pt_modified)
1025 return false;
1026
1027 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1028 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1029 &modified, NULL, NULL,
1030 fbi->aa_walk_budget + 1);
1031 if (walked < 0)
1032 {
1033 fbi->aa_walk_budget = 0;
1034 modified = true;
1035 }
1036 else
1037 fbi->aa_walk_budget -= walked;
1038 if (modified)
1039 paa->pt_modified = true;
1040 return !modified;
1041 }
1042
1043 /* Return true if we can prove that OP is a memory reference loading
1044 data from an aggregate passed as a parameter.
1045
1046 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1047 false if it cannot prove that the value has not been modified before the
1048 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1049 if it cannot prove the value has not been modified, in that case it will
1050 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1051
1052 INFO and PARMS_AINFO describe parameters of the current function (but the
1053 latter can be NULL), STMT is the load statement. If function returns true,
1054 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1055 within the aggregate and whether it is a load from a value passed by
1056 reference respectively. */
1057
1058 bool
1059 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1060 vec<ipa_param_descriptor, va_gc> *descriptors,
1061 gimple *stmt, tree op, int *index_p,
1062 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1063 bool *by_ref_p, bool *guaranteed_unmodified)
1064 {
1065 int index;
1066 HOST_WIDE_INT size;
1067 bool reverse;
1068 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1069
1070 if (!base)
1071 return false;
1072
1073 if (DECL_P (base))
1074 {
1075 int index = ipa_get_param_decl_index_1 (descriptors, base);
1076 if (index >= 0
1077 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1078 {
1079 *index_p = index;
1080 *by_ref_p = false;
1081 if (size_p)
1082 *size_p = size;
1083 if (guaranteed_unmodified)
1084 *guaranteed_unmodified = true;
1085 return true;
1086 }
1087 return false;
1088 }
1089
1090 if (TREE_CODE (base) != MEM_REF
1091 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1092 || !integer_zerop (TREE_OPERAND (base, 1)))
1093 return false;
1094
1095 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1096 {
1097 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1098 index = ipa_get_param_decl_index_1 (descriptors, parm);
1099 }
1100 else
1101 {
1102 /* This branch catches situations where a pointer parameter is not a
1103 gimple register, for example:
1104
1105 void hip7(S*) (struct S * p)
1106 {
1107 void (*<T2e4>) (struct S *) D.1867;
1108 struct S * p.1;
1109
1110 <bb 2>:
1111 p.1_1 = p;
1112 D.1867_2 = p.1_1->f;
1113 D.1867_2 ();
1114 gdp = &p;
1115 */
1116
1117 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1118 index = load_from_unmodified_param (fbi, descriptors, def);
1119 }
1120
1121 if (index >= 0)
1122 {
1123 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1124 if (!data_preserved && !guaranteed_unmodified)
1125 return false;
1126
1127 *index_p = index;
1128 *by_ref_p = true;
1129 if (size_p)
1130 *size_p = size;
1131 if (guaranteed_unmodified)
1132 *guaranteed_unmodified = data_preserved;
1133 return true;
1134 }
1135 return false;
1136 }
1137
1138 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1139 of an assignment statement STMT, try to determine whether we are actually
1140 handling any of the following cases and construct an appropriate jump
1141 function into JFUNC if so:
1142
1143 1) The passed value is loaded from a formal parameter which is not a gimple
1144 register (most probably because it is addressable, the value has to be
1145 scalar) and we can guarantee the value has not changed. This case can
1146 therefore be described by a simple pass-through jump function. For example:
1147
1148 foo (int a)
1149 {
1150 int a.0;
1151
1152 a.0_2 = a;
1153 bar (a.0_2);
1154
1155 2) The passed value can be described by a simple arithmetic pass-through
1156 jump function. E.g.
1157
1158 foo (int a)
1159 {
1160 int D.2064;
1161
1162 D.2064_4 = a.1(D) + 4;
1163 bar (D.2064_4);
1164
1165 This case can also occur in combination of the previous one, e.g.:
1166
1167 foo (int a, int z)
1168 {
1169 int a.0;
1170 int D.2064;
1171
1172 a.0_3 = a;
1173 D.2064_4 = a.0_3 + 4;
1174 foo (D.2064_4);
1175
1176 3) The passed value is an address of an object within another one (which
1177 also passed by reference). Such situations are described by an ancestor
1178 jump function and describe situations such as:
1179
1180 B::foo() (struct B * const this)
1181 {
1182 struct A * D.1845;
1183
1184 D.1845_2 = &this_1(D)->D.1748;
1185 A::bar (D.1845_2);
1186
1187 INFO is the structure describing individual parameters access different
1188 stages of IPA optimizations. PARMS_AINFO contains the information that is
1189 only needed for intraprocedural analysis. */
1190
1191 static void
1192 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1193 class ipa_node_params *info,
1194 struct ipa_jump_func *jfunc,
1195 gcall *call, gimple *stmt, tree name,
1196 tree param_type)
1197 {
1198 HOST_WIDE_INT offset, size;
1199 tree op1, tc_ssa, base, ssa;
1200 bool reverse;
1201 int index;
1202
1203 op1 = gimple_assign_rhs1 (stmt);
1204
1205 if (TREE_CODE (op1) == SSA_NAME)
1206 {
1207 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1208 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1209 else
1210 index = load_from_unmodified_param (fbi, info->descriptors,
1211 SSA_NAME_DEF_STMT (op1));
1212 tc_ssa = op1;
1213 }
1214 else
1215 {
1216 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1217 tc_ssa = gimple_assign_lhs (stmt);
1218 }
1219
1220 if (index >= 0)
1221 {
1222 switch (gimple_assign_rhs_class (stmt))
1223 {
1224 case GIMPLE_BINARY_RHS:
1225 {
1226 tree op2 = gimple_assign_rhs2 (stmt);
1227 if (!is_gimple_ip_invariant (op2)
1228 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1229 != tcc_comparison)
1230 && !useless_type_conversion_p (TREE_TYPE (name),
1231 TREE_TYPE (op1))))
1232 return;
1233
1234 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1235 gimple_assign_rhs_code (stmt));
1236 break;
1237 }
1238 case GIMPLE_SINGLE_RHS:
1239 {
1240 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1241 tc_ssa);
1242 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1243 break;
1244 }
1245 case GIMPLE_UNARY_RHS:
1246 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1247 ipa_set_jf_unary_pass_through (jfunc, index,
1248 gimple_assign_rhs_code (stmt));
1249 default:;
1250 }
1251 return;
1252 }
1253
1254 if (TREE_CODE (op1) != ADDR_EXPR)
1255 return;
1256 op1 = TREE_OPERAND (op1, 0);
1257 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1258 return;
1259 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1260 offset_int mem_offset;
1261 if (!base
1262 || TREE_CODE (base) != MEM_REF
1263 || !mem_ref_offset (base).is_constant (&mem_offset))
1264 return;
1265 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1266 ssa = TREE_OPERAND (base, 0);
1267 if (TREE_CODE (ssa) != SSA_NAME
1268 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1269 || offset < 0)
1270 return;
1271
1272 /* Dynamic types are changed in constructors and destructors. */
1273 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1274 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1275 ipa_set_ancestor_jf (jfunc, offset, index,
1276 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1277 }
1278
1279 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1280 it looks like:
1281
1282 iftmp.1_3 = &obj_2(D)->D.1762;
1283
1284 The base of the MEM_REF must be a default definition SSA NAME of a
1285 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1286 whole MEM_REF expression is returned and the offset calculated from any
1287 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1288 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1289
1290 static tree
1291 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1292 {
1293 HOST_WIDE_INT size;
1294 tree expr, parm, obj;
1295 bool reverse;
1296
1297 if (!gimple_assign_single_p (assign))
1298 return NULL_TREE;
1299 expr = gimple_assign_rhs1 (assign);
1300
1301 if (TREE_CODE (expr) != ADDR_EXPR)
1302 return NULL_TREE;
1303 expr = TREE_OPERAND (expr, 0);
1304 obj = expr;
1305 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1306
1307 offset_int mem_offset;
1308 if (!expr
1309 || TREE_CODE (expr) != MEM_REF
1310 || !mem_ref_offset (expr).is_constant (&mem_offset))
1311 return NULL_TREE;
1312 parm = TREE_OPERAND (expr, 0);
1313 if (TREE_CODE (parm) != SSA_NAME
1314 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1315 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1316 return NULL_TREE;
1317
1318 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1319 *obj_p = obj;
1320 return expr;
1321 }
1322
1323
1324 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1325 statement PHI, try to find out whether NAME is in fact a
1326 multiple-inheritance typecast from a descendant into an ancestor of a formal
1327 parameter and thus can be described by an ancestor jump function and if so,
1328 write the appropriate function into JFUNC.
1329
1330 Essentially we want to match the following pattern:
1331
1332 if (obj_2(D) != 0B)
1333 goto <bb 3>;
1334 else
1335 goto <bb 4>;
1336
1337 <bb 3>:
1338 iftmp.1_3 = &obj_2(D)->D.1762;
1339
1340 <bb 4>:
1341 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1342 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1343 return D.1879_6; */
1344
1345 static void
1346 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1347 class ipa_node_params *info,
1348 struct ipa_jump_func *jfunc,
1349 gcall *call, gphi *phi)
1350 {
1351 HOST_WIDE_INT offset;
1352 gimple *assign, *cond;
1353 basic_block phi_bb, assign_bb, cond_bb;
1354 tree tmp, parm, expr, obj;
1355 int index, i;
1356
1357 if (gimple_phi_num_args (phi) != 2)
1358 return;
1359
1360 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1361 tmp = PHI_ARG_DEF (phi, 0);
1362 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1363 tmp = PHI_ARG_DEF (phi, 1);
1364 else
1365 return;
1366 if (TREE_CODE (tmp) != SSA_NAME
1367 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1368 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1369 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1370 return;
1371
1372 assign = SSA_NAME_DEF_STMT (tmp);
1373 assign_bb = gimple_bb (assign);
1374 if (!single_pred_p (assign_bb))
1375 return;
1376 expr = get_ancestor_addr_info (assign, &obj, &offset);
1377 if (!expr)
1378 return;
1379 parm = TREE_OPERAND (expr, 0);
1380 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1381 if (index < 0)
1382 return;
1383
1384 cond_bb = single_pred (assign_bb);
1385 cond = last_stmt (cond_bb);
1386 if (!cond
1387 || gimple_code (cond) != GIMPLE_COND
1388 || gimple_cond_code (cond) != NE_EXPR
1389 || gimple_cond_lhs (cond) != parm
1390 || !integer_zerop (gimple_cond_rhs (cond)))
1391 return;
1392
1393 phi_bb = gimple_bb (phi);
1394 for (i = 0; i < 2; i++)
1395 {
1396 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1397 if (pred != assign_bb && pred != cond_bb)
1398 return;
1399 }
1400
1401 ipa_set_ancestor_jf (jfunc, offset, index,
1402 parm_ref_data_pass_through_p (fbi, index, call, parm));
1403 }
1404
1405 /* Inspect the given TYPE and return true iff it has the same structure (the
1406 same number of fields of the same types) as a C++ member pointer. If
1407 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1408 corresponding fields there. */
1409
1410 static bool
1411 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1412 {
1413 tree fld;
1414
1415 if (TREE_CODE (type) != RECORD_TYPE)
1416 return false;
1417
1418 fld = TYPE_FIELDS (type);
1419 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1420 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1421 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1422 return false;
1423
1424 if (method_ptr)
1425 *method_ptr = fld;
1426
1427 fld = DECL_CHAIN (fld);
1428 if (!fld || INTEGRAL_TYPE_P (fld)
1429 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1430 return false;
1431 if (delta)
1432 *delta = fld;
1433
1434 if (DECL_CHAIN (fld))
1435 return false;
1436
1437 return true;
1438 }
1439
1440 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1441 return the rhs of its defining statement. Otherwise return RHS as it
1442 is. */
1443
1444 static inline tree
1445 get_ssa_def_if_simple_copy (tree rhs)
1446 {
1447 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1448 {
1449 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1450
1451 if (gimple_assign_single_p (def_stmt))
1452 rhs = gimple_assign_rhs1 (def_stmt);
1453 else
1454 break;
1455 }
1456 return rhs;
1457 }
1458
1459 /* Simple linked list, describing known contents of an aggregate before
1460 call. */
1461
1462 struct ipa_known_agg_contents_list
1463 {
1464 /* Offset and size of the described part of the aggregate. */
1465 HOST_WIDE_INT offset, size;
1466 /* Known constant value or NULL if the contents is known to be unknown. */
1467 tree constant;
1468 /* Pointer to the next structure in the list. */
1469 struct ipa_known_agg_contents_list *next;
1470 };
1471
1472 /* Add a known content item into a linked list of ipa_known_agg_contents_list
1473 structure, in which all elements are sorted ascendingly by offset. */
1474
1475 static inline void
1476 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1477 struct ipa_known_agg_contents_list *item)
1478 {
1479 struct ipa_known_agg_contents_list *list = *plist;
1480
1481 for (; list; list = list->next)
1482 {
1483 if (list->offset >= item->offset)
1484 break;
1485
1486 plist = &list->next;
1487 }
1488
1489 item->next = list;
1490 *plist = item;
1491 }
1492
1493 /* Check whether a given known content is clobbered by certain element in
1494 a linked list of ipa_known_agg_contents_list. */
1495
1496 static inline bool
1497 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1498 struct ipa_known_agg_contents_list *item)
1499 {
1500 for (; list; list = list->next)
1501 {
1502 if (list->offset >= item->offset)
1503 return list->offset < item->offset + item->size;
1504
1505 if (list->offset + list->size > item->offset)
1506 return true;
1507 }
1508
1509 return false;
1510 }
1511
1512 /* Build aggregate jump function from LIST, assuming there are exactly
1513 CONST_COUNT constant entries there and that offset of the passed argument
1514 is ARG_OFFSET and store it into JFUNC. */
1515
1516 static void
1517 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1518 int const_count, HOST_WIDE_INT arg_offset,
1519 struct ipa_jump_func *jfunc)
1520 {
1521 vec_alloc (jfunc->agg.items, const_count);
1522 while (list)
1523 {
1524 if (list->constant)
1525 {
1526 struct ipa_agg_jf_item item;
1527 item.offset = list->offset - arg_offset;
1528 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1529 item.value = unshare_expr_without_location (list->constant);
1530 jfunc->agg.items->quick_push (item);
1531 }
1532 list = list->next;
1533 }
1534 }
1535
1536 /* If STMT is a memory store to the object whose address is BASE, extract
1537 information (offset, size, and value) into CONTENT, and return true,
1538 otherwise we conservatively assume the whole object is modified with
1539 unknown content, and return false. CHECK_REF means that access to object
1540 is expected to be in form of MEM_REF expression. */
1541
1542 static bool
1543 extract_mem_content (gimple *stmt, tree base, bool check_ref,
1544 struct ipa_known_agg_contents_list *content)
1545 {
1546 HOST_WIDE_INT lhs_offset, lhs_size;
1547 tree lhs, rhs, lhs_base;
1548 bool reverse;
1549
1550 if (!gimple_assign_single_p (stmt))
1551 return false;
1552
1553 lhs = gimple_assign_lhs (stmt);
1554 rhs = gimple_assign_rhs1 (stmt);
1555
1556 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1557 || TREE_CODE (lhs) == BIT_FIELD_REF
1558 || contains_bitfld_component_ref_p (lhs))
1559 return false;
1560
1561 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1562 &lhs_size, &reverse);
1563 if (!lhs_base)
1564 return false;
1565
1566 if (check_ref)
1567 {
1568 if (TREE_CODE (lhs_base) != MEM_REF
1569 || TREE_OPERAND (lhs_base, 0) != base
1570 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1571 return false;
1572 }
1573 else if (lhs_base != base)
1574 return false;
1575
1576 rhs = get_ssa_def_if_simple_copy (rhs);
1577
1578 content->size = lhs_size;
1579 content->offset = lhs_offset;
1580 content->constant = is_gimple_ip_invariant (rhs) ? rhs : NULL_TREE;
1581 content->next = NULL;
1582
1583 return true;
1584 }
1585
1586 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1587 in ARG is filled in with constant values. ARG can either be an aggregate
1588 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1589 aggregate. JFUNC is the jump function into which the constants are
1590 subsequently stored. AA_WALK_BUDGET_P points to limit on number of
1591 statements we allow get_continuation_for_phi to examine. */
1592
1593 static void
1594 determine_known_aggregate_parts (gcall *call, tree arg,
1595 tree arg_type,
1596 struct ipa_jump_func *jfunc,
1597 unsigned *aa_walk_budget_p)
1598 {
1599 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1600 bitmap visited = NULL;
1601 int item_count = 0, const_count = 0;
1602 int ipa_max_agg_items = PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS);
1603 HOST_WIDE_INT arg_offset, arg_size;
1604 tree arg_base;
1605 bool check_ref, by_ref;
1606 ao_ref r;
1607
1608 if (ipa_max_agg_items == 0)
1609 return;
1610
1611 /* The function operates in three stages. First, we prepare check_ref, r,
1612 arg_base and arg_offset based on what is actually passed as an actual
1613 argument. */
1614
1615 if (POINTER_TYPE_P (arg_type))
1616 {
1617 by_ref = true;
1618 if (TREE_CODE (arg) == SSA_NAME)
1619 {
1620 tree type_size;
1621 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1622 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1623 return;
1624 check_ref = true;
1625 arg_base = arg;
1626 arg_offset = 0;
1627 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1628 arg_size = tree_to_uhwi (type_size);
1629 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1630 }
1631 else if (TREE_CODE (arg) == ADDR_EXPR)
1632 {
1633 bool reverse;
1634
1635 arg = TREE_OPERAND (arg, 0);
1636 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1637 &arg_size, &reverse);
1638 if (!arg_base)
1639 return;
1640 if (DECL_P (arg_base))
1641 {
1642 check_ref = false;
1643 ao_ref_init (&r, arg_base);
1644 }
1645 else
1646 return;
1647 }
1648 else
1649 return;
1650 }
1651 else
1652 {
1653 bool reverse;
1654
1655 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1656
1657 by_ref = false;
1658 check_ref = false;
1659 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1660 &arg_size, &reverse);
1661 if (!arg_base)
1662 return;
1663
1664 ao_ref_init (&r, arg);
1665 }
1666
1667 /* Second stage traverses virtual SSA web backwards starting from the call
1668 statement, only looks at individual dominating virtual operand (its
1669 definition dominates the call), as long as it is confident that content
1670 of the aggregate is affected by definition of the virtual operand, it
1671 builds a sorted linked list of ipa_agg_jf_list describing that. */
1672
1673 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1674 {
1675 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
1676
1677 if (gimple_code (stmt) == GIMPLE_PHI)
1678 {
1679 dom_vuse = get_continuation_for_phi (stmt, &r, true,
1680 *aa_walk_budget_p,
1681 &visited, false, NULL, NULL);
1682 continue;
1683 }
1684
1685 if (stmt_may_clobber_ref_p_1 (stmt, &r))
1686 {
1687 struct ipa_known_agg_contents_list *content
1688 = XALLOCA (struct ipa_known_agg_contents_list);
1689
1690 if (!extract_mem_content (stmt, arg_base, check_ref, content))
1691 break;
1692
1693 /* Now we get a dominating virtual operand, and need to check
1694 whether its value is clobbered any other dominating one. */
1695 if (content->constant
1696 && !clobber_by_agg_contents_list_p (all_list, content))
1697 {
1698 struct ipa_known_agg_contents_list *copy
1699 = XALLOCA (struct ipa_known_agg_contents_list);
1700
1701 /* Add to the list consisting of only dominating virtual
1702 operands, whose definitions can finally reach the call. */
1703 add_to_agg_contents_list (&list, (*copy = *content, copy));
1704
1705 if (++const_count == ipa_max_agg_items)
1706 break;
1707 }
1708
1709 /* Add to the list consisting of all dominating virtual operands. */
1710 add_to_agg_contents_list (&all_list, content);
1711
1712 if (++item_count == 2 * ipa_max_agg_items)
1713 break;
1714 }
1715 dom_vuse = gimple_vuse (stmt);
1716 }
1717
1718 if (visited)
1719 BITMAP_FREE (visited);
1720
1721 /* Third stage just goes over the list and creates an appropriate vector of
1722 ipa_agg_jf_item structures out of it, of course only if there are
1723 any known constants to begin with. */
1724
1725 if (const_count)
1726 {
1727 jfunc->agg.by_ref = by_ref;
1728 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1729 }
1730 }
1731
1732
1733 /* Return the Ith param type of callee associated with call graph
1734 edge E. */
1735
1736 tree
1737 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1738 {
1739 int n;
1740 tree type = (e->callee
1741 ? TREE_TYPE (e->callee->decl)
1742 : gimple_call_fntype (e->call_stmt));
1743 tree t = TYPE_ARG_TYPES (type);
1744
1745 for (n = 0; n < i; n++)
1746 {
1747 if (!t)
1748 break;
1749 t = TREE_CHAIN (t);
1750 }
1751 if (t)
1752 return TREE_VALUE (t);
1753 if (!e->callee)
1754 return NULL;
1755 t = DECL_ARGUMENTS (e->callee->decl);
1756 for (n = 0; n < i; n++)
1757 {
1758 if (!t)
1759 return NULL;
1760 t = TREE_CHAIN (t);
1761 }
1762 if (t)
1763 return TREE_TYPE (t);
1764 return NULL;
1765 }
1766
1767 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1768 allocated structure or a previously existing one shared with other jump
1769 functions and/or transformation summaries. */
1770
1771 ipa_bits *
1772 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1773 {
1774 ipa_bits tmp;
1775 tmp.value = value;
1776 tmp.mask = mask;
1777
1778 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1779 if (*slot)
1780 return *slot;
1781
1782 ipa_bits *res = ggc_alloc<ipa_bits> ();
1783 res->value = value;
1784 res->mask = mask;
1785 *slot = res;
1786
1787 return res;
1788 }
1789
1790 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1791 table in order to avoid creating multiple same ipa_bits structures. */
1792
1793 static void
1794 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1795 const widest_int &mask)
1796 {
1797 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1798 }
1799
1800 /* Return a pointer to a value_range just like *TMP, but either find it in
1801 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1802
1803 static value_range_base *
1804 ipa_get_value_range (value_range_base *tmp)
1805 {
1806 value_range_base **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1807 if (*slot)
1808 return *slot;
1809
1810 value_range_base *vr = ggc_alloc<value_range_base> ();
1811 *vr = *tmp;
1812 *slot = vr;
1813
1814 return vr;
1815 }
1816
1817 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1818 equiv set. Use hash table in order to avoid creating multiple same copies of
1819 value_ranges. */
1820
1821 static value_range_base *
1822 ipa_get_value_range (enum value_range_kind type, tree min, tree max)
1823 {
1824 value_range_base tmp (type, min, max);
1825 return ipa_get_value_range (&tmp);
1826 }
1827
1828 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1829 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1830 same value_range structures. */
1831
1832 static void
1833 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
1834 tree min, tree max)
1835 {
1836 jf->m_vr = ipa_get_value_range (type, min, max);
1837 }
1838
1839 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
1840 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1841
1842 static void
1843 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range_base *tmp)
1844 {
1845 jf->m_vr = ipa_get_value_range (tmp);
1846 }
1847
1848 /* Compute jump function for all arguments of callsite CS and insert the
1849 information in the jump_functions array in the ipa_edge_args corresponding
1850 to this callsite. */
1851
1852 static void
1853 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1854 struct cgraph_edge *cs)
1855 {
1856 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
1857 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
1858 gcall *call = cs->call_stmt;
1859 int n, arg_num = gimple_call_num_args (call);
1860 bool useful_context = false;
1861
1862 if (arg_num == 0 || args->jump_functions)
1863 return;
1864 vec_safe_grow_cleared (args->jump_functions, arg_num);
1865 if (flag_devirtualize)
1866 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1867
1868 if (gimple_call_internal_p (call))
1869 return;
1870 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1871 return;
1872
1873 for (n = 0; n < arg_num; n++)
1874 {
1875 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1876 tree arg = gimple_call_arg (call, n);
1877 tree param_type = ipa_get_callee_param_type (cs, n);
1878 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1879 {
1880 tree instance;
1881 class ipa_polymorphic_call_context context (cs->caller->decl,
1882 arg, cs->call_stmt,
1883 &instance);
1884 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
1885 &fbi->aa_walk_budget);
1886 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1887 if (!context.useless_p ())
1888 useful_context = true;
1889 }
1890
1891 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1892 {
1893 bool addr_nonzero = false;
1894 bool strict_overflow = false;
1895
1896 if (TREE_CODE (arg) == SSA_NAME
1897 && param_type
1898 && get_ptr_nonnull (arg))
1899 addr_nonzero = true;
1900 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1901 addr_nonzero = true;
1902
1903 if (addr_nonzero)
1904 {
1905 tree z = build_int_cst (TREE_TYPE (arg), 0);
1906 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1907 }
1908 else
1909 gcc_assert (!jfunc->m_vr);
1910 }
1911 else
1912 {
1913 wide_int min, max;
1914 value_range_kind type;
1915 if (TREE_CODE (arg) == SSA_NAME
1916 && param_type
1917 && (type = get_range_info (arg, &min, &max))
1918 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1919 {
1920 value_range_base resvr;
1921 value_range_base tmpvr (type,
1922 wide_int_to_tree (TREE_TYPE (arg), min),
1923 wide_int_to_tree (TREE_TYPE (arg), max));
1924 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
1925 &tmpvr, TREE_TYPE (arg));
1926 if (!resvr.undefined_p () && !resvr.varying_p ())
1927 ipa_set_jfunc_vr (jfunc, &resvr);
1928 else
1929 gcc_assert (!jfunc->m_vr);
1930 }
1931 else
1932 gcc_assert (!jfunc->m_vr);
1933 }
1934
1935 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1936 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1937 {
1938 if (TREE_CODE (arg) == SSA_NAME)
1939 ipa_set_jfunc_bits (jfunc, 0,
1940 widest_int::from (get_nonzero_bits (arg),
1941 TYPE_SIGN (TREE_TYPE (arg))));
1942 else
1943 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1944 }
1945 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1946 {
1947 unsigned HOST_WIDE_INT bitpos;
1948 unsigned align;
1949
1950 get_pointer_alignment_1 (arg, &align, &bitpos);
1951 widest_int mask = wi::bit_and_not
1952 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1953 align / BITS_PER_UNIT - 1);
1954 widest_int value = bitpos / BITS_PER_UNIT;
1955 ipa_set_jfunc_bits (jfunc, value, mask);
1956 }
1957 else
1958 gcc_assert (!jfunc->bits);
1959
1960 if (is_gimple_ip_invariant (arg)
1961 || (VAR_P (arg)
1962 && is_global_var (arg)
1963 && TREE_READONLY (arg)))
1964 ipa_set_jf_constant (jfunc, arg, cs);
1965 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1966 && TREE_CODE (arg) == PARM_DECL)
1967 {
1968 int index = ipa_get_param_decl_index (info, arg);
1969
1970 gcc_assert (index >=0);
1971 /* Aggregate passed by value, check for pass-through, otherwise we
1972 will attempt to fill in aggregate contents later in this
1973 for cycle. */
1974 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1975 {
1976 ipa_set_jf_simple_pass_through (jfunc, index, false);
1977 continue;
1978 }
1979 }
1980 else if (TREE_CODE (arg) == SSA_NAME)
1981 {
1982 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1983 {
1984 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1985 if (index >= 0)
1986 {
1987 bool agg_p;
1988 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1989 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1990 }
1991 }
1992 else
1993 {
1994 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1995 if (is_gimple_assign (stmt))
1996 compute_complex_assign_jump_func (fbi, info, jfunc,
1997 call, stmt, arg, param_type);
1998 else if (gimple_code (stmt) == GIMPLE_PHI)
1999 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2000 call,
2001 as_a <gphi *> (stmt));
2002 }
2003 }
2004
2005 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2006 passed (because type conversions are ignored in gimple). Usually we can
2007 safely get type from function declaration, but in case of K&R prototypes or
2008 variadic functions we can try our luck with type of the pointer passed.
2009 TODO: Since we look for actual initialization of the memory object, we may better
2010 work out the type based on the memory stores we find. */
2011 if (!param_type)
2012 param_type = TREE_TYPE (arg);
2013
2014 if ((jfunc->type != IPA_JF_PASS_THROUGH
2015 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2016 && (jfunc->type != IPA_JF_ANCESTOR
2017 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2018 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2019 || POINTER_TYPE_P (param_type)))
2020 determine_known_aggregate_parts (call, arg, param_type, jfunc,
2021 &fbi->aa_walk_budget);
2022 }
2023 if (!useful_context)
2024 vec_free (args->polymorphic_call_contexts);
2025 }
2026
2027 /* Compute jump functions for all edges - both direct and indirect - outgoing
2028 from BB. */
2029
2030 static void
2031 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2032 {
2033 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2034 int i;
2035 struct cgraph_edge *cs;
2036
2037 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2038 {
2039 struct cgraph_node *callee = cs->callee;
2040
2041 if (callee)
2042 {
2043 callee->ultimate_alias_target ();
2044 /* We do not need to bother analyzing calls to unknown functions
2045 unless they may become known during lto/whopr. */
2046 if (!callee->definition && !flag_lto)
2047 continue;
2048 }
2049 ipa_compute_jump_functions_for_edge (fbi, cs);
2050 }
2051 }
2052
2053 /* If STMT looks like a statement loading a value from a member pointer formal
2054 parameter, return that parameter and store the offset of the field to
2055 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2056 might be clobbered). If USE_DELTA, then we look for a use of the delta
2057 field rather than the pfn. */
2058
2059 static tree
2060 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2061 HOST_WIDE_INT *offset_p)
2062 {
2063 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2064
2065 if (!gimple_assign_single_p (stmt))
2066 return NULL_TREE;
2067
2068 rhs = gimple_assign_rhs1 (stmt);
2069 if (TREE_CODE (rhs) == COMPONENT_REF)
2070 {
2071 ref_field = TREE_OPERAND (rhs, 1);
2072 rhs = TREE_OPERAND (rhs, 0);
2073 }
2074 else
2075 ref_field = NULL_TREE;
2076 if (TREE_CODE (rhs) != MEM_REF)
2077 return NULL_TREE;
2078 rec = TREE_OPERAND (rhs, 0);
2079 if (TREE_CODE (rec) != ADDR_EXPR)
2080 return NULL_TREE;
2081 rec = TREE_OPERAND (rec, 0);
2082 if (TREE_CODE (rec) != PARM_DECL
2083 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2084 return NULL_TREE;
2085 ref_offset = TREE_OPERAND (rhs, 1);
2086
2087 if (use_delta)
2088 fld = delta_field;
2089 else
2090 fld = ptr_field;
2091 if (offset_p)
2092 *offset_p = int_bit_position (fld);
2093
2094 if (ref_field)
2095 {
2096 if (integer_nonzerop (ref_offset))
2097 return NULL_TREE;
2098 return ref_field == fld ? rec : NULL_TREE;
2099 }
2100 else
2101 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2102 : NULL_TREE;
2103 }
2104
2105 /* Returns true iff T is an SSA_NAME defined by a statement. */
2106
2107 static bool
2108 ipa_is_ssa_with_stmt_def (tree t)
2109 {
2110 if (TREE_CODE (t) == SSA_NAME
2111 && !SSA_NAME_IS_DEFAULT_DEF (t))
2112 return true;
2113 else
2114 return false;
2115 }
2116
2117 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2118 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2119 indirect call graph edge. */
2120
2121 static struct cgraph_edge *
2122 ipa_note_param_call (struct cgraph_node *node, int param_index,
2123 gcall *stmt)
2124 {
2125 struct cgraph_edge *cs;
2126
2127 cs = node->get_edge (stmt);
2128 cs->indirect_info->param_index = param_index;
2129 cs->indirect_info->agg_contents = 0;
2130 cs->indirect_info->member_ptr = 0;
2131 cs->indirect_info->guaranteed_unmodified = 0;
2132 return cs;
2133 }
2134
2135 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2136 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2137 intermediate information about each formal parameter. Currently it checks
2138 whether the call calls a pointer that is a formal parameter and if so, the
2139 parameter is marked with the called flag and an indirect call graph edge
2140 describing the call is created. This is very simple for ordinary pointers
2141 represented in SSA but not-so-nice when it comes to member pointers. The
2142 ugly part of this function does nothing more than trying to match the
2143 pattern of such a call. An example of such a pattern is the gimple dump
2144 below, the call is on the last line:
2145
2146 <bb 2>:
2147 f$__delta_5 = f.__delta;
2148 f$__pfn_24 = f.__pfn;
2149
2150 or
2151 <bb 2>:
2152 f$__delta_5 = MEM[(struct *)&f];
2153 f$__pfn_24 = MEM[(struct *)&f + 4B];
2154
2155 and a few lines below:
2156
2157 <bb 5>
2158 D.2496_3 = (int) f$__pfn_24;
2159 D.2497_4 = D.2496_3 & 1;
2160 if (D.2497_4 != 0)
2161 goto <bb 3>;
2162 else
2163 goto <bb 4>;
2164
2165 <bb 6>:
2166 D.2500_7 = (unsigned int) f$__delta_5;
2167 D.2501_8 = &S + D.2500_7;
2168 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2169 D.2503_10 = *D.2502_9;
2170 D.2504_12 = f$__pfn_24 + -1;
2171 D.2505_13 = (unsigned int) D.2504_12;
2172 D.2506_14 = D.2503_10 + D.2505_13;
2173 D.2507_15 = *D.2506_14;
2174 iftmp.11_16 = (String:: *) D.2507_15;
2175
2176 <bb 7>:
2177 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2178 D.2500_19 = (unsigned int) f$__delta_5;
2179 D.2508_20 = &S + D.2500_19;
2180 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2181
2182 Such patterns are results of simple calls to a member pointer:
2183
2184 int doprinting (int (MyString::* f)(int) const)
2185 {
2186 MyString S ("somestring");
2187
2188 return (S.*f)(4);
2189 }
2190
2191 Moreover, the function also looks for called pointers loaded from aggregates
2192 passed by value or reference. */
2193
2194 static void
2195 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2196 tree target)
2197 {
2198 class ipa_node_params *info = fbi->info;
2199 HOST_WIDE_INT offset;
2200 bool by_ref;
2201
2202 if (SSA_NAME_IS_DEFAULT_DEF (target))
2203 {
2204 tree var = SSA_NAME_VAR (target);
2205 int index = ipa_get_param_decl_index (info, var);
2206 if (index >= 0)
2207 ipa_note_param_call (fbi->node, index, call);
2208 return;
2209 }
2210
2211 int index;
2212 gimple *def = SSA_NAME_DEF_STMT (target);
2213 bool guaranteed_unmodified;
2214 if (gimple_assign_single_p (def)
2215 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2216 gimple_assign_rhs1 (def), &index, &offset,
2217 NULL, &by_ref, &guaranteed_unmodified))
2218 {
2219 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2220 cs->indirect_info->offset = offset;
2221 cs->indirect_info->agg_contents = 1;
2222 cs->indirect_info->by_ref = by_ref;
2223 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2224 return;
2225 }
2226
2227 /* Now we need to try to match the complex pattern of calling a member
2228 pointer. */
2229 if (gimple_code (def) != GIMPLE_PHI
2230 || gimple_phi_num_args (def) != 2
2231 || !POINTER_TYPE_P (TREE_TYPE (target))
2232 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2233 return;
2234
2235 /* First, we need to check whether one of these is a load from a member
2236 pointer that is a parameter to this function. */
2237 tree n1 = PHI_ARG_DEF (def, 0);
2238 tree n2 = PHI_ARG_DEF (def, 1);
2239 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2240 return;
2241 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2242 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2243
2244 tree rec;
2245 basic_block bb, virt_bb;
2246 basic_block join = gimple_bb (def);
2247 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2248 {
2249 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2250 return;
2251
2252 bb = EDGE_PRED (join, 0)->src;
2253 virt_bb = gimple_bb (d2);
2254 }
2255 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2256 {
2257 bb = EDGE_PRED (join, 1)->src;
2258 virt_bb = gimple_bb (d1);
2259 }
2260 else
2261 return;
2262
2263 /* Second, we need to check that the basic blocks are laid out in the way
2264 corresponding to the pattern. */
2265
2266 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2267 || single_pred (virt_bb) != bb
2268 || single_succ (virt_bb) != join)
2269 return;
2270
2271 /* Third, let's see that the branching is done depending on the least
2272 significant bit of the pfn. */
2273
2274 gimple *branch = last_stmt (bb);
2275 if (!branch || gimple_code (branch) != GIMPLE_COND)
2276 return;
2277
2278 if ((gimple_cond_code (branch) != NE_EXPR
2279 && gimple_cond_code (branch) != EQ_EXPR)
2280 || !integer_zerop (gimple_cond_rhs (branch)))
2281 return;
2282
2283 tree cond = gimple_cond_lhs (branch);
2284 if (!ipa_is_ssa_with_stmt_def (cond))
2285 return;
2286
2287 def = SSA_NAME_DEF_STMT (cond);
2288 if (!is_gimple_assign (def)
2289 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2290 || !integer_onep (gimple_assign_rhs2 (def)))
2291 return;
2292
2293 cond = gimple_assign_rhs1 (def);
2294 if (!ipa_is_ssa_with_stmt_def (cond))
2295 return;
2296
2297 def = SSA_NAME_DEF_STMT (cond);
2298
2299 if (is_gimple_assign (def)
2300 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2301 {
2302 cond = gimple_assign_rhs1 (def);
2303 if (!ipa_is_ssa_with_stmt_def (cond))
2304 return;
2305 def = SSA_NAME_DEF_STMT (cond);
2306 }
2307
2308 tree rec2;
2309 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2310 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2311 == ptrmemfunc_vbit_in_delta),
2312 NULL);
2313 if (rec != rec2)
2314 return;
2315
2316 index = ipa_get_param_decl_index (info, rec);
2317 if (index >= 0
2318 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2319 {
2320 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2321 cs->indirect_info->offset = offset;
2322 cs->indirect_info->agg_contents = 1;
2323 cs->indirect_info->member_ptr = 1;
2324 cs->indirect_info->guaranteed_unmodified = 1;
2325 }
2326
2327 return;
2328 }
2329
2330 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2331 object referenced in the expression is a formal parameter of the caller
2332 FBI->node (described by FBI->info), create a call note for the
2333 statement. */
2334
2335 static void
2336 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2337 gcall *call, tree target)
2338 {
2339 tree obj = OBJ_TYPE_REF_OBJECT (target);
2340 int index;
2341 HOST_WIDE_INT anc_offset;
2342
2343 if (!flag_devirtualize)
2344 return;
2345
2346 if (TREE_CODE (obj) != SSA_NAME)
2347 return;
2348
2349 class ipa_node_params *info = fbi->info;
2350 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2351 {
2352 struct ipa_jump_func jfunc;
2353 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2354 return;
2355
2356 anc_offset = 0;
2357 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2358 gcc_assert (index >= 0);
2359 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2360 call, &jfunc))
2361 return;
2362 }
2363 else
2364 {
2365 struct ipa_jump_func jfunc;
2366 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2367 tree expr;
2368
2369 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2370 if (!expr)
2371 return;
2372 index = ipa_get_param_decl_index (info,
2373 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2374 gcc_assert (index >= 0);
2375 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2376 call, &jfunc, anc_offset))
2377 return;
2378 }
2379
2380 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2381 class cgraph_indirect_call_info *ii = cs->indirect_info;
2382 ii->offset = anc_offset;
2383 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2384 ii->otr_type = obj_type_ref_class (target);
2385 ii->polymorphic = 1;
2386 }
2387
2388 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2389 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2390 containing intermediate information about each formal parameter. */
2391
2392 static void
2393 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2394 {
2395 tree target = gimple_call_fn (call);
2396
2397 if (!target
2398 || (TREE_CODE (target) != SSA_NAME
2399 && !virtual_method_call_p (target)))
2400 return;
2401
2402 struct cgraph_edge *cs = fbi->node->get_edge (call);
2403 /* If we previously turned the call into a direct call, there is
2404 no need to analyze. */
2405 if (cs && !cs->indirect_unknown_callee)
2406 return;
2407
2408 if (cs->indirect_info->polymorphic && flag_devirtualize)
2409 {
2410 tree instance;
2411 tree target = gimple_call_fn (call);
2412 ipa_polymorphic_call_context context (current_function_decl,
2413 target, call, &instance);
2414
2415 gcc_checking_assert (cs->indirect_info->otr_type
2416 == obj_type_ref_class (target));
2417 gcc_checking_assert (cs->indirect_info->otr_token
2418 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2419
2420 cs->indirect_info->vptr_changed
2421 = !context.get_dynamic_type (instance,
2422 OBJ_TYPE_REF_OBJECT (target),
2423 obj_type_ref_class (target), call,
2424 &fbi->aa_walk_budget);
2425 cs->indirect_info->context = context;
2426 }
2427
2428 if (TREE_CODE (target) == SSA_NAME)
2429 ipa_analyze_indirect_call_uses (fbi, call, target);
2430 else if (virtual_method_call_p (target))
2431 ipa_analyze_virtual_call_uses (fbi, call, target);
2432 }
2433
2434
2435 /* Analyze the call statement STMT with respect to formal parameters (described
2436 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2437 formal parameters are called. */
2438
2439 static void
2440 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2441 {
2442 if (is_gimple_call (stmt))
2443 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2444 }
2445
2446 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2447 If OP is a parameter declaration, mark it as used in the info structure
2448 passed in DATA. */
2449
2450 static bool
2451 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2452 {
2453 class ipa_node_params *info = (class ipa_node_params *) data;
2454
2455 op = get_base_address (op);
2456 if (op
2457 && TREE_CODE (op) == PARM_DECL)
2458 {
2459 int index = ipa_get_param_decl_index (info, op);
2460 gcc_assert (index >= 0);
2461 ipa_set_param_used (info, index, true);
2462 }
2463
2464 return false;
2465 }
2466
2467 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2468 the findings in various structures of the associated ipa_node_params
2469 structure, such as parameter flags, notes etc. FBI holds various data about
2470 the function being analyzed. */
2471
2472 static void
2473 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2474 {
2475 gimple_stmt_iterator gsi;
2476 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2477 {
2478 gimple *stmt = gsi_stmt (gsi);
2479
2480 if (is_gimple_debug (stmt))
2481 continue;
2482
2483 ipa_analyze_stmt_uses (fbi, stmt);
2484 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2485 visit_ref_for_mod_analysis,
2486 visit_ref_for_mod_analysis,
2487 visit_ref_for_mod_analysis);
2488 }
2489 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2490 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2491 visit_ref_for_mod_analysis,
2492 visit_ref_for_mod_analysis,
2493 visit_ref_for_mod_analysis);
2494 }
2495
2496 /* Calculate controlled uses of parameters of NODE. */
2497
2498 static void
2499 ipa_analyze_controlled_uses (struct cgraph_node *node)
2500 {
2501 class ipa_node_params *info = IPA_NODE_REF (node);
2502
2503 for (int i = 0; i < ipa_get_param_count (info); i++)
2504 {
2505 tree parm = ipa_get_param (info, i);
2506 int controlled_uses = 0;
2507
2508 /* For SSA regs see if parameter is used. For non-SSA we compute
2509 the flag during modification analysis. */
2510 if (is_gimple_reg (parm))
2511 {
2512 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2513 parm);
2514 if (ddef && !has_zero_uses (ddef))
2515 {
2516 imm_use_iterator imm_iter;
2517 use_operand_p use_p;
2518
2519 ipa_set_param_used (info, i, true);
2520 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2521 if (!is_gimple_call (USE_STMT (use_p)))
2522 {
2523 if (!is_gimple_debug (USE_STMT (use_p)))
2524 {
2525 controlled_uses = IPA_UNDESCRIBED_USE;
2526 break;
2527 }
2528 }
2529 else
2530 controlled_uses++;
2531 }
2532 else
2533 controlled_uses = 0;
2534 }
2535 else
2536 controlled_uses = IPA_UNDESCRIBED_USE;
2537 ipa_set_controlled_uses (info, i, controlled_uses);
2538 }
2539 }
2540
2541 /* Free stuff in BI. */
2542
2543 static void
2544 free_ipa_bb_info (struct ipa_bb_info *bi)
2545 {
2546 bi->cg_edges.release ();
2547 bi->param_aa_statuses.release ();
2548 }
2549
2550 /* Dominator walker driving the analysis. */
2551
2552 class analysis_dom_walker : public dom_walker
2553 {
2554 public:
2555 analysis_dom_walker (struct ipa_func_body_info *fbi)
2556 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2557
2558 virtual edge before_dom_children (basic_block);
2559
2560 private:
2561 struct ipa_func_body_info *m_fbi;
2562 };
2563
2564 edge
2565 analysis_dom_walker::before_dom_children (basic_block bb)
2566 {
2567 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2568 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2569 return NULL;
2570 }
2571
2572 /* Release body info FBI. */
2573
2574 void
2575 ipa_release_body_info (struct ipa_func_body_info *fbi)
2576 {
2577 int i;
2578 struct ipa_bb_info *bi;
2579
2580 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2581 free_ipa_bb_info (bi);
2582 fbi->bb_infos.release ();
2583 }
2584
2585 /* Initialize the array describing properties of formal parameters
2586 of NODE, analyze their uses and compute jump functions associated
2587 with actual arguments of calls from within NODE. */
2588
2589 void
2590 ipa_analyze_node (struct cgraph_node *node)
2591 {
2592 struct ipa_func_body_info fbi;
2593 class ipa_node_params *info;
2594
2595 ipa_check_create_node_params ();
2596 ipa_check_create_edge_args ();
2597 info = IPA_NODE_REF (node);
2598
2599 if (info->analysis_done)
2600 return;
2601 info->analysis_done = 1;
2602
2603 if (ipa_func_spec_opts_forbid_analysis_p (node))
2604 {
2605 for (int i = 0; i < ipa_get_param_count (info); i++)
2606 {
2607 ipa_set_param_used (info, i, true);
2608 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2609 }
2610 return;
2611 }
2612
2613 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2614 push_cfun (func);
2615 calculate_dominance_info (CDI_DOMINATORS);
2616 ipa_initialize_node_params (node);
2617 ipa_analyze_controlled_uses (node);
2618
2619 fbi.node = node;
2620 fbi.info = IPA_NODE_REF (node);
2621 fbi.bb_infos = vNULL;
2622 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2623 fbi.param_count = ipa_get_param_count (info);
2624 fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
2625
2626 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2627 {
2628 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2629 bi->cg_edges.safe_push (cs);
2630 }
2631
2632 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2633 {
2634 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2635 bi->cg_edges.safe_push (cs);
2636 }
2637
2638 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2639
2640 ipa_release_body_info (&fbi);
2641 free_dominance_info (CDI_DOMINATORS);
2642 pop_cfun ();
2643 }
2644
2645 /* Update the jump functions associated with call graph edge E when the call
2646 graph edge CS is being inlined, assuming that E->caller is already (possibly
2647 indirectly) inlined into CS->callee and that E has not been inlined. */
2648
2649 static void
2650 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2651 struct cgraph_edge *e)
2652 {
2653 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2654 class ipa_edge_args *args = IPA_EDGE_REF (e);
2655 if (!args)
2656 return;
2657 int count = ipa_get_cs_argument_count (args);
2658 int i;
2659
2660 for (i = 0; i < count; i++)
2661 {
2662 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2663 class ipa_polymorphic_call_context *dst_ctx
2664 = ipa_get_ith_polymorhic_call_context (args, i);
2665
2666 if (dst->type == IPA_JF_ANCESTOR)
2667 {
2668 struct ipa_jump_func *src;
2669 int dst_fid = dst->value.ancestor.formal_id;
2670 class ipa_polymorphic_call_context *src_ctx
2671 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2672
2673 /* Variable number of arguments can cause havoc if we try to access
2674 one that does not exist in the inlined edge. So make sure we
2675 don't. */
2676 if (dst_fid >= ipa_get_cs_argument_count (top))
2677 {
2678 ipa_set_jf_unknown (dst);
2679 continue;
2680 }
2681
2682 src = ipa_get_ith_jump_func (top, dst_fid);
2683
2684 if (src_ctx && !src_ctx->useless_p ())
2685 {
2686 class ipa_polymorphic_call_context ctx = *src_ctx;
2687
2688 /* TODO: Make type preserved safe WRT contexts. */
2689 if (!ipa_get_jf_ancestor_type_preserved (dst))
2690 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2691 ctx.offset_by (dst->value.ancestor.offset);
2692 if (!ctx.useless_p ())
2693 {
2694 if (!dst_ctx)
2695 {
2696 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2697 count);
2698 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2699 }
2700
2701 dst_ctx->combine_with (ctx);
2702 }
2703 }
2704
2705 if (src->agg.items
2706 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2707 {
2708 struct ipa_agg_jf_item *item;
2709 int j;
2710
2711 /* Currently we do not produce clobber aggregate jump functions,
2712 replace with merging when we do. */
2713 gcc_assert (!dst->agg.items);
2714
2715 dst->agg.items = vec_safe_copy (src->agg.items);
2716 dst->agg.by_ref = src->agg.by_ref;
2717 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2718 item->offset -= dst->value.ancestor.offset;
2719 }
2720
2721 if (src->type == IPA_JF_PASS_THROUGH
2722 && src->value.pass_through.operation == NOP_EXPR)
2723 {
2724 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2725 dst->value.ancestor.agg_preserved &=
2726 src->value.pass_through.agg_preserved;
2727 }
2728 else if (src->type == IPA_JF_ANCESTOR)
2729 {
2730 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2731 dst->value.ancestor.offset += src->value.ancestor.offset;
2732 dst->value.ancestor.agg_preserved &=
2733 src->value.ancestor.agg_preserved;
2734 }
2735 else
2736 ipa_set_jf_unknown (dst);
2737 }
2738 else if (dst->type == IPA_JF_PASS_THROUGH)
2739 {
2740 struct ipa_jump_func *src;
2741 /* We must check range due to calls with variable number of arguments
2742 and we cannot combine jump functions with operations. */
2743 if (dst->value.pass_through.operation == NOP_EXPR
2744 && (top && dst->value.pass_through.formal_id
2745 < ipa_get_cs_argument_count (top)))
2746 {
2747 int dst_fid = dst->value.pass_through.formal_id;
2748 src = ipa_get_ith_jump_func (top, dst_fid);
2749 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2750 class ipa_polymorphic_call_context *src_ctx
2751 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2752
2753 if (src_ctx && !src_ctx->useless_p ())
2754 {
2755 class ipa_polymorphic_call_context ctx = *src_ctx;
2756
2757 /* TODO: Make type preserved safe WRT contexts. */
2758 if (!ipa_get_jf_pass_through_type_preserved (dst))
2759 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2760 if (!ctx.useless_p ())
2761 {
2762 if (!dst_ctx)
2763 {
2764 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2765 count);
2766 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2767 }
2768 dst_ctx->combine_with (ctx);
2769 }
2770 }
2771 switch (src->type)
2772 {
2773 case IPA_JF_UNKNOWN:
2774 ipa_set_jf_unknown (dst);
2775 break;
2776 case IPA_JF_CONST:
2777 ipa_set_jf_cst_copy (dst, src);
2778 break;
2779
2780 case IPA_JF_PASS_THROUGH:
2781 {
2782 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2783 enum tree_code operation;
2784 operation = ipa_get_jf_pass_through_operation (src);
2785
2786 if (operation == NOP_EXPR)
2787 {
2788 bool agg_p;
2789 agg_p = dst_agg_p
2790 && ipa_get_jf_pass_through_agg_preserved (src);
2791 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2792 }
2793 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2794 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2795 else
2796 {
2797 tree operand = ipa_get_jf_pass_through_operand (src);
2798 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2799 operation);
2800 }
2801 break;
2802 }
2803 case IPA_JF_ANCESTOR:
2804 {
2805 bool agg_p;
2806 agg_p = dst_agg_p
2807 && ipa_get_jf_ancestor_agg_preserved (src);
2808 ipa_set_ancestor_jf (dst,
2809 ipa_get_jf_ancestor_offset (src),
2810 ipa_get_jf_ancestor_formal_id (src),
2811 agg_p);
2812 break;
2813 }
2814 default:
2815 gcc_unreachable ();
2816 }
2817
2818 if (src->agg.items
2819 && (dst_agg_p || !src->agg.by_ref))
2820 {
2821 /* Currently we do not produce clobber aggregate jump
2822 functions, replace with merging when we do. */
2823 gcc_assert (!dst->agg.items);
2824
2825 dst->agg.by_ref = src->agg.by_ref;
2826 dst->agg.items = vec_safe_copy (src->agg.items);
2827 }
2828 }
2829 else
2830 ipa_set_jf_unknown (dst);
2831 }
2832 }
2833 }
2834
2835 /* If TARGET is an addr_expr of a function declaration, make it the
2836 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2837 Otherwise, return NULL. */
2838
2839 struct cgraph_edge *
2840 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2841 bool speculative)
2842 {
2843 struct cgraph_node *callee;
2844 bool unreachable = false;
2845
2846 if (TREE_CODE (target) == ADDR_EXPR)
2847 target = TREE_OPERAND (target, 0);
2848 if (TREE_CODE (target) != FUNCTION_DECL)
2849 {
2850 target = canonicalize_constructor_val (target, NULL);
2851 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2852 {
2853 /* Member pointer call that goes through a VMT lookup. */
2854 if (ie->indirect_info->member_ptr
2855 /* Or if target is not an invariant expression and we do not
2856 know if it will evaulate to function at runtime.
2857 This can happen when folding through &VAR, where &VAR
2858 is IP invariant, but VAR itself is not.
2859
2860 TODO: Revisit this when GCC 5 is branched. It seems that
2861 member_ptr check is not needed and that we may try to fold
2862 the expression and see if VAR is readonly. */
2863 || !is_gimple_ip_invariant (target))
2864 {
2865 if (dump_enabled_p ())
2866 {
2867 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2868 "discovered direct call non-invariant %s\n",
2869 ie->caller->dump_name ());
2870 }
2871 return NULL;
2872 }
2873
2874
2875 if (dump_enabled_p ())
2876 {
2877 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2878 "discovered direct call to non-function in %s, "
2879 "making it __builtin_unreachable\n",
2880 ie->caller->dump_name ());
2881 }
2882
2883 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2884 callee = cgraph_node::get_create (target);
2885 unreachable = true;
2886 }
2887 else
2888 callee = cgraph_node::get (target);
2889 }
2890 else
2891 callee = cgraph_node::get (target);
2892
2893 /* Because may-edges are not explicitely represented and vtable may be external,
2894 we may create the first reference to the object in the unit. */
2895 if (!callee || callee->global.inlined_to)
2896 {
2897
2898 /* We are better to ensure we can refer to it.
2899 In the case of static functions we are out of luck, since we already
2900 removed its body. In the case of public functions we may or may
2901 not introduce the reference. */
2902 if (!canonicalize_constructor_val (target, NULL)
2903 || !TREE_PUBLIC (target))
2904 {
2905 if (dump_file)
2906 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2907 "(%s -> %s) but cannot refer to it. Giving up.\n",
2908 ie->caller->dump_name (),
2909 ie->callee->dump_name ());
2910 return NULL;
2911 }
2912 callee = cgraph_node::get_create (target);
2913 }
2914
2915 /* If the edge is already speculated. */
2916 if (speculative && ie->speculative)
2917 {
2918 struct cgraph_edge *e2;
2919 struct ipa_ref *ref;
2920 ie->speculative_call_info (e2, ie, ref);
2921 if (e2->callee->ultimate_alias_target ()
2922 != callee->ultimate_alias_target ())
2923 {
2924 if (dump_file)
2925 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2926 "target (%s -> %s) but the call is already "
2927 "speculated to %s. Giving up.\n",
2928 ie->caller->dump_name (), callee->dump_name (),
2929 e2->callee->dump_name ());
2930 }
2931 else
2932 {
2933 if (dump_file)
2934 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2935 "(%s -> %s) this agree with previous speculation.\n",
2936 ie->caller->dump_name (), callee->dump_name ());
2937 }
2938 return NULL;
2939 }
2940
2941 if (!dbg_cnt (devirt))
2942 return NULL;
2943
2944 ipa_check_create_node_params ();
2945
2946 /* We cannot make edges to inline clones. It is bug that someone removed
2947 the cgraph node too early. */
2948 gcc_assert (!callee->global.inlined_to);
2949
2950 if (dump_file && !unreachable)
2951 {
2952 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2953 "(%s -> %s), for stmt ",
2954 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2955 speculative ? "speculative" : "known",
2956 ie->caller->dump_name (),
2957 callee->dump_name ());
2958 if (ie->call_stmt)
2959 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2960 else
2961 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2962 }
2963 if (dump_enabled_p ())
2964 {
2965 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2966 "converting indirect call in %s to direct call to %s\n",
2967 ie->caller->name (), callee->name ());
2968 }
2969 if (!speculative)
2970 {
2971 struct cgraph_edge *orig = ie;
2972 ie = ie->make_direct (callee);
2973 /* If we resolved speculative edge the cost is already up to date
2974 for direct call (adjusted by inline_edge_duplication_hook). */
2975 if (ie == orig)
2976 {
2977 ipa_call_summary *es = ipa_call_summaries->get (ie);
2978 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2979 - eni_size_weights.call_cost);
2980 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2981 - eni_time_weights.call_cost);
2982 }
2983 }
2984 else
2985 {
2986 if (!callee->can_be_discarded_p ())
2987 {
2988 cgraph_node *alias;
2989 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2990 if (alias)
2991 callee = alias;
2992 }
2993 /* make_speculative will update ie's cost to direct call cost. */
2994 ie = ie->make_speculative
2995 (callee, ie->count.apply_scale (8, 10));
2996 }
2997
2998 return ie;
2999 }
3000
3001 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3002 CONSTRUCTOR and return it. Return NULL if the search fails for some
3003 reason. */
3004
3005 static tree
3006 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3007 {
3008 tree type = TREE_TYPE (constructor);
3009 if (TREE_CODE (type) != ARRAY_TYPE
3010 && TREE_CODE (type) != RECORD_TYPE)
3011 return NULL;
3012
3013 unsigned ix;
3014 tree index, val;
3015 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3016 {
3017 HOST_WIDE_INT elt_offset;
3018 if (TREE_CODE (type) == ARRAY_TYPE)
3019 {
3020 offset_int off;
3021 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3022 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3023
3024 if (index)
3025 {
3026 if (TREE_CODE (index) == RANGE_EXPR)
3027 off = wi::to_offset (TREE_OPERAND (index, 0));
3028 else
3029 off = wi::to_offset (index);
3030 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3031 {
3032 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3033 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3034 off = wi::sext (off - wi::to_offset (low_bound),
3035 TYPE_PRECISION (TREE_TYPE (index)));
3036 }
3037 off *= wi::to_offset (unit_size);
3038 /* ??? Handle more than just the first index of a
3039 RANGE_EXPR. */
3040 }
3041 else
3042 off = wi::to_offset (unit_size) * ix;
3043
3044 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3045 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3046 continue;
3047 elt_offset = off.to_shwi ();
3048 }
3049 else if (TREE_CODE (type) == RECORD_TYPE)
3050 {
3051 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3052 if (DECL_BIT_FIELD (index))
3053 continue;
3054 elt_offset = int_bit_position (index);
3055 }
3056 else
3057 gcc_unreachable ();
3058
3059 if (elt_offset > req_offset)
3060 return NULL;
3061
3062 if (TREE_CODE (val) == CONSTRUCTOR)
3063 return find_constructor_constant_at_offset (val,
3064 req_offset - elt_offset);
3065
3066 if (elt_offset == req_offset
3067 && is_gimple_reg_type (TREE_TYPE (val))
3068 && is_gimple_ip_invariant (val))
3069 return val;
3070 }
3071 return NULL;
3072 }
3073
3074 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3075 invariant from a static constructor and if so, return it. Otherwise return
3076 NULL. */
3077
3078 static tree
3079 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3080 {
3081 if (by_ref)
3082 {
3083 if (TREE_CODE (scalar) != ADDR_EXPR)
3084 return NULL;
3085 scalar = TREE_OPERAND (scalar, 0);
3086 }
3087
3088 if (!VAR_P (scalar)
3089 || !is_global_var (scalar)
3090 || !TREE_READONLY (scalar)
3091 || !DECL_INITIAL (scalar)
3092 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3093 return NULL;
3094
3095 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3096 }
3097
3098 /* Retrieve value from aggregate jump function AGG or static initializer of
3099 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3100 none. BY_REF specifies whether the value has to be passed by reference or
3101 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3102 to is set to true if the value comes from an initializer of a constant. */
3103
3104 tree
3105 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3106 HOST_WIDE_INT offset, bool by_ref,
3107 bool *from_global_constant)
3108 {
3109 struct ipa_agg_jf_item *item;
3110 int i;
3111
3112 if (scalar)
3113 {
3114 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3115 if (res)
3116 {
3117 if (from_global_constant)
3118 *from_global_constant = true;
3119 return res;
3120 }
3121 }
3122
3123 if (!agg
3124 || by_ref != agg->by_ref)
3125 return NULL;
3126
3127 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3128 if (item->offset == offset)
3129 {
3130 /* Currently we do not have clobber values, return NULL for them once
3131 we do. */
3132 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3133 if (from_global_constant)
3134 *from_global_constant = false;
3135 return item->value;
3136 }
3137 return NULL;
3138 }
3139
3140 /* Remove a reference to SYMBOL from the list of references of a node given by
3141 reference description RDESC. Return true if the reference has been
3142 successfully found and removed. */
3143
3144 static bool
3145 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3146 {
3147 struct ipa_ref *to_del;
3148 struct cgraph_edge *origin;
3149
3150 origin = rdesc->cs;
3151 if (!origin)
3152 return false;
3153 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3154 origin->lto_stmt_uid);
3155 if (!to_del)
3156 return false;
3157
3158 to_del->remove_reference ();
3159 if (dump_file)
3160 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3161 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3162 return true;
3163 }
3164
3165 /* If JFUNC has a reference description with refcount different from
3166 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3167 NULL. JFUNC must be a constant jump function. */
3168
3169 static struct ipa_cst_ref_desc *
3170 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3171 {
3172 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3173 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3174 return rdesc;
3175 else
3176 return NULL;
3177 }
3178
3179 /* If the value of constant jump function JFUNC is an address of a function
3180 declaration, return the associated call graph node. Otherwise return
3181 NULL. */
3182
3183 static cgraph_node *
3184 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3185 {
3186 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3187 tree cst = ipa_get_jf_constant (jfunc);
3188 if (TREE_CODE (cst) != ADDR_EXPR
3189 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3190 return NULL;
3191
3192 return cgraph_node::get (TREE_OPERAND (cst, 0));
3193 }
3194
3195
3196 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3197 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3198 the edge specified in the rdesc. Return false if either the symbol or the
3199 reference could not be found, otherwise return true. */
3200
3201 static bool
3202 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3203 {
3204 struct ipa_cst_ref_desc *rdesc;
3205 if (jfunc->type == IPA_JF_CONST
3206 && (rdesc = jfunc_rdesc_usable (jfunc))
3207 && --rdesc->refcount == 0)
3208 {
3209 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3210 if (!symbol)
3211 return false;
3212
3213 return remove_described_reference (symbol, rdesc);
3214 }
3215 return true;
3216 }
3217
3218 /* Try to find a destination for indirect edge IE that corresponds to a simple
3219 call or a call of a member function pointer and where the destination is a
3220 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3221 the type of the parameter to which the result of JFUNC is passed. If it can
3222 be determined, return the newly direct edge, otherwise return NULL.
3223 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3224
3225 static struct cgraph_edge *
3226 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3227 struct ipa_jump_func *jfunc, tree target_type,
3228 class ipa_node_params *new_root_info)
3229 {
3230 struct cgraph_edge *cs;
3231 tree target;
3232 bool agg_contents = ie->indirect_info->agg_contents;
3233 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3234 if (agg_contents)
3235 {
3236 bool from_global_constant;
3237 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3238 ie->indirect_info->offset,
3239 ie->indirect_info->by_ref,
3240 &from_global_constant);
3241 if (target
3242 && !from_global_constant
3243 && !ie->indirect_info->guaranteed_unmodified)
3244 return NULL;
3245 }
3246 else
3247 target = scalar;
3248 if (!target)
3249 return NULL;
3250 cs = ipa_make_edge_direct_to_target (ie, target);
3251
3252 if (cs && !agg_contents)
3253 {
3254 bool ok;
3255 gcc_checking_assert (cs->callee
3256 && (cs != ie
3257 || jfunc->type != IPA_JF_CONST
3258 || !cgraph_node_for_jfunc (jfunc)
3259 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3260 ok = try_decrement_rdesc_refcount (jfunc);
3261 gcc_checking_assert (ok);
3262 }
3263
3264 return cs;
3265 }
3266
3267 /* Return the target to be used in cases of impossible devirtualization. IE
3268 and target (the latter can be NULL) are dumped when dumping is enabled. */
3269
3270 tree
3271 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3272 {
3273 if (dump_file)
3274 {
3275 if (target)
3276 fprintf (dump_file,
3277 "Type inconsistent devirtualization: %s->%s\n",
3278 ie->caller->dump_name (),
3279 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3280 else
3281 fprintf (dump_file,
3282 "No devirtualization target in %s\n",
3283 ie->caller->dump_name ());
3284 }
3285 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3286 cgraph_node::get_create (new_target);
3287 return new_target;
3288 }
3289
3290 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3291 call based on a formal parameter which is described by jump function JFUNC
3292 and if it can be determined, make it direct and return the direct edge.
3293 Otherwise, return NULL. CTX describes the polymorphic context that the
3294 parameter the call is based on brings along with it. */
3295
3296 static struct cgraph_edge *
3297 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3298 struct ipa_jump_func *jfunc,
3299 class ipa_polymorphic_call_context ctx)
3300 {
3301 tree target = NULL;
3302 bool speculative = false;
3303
3304 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3305 return NULL;
3306
3307 gcc_assert (!ie->indirect_info->by_ref);
3308
3309 /* Try to do lookup via known virtual table pointer value. */
3310 if (!ie->indirect_info->vptr_changed
3311 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3312 {
3313 tree vtable;
3314 unsigned HOST_WIDE_INT offset;
3315 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3316 : NULL;
3317 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3318 ie->indirect_info->offset,
3319 true);
3320 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3321 {
3322 bool can_refer;
3323 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3324 vtable, offset, &can_refer);
3325 if (can_refer)
3326 {
3327 if (!t
3328 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3329 || !possible_polymorphic_call_target_p
3330 (ie, cgraph_node::get (t)))
3331 {
3332 /* Do not speculate builtin_unreachable, it is stupid! */
3333 if (!ie->indirect_info->vptr_changed)
3334 target = ipa_impossible_devirt_target (ie, target);
3335 else
3336 target = NULL;
3337 }
3338 else
3339 {
3340 target = t;
3341 speculative = ie->indirect_info->vptr_changed;
3342 }
3343 }
3344 }
3345 }
3346
3347 ipa_polymorphic_call_context ie_context (ie);
3348 vec <cgraph_node *>targets;
3349 bool final;
3350
3351 ctx.offset_by (ie->indirect_info->offset);
3352 if (ie->indirect_info->vptr_changed)
3353 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3354 ie->indirect_info->otr_type);
3355 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3356 targets = possible_polymorphic_call_targets
3357 (ie->indirect_info->otr_type,
3358 ie->indirect_info->otr_token,
3359 ctx, &final);
3360 if (final && targets.length () <= 1)
3361 {
3362 speculative = false;
3363 if (targets.length () == 1)
3364 target = targets[0]->decl;
3365 else
3366 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3367 }
3368 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3369 && !ie->speculative && ie->maybe_hot_p ())
3370 {
3371 cgraph_node *n;
3372 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3373 ie->indirect_info->otr_token,
3374 ie->indirect_info->context);
3375 if (n)
3376 {
3377 target = n->decl;
3378 speculative = true;
3379 }
3380 }
3381
3382 if (target)
3383 {
3384 if (!possible_polymorphic_call_target_p
3385 (ie, cgraph_node::get_create (target)))
3386 {
3387 if (speculative)
3388 return NULL;
3389 target = ipa_impossible_devirt_target (ie, target);
3390 }
3391 return ipa_make_edge_direct_to_target (ie, target, speculative);
3392 }
3393 else
3394 return NULL;
3395 }
3396
3397 /* Update the param called notes associated with NODE when CS is being inlined,
3398 assuming NODE is (potentially indirectly) inlined into CS->callee.
3399 Moreover, if the callee is discovered to be constant, create a new cgraph
3400 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3401 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3402
3403 static bool
3404 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3405 struct cgraph_node *node,
3406 vec<cgraph_edge *> *new_edges)
3407 {
3408 class ipa_edge_args *top;
3409 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3410 class ipa_node_params *new_root_info, *inlined_node_info;
3411 bool res = false;
3412
3413 ipa_check_create_edge_args ();
3414 top = IPA_EDGE_REF (cs);
3415 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3416 ? cs->caller->global.inlined_to
3417 : cs->caller);
3418 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3419
3420 for (ie = node->indirect_calls; ie; ie = next_ie)
3421 {
3422 class cgraph_indirect_call_info *ici = ie->indirect_info;
3423 struct ipa_jump_func *jfunc;
3424 int param_index;
3425 cgraph_node *spec_target = NULL;
3426
3427 next_ie = ie->next_callee;
3428
3429 if (ici->param_index == -1)
3430 continue;
3431
3432 /* We must check range due to calls with variable number of arguments: */
3433 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3434 {
3435 ici->param_index = -1;
3436 continue;
3437 }
3438
3439 param_index = ici->param_index;
3440 jfunc = ipa_get_ith_jump_func (top, param_index);
3441
3442 if (ie->speculative)
3443 {
3444 struct cgraph_edge *de;
3445 struct ipa_ref *ref;
3446 ie->speculative_call_info (de, ie, ref);
3447 spec_target = de->callee;
3448 }
3449
3450 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3451 new_direct_edge = NULL;
3452 else if (ici->polymorphic)
3453 {
3454 ipa_polymorphic_call_context ctx;
3455 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3456 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3457 }
3458 else
3459 {
3460 tree target_type = ipa_get_type (inlined_node_info, param_index);
3461 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3462 target_type,
3463 new_root_info);
3464 }
3465
3466 /* If speculation was removed, then we need to do nothing. */
3467 if (new_direct_edge && new_direct_edge != ie
3468 && new_direct_edge->callee == spec_target)
3469 {
3470 new_direct_edge->indirect_inlining_edge = 1;
3471 top = IPA_EDGE_REF (cs);
3472 res = true;
3473 if (!new_direct_edge->speculative)
3474 continue;
3475 }
3476 else if (new_direct_edge)
3477 {
3478 new_direct_edge->indirect_inlining_edge = 1;
3479 if (new_direct_edge->call_stmt)
3480 new_direct_edge->call_stmt_cannot_inline_p
3481 = !gimple_check_call_matching_types (
3482 new_direct_edge->call_stmt,
3483 new_direct_edge->callee->decl, false);
3484 if (new_edges)
3485 {
3486 new_edges->safe_push (new_direct_edge);
3487 res = true;
3488 }
3489 top = IPA_EDGE_REF (cs);
3490 /* If speculative edge was introduced we still need to update
3491 call info of the indirect edge. */
3492 if (!new_direct_edge->speculative)
3493 continue;
3494 }
3495 if (jfunc->type == IPA_JF_PASS_THROUGH
3496 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3497 {
3498 if (ici->agg_contents
3499 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3500 && !ici->polymorphic)
3501 ici->param_index = -1;
3502 else
3503 {
3504 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3505 if (ici->polymorphic
3506 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3507 ici->vptr_changed = true;
3508 }
3509 }
3510 else if (jfunc->type == IPA_JF_ANCESTOR)
3511 {
3512 if (ici->agg_contents
3513 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3514 && !ici->polymorphic)
3515 ici->param_index = -1;
3516 else
3517 {
3518 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3519 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3520 if (ici->polymorphic
3521 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3522 ici->vptr_changed = true;
3523 }
3524 }
3525 else
3526 /* Either we can find a destination for this edge now or never. */
3527 ici->param_index = -1;
3528 }
3529
3530 return res;
3531 }
3532
3533 /* Recursively traverse subtree of NODE (including node) made of inlined
3534 cgraph_edges when CS has been inlined and invoke
3535 update_indirect_edges_after_inlining on all nodes and
3536 update_jump_functions_after_inlining on all non-inlined edges that lead out
3537 of this subtree. Newly discovered indirect edges will be added to
3538 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3539 created. */
3540
3541 static bool
3542 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3543 struct cgraph_node *node,
3544 vec<cgraph_edge *> *new_edges)
3545 {
3546 struct cgraph_edge *e;
3547 bool res;
3548
3549 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3550
3551 for (e = node->callees; e; e = e->next_callee)
3552 if (!e->inline_failed)
3553 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3554 else
3555 update_jump_functions_after_inlining (cs, e);
3556 for (e = node->indirect_calls; e; e = e->next_callee)
3557 update_jump_functions_after_inlining (cs, e);
3558
3559 return res;
3560 }
3561
3562 /* Combine two controlled uses counts as done during inlining. */
3563
3564 static int
3565 combine_controlled_uses_counters (int c, int d)
3566 {
3567 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3568 return IPA_UNDESCRIBED_USE;
3569 else
3570 return c + d - 1;
3571 }
3572
3573 /* Propagate number of controlled users from CS->caleee to the new root of the
3574 tree of inlined nodes. */
3575
3576 static void
3577 propagate_controlled_uses (struct cgraph_edge *cs)
3578 {
3579 class ipa_edge_args *args = IPA_EDGE_REF (cs);
3580 if (!args)
3581 return;
3582 struct cgraph_node *new_root = cs->caller->global.inlined_to
3583 ? cs->caller->global.inlined_to : cs->caller;
3584 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3585 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3586 int count, i;
3587
3588 count = MIN (ipa_get_cs_argument_count (args),
3589 ipa_get_param_count (old_root_info));
3590 for (i = 0; i < count; i++)
3591 {
3592 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3593 struct ipa_cst_ref_desc *rdesc;
3594
3595 if (jf->type == IPA_JF_PASS_THROUGH)
3596 {
3597 int src_idx, c, d;
3598 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3599 c = ipa_get_controlled_uses (new_root_info, src_idx);
3600 d = ipa_get_controlled_uses (old_root_info, i);
3601
3602 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3603 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3604 c = combine_controlled_uses_counters (c, d);
3605 ipa_set_controlled_uses (new_root_info, src_idx, c);
3606 if (c == 0 && new_root_info->ipcp_orig_node)
3607 {
3608 struct cgraph_node *n;
3609 struct ipa_ref *ref;
3610 tree t = new_root_info->known_csts[src_idx];
3611
3612 if (t && TREE_CODE (t) == ADDR_EXPR
3613 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3614 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3615 && (ref = new_root->find_reference (n, NULL, 0)))
3616 {
3617 if (dump_file)
3618 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3619 "reference from %s to %s.\n",
3620 new_root->dump_name (),
3621 n->dump_name ());
3622 ref->remove_reference ();
3623 }
3624 }
3625 }
3626 else if (jf->type == IPA_JF_CONST
3627 && (rdesc = jfunc_rdesc_usable (jf)))
3628 {
3629 int d = ipa_get_controlled_uses (old_root_info, i);
3630 int c = rdesc->refcount;
3631 rdesc->refcount = combine_controlled_uses_counters (c, d);
3632 if (rdesc->refcount == 0)
3633 {
3634 tree cst = ipa_get_jf_constant (jf);
3635 struct cgraph_node *n;
3636 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3637 && TREE_CODE (TREE_OPERAND (cst, 0))
3638 == FUNCTION_DECL);
3639 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3640 if (n)
3641 {
3642 struct cgraph_node *clone;
3643 bool ok;
3644 ok = remove_described_reference (n, rdesc);
3645 gcc_checking_assert (ok);
3646
3647 clone = cs->caller;
3648 while (clone->global.inlined_to
3649 && clone != rdesc->cs->caller
3650 && IPA_NODE_REF (clone)->ipcp_orig_node)
3651 {
3652 struct ipa_ref *ref;
3653 ref = clone->find_reference (n, NULL, 0);
3654 if (ref)
3655 {
3656 if (dump_file)
3657 fprintf (dump_file, "ipa-prop: Removing "
3658 "cloning-created reference "
3659 "from %s to %s.\n",
3660 clone->dump_name (),
3661 n->dump_name ());
3662 ref->remove_reference ();
3663 }
3664 clone = clone->callers->caller;
3665 }
3666 }
3667 }
3668 }
3669 }
3670
3671 for (i = ipa_get_param_count (old_root_info);
3672 i < ipa_get_cs_argument_count (args);
3673 i++)
3674 {
3675 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3676
3677 if (jf->type == IPA_JF_CONST)
3678 {
3679 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3680 if (rdesc)
3681 rdesc->refcount = IPA_UNDESCRIBED_USE;
3682 }
3683 else if (jf->type == IPA_JF_PASS_THROUGH)
3684 ipa_set_controlled_uses (new_root_info,
3685 jf->value.pass_through.formal_id,
3686 IPA_UNDESCRIBED_USE);
3687 }
3688 }
3689
3690 /* Update jump functions and call note functions on inlining the call site CS.
3691 CS is expected to lead to a node already cloned by
3692 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3693 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3694 created. */
3695
3696 bool
3697 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3698 vec<cgraph_edge *> *new_edges)
3699 {
3700 bool changed;
3701 /* Do nothing if the preparation phase has not been carried out yet
3702 (i.e. during early inlining). */
3703 if (!ipa_node_params_sum)
3704 return false;
3705 gcc_assert (ipa_edge_args_sum);
3706
3707 propagate_controlled_uses (cs);
3708 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3709
3710 return changed;
3711 }
3712
3713 /* Ensure that array of edge arguments infos is big enough to accommodate a
3714 structure for all edges and reallocates it if not. Also, allocate
3715 associated hash tables is they do not already exist. */
3716
3717 void
3718 ipa_check_create_edge_args (void)
3719 {
3720 if (!ipa_edge_args_sum)
3721 ipa_edge_args_sum
3722 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3723 ipa_edge_args_sum_t (symtab, true));
3724 if (!ipa_bits_hash_table)
3725 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3726 if (!ipa_vr_hash_table)
3727 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3728 }
3729
3730 /* Free all ipa_edge structures. */
3731
3732 void
3733 ipa_free_all_edge_args (void)
3734 {
3735 if (!ipa_edge_args_sum)
3736 return;
3737
3738 ipa_edge_args_sum->release ();
3739 ipa_edge_args_sum = NULL;
3740 }
3741
3742 /* Free all ipa_node_params structures. */
3743
3744 void
3745 ipa_free_all_node_params (void)
3746 {
3747 ipa_node_params_sum->release ();
3748 ipa_node_params_sum = NULL;
3749 }
3750
3751 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3752 tables if they do not already exist. */
3753
3754 void
3755 ipcp_transformation_initialize (void)
3756 {
3757 if (!ipa_bits_hash_table)
3758 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3759 if (!ipa_vr_hash_table)
3760 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3761 if (ipcp_transformation_sum == NULL)
3762 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
3763 }
3764
3765 /* Release the IPA CP transformation summary. */
3766
3767 void
3768 ipcp_free_transformation_sum (void)
3769 {
3770 if (!ipcp_transformation_sum)
3771 return;
3772
3773 ipcp_transformation_sum->release ();
3774 ipcp_transformation_sum = NULL;
3775 }
3776
3777 /* Set the aggregate replacements of NODE to be AGGVALS. */
3778
3779 void
3780 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3781 struct ipa_agg_replacement_value *aggvals)
3782 {
3783 ipcp_transformation_initialize ();
3784 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3785 s->agg_values = aggvals;
3786 }
3787
3788 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3789 count data structures accordingly. */
3790
3791 void
3792 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3793 {
3794 if (args->jump_functions)
3795 {
3796 struct ipa_jump_func *jf;
3797 int i;
3798 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3799 {
3800 struct ipa_cst_ref_desc *rdesc;
3801 try_decrement_rdesc_refcount (jf);
3802 if (jf->type == IPA_JF_CONST
3803 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3804 && rdesc->cs == cs)
3805 rdesc->cs = NULL;
3806 }
3807 }
3808 }
3809
3810 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3811 reference count data strucutres accordingly. */
3812
3813 void
3814 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3815 ipa_edge_args *old_args, ipa_edge_args *new_args)
3816 {
3817 unsigned int i;
3818
3819 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3820 if (old_args->polymorphic_call_contexts)
3821 new_args->polymorphic_call_contexts
3822 = vec_safe_copy (old_args->polymorphic_call_contexts);
3823
3824 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3825 {
3826 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3827 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3828
3829 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3830
3831 if (src_jf->type == IPA_JF_CONST)
3832 {
3833 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3834
3835 if (!src_rdesc)
3836 dst_jf->value.constant.rdesc = NULL;
3837 else if (src->caller == dst->caller)
3838 {
3839 struct ipa_ref *ref;
3840 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3841 gcc_checking_assert (n);
3842 ref = src->caller->find_reference (n, src->call_stmt,
3843 src->lto_stmt_uid);
3844 gcc_checking_assert (ref);
3845 dst->caller->clone_reference (ref, ref->stmt);
3846
3847 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3848 dst_rdesc->cs = dst;
3849 dst_rdesc->refcount = src_rdesc->refcount;
3850 dst_rdesc->next_duplicate = NULL;
3851 dst_jf->value.constant.rdesc = dst_rdesc;
3852 }
3853 else if (src_rdesc->cs == src)
3854 {
3855 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3856 dst_rdesc->cs = dst;
3857 dst_rdesc->refcount = src_rdesc->refcount;
3858 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3859 src_rdesc->next_duplicate = dst_rdesc;
3860 dst_jf->value.constant.rdesc = dst_rdesc;
3861 }
3862 else
3863 {
3864 struct ipa_cst_ref_desc *dst_rdesc;
3865 /* This can happen during inlining, when a JFUNC can refer to a
3866 reference taken in a function up in the tree of inline clones.
3867 We need to find the duplicate that refers to our tree of
3868 inline clones. */
3869
3870 gcc_assert (dst->caller->global.inlined_to);
3871 for (dst_rdesc = src_rdesc->next_duplicate;
3872 dst_rdesc;
3873 dst_rdesc = dst_rdesc->next_duplicate)
3874 {
3875 struct cgraph_node *top;
3876 top = dst_rdesc->cs->caller->global.inlined_to
3877 ? dst_rdesc->cs->caller->global.inlined_to
3878 : dst_rdesc->cs->caller;
3879 if (dst->caller->global.inlined_to == top)
3880 break;
3881 }
3882 gcc_assert (dst_rdesc);
3883 dst_jf->value.constant.rdesc = dst_rdesc;
3884 }
3885 }
3886 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3887 && src->caller == dst->caller)
3888 {
3889 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3890 ? dst->caller->global.inlined_to : dst->caller;
3891 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3892 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3893
3894 int c = ipa_get_controlled_uses (root_info, idx);
3895 if (c != IPA_UNDESCRIBED_USE)
3896 {
3897 c++;
3898 ipa_set_controlled_uses (root_info, idx, c);
3899 }
3900 }
3901 }
3902 }
3903
3904 /* Analyze newly added function into callgraph. */
3905
3906 static void
3907 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3908 {
3909 if (node->has_gimple_body_p ())
3910 ipa_analyze_node (node);
3911 }
3912
3913 /* Hook that is called by summary when a node is duplicated. */
3914
3915 void
3916 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3917 ipa_node_params *old_info,
3918 ipa_node_params *new_info)
3919 {
3920 ipa_agg_replacement_value *old_av, *new_av;
3921
3922 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3923 new_info->lattices = NULL;
3924 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3925 new_info->known_csts = old_info->known_csts.copy ();
3926 new_info->known_contexts = old_info->known_contexts.copy ();
3927
3928 new_info->analysis_done = old_info->analysis_done;
3929 new_info->node_enqueued = old_info->node_enqueued;
3930 new_info->versionable = old_info->versionable;
3931
3932 old_av = ipa_get_agg_replacements_for_node (src);
3933 if (old_av)
3934 {
3935 new_av = NULL;
3936 while (old_av)
3937 {
3938 struct ipa_agg_replacement_value *v;
3939
3940 v = ggc_alloc<ipa_agg_replacement_value> ();
3941 memcpy (v, old_av, sizeof (*v));
3942 v->next = new_av;
3943 new_av = v;
3944 old_av = old_av->next;
3945 }
3946 ipa_set_node_agg_value_chain (dst, new_av);
3947 }
3948
3949 ipcp_transformation *src_trans = ipcp_get_transformation_summary (src);
3950
3951 if (src_trans)
3952 {
3953 ipcp_transformation_initialize ();
3954 src_trans = ipcp_transformation_sum->get_create (src);
3955 ipcp_transformation *dst_trans
3956 = ipcp_transformation_sum->get_create (dst);
3957
3958 dst_trans->bits = vec_safe_copy (src_trans->bits);
3959
3960 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3961 vec<ipa_vr, va_gc> *&dst_vr
3962 = ipcp_get_transformation_summary (dst)->m_vr;
3963 if (vec_safe_length (src_trans->m_vr) > 0)
3964 {
3965 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3966 for (unsigned i = 0; i < src_vr->length (); ++i)
3967 dst_vr->quick_push ((*src_vr)[i]);
3968 }
3969 }
3970 }
3971
3972 /* Register our cgraph hooks if they are not already there. */
3973
3974 void
3975 ipa_register_cgraph_hooks (void)
3976 {
3977 ipa_check_create_node_params ();
3978 ipa_check_create_edge_args ();
3979
3980 function_insertion_hook_holder =
3981 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3982 }
3983
3984 /* Unregister our cgraph hooks if they are not already there. */
3985
3986 static void
3987 ipa_unregister_cgraph_hooks (void)
3988 {
3989 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3990 function_insertion_hook_holder = NULL;
3991 }
3992
3993 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3994 longer needed after ipa-cp. */
3995
3996 void
3997 ipa_free_all_structures_after_ipa_cp (void)
3998 {
3999 if (!optimize && !in_lto_p)
4000 {
4001 ipa_free_all_edge_args ();
4002 ipa_free_all_node_params ();
4003 ipcp_sources_pool.release ();
4004 ipcp_cst_values_pool.release ();
4005 ipcp_poly_ctx_values_pool.release ();
4006 ipcp_agg_lattice_pool.release ();
4007 ipa_unregister_cgraph_hooks ();
4008 ipa_refdesc_pool.release ();
4009 }
4010 }
4011
4012 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4013 longer needed after indirect inlining. */
4014
4015 void
4016 ipa_free_all_structures_after_iinln (void)
4017 {
4018 ipa_free_all_edge_args ();
4019 ipa_free_all_node_params ();
4020 ipa_unregister_cgraph_hooks ();
4021 ipcp_sources_pool.release ();
4022 ipcp_cst_values_pool.release ();
4023 ipcp_poly_ctx_values_pool.release ();
4024 ipcp_agg_lattice_pool.release ();
4025 ipa_refdesc_pool.release ();
4026 }
4027
4028 /* Print ipa_tree_map data structures of all functions in the
4029 callgraph to F. */
4030
4031 void
4032 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4033 {
4034 int i, count;
4035 class ipa_node_params *info;
4036
4037 if (!node->definition)
4038 return;
4039 info = IPA_NODE_REF (node);
4040 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4041 count = ipa_get_param_count (info);
4042 for (i = 0; i < count; i++)
4043 {
4044 int c;
4045
4046 fprintf (f, " ");
4047 ipa_dump_param (f, info, i);
4048 if (ipa_is_param_used (info, i))
4049 fprintf (f, " used");
4050 c = ipa_get_controlled_uses (info, i);
4051 if (c == IPA_UNDESCRIBED_USE)
4052 fprintf (f, " undescribed_use");
4053 else
4054 fprintf (f, " controlled_uses=%i", c);
4055 fprintf (f, "\n");
4056 }
4057 }
4058
4059 /* Print ipa_tree_map data structures of all functions in the
4060 callgraph to F. */
4061
4062 void
4063 ipa_print_all_params (FILE * f)
4064 {
4065 struct cgraph_node *node;
4066
4067 fprintf (f, "\nFunction parameters:\n");
4068 FOR_EACH_FUNCTION (node)
4069 ipa_print_node_params (f, node);
4070 }
4071
4072 /* Dump the AV linked list. */
4073
4074 void
4075 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4076 {
4077 bool comma = false;
4078 fprintf (f, " Aggregate replacements:");
4079 for (; av; av = av->next)
4080 {
4081 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4082 av->index, av->offset);
4083 print_generic_expr (f, av->value);
4084 comma = true;
4085 }
4086 fprintf (f, "\n");
4087 }
4088
4089 /* Stream out jump function JUMP_FUNC to OB. */
4090
4091 static void
4092 ipa_write_jump_function (struct output_block *ob,
4093 struct ipa_jump_func *jump_func)
4094 {
4095 struct ipa_agg_jf_item *item;
4096 struct bitpack_d bp;
4097 int i, count;
4098 int flag = 0;
4099
4100 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4101 as well as WPA memory by handling them specially. */
4102 if (jump_func->type == IPA_JF_CONST
4103 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4104 flag = 1;
4105
4106 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4107 switch (jump_func->type)
4108 {
4109 case IPA_JF_UNKNOWN:
4110 break;
4111 case IPA_JF_CONST:
4112 gcc_assert (
4113 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4114 stream_write_tree (ob,
4115 flag
4116 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4117 : jump_func->value.constant.value, true);
4118 break;
4119 case IPA_JF_PASS_THROUGH:
4120 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4121 if (jump_func->value.pass_through.operation == NOP_EXPR)
4122 {
4123 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4124 bp = bitpack_create (ob->main_stream);
4125 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4126 streamer_write_bitpack (&bp);
4127 }
4128 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4129 == tcc_unary)
4130 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4131 else
4132 {
4133 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4134 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4135 }
4136 break;
4137 case IPA_JF_ANCESTOR:
4138 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4139 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4140 bp = bitpack_create (ob->main_stream);
4141 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4142 streamer_write_bitpack (&bp);
4143 break;
4144 }
4145
4146 count = vec_safe_length (jump_func->agg.items);
4147 streamer_write_uhwi (ob, count);
4148 if (count)
4149 {
4150 bp = bitpack_create (ob->main_stream);
4151 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4152 streamer_write_bitpack (&bp);
4153 }
4154
4155 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4156 {
4157 streamer_write_uhwi (ob, item->offset);
4158 stream_write_tree (ob, item->value, true);
4159 }
4160
4161 bp = bitpack_create (ob->main_stream);
4162 bp_pack_value (&bp, !!jump_func->bits, 1);
4163 streamer_write_bitpack (&bp);
4164 if (jump_func->bits)
4165 {
4166 streamer_write_widest_int (ob, jump_func->bits->value);
4167 streamer_write_widest_int (ob, jump_func->bits->mask);
4168 }
4169 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4170 streamer_write_bitpack (&bp);
4171 if (jump_func->m_vr)
4172 {
4173 streamer_write_enum (ob->main_stream, value_rang_type,
4174 VR_LAST, jump_func->m_vr->kind ());
4175 stream_write_tree (ob, jump_func->m_vr->min (), true);
4176 stream_write_tree (ob, jump_func->m_vr->max (), true);
4177 }
4178 }
4179
4180 /* Read in jump function JUMP_FUNC from IB. */
4181
4182 static void
4183 ipa_read_jump_function (class lto_input_block *ib,
4184 struct ipa_jump_func *jump_func,
4185 struct cgraph_edge *cs,
4186 class data_in *data_in,
4187 bool prevails)
4188 {
4189 enum jump_func_type jftype;
4190 enum tree_code operation;
4191 int i, count;
4192 int val = streamer_read_uhwi (ib);
4193 bool flag = val & 1;
4194
4195 jftype = (enum jump_func_type) (val / 2);
4196 switch (jftype)
4197 {
4198 case IPA_JF_UNKNOWN:
4199 ipa_set_jf_unknown (jump_func);
4200 break;
4201 case IPA_JF_CONST:
4202 {
4203 tree t = stream_read_tree (ib, data_in);
4204 if (flag && prevails)
4205 t = build_fold_addr_expr (t);
4206 ipa_set_jf_constant (jump_func, t, cs);
4207 }
4208 break;
4209 case IPA_JF_PASS_THROUGH:
4210 operation = (enum tree_code) streamer_read_uhwi (ib);
4211 if (operation == NOP_EXPR)
4212 {
4213 int formal_id = streamer_read_uhwi (ib);
4214 struct bitpack_d bp = streamer_read_bitpack (ib);
4215 bool agg_preserved = bp_unpack_value (&bp, 1);
4216 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4217 }
4218 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4219 {
4220 int formal_id = streamer_read_uhwi (ib);
4221 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4222 }
4223 else
4224 {
4225 tree operand = stream_read_tree (ib, data_in);
4226 int formal_id = streamer_read_uhwi (ib);
4227 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4228 operation);
4229 }
4230 break;
4231 case IPA_JF_ANCESTOR:
4232 {
4233 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4234 int formal_id = streamer_read_uhwi (ib);
4235 struct bitpack_d bp = streamer_read_bitpack (ib);
4236 bool agg_preserved = bp_unpack_value (&bp, 1);
4237 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4238 break;
4239 }
4240 default:
4241 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4242 }
4243
4244 count = streamer_read_uhwi (ib);
4245 if (prevails)
4246 vec_alloc (jump_func->agg.items, count);
4247 if (count)
4248 {
4249 struct bitpack_d bp = streamer_read_bitpack (ib);
4250 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4251 }
4252 for (i = 0; i < count; i++)
4253 {
4254 struct ipa_agg_jf_item item;
4255 item.offset = streamer_read_uhwi (ib);
4256 item.value = stream_read_tree (ib, data_in);
4257 if (prevails)
4258 jump_func->agg.items->quick_push (item);
4259 }
4260
4261 struct bitpack_d bp = streamer_read_bitpack (ib);
4262 bool bits_known = bp_unpack_value (&bp, 1);
4263 if (bits_known)
4264 {
4265 widest_int value = streamer_read_widest_int (ib);
4266 widest_int mask = streamer_read_widest_int (ib);
4267 if (prevails)
4268 ipa_set_jfunc_bits (jump_func, value, mask);
4269 }
4270 else
4271 jump_func->bits = NULL;
4272
4273 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4274 bool vr_known = bp_unpack_value (&vr_bp, 1);
4275 if (vr_known)
4276 {
4277 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4278 VR_LAST);
4279 tree min = stream_read_tree (ib, data_in);
4280 tree max = stream_read_tree (ib, data_in);
4281 if (prevails)
4282 ipa_set_jfunc_vr (jump_func, type, min, max);
4283 }
4284 else
4285 jump_func->m_vr = NULL;
4286 }
4287
4288 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4289 relevant to indirect inlining to OB. */
4290
4291 static void
4292 ipa_write_indirect_edge_info (struct output_block *ob,
4293 struct cgraph_edge *cs)
4294 {
4295 class cgraph_indirect_call_info *ii = cs->indirect_info;
4296 struct bitpack_d bp;
4297
4298 streamer_write_hwi (ob, ii->param_index);
4299 bp = bitpack_create (ob->main_stream);
4300 bp_pack_value (&bp, ii->polymorphic, 1);
4301 bp_pack_value (&bp, ii->agg_contents, 1);
4302 bp_pack_value (&bp, ii->member_ptr, 1);
4303 bp_pack_value (&bp, ii->by_ref, 1);
4304 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4305 bp_pack_value (&bp, ii->vptr_changed, 1);
4306 streamer_write_bitpack (&bp);
4307 if (ii->agg_contents || ii->polymorphic)
4308 streamer_write_hwi (ob, ii->offset);
4309 else
4310 gcc_assert (ii->offset == 0);
4311
4312 if (ii->polymorphic)
4313 {
4314 streamer_write_hwi (ob, ii->otr_token);
4315 stream_write_tree (ob, ii->otr_type, true);
4316 ii->context.stream_out (ob);
4317 }
4318 }
4319
4320 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4321 relevant to indirect inlining from IB. */
4322
4323 static void
4324 ipa_read_indirect_edge_info (class lto_input_block *ib,
4325 class data_in *data_in,
4326 struct cgraph_edge *cs)
4327 {
4328 class cgraph_indirect_call_info *ii = cs->indirect_info;
4329 struct bitpack_d bp;
4330
4331 ii->param_index = (int) streamer_read_hwi (ib);
4332 bp = streamer_read_bitpack (ib);
4333 ii->polymorphic = bp_unpack_value (&bp, 1);
4334 ii->agg_contents = bp_unpack_value (&bp, 1);
4335 ii->member_ptr = bp_unpack_value (&bp, 1);
4336 ii->by_ref = bp_unpack_value (&bp, 1);
4337 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4338 ii->vptr_changed = bp_unpack_value (&bp, 1);
4339 if (ii->agg_contents || ii->polymorphic)
4340 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4341 else
4342 ii->offset = 0;
4343 if (ii->polymorphic)
4344 {
4345 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4346 ii->otr_type = stream_read_tree (ib, data_in);
4347 ii->context.stream_in (ib, data_in);
4348 }
4349 }
4350
4351 /* Stream out NODE info to OB. */
4352
4353 static void
4354 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4355 {
4356 int node_ref;
4357 lto_symtab_encoder_t encoder;
4358 class ipa_node_params *info = IPA_NODE_REF (node);
4359 int j;
4360 struct cgraph_edge *e;
4361 struct bitpack_d bp;
4362
4363 encoder = ob->decl_state->symtab_node_encoder;
4364 node_ref = lto_symtab_encoder_encode (encoder, node);
4365 streamer_write_uhwi (ob, node_ref);
4366
4367 streamer_write_uhwi (ob, ipa_get_param_count (info));
4368 for (j = 0; j < ipa_get_param_count (info); j++)
4369 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4370 bp = bitpack_create (ob->main_stream);
4371 gcc_assert (info->analysis_done
4372 || ipa_get_param_count (info) == 0);
4373 gcc_assert (!info->node_enqueued);
4374 gcc_assert (!info->ipcp_orig_node);
4375 for (j = 0; j < ipa_get_param_count (info); j++)
4376 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4377 streamer_write_bitpack (&bp);
4378 for (j = 0; j < ipa_get_param_count (info); j++)
4379 {
4380 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4381 stream_write_tree (ob, ipa_get_type (info, j), true);
4382 }
4383 for (e = node->callees; e; e = e->next_callee)
4384 {
4385 class ipa_edge_args *args = IPA_EDGE_REF (e);
4386
4387 if (!args)
4388 {
4389 streamer_write_uhwi (ob, 0);
4390 continue;
4391 }
4392
4393 streamer_write_uhwi (ob,
4394 ipa_get_cs_argument_count (args) * 2
4395 + (args->polymorphic_call_contexts != NULL));
4396 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4397 {
4398 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4399 if (args->polymorphic_call_contexts != NULL)
4400 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4401 }
4402 }
4403 for (e = node->indirect_calls; e; e = e->next_callee)
4404 {
4405 class ipa_edge_args *args = IPA_EDGE_REF (e);
4406 if (!args)
4407 streamer_write_uhwi (ob, 0);
4408 else
4409 {
4410 streamer_write_uhwi (ob,
4411 ipa_get_cs_argument_count (args) * 2
4412 + (args->polymorphic_call_contexts != NULL));
4413 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4414 {
4415 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4416 if (args->polymorphic_call_contexts != NULL)
4417 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4418 }
4419 }
4420 ipa_write_indirect_edge_info (ob, e);
4421 }
4422 }
4423
4424 /* Stream in edge E from IB. */
4425
4426 static void
4427 ipa_read_edge_info (class lto_input_block *ib,
4428 class data_in *data_in,
4429 struct cgraph_edge *e, bool prevails)
4430 {
4431 int count = streamer_read_uhwi (ib);
4432 bool contexts_computed = count & 1;
4433
4434 count /= 2;
4435 if (!count)
4436 return;
4437 if (prevails && e->possibly_call_in_translation_unit_p ())
4438 {
4439 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
4440 vec_safe_grow_cleared (args->jump_functions, count);
4441 if (contexts_computed)
4442 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4443 for (int k = 0; k < count; k++)
4444 {
4445 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4446 data_in, prevails);
4447 if (contexts_computed)
4448 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4449 (ib, data_in);
4450 }
4451 }
4452 else
4453 {
4454 for (int k = 0; k < count; k++)
4455 {
4456 struct ipa_jump_func dummy;
4457 ipa_read_jump_function (ib, &dummy, e,
4458 data_in, prevails);
4459 if (contexts_computed)
4460 {
4461 class ipa_polymorphic_call_context ctx;
4462 ctx.stream_in (ib, data_in);
4463 }
4464 }
4465 }
4466 }
4467
4468 /* Stream in NODE info from IB. */
4469
4470 static void
4471 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4472 class data_in *data_in)
4473 {
4474 int k;
4475 struct cgraph_edge *e;
4476 struct bitpack_d bp;
4477 bool prevails = node->prevailing_p ();
4478 class ipa_node_params *info = prevails ? IPA_NODE_REF (node) : NULL;
4479
4480 int param_count = streamer_read_uhwi (ib);
4481 if (prevails)
4482 {
4483 ipa_alloc_node_params (node, param_count);
4484 for (k = 0; k < param_count; k++)
4485 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4486 if (ipa_get_param_count (info) != 0)
4487 info->analysis_done = true;
4488 info->node_enqueued = false;
4489 }
4490 else
4491 for (k = 0; k < param_count; k++)
4492 streamer_read_uhwi (ib);
4493
4494 bp = streamer_read_bitpack (ib);
4495 for (k = 0; k < param_count; k++)
4496 {
4497 bool used = bp_unpack_value (&bp, 1);
4498
4499 if (prevails)
4500 ipa_set_param_used (info, k, used);
4501 }
4502 for (k = 0; k < param_count; k++)
4503 {
4504 int nuses = streamer_read_hwi (ib);
4505 tree type = stream_read_tree (ib, data_in);
4506
4507 if (prevails)
4508 {
4509 ipa_set_controlled_uses (info, k, nuses);
4510 (*info->descriptors)[k].decl_or_type = type;
4511 }
4512 }
4513 for (e = node->callees; e; e = e->next_callee)
4514 ipa_read_edge_info (ib, data_in, e, prevails);
4515 for (e = node->indirect_calls; e; e = e->next_callee)
4516 {
4517 ipa_read_edge_info (ib, data_in, e, prevails);
4518 ipa_read_indirect_edge_info (ib, data_in, e);
4519 }
4520 }
4521
4522 /* Write jump functions for nodes in SET. */
4523
4524 void
4525 ipa_prop_write_jump_functions (void)
4526 {
4527 struct cgraph_node *node;
4528 struct output_block *ob;
4529 unsigned int count = 0;
4530 lto_symtab_encoder_iterator lsei;
4531 lto_symtab_encoder_t encoder;
4532
4533 if (!ipa_node_params_sum || !ipa_edge_args_sum)
4534 return;
4535
4536 ob = create_output_block (LTO_section_jump_functions);
4537 encoder = ob->decl_state->symtab_node_encoder;
4538 ob->symbol = NULL;
4539 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4540 lsei_next_function_in_partition (&lsei))
4541 {
4542 node = lsei_cgraph_node (lsei);
4543 if (node->has_gimple_body_p ()
4544 && IPA_NODE_REF (node) != NULL)
4545 count++;
4546 }
4547
4548 streamer_write_uhwi (ob, count);
4549
4550 /* Process all of the functions. */
4551 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4552 lsei_next_function_in_partition (&lsei))
4553 {
4554 node = lsei_cgraph_node (lsei);
4555 if (node->has_gimple_body_p ()
4556 && IPA_NODE_REF (node) != NULL)
4557 ipa_write_node_info (ob, node);
4558 }
4559 streamer_write_char_stream (ob->main_stream, 0);
4560 produce_asm (ob, NULL);
4561 destroy_output_block (ob);
4562 }
4563
4564 /* Read section in file FILE_DATA of length LEN with data DATA. */
4565
4566 static void
4567 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4568 size_t len)
4569 {
4570 const struct lto_function_header *header =
4571 (const struct lto_function_header *) data;
4572 const int cfg_offset = sizeof (struct lto_function_header);
4573 const int main_offset = cfg_offset + header->cfg_size;
4574 const int string_offset = main_offset + header->main_size;
4575 class data_in *data_in;
4576 unsigned int i;
4577 unsigned int count;
4578
4579 lto_input_block ib_main ((const char *) data + main_offset,
4580 header->main_size, file_data->mode_table);
4581
4582 data_in =
4583 lto_data_in_create (file_data, (const char *) data + string_offset,
4584 header->string_size, vNULL);
4585 count = streamer_read_uhwi (&ib_main);
4586
4587 for (i = 0; i < count; i++)
4588 {
4589 unsigned int index;
4590 struct cgraph_node *node;
4591 lto_symtab_encoder_t encoder;
4592
4593 index = streamer_read_uhwi (&ib_main);
4594 encoder = file_data->symtab_node_encoder;
4595 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4596 index));
4597 gcc_assert (node->definition);
4598 ipa_read_node_info (&ib_main, node, data_in);
4599 }
4600 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4601 len);
4602 lto_data_in_delete (data_in);
4603 }
4604
4605 /* Read ipcp jump functions. */
4606
4607 void
4608 ipa_prop_read_jump_functions (void)
4609 {
4610 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4611 struct lto_file_decl_data *file_data;
4612 unsigned int j = 0;
4613
4614 ipa_check_create_node_params ();
4615 ipa_check_create_edge_args ();
4616 ipa_register_cgraph_hooks ();
4617
4618 while ((file_data = file_data_vec[j++]))
4619 {
4620 size_t len;
4621 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4622
4623 if (data)
4624 ipa_prop_read_section (file_data, data, len);
4625 }
4626 }
4627
4628 void
4629 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4630 {
4631 int node_ref;
4632 unsigned int count = 0;
4633 lto_symtab_encoder_t encoder;
4634 struct ipa_agg_replacement_value *aggvals, *av;
4635
4636 aggvals = ipa_get_agg_replacements_for_node (node);
4637 encoder = ob->decl_state->symtab_node_encoder;
4638 node_ref = lto_symtab_encoder_encode (encoder, node);
4639 streamer_write_uhwi (ob, node_ref);
4640
4641 for (av = aggvals; av; av = av->next)
4642 count++;
4643 streamer_write_uhwi (ob, count);
4644
4645 for (av = aggvals; av; av = av->next)
4646 {
4647 struct bitpack_d bp;
4648
4649 streamer_write_uhwi (ob, av->offset);
4650 streamer_write_uhwi (ob, av->index);
4651 stream_write_tree (ob, av->value, true);
4652
4653 bp = bitpack_create (ob->main_stream);
4654 bp_pack_value (&bp, av->by_ref, 1);
4655 streamer_write_bitpack (&bp);
4656 }
4657
4658 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4659 if (ts && vec_safe_length (ts->m_vr) > 0)
4660 {
4661 count = ts->m_vr->length ();
4662 streamer_write_uhwi (ob, count);
4663 for (unsigned i = 0; i < count; ++i)
4664 {
4665 struct bitpack_d bp;
4666 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4667 bp = bitpack_create (ob->main_stream);
4668 bp_pack_value (&bp, parm_vr->known, 1);
4669 streamer_write_bitpack (&bp);
4670 if (parm_vr->known)
4671 {
4672 streamer_write_enum (ob->main_stream, value_rang_type,
4673 VR_LAST, parm_vr->type);
4674 streamer_write_wide_int (ob, parm_vr->min);
4675 streamer_write_wide_int (ob, parm_vr->max);
4676 }
4677 }
4678 }
4679 else
4680 streamer_write_uhwi (ob, 0);
4681
4682 if (ts && vec_safe_length (ts->bits) > 0)
4683 {
4684 count = ts->bits->length ();
4685 streamer_write_uhwi (ob, count);
4686
4687 for (unsigned i = 0; i < count; ++i)
4688 {
4689 const ipa_bits *bits_jfunc = (*ts->bits)[i];
4690 struct bitpack_d bp = bitpack_create (ob->main_stream);
4691 bp_pack_value (&bp, !!bits_jfunc, 1);
4692 streamer_write_bitpack (&bp);
4693 if (bits_jfunc)
4694 {
4695 streamer_write_widest_int (ob, bits_jfunc->value);
4696 streamer_write_widest_int (ob, bits_jfunc->mask);
4697 }
4698 }
4699 }
4700 else
4701 streamer_write_uhwi (ob, 0);
4702 }
4703
4704 /* Stream in the aggregate value replacement chain for NODE from IB. */
4705
4706 static void
4707 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4708 data_in *data_in)
4709 {
4710 struct ipa_agg_replacement_value *aggvals = NULL;
4711 unsigned int count, i;
4712
4713 count = streamer_read_uhwi (ib);
4714 for (i = 0; i <count; i++)
4715 {
4716 struct ipa_agg_replacement_value *av;
4717 struct bitpack_d bp;
4718
4719 av = ggc_alloc<ipa_agg_replacement_value> ();
4720 av->offset = streamer_read_uhwi (ib);
4721 av->index = streamer_read_uhwi (ib);
4722 av->value = stream_read_tree (ib, data_in);
4723 bp = streamer_read_bitpack (ib);
4724 av->by_ref = bp_unpack_value (&bp, 1);
4725 av->next = aggvals;
4726 aggvals = av;
4727 }
4728 ipa_set_node_agg_value_chain (node, aggvals);
4729
4730 count = streamer_read_uhwi (ib);
4731 if (count > 0)
4732 {
4733 ipcp_transformation_initialize ();
4734 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4735 vec_safe_grow_cleared (ts->m_vr, count);
4736 for (i = 0; i < count; i++)
4737 {
4738 ipa_vr *parm_vr;
4739 parm_vr = &(*ts->m_vr)[i];
4740 struct bitpack_d bp;
4741 bp = streamer_read_bitpack (ib);
4742 parm_vr->known = bp_unpack_value (&bp, 1);
4743 if (parm_vr->known)
4744 {
4745 parm_vr->type = streamer_read_enum (ib, value_range_kind,
4746 VR_LAST);
4747 parm_vr->min = streamer_read_wide_int (ib);
4748 parm_vr->max = streamer_read_wide_int (ib);
4749 }
4750 }
4751 }
4752 count = streamer_read_uhwi (ib);
4753 if (count > 0)
4754 {
4755 ipcp_transformation_initialize ();
4756 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4757 vec_safe_grow_cleared (ts->bits, count);
4758
4759 for (i = 0; i < count; i++)
4760 {
4761 struct bitpack_d bp = streamer_read_bitpack (ib);
4762 bool known = bp_unpack_value (&bp, 1);
4763 if (known)
4764 {
4765 ipa_bits *bits
4766 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4767 streamer_read_widest_int (ib));
4768 (*ts->bits)[i] = bits;
4769 }
4770 }
4771 }
4772 }
4773
4774 /* Write all aggregate replacement for nodes in set. */
4775
4776 void
4777 ipcp_write_transformation_summaries (void)
4778 {
4779 struct cgraph_node *node;
4780 struct output_block *ob;
4781 unsigned int count = 0;
4782 lto_symtab_encoder_iterator lsei;
4783 lto_symtab_encoder_t encoder;
4784
4785 ob = create_output_block (LTO_section_ipcp_transform);
4786 encoder = ob->decl_state->symtab_node_encoder;
4787 ob->symbol = NULL;
4788 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4789 lsei_next_function_in_partition (&lsei))
4790 {
4791 node = lsei_cgraph_node (lsei);
4792 if (node->has_gimple_body_p ())
4793 count++;
4794 }
4795
4796 streamer_write_uhwi (ob, count);
4797
4798 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4799 lsei_next_function_in_partition (&lsei))
4800 {
4801 node = lsei_cgraph_node (lsei);
4802 if (node->has_gimple_body_p ())
4803 write_ipcp_transformation_info (ob, node);
4804 }
4805 streamer_write_char_stream (ob->main_stream, 0);
4806 produce_asm (ob, NULL);
4807 destroy_output_block (ob);
4808 }
4809
4810 /* Read replacements section in file FILE_DATA of length LEN with data
4811 DATA. */
4812
4813 static void
4814 read_replacements_section (struct lto_file_decl_data *file_data,
4815 const char *data,
4816 size_t len)
4817 {
4818 const struct lto_function_header *header =
4819 (const struct lto_function_header *) data;
4820 const int cfg_offset = sizeof (struct lto_function_header);
4821 const int main_offset = cfg_offset + header->cfg_size;
4822 const int string_offset = main_offset + header->main_size;
4823 class data_in *data_in;
4824 unsigned int i;
4825 unsigned int count;
4826
4827 lto_input_block ib_main ((const char *) data + main_offset,
4828 header->main_size, file_data->mode_table);
4829
4830 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4831 header->string_size, vNULL);
4832 count = streamer_read_uhwi (&ib_main);
4833
4834 for (i = 0; i < count; i++)
4835 {
4836 unsigned int index;
4837 struct cgraph_node *node;
4838 lto_symtab_encoder_t encoder;
4839
4840 index = streamer_read_uhwi (&ib_main);
4841 encoder = file_data->symtab_node_encoder;
4842 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4843 index));
4844 gcc_assert (node->definition);
4845 read_ipcp_transformation_info (&ib_main, node, data_in);
4846 }
4847 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4848 len);
4849 lto_data_in_delete (data_in);
4850 }
4851
4852 /* Read IPA-CP aggregate replacements. */
4853
4854 void
4855 ipcp_read_transformation_summaries (void)
4856 {
4857 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4858 struct lto_file_decl_data *file_data;
4859 unsigned int j = 0;
4860
4861 while ((file_data = file_data_vec[j++]))
4862 {
4863 size_t len;
4864 const char *data = lto_get_section_data (file_data,
4865 LTO_section_ipcp_transform,
4866 NULL, &len);
4867 if (data)
4868 read_replacements_section (file_data, data, len);
4869 }
4870 }
4871
4872 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4873 NODE. */
4874
4875 static void
4876 adjust_agg_replacement_values (struct cgraph_node *node,
4877 struct ipa_agg_replacement_value *aggval)
4878 {
4879 struct ipa_agg_replacement_value *v;
4880
4881 if (!node->clone.param_adjustments)
4882 return;
4883
4884 auto_vec<int, 16> new_indices;
4885 node->clone.param_adjustments->get_updated_indices (&new_indices);
4886 for (v = aggval; v; v = v->next)
4887 {
4888 gcc_checking_assert (v->index >= 0);
4889
4890 if ((unsigned) v->index < new_indices.length ())
4891 v->index = new_indices[v->index];
4892 else
4893 /* This can happen if we know about a constant passed by reference by
4894 an argument which is never actually used for anything, let alone
4895 loading that constant. */
4896 v->index = -1;
4897 }
4898 }
4899
4900 /* Dominator walker driving the ipcp modification phase. */
4901
4902 class ipcp_modif_dom_walker : public dom_walker
4903 {
4904 public:
4905 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4906 vec<ipa_param_descriptor, va_gc> *descs,
4907 struct ipa_agg_replacement_value *av,
4908 bool *sc, bool *cc)
4909 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4910 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4911
4912 virtual edge before_dom_children (basic_block);
4913
4914 private:
4915 struct ipa_func_body_info *m_fbi;
4916 vec<ipa_param_descriptor, va_gc> *m_descriptors;
4917 struct ipa_agg_replacement_value *m_aggval;
4918 bool *m_something_changed, *m_cfg_changed;
4919 };
4920
4921 edge
4922 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4923 {
4924 gimple_stmt_iterator gsi;
4925 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4926 {
4927 struct ipa_agg_replacement_value *v;
4928 gimple *stmt = gsi_stmt (gsi);
4929 tree rhs, val, t;
4930 HOST_WIDE_INT offset;
4931 poly_int64 size;
4932 int index;
4933 bool by_ref, vce;
4934
4935 if (!gimple_assign_load_p (stmt))
4936 continue;
4937 rhs = gimple_assign_rhs1 (stmt);
4938 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4939 continue;
4940
4941 vce = false;
4942 t = rhs;
4943 while (handled_component_p (t))
4944 {
4945 /* V_C_E can do things like convert an array of integers to one
4946 bigger integer and similar things we do not handle below. */
4947 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
4948 {
4949 vce = true;
4950 break;
4951 }
4952 t = TREE_OPERAND (t, 0);
4953 }
4954 if (vce)
4955 continue;
4956
4957 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4958 &offset, &size, &by_ref))
4959 continue;
4960 for (v = m_aggval; v; v = v->next)
4961 if (v->index == index
4962 && v->offset == offset)
4963 break;
4964 if (!v
4965 || v->by_ref != by_ref
4966 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
4967 size))
4968 continue;
4969
4970 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4971 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4972 {
4973 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4974 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4975 else if (TYPE_SIZE (TREE_TYPE (rhs))
4976 == TYPE_SIZE (TREE_TYPE (v->value)))
4977 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4978 else
4979 {
4980 if (dump_file)
4981 {
4982 fprintf (dump_file, " const ");
4983 print_generic_expr (dump_file, v->value);
4984 fprintf (dump_file, " can't be converted to type of ");
4985 print_generic_expr (dump_file, rhs);
4986 fprintf (dump_file, "\n");
4987 }
4988 continue;
4989 }
4990 }
4991 else
4992 val = v->value;
4993
4994 if (dump_file && (dump_flags & TDF_DETAILS))
4995 {
4996 fprintf (dump_file, "Modifying stmt:\n ");
4997 print_gimple_stmt (dump_file, stmt, 0);
4998 }
4999 gimple_assign_set_rhs_from_tree (&gsi, val);
5000 update_stmt (stmt);
5001
5002 if (dump_file && (dump_flags & TDF_DETAILS))
5003 {
5004 fprintf (dump_file, "into:\n ");
5005 print_gimple_stmt (dump_file, stmt, 0);
5006 fprintf (dump_file, "\n");
5007 }
5008
5009 *m_something_changed = true;
5010 if (maybe_clean_eh_stmt (stmt)
5011 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5012 *m_cfg_changed = true;
5013 }
5014 return NULL;
5015 }
5016
5017 /* Update bits info of formal parameters as described in
5018 ipcp_transformation. */
5019
5020 static void
5021 ipcp_update_bits (struct cgraph_node *node)
5022 {
5023 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5024
5025 if (!ts || vec_safe_length (ts->bits) == 0)
5026 return;
5027 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5028 unsigned count = bits.length ();
5029 if (!count)
5030 return;
5031
5032 auto_vec<int, 16> new_indices;
5033 bool need_remapping = false;
5034 if (node->clone.param_adjustments)
5035 {
5036 node->clone.param_adjustments->get_updated_indices (&new_indices);
5037 need_remapping = true;
5038 }
5039 auto_vec <tree, 16> parm_decls;
5040 push_function_arg_decls (&parm_decls, node->decl);
5041
5042 for (unsigned i = 0; i < count; ++i)
5043 {
5044 tree parm;
5045 if (need_remapping)
5046 {
5047 if (i >= new_indices.length ())
5048 continue;
5049 int idx = new_indices[i];
5050 if (idx < 0)
5051 continue;
5052 parm = parm_decls[idx];
5053 }
5054 else
5055 parm = parm_decls[i];
5056 gcc_checking_assert (parm);
5057
5058
5059 if (!bits[i]
5060 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5061 || POINTER_TYPE_P (TREE_TYPE (parm)))
5062 || !is_gimple_reg (parm))
5063 continue;
5064
5065 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5066 if (!ddef)
5067 continue;
5068
5069 if (dump_file)
5070 {
5071 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5072 print_hex (bits[i]->mask, dump_file);
5073 fprintf (dump_file, "\n");
5074 }
5075
5076 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5077 {
5078 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5079 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5080
5081 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5082 | wide_int::from (bits[i]->value, prec, sgn);
5083 set_nonzero_bits (ddef, nonzero_bits);
5084 }
5085 else
5086 {
5087 unsigned tem = bits[i]->mask.to_uhwi ();
5088 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5089 unsigned align = tem & -tem;
5090 unsigned misalign = bitpos & (align - 1);
5091
5092 if (align > 1)
5093 {
5094 if (dump_file)
5095 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5096
5097 unsigned old_align, old_misalign;
5098 struct ptr_info_def *pi = get_ptr_info (ddef);
5099 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5100
5101 if (old_known
5102 && old_align > align)
5103 {
5104 if (dump_file)
5105 {
5106 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5107 if ((old_misalign & (align - 1)) != misalign)
5108 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5109 old_misalign, misalign);
5110 }
5111 continue;
5112 }
5113
5114 if (old_known
5115 && ((misalign & (old_align - 1)) != old_misalign)
5116 && dump_file)
5117 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5118 old_misalign, misalign);
5119
5120 set_ptr_info_alignment (pi, align, misalign);
5121 }
5122 }
5123 }
5124 }
5125
5126 bool
5127 ipa_vr::nonzero_p (tree expr_type) const
5128 {
5129 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5130 return true;
5131
5132 unsigned prec = TYPE_PRECISION (expr_type);
5133 return (type == VR_RANGE
5134 && TYPE_UNSIGNED (expr_type)
5135 && wi::eq_p (min, wi::one (prec))
5136 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5137 }
5138
5139 /* Update value range of formal parameters as described in
5140 ipcp_transformation. */
5141
5142 static void
5143 ipcp_update_vr (struct cgraph_node *node)
5144 {
5145 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5146 if (!ts || vec_safe_length (ts->m_vr) == 0)
5147 return;
5148 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5149 unsigned count = vr.length ();
5150 if (!count)
5151 return;
5152
5153 auto_vec<int, 16> new_indices;
5154 bool need_remapping = false;
5155 if (node->clone.param_adjustments)
5156 {
5157 node->clone.param_adjustments->get_updated_indices (&new_indices);
5158 need_remapping = true;
5159 }
5160 auto_vec <tree, 16> parm_decls;
5161 push_function_arg_decls (&parm_decls, node->decl);
5162
5163 for (unsigned i = 0; i < count; ++i)
5164 {
5165 tree parm;
5166 int remapped_idx;
5167 if (need_remapping)
5168 {
5169 if (i >= new_indices.length ())
5170 continue;
5171 remapped_idx = new_indices[i];
5172 if (remapped_idx < 0)
5173 continue;
5174 }
5175 else
5176 remapped_idx = i;
5177
5178 parm = parm_decls[remapped_idx];
5179
5180 gcc_checking_assert (parm);
5181 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5182
5183 if (!ddef || !is_gimple_reg (parm))
5184 continue;
5185
5186 if (vr[i].known
5187 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5188 {
5189 tree type = TREE_TYPE (ddef);
5190 unsigned prec = TYPE_PRECISION (type);
5191 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5192 {
5193 if (dump_file)
5194 {
5195 fprintf (dump_file, "Setting value range of param %u "
5196 "(now %i) ", i, remapped_idx);
5197 fprintf (dump_file, "%s[",
5198 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5199 print_decs (vr[i].min, dump_file);
5200 fprintf (dump_file, ", ");
5201 print_decs (vr[i].max, dump_file);
5202 fprintf (dump_file, "]\n");
5203 }
5204 set_range_info (ddef, vr[i].type,
5205 wide_int_storage::from (vr[i].min, prec,
5206 TYPE_SIGN (type)),
5207 wide_int_storage::from (vr[i].max, prec,
5208 TYPE_SIGN (type)));
5209 }
5210 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5211 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5212 {
5213 if (dump_file)
5214 fprintf (dump_file, "Setting nonnull for %u\n", i);
5215 set_ptr_nonnull (ddef);
5216 }
5217 }
5218 }
5219 }
5220
5221 /* IPCP transformation phase doing propagation of aggregate values. */
5222
5223 unsigned int
5224 ipcp_transform_function (struct cgraph_node *node)
5225 {
5226 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5227 struct ipa_func_body_info fbi;
5228 struct ipa_agg_replacement_value *aggval;
5229 int param_count;
5230 bool cfg_changed = false, something_changed = false;
5231
5232 gcc_checking_assert (cfun);
5233 gcc_checking_assert (current_function_decl);
5234
5235 if (dump_file)
5236 fprintf (dump_file, "Modification phase of node %s\n",
5237 node->dump_name ());
5238
5239 ipcp_update_bits (node);
5240 ipcp_update_vr (node);
5241 aggval = ipa_get_agg_replacements_for_node (node);
5242 if (!aggval)
5243 return 0;
5244 param_count = count_formal_params (node->decl);
5245 if (param_count == 0)
5246 return 0;
5247 adjust_agg_replacement_values (node, aggval);
5248 if (dump_file)
5249 ipa_dump_agg_replacement_values (dump_file, aggval);
5250
5251 fbi.node = node;
5252 fbi.info = NULL;
5253 fbi.bb_infos = vNULL;
5254 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5255 fbi.param_count = param_count;
5256 fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
5257
5258 vec_safe_grow_cleared (descriptors, param_count);
5259 ipa_populate_param_decls (node, *descriptors);
5260 calculate_dominance_info (CDI_DOMINATORS);
5261 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5262 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5263
5264 int i;
5265 struct ipa_bb_info *bi;
5266 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5267 free_ipa_bb_info (bi);
5268 fbi.bb_infos.release ();
5269 free_dominance_info (CDI_DOMINATORS);
5270
5271 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5272 s->agg_values = NULL;
5273 s->bits = NULL;
5274 s->m_vr = NULL;
5275
5276 vec_free (descriptors);
5277
5278 if (!something_changed)
5279 return 0;
5280
5281 if (cfg_changed)
5282 delete_unreachable_blocks_update_callgraph (node, false);
5283
5284 return TODO_update_ssa_only_virtuals;
5285 }
5286
5287 #include "gt-ipa-prop.h"