]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
Add default value for last argument of dump functions.
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Edge summary for IPA-CP edge information. */
61 ipa_edge_args_sum_t *ipa_edge_args_sum;
62
63 /* Traits for a hash table for reusing already existing ipa_bits. */
64
65 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
66 {
67 typedef ipa_bits *value_type;
68 typedef ipa_bits *compare_type;
69 static hashval_t
70 hash (const ipa_bits *p)
71 {
72 hashval_t t = (hashval_t) p->value.to_shwi ();
73 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
74 }
75 static bool
76 equal (const ipa_bits *a, const ipa_bits *b)
77 {
78 return a->value == b->value && a->mask == b->mask;
79 }
80 static void
81 mark_empty (ipa_bits *&p)
82 {
83 p = NULL;
84 }
85 static bool
86 is_empty (const ipa_bits *p)
87 {
88 return p == NULL;
89 }
90 static bool
91 is_deleted (const ipa_bits *p)
92 {
93 return p == reinterpret_cast<const ipa_bits *> (1);
94 }
95 static void
96 mark_deleted (ipa_bits *&p)
97 {
98 p = reinterpret_cast<ipa_bits *> (1);
99 }
100 };
101
102 /* Hash table for avoid repeated allocations of equal ipa_bits. */
103 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
104
105 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
106 the equiv bitmap is not hashed and is expected to be NULL. */
107
108 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
109 {
110 typedef value_range *value_type;
111 typedef value_range *compare_type;
112 static hashval_t
113 hash (const value_range *p)
114 {
115 gcc_checking_assert (!p->equiv);
116 hashval_t t = (hashval_t) p->type;
117 t = iterative_hash_expr (p->min, t);
118 return iterative_hash_expr (p->max, t);
119 }
120 static bool
121 equal (const value_range *a, const value_range *b)
122 {
123 return a->type == b->type && a->min == b->min && a->max == b->max;
124 }
125 static void
126 mark_empty (value_range *&p)
127 {
128 p = NULL;
129 }
130 static bool
131 is_empty (const value_range *p)
132 {
133 return p == NULL;
134 }
135 static bool
136 is_deleted (const value_range *p)
137 {
138 return p == reinterpret_cast<const value_range *> (1);
139 }
140 static void
141 mark_deleted (value_range *&p)
142 {
143 p = reinterpret_cast<value_range *> (1);
144 }
145 };
146
147 /* Hash table for avoid repeated allocations of equal value_ranges. */
148 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
149
150 /* Holders of ipa cgraph hooks: */
151 static struct cgraph_node_hook_list *function_insertion_hook_holder;
152
153 /* Description of a reference to an IPA constant. */
154 struct ipa_cst_ref_desc
155 {
156 /* Edge that corresponds to the statement which took the reference. */
157 struct cgraph_edge *cs;
158 /* Linked list of duplicates created when call graph edges are cloned. */
159 struct ipa_cst_ref_desc *next_duplicate;
160 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
161 if out of control. */
162 int refcount;
163 };
164
165 /* Allocation pool for reference descriptions. */
166
167 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
168 ("IPA-PROP ref descriptions");
169
170 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
171 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
172
173 static bool
174 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
175 {
176 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
177
178 if (!fs_opts)
179 return false;
180 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
181 }
182
183 /* Return index of the formal whose tree is PTREE in function which corresponds
184 to INFO. */
185
186 static int
187 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
188 tree ptree)
189 {
190 int i, count;
191
192 count = vec_safe_length (descriptors);
193 for (i = 0; i < count; i++)
194 if ((*descriptors)[i].decl_or_type == ptree)
195 return i;
196
197 return -1;
198 }
199
200 /* Return index of the formal whose tree is PTREE in function which corresponds
201 to INFO. */
202
203 int
204 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
205 {
206 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
207 }
208
209 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
210 NODE. */
211
212 static void
213 ipa_populate_param_decls (struct cgraph_node *node,
214 vec<ipa_param_descriptor, va_gc> &descriptors)
215 {
216 tree fndecl;
217 tree fnargs;
218 tree parm;
219 int param_num;
220
221 fndecl = node->decl;
222 gcc_assert (gimple_has_body_p (fndecl));
223 fnargs = DECL_ARGUMENTS (fndecl);
224 param_num = 0;
225 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
226 {
227 descriptors[param_num].decl_or_type = parm;
228 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
229 true);
230 param_num++;
231 }
232 }
233
234 /* Return how many formal parameters FNDECL has. */
235
236 int
237 count_formal_params (tree fndecl)
238 {
239 tree parm;
240 int count = 0;
241 gcc_assert (gimple_has_body_p (fndecl));
242
243 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
244 count++;
245
246 return count;
247 }
248
249 /* Return the declaration of Ith formal parameter of the function corresponding
250 to INFO. Note there is no setter function as this array is built just once
251 using ipa_initialize_node_params. */
252
253 void
254 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
255 {
256 fprintf (file, "param #%i", i);
257 if ((*info->descriptors)[i].decl_or_type)
258 {
259 fprintf (file, " ");
260 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
261 }
262 }
263
264 /* If necessary, allocate vector of parameter descriptors in info of NODE.
265 Return true if they were allocated, false if not. */
266
267 static bool
268 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
269 {
270 struct ipa_node_params *info = IPA_NODE_REF (node);
271
272 if (!info->descriptors && param_count)
273 {
274 vec_safe_grow_cleared (info->descriptors, param_count);
275 return true;
276 }
277 else
278 return false;
279 }
280
281 /* Initialize the ipa_node_params structure associated with NODE by counting
282 the function parameters, creating the descriptors and populating their
283 param_decls. */
284
285 void
286 ipa_initialize_node_params (struct cgraph_node *node)
287 {
288 struct ipa_node_params *info = IPA_NODE_REF (node);
289
290 if (!info->descriptors
291 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
292 ipa_populate_param_decls (node, *info->descriptors);
293 }
294
295 /* Print the jump functions associated with call graph edge CS to file F. */
296
297 static void
298 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
299 {
300 int i, count;
301
302 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
303 for (i = 0; i < count; i++)
304 {
305 struct ipa_jump_func *jump_func;
306 enum jump_func_type type;
307
308 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
309 type = jump_func->type;
310
311 fprintf (f, " param %d: ", i);
312 if (type == IPA_JF_UNKNOWN)
313 fprintf (f, "UNKNOWN\n");
314 else if (type == IPA_JF_CONST)
315 {
316 tree val = jump_func->value.constant.value;
317 fprintf (f, "CONST: ");
318 print_generic_expr (f, val);
319 if (TREE_CODE (val) == ADDR_EXPR
320 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
321 {
322 fprintf (f, " -> ");
323 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
324 }
325 fprintf (f, "\n");
326 }
327 else if (type == IPA_JF_PASS_THROUGH)
328 {
329 fprintf (f, "PASS THROUGH: ");
330 fprintf (f, "%d, op %s",
331 jump_func->value.pass_through.formal_id,
332 get_tree_code_name(jump_func->value.pass_through.operation));
333 if (jump_func->value.pass_through.operation != NOP_EXPR)
334 {
335 fprintf (f, " ");
336 print_generic_expr (f, jump_func->value.pass_through.operand);
337 }
338 if (jump_func->value.pass_through.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 fprintf (f, "\n");
341 }
342 else if (type == IPA_JF_ANCESTOR)
343 {
344 fprintf (f, "ANCESTOR: ");
345 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
346 jump_func->value.ancestor.formal_id,
347 jump_func->value.ancestor.offset);
348 if (jump_func->value.ancestor.agg_preserved)
349 fprintf (f, ", agg_preserved");
350 fprintf (f, "\n");
351 }
352
353 if (jump_func->agg.items)
354 {
355 struct ipa_agg_jf_item *item;
356 int j;
357
358 fprintf (f, " Aggregate passed by %s:\n",
359 jump_func->agg.by_ref ? "reference" : "value");
360 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
361 {
362 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
363 item->offset);
364 if (TYPE_P (item->value))
365 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
366 tree_to_uhwi (TYPE_SIZE (item->value)));
367 else
368 {
369 fprintf (f, "cst: ");
370 print_generic_expr (f, item->value);
371 }
372 fprintf (f, "\n");
373 }
374 }
375
376 struct ipa_polymorphic_call_context *ctx
377 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
378 if (ctx && !ctx->useless_p ())
379 {
380 fprintf (f, " Context: ");
381 ctx->dump (dump_file);
382 }
383
384 if (jump_func->bits)
385 {
386 fprintf (f, " value: ");
387 print_hex (jump_func->bits->value, f);
388 fprintf (f, ", mask: ");
389 print_hex (jump_func->bits->mask, f);
390 fprintf (f, "\n");
391 }
392 else
393 fprintf (f, " Unknown bits\n");
394
395 if (jump_func->m_vr)
396 {
397 fprintf (f, " VR ");
398 fprintf (f, "%s[",
399 (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
400 print_decs (jump_func->m_vr->min, f);
401 fprintf (f, ", ");
402 print_decs (jump_func->m_vr->max, f);
403 fprintf (f, "]\n");
404 }
405 else
406 fprintf (f, " Unknown VR\n");
407 }
408 }
409
410
411 /* Print the jump functions of all arguments on all call graph edges going from
412 NODE to file F. */
413
414 void
415 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
416 {
417 struct cgraph_edge *cs;
418
419 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
420 node->order);
421 for (cs = node->callees; cs; cs = cs->next_callee)
422 {
423 if (!ipa_edge_args_info_available_for_edge_p (cs))
424 continue;
425
426 fprintf (f, " callsite %s/%i -> %s/%i : \n",
427 xstrdup_for_dump (node->name ()), node->order,
428 xstrdup_for_dump (cs->callee->name ()),
429 cs->callee->order);
430 ipa_print_node_jump_functions_for_edge (f, cs);
431 }
432
433 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
434 {
435 struct cgraph_indirect_call_info *ii;
436 if (!ipa_edge_args_info_available_for_edge_p (cs))
437 continue;
438
439 ii = cs->indirect_info;
440 if (ii->agg_contents)
441 fprintf (f, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
443 ii->member_ptr ? "member ptr" : "aggregate",
444 ii->param_index, ii->offset,
445 ii->by_ref ? "by reference" : "by_value");
446 else
447 fprintf (f, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC,
449 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
450 ii->offset);
451
452 if (cs->call_stmt)
453 {
454 fprintf (f, ", for stmt ");
455 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
456 }
457 else
458 fprintf (f, "\n");
459 if (ii->polymorphic)
460 ii->context.dump (f);
461 ipa_print_node_jump_functions_for_edge (f, cs);
462 }
463 }
464
465 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
466
467 void
468 ipa_print_all_jump_functions (FILE *f)
469 {
470 struct cgraph_node *node;
471
472 fprintf (f, "\nJump functions:\n");
473 FOR_EACH_FUNCTION (node)
474 {
475 ipa_print_node_jump_functions (f, node);
476 }
477 }
478
479 /* Set jfunc to be a know-really nothing jump function. */
480
481 static void
482 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
483 {
484 jfunc->type = IPA_JF_UNKNOWN;
485 jfunc->bits = NULL;
486 jfunc->m_vr = NULL;
487 }
488
489 /* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
491
492 static void
493 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
494 struct ipa_jump_func *src)
495
496 {
497 gcc_checking_assert (src->type == IPA_JF_CONST);
498 dst->type = IPA_JF_CONST;
499 dst->value.constant = src->value.constant;
500 }
501
502 /* Set JFUNC to be a constant jmp function. */
503
504 static void
505 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
506 struct cgraph_edge *cs)
507 {
508 jfunc->type = IPA_JF_CONST;
509 jfunc->value.constant.value = unshare_expr_without_location (constant);
510
511 if (TREE_CODE (constant) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
513 {
514 struct ipa_cst_ref_desc *rdesc;
515
516 rdesc = ipa_refdesc_pool.allocate ();
517 rdesc->cs = cs;
518 rdesc->next_duplicate = NULL;
519 rdesc->refcount = 1;
520 jfunc->value.constant.rdesc = rdesc;
521 }
522 else
523 jfunc->value.constant.rdesc = NULL;
524 }
525
526 /* Set JFUNC to be a simple pass-through jump function. */
527 static void
528 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
529 bool agg_preserved)
530 {
531 jfunc->type = IPA_JF_PASS_THROUGH;
532 jfunc->value.pass_through.operand = NULL_TREE;
533 jfunc->value.pass_through.formal_id = formal_id;
534 jfunc->value.pass_through.operation = NOP_EXPR;
535 jfunc->value.pass_through.agg_preserved = agg_preserved;
536 }
537
538 /* Set JFUNC to be an unary pass through jump function. */
539
540 static void
541 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
542 enum tree_code operation)
543 {
544 jfunc->type = IPA_JF_PASS_THROUGH;
545 jfunc->value.pass_through.operand = NULL_TREE;
546 jfunc->value.pass_through.formal_id = formal_id;
547 jfunc->value.pass_through.operation = operation;
548 jfunc->value.pass_through.agg_preserved = false;
549 }
550 /* Set JFUNC to be an arithmetic pass through jump function. */
551
552 static void
553 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
554 tree operand, enum tree_code operation)
555 {
556 jfunc->type = IPA_JF_PASS_THROUGH;
557 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
558 jfunc->value.pass_through.formal_id = formal_id;
559 jfunc->value.pass_through.operation = operation;
560 jfunc->value.pass_through.agg_preserved = false;
561 }
562
563 /* Set JFUNC to be an ancestor jump function. */
564
565 static void
566 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
567 int formal_id, bool agg_preserved)
568 {
569 jfunc->type = IPA_JF_ANCESTOR;
570 jfunc->value.ancestor.formal_id = formal_id;
571 jfunc->value.ancestor.offset = offset;
572 jfunc->value.ancestor.agg_preserved = agg_preserved;
573 }
574
575 /* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
577
578 static struct ipa_bb_info *
579 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
580 {
581 gcc_checking_assert (fbi);
582 return &fbi->bb_infos[bb->index];
583 }
584
585 /* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
587
588 struct prop_type_change_info
589 {
590 /* Offset into the object where there is the virtual method pointer we are
591 looking for. */
592 HOST_WIDE_INT offset;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
594 type change. */
595 tree object;
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed;
598 };
599
600 /* Return true if STMT can modify a virtual method table pointer.
601
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
606
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
609
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
612
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
616
617 There is no way to call a constructor of an ancestor sub-object in any
618 other way.
619
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
623
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
635 */
636
637 static bool
638 stmt_may_be_vtbl_ptr_store (gimple *stmt)
639 {
640 if (is_gimple_call (stmt))
641 return false;
642 if (gimple_clobber_p (stmt))
643 return false;
644 else if (is_gimple_assign (stmt))
645 {
646 tree lhs = gimple_assign_lhs (stmt);
647
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
649 {
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
652 return false;
653
654 if (TREE_CODE (lhs) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
656 return false;
657 /* In the future we might want to use get_base_ref_and_offset to find
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
660 }
661 }
662 return true;
663 }
664
665 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
668 prop_type_change_info structure. */
669
670 static bool
671 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
672 {
673 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
674 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
675
676 if (stmt_may_be_vtbl_ptr_store (stmt))
677 {
678 tci->type_maybe_changed = true;
679 return true;
680 }
681 else
682 return false;
683 }
684
685 /* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
688
689 Generally functions are not allowed to change type of such instances,
690 but they call destructors. We assume that methods can not destroy the THIS
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
693
694 static bool
695 param_type_may_change_p (tree function, tree arg, gimple *call)
696 {
697 /* Pure functions can not do any changes on the dynamic type;
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
700 return false;
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
707
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
712 return true;
713 if (TREE_CODE (arg) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg)
715 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
716 {
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
719 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
720 /* THIS pointer of an method - here we want to watch constructors
721 and destructors as those definitely may change the dynamic
722 type. */
723 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function)
725 && !DECL_CXX_DESTRUCTOR_P (function)
726 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
727 {
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
730 block = BLOCK_SUPERCONTEXT (block))
731 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
732 return true;
733 return false;
734 }
735 }
736 return true;
737 }
738
739 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
744 returned by get_ref_base_and_extent, as is the offset.
745
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
748
749 static bool
750 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
751 gcall *call, struct ipa_jump_func *jfunc,
752 HOST_WIDE_INT offset)
753 {
754 struct prop_type_change_info tci;
755 ao_ref ao;
756 bool entry_reached = false;
757
758 gcc_checking_assert (DECL_P (arg)
759 || TREE_CODE (arg) == MEM_REF
760 || handled_component_p (arg));
761
762 comp_type = TYPE_MAIN_VARIANT (comp_type);
763
764 /* Const calls cannot call virtual methods through VMT and so type changes do
765 not matter. */
766 if (!flag_devirtualize || !gimple_vuse (call)
767 /* Be sure expected_type is polymorphic. */
768 || !comp_type
769 || TREE_CODE (comp_type) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
772 return true;
773
774 ao_ref_init (&ao, arg);
775 ao.base = base;
776 ao.offset = offset;
777 ao.size = POINTER_SIZE;
778 ao.max_size = ao.size;
779
780 tci.offset = offset;
781 tci.object = get_base_address (arg);
782 tci.type_maybe_changed = false;
783
784 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
785 &tci, NULL, &entry_reached);
786 if (!tci.type_maybe_changed)
787 return false;
788
789 ipa_set_jf_unknown (jfunc);
790 return true;
791 }
792
793 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
794 If it is, return true and fill in the jump function JFUNC with relevant type
795 information or set it to unknown. ARG is the object itself (not a pointer
796 to it, unless dereferenced). BASE is the base of the memory access as
797 returned by get_ref_base_and_extent, as is the offset. */
798
799 static bool
800 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
801 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
802 {
803 if (!flag_devirtualize)
804 return false;
805
806 if (TREE_CODE (base) == MEM_REF
807 && !param_type_may_change_p (current_function_decl,
808 TREE_OPERAND (base, 0),
809 call))
810 return false;
811 return detect_type_change_from_memory_writes (arg, base, comp_type,
812 call, jfunc, offset);
813 }
814
815 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
816 SSA name (its dereference will become the base and the offset is assumed to
817 be zero). */
818
819 static bool
820 detect_type_change_ssa (tree arg, tree comp_type,
821 gcall *call, struct ipa_jump_func *jfunc)
822 {
823 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
824 if (!flag_devirtualize
825 || !POINTER_TYPE_P (TREE_TYPE (arg)))
826 return false;
827
828 if (!param_type_may_change_p (current_function_decl, arg, call))
829 return false;
830
831 arg = build2 (MEM_REF, ptr_type_node, arg,
832 build_int_cst (ptr_type_node, 0));
833
834 return detect_type_change_from_memory_writes (arg, arg, comp_type,
835 call, jfunc, 0);
836 }
837
838 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
839 boolean variable pointed to by DATA. */
840
841 static bool
842 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
843 void *data)
844 {
845 bool *b = (bool *) data;
846 *b = true;
847 return true;
848 }
849
850 /* Return true if we have already walked so many statements in AA that we
851 should really just start giving up. */
852
853 static bool
854 aa_overwalked (struct ipa_func_body_info *fbi)
855 {
856 gcc_checking_assert (fbi);
857 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
858 }
859
860 /* Find the nearest valid aa status for parameter specified by INDEX that
861 dominates BB. */
862
863 static struct ipa_param_aa_status *
864 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
865 int index)
866 {
867 while (true)
868 {
869 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
870 if (!bb)
871 return NULL;
872 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
873 if (!bi->param_aa_statuses.is_empty ()
874 && bi->param_aa_statuses[index].valid)
875 return &bi->param_aa_statuses[index];
876 }
877 }
878
879 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
880 structures and/or intialize the result with a dominating description as
881 necessary. */
882
883 static struct ipa_param_aa_status *
884 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
885 int index)
886 {
887 gcc_checking_assert (fbi);
888 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
889 if (bi->param_aa_statuses.is_empty ())
890 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
891 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
892 if (!paa->valid)
893 {
894 gcc_checking_assert (!paa->parm_modified
895 && !paa->ref_modified
896 && !paa->pt_modified);
897 struct ipa_param_aa_status *dom_paa;
898 dom_paa = find_dominating_aa_status (fbi, bb, index);
899 if (dom_paa)
900 *paa = *dom_paa;
901 else
902 paa->valid = true;
903 }
904
905 return paa;
906 }
907
908 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
909 a value known not to be modified in this function before reaching the
910 statement STMT. FBI holds information about the function we have so far
911 gathered but do not survive the summary building stage. */
912
913 static bool
914 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
915 gimple *stmt, tree parm_load)
916 {
917 struct ipa_param_aa_status *paa;
918 bool modified = false;
919 ao_ref refd;
920
921 tree base = get_base_address (parm_load);
922 gcc_assert (TREE_CODE (base) == PARM_DECL);
923 if (TREE_READONLY (base))
924 return true;
925
926 /* FIXME: FBI can be NULL if we are being called from outside
927 ipa_node_analysis or ipcp_transform_function, which currently happens
928 during inlining analysis. It would be great to extend fbi's lifetime and
929 always have it. Currently, we are just not afraid of too much walking in
930 that case. */
931 if (fbi)
932 {
933 if (aa_overwalked (fbi))
934 return false;
935 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
936 if (paa->parm_modified)
937 return false;
938 }
939 else
940 paa = NULL;
941
942 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
943 ao_ref_init (&refd, parm_load);
944 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
945 &modified, NULL);
946 if (fbi)
947 fbi->aa_walked += walked;
948 if (paa && modified)
949 paa->parm_modified = true;
950 return !modified;
951 }
952
953 /* If STMT is an assignment that loads a value from an parameter declaration,
954 return the index of the parameter in ipa_node_params which has not been
955 modified. Otherwise return -1. */
956
957 static int
958 load_from_unmodified_param (struct ipa_func_body_info *fbi,
959 vec<ipa_param_descriptor, va_gc> *descriptors,
960 gimple *stmt)
961 {
962 int index;
963 tree op1;
964
965 if (!gimple_assign_single_p (stmt))
966 return -1;
967
968 op1 = gimple_assign_rhs1 (stmt);
969 if (TREE_CODE (op1) != PARM_DECL)
970 return -1;
971
972 index = ipa_get_param_decl_index_1 (descriptors, op1);
973 if (index < 0
974 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
975 return -1;
976
977 return index;
978 }
979
980 /* Return true if memory reference REF (which must be a load through parameter
981 with INDEX) loads data that are known to be unmodified in this function
982 before reaching statement STMT. */
983
984 static bool
985 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
986 int index, gimple *stmt, tree ref)
987 {
988 struct ipa_param_aa_status *paa;
989 bool modified = false;
990 ao_ref refd;
991
992 /* FIXME: FBI can be NULL if we are being called from outside
993 ipa_node_analysis or ipcp_transform_function, which currently happens
994 during inlining analysis. It would be great to extend fbi's lifetime and
995 always have it. Currently, we are just not afraid of too much walking in
996 that case. */
997 if (fbi)
998 {
999 if (aa_overwalked (fbi))
1000 return false;
1001 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1002 if (paa->ref_modified)
1003 return false;
1004 }
1005 else
1006 paa = NULL;
1007
1008 gcc_checking_assert (gimple_vuse (stmt));
1009 ao_ref_init (&refd, ref);
1010 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1011 &modified, NULL);
1012 if (fbi)
1013 fbi->aa_walked += walked;
1014 if (paa && modified)
1015 paa->ref_modified = true;
1016 return !modified;
1017 }
1018
1019 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1020 is known to be unmodified in this function before reaching call statement
1021 CALL into which it is passed. FBI describes the function body. */
1022
1023 static bool
1024 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1025 gimple *call, tree parm)
1026 {
1027 bool modified = false;
1028 ao_ref refd;
1029
1030 /* It's unnecessary to calculate anything about memory contnets for a const
1031 function because it is not goin to use it. But do not cache the result
1032 either. Also, no such calculations for non-pointers. */
1033 if (!gimple_vuse (call)
1034 || !POINTER_TYPE_P (TREE_TYPE (parm))
1035 || aa_overwalked (fbi))
1036 return false;
1037
1038 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1039 gimple_bb (call),
1040 index);
1041 if (paa->pt_modified)
1042 return false;
1043
1044 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1045 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1046 &modified, NULL);
1047 fbi->aa_walked += walked;
1048 if (modified)
1049 paa->pt_modified = true;
1050 return !modified;
1051 }
1052
1053 /* Return true if we can prove that OP is a memory reference loading
1054 data from an aggregate passed as a parameter.
1055
1056 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1057 false if it cannot prove that the value has not been modified before the
1058 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1059 if it cannot prove the value has not been modified, in that case it will
1060 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1061
1062 INFO and PARMS_AINFO describe parameters of the current function (but the
1063 latter can be NULL), STMT is the load statement. If function returns true,
1064 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1065 within the aggregate and whether it is a load from a value passed by
1066 reference respectively. */
1067
1068 bool
1069 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1070 vec<ipa_param_descriptor, va_gc> *descriptors,
1071 gimple *stmt, tree op, int *index_p,
1072 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1073 bool *by_ref_p, bool *guaranteed_unmodified)
1074 {
1075 int index;
1076 HOST_WIDE_INT size, max_size;
1077 bool reverse;
1078 tree base
1079 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
1080
1081 if (max_size == -1 || max_size != size || *offset_p < 0)
1082 return false;
1083
1084 if (DECL_P (base))
1085 {
1086 int index = ipa_get_param_decl_index_1 (descriptors, base);
1087 if (index >= 0
1088 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1089 {
1090 *index_p = index;
1091 *by_ref_p = false;
1092 if (size_p)
1093 *size_p = size;
1094 if (guaranteed_unmodified)
1095 *guaranteed_unmodified = true;
1096 return true;
1097 }
1098 return false;
1099 }
1100
1101 if (TREE_CODE (base) != MEM_REF
1102 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1103 || !integer_zerop (TREE_OPERAND (base, 1)))
1104 return false;
1105
1106 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1107 {
1108 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1109 index = ipa_get_param_decl_index_1 (descriptors, parm);
1110 }
1111 else
1112 {
1113 /* This branch catches situations where a pointer parameter is not a
1114 gimple register, for example:
1115
1116 void hip7(S*) (struct S * p)
1117 {
1118 void (*<T2e4>) (struct S *) D.1867;
1119 struct S * p.1;
1120
1121 <bb 2>:
1122 p.1_1 = p;
1123 D.1867_2 = p.1_1->f;
1124 D.1867_2 ();
1125 gdp = &p;
1126 */
1127
1128 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1129 index = load_from_unmodified_param (fbi, descriptors, def);
1130 }
1131
1132 if (index >= 0)
1133 {
1134 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1135 if (!data_preserved && !guaranteed_unmodified)
1136 return false;
1137
1138 *index_p = index;
1139 *by_ref_p = true;
1140 if (size_p)
1141 *size_p = size;
1142 if (guaranteed_unmodified)
1143 *guaranteed_unmodified = data_preserved;
1144 return true;
1145 }
1146 return false;
1147 }
1148
1149 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1150 of an assignment statement STMT, try to determine whether we are actually
1151 handling any of the following cases and construct an appropriate jump
1152 function into JFUNC if so:
1153
1154 1) The passed value is loaded from a formal parameter which is not a gimple
1155 register (most probably because it is addressable, the value has to be
1156 scalar) and we can guarantee the value has not changed. This case can
1157 therefore be described by a simple pass-through jump function. For example:
1158
1159 foo (int a)
1160 {
1161 int a.0;
1162
1163 a.0_2 = a;
1164 bar (a.0_2);
1165
1166 2) The passed value can be described by a simple arithmetic pass-through
1167 jump function. E.g.
1168
1169 foo (int a)
1170 {
1171 int D.2064;
1172
1173 D.2064_4 = a.1(D) + 4;
1174 bar (D.2064_4);
1175
1176 This case can also occur in combination of the previous one, e.g.:
1177
1178 foo (int a, int z)
1179 {
1180 int a.0;
1181 int D.2064;
1182
1183 a.0_3 = a;
1184 D.2064_4 = a.0_3 + 4;
1185 foo (D.2064_4);
1186
1187 3) The passed value is an address of an object within another one (which
1188 also passed by reference). Such situations are described by an ancestor
1189 jump function and describe situations such as:
1190
1191 B::foo() (struct B * const this)
1192 {
1193 struct A * D.1845;
1194
1195 D.1845_2 = &this_1(D)->D.1748;
1196 A::bar (D.1845_2);
1197
1198 INFO is the structure describing individual parameters access different
1199 stages of IPA optimizations. PARMS_AINFO contains the information that is
1200 only needed for intraprocedural analysis. */
1201
1202 static void
1203 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1204 struct ipa_node_params *info,
1205 struct ipa_jump_func *jfunc,
1206 gcall *call, gimple *stmt, tree name,
1207 tree param_type)
1208 {
1209 HOST_WIDE_INT offset, size, max_size;
1210 tree op1, tc_ssa, base, ssa;
1211 bool reverse;
1212 int index;
1213
1214 op1 = gimple_assign_rhs1 (stmt);
1215
1216 if (TREE_CODE (op1) == SSA_NAME)
1217 {
1218 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1219 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1220 else
1221 index = load_from_unmodified_param (fbi, info->descriptors,
1222 SSA_NAME_DEF_STMT (op1));
1223 tc_ssa = op1;
1224 }
1225 else
1226 {
1227 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1228 tc_ssa = gimple_assign_lhs (stmt);
1229 }
1230
1231 if (index >= 0)
1232 {
1233 switch (gimple_assign_rhs_class (stmt))
1234 {
1235 case GIMPLE_BINARY_RHS:
1236 {
1237 tree op2 = gimple_assign_rhs2 (stmt);
1238 if (!is_gimple_ip_invariant (op2)
1239 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1240 != tcc_comparison)
1241 && !useless_type_conversion_p (TREE_TYPE (name),
1242 TREE_TYPE (op1))))
1243 return;
1244
1245 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1246 gimple_assign_rhs_code (stmt));
1247 break;
1248 }
1249 case GIMPLE_SINGLE_RHS:
1250 {
1251 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1252 tc_ssa);
1253 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1254 break;
1255 }
1256 case GIMPLE_UNARY_RHS:
1257 if (is_gimple_assign (stmt)
1258 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1259 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1260 ipa_set_jf_unary_pass_through (jfunc, index,
1261 gimple_assign_rhs_code (stmt));
1262 default:;
1263 }
1264 return;
1265 }
1266
1267 if (TREE_CODE (op1) != ADDR_EXPR)
1268 return;
1269 op1 = TREE_OPERAND (op1, 0);
1270 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1271 return;
1272 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1273 if (TREE_CODE (base) != MEM_REF
1274 /* If this is a varying address, punt. */
1275 || max_size == -1
1276 || max_size != size)
1277 return;
1278 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1279 ssa = TREE_OPERAND (base, 0);
1280 if (TREE_CODE (ssa) != SSA_NAME
1281 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1282 || offset < 0)
1283 return;
1284
1285 /* Dynamic types are changed in constructors and destructors. */
1286 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1287 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1288 ipa_set_ancestor_jf (jfunc, offset, index,
1289 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1290 }
1291
1292 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1293 it looks like:
1294
1295 iftmp.1_3 = &obj_2(D)->D.1762;
1296
1297 The base of the MEM_REF must be a default definition SSA NAME of a
1298 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1299 whole MEM_REF expression is returned and the offset calculated from any
1300 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1301 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1302
1303 static tree
1304 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1305 {
1306 HOST_WIDE_INT size, max_size;
1307 tree expr, parm, obj;
1308 bool reverse;
1309
1310 if (!gimple_assign_single_p (assign))
1311 return NULL_TREE;
1312 expr = gimple_assign_rhs1 (assign);
1313
1314 if (TREE_CODE (expr) != ADDR_EXPR)
1315 return NULL_TREE;
1316 expr = TREE_OPERAND (expr, 0);
1317 obj = expr;
1318 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1319
1320 if (TREE_CODE (expr) != MEM_REF
1321 /* If this is a varying address, punt. */
1322 || max_size == -1
1323 || max_size != size
1324 || *offset < 0)
1325 return NULL_TREE;
1326 parm = TREE_OPERAND (expr, 0);
1327 if (TREE_CODE (parm) != SSA_NAME
1328 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1329 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1330 return NULL_TREE;
1331
1332 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1333 *obj_p = obj;
1334 return expr;
1335 }
1336
1337
1338 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1339 statement PHI, try to find out whether NAME is in fact a
1340 multiple-inheritance typecast from a descendant into an ancestor of a formal
1341 parameter and thus can be described by an ancestor jump function and if so,
1342 write the appropriate function into JFUNC.
1343
1344 Essentially we want to match the following pattern:
1345
1346 if (obj_2(D) != 0B)
1347 goto <bb 3>;
1348 else
1349 goto <bb 4>;
1350
1351 <bb 3>:
1352 iftmp.1_3 = &obj_2(D)->D.1762;
1353
1354 <bb 4>:
1355 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1356 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1357 return D.1879_6; */
1358
1359 static void
1360 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1361 struct ipa_node_params *info,
1362 struct ipa_jump_func *jfunc,
1363 gcall *call, gphi *phi)
1364 {
1365 HOST_WIDE_INT offset;
1366 gimple *assign, *cond;
1367 basic_block phi_bb, assign_bb, cond_bb;
1368 tree tmp, parm, expr, obj;
1369 int index, i;
1370
1371 if (gimple_phi_num_args (phi) != 2)
1372 return;
1373
1374 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1375 tmp = PHI_ARG_DEF (phi, 0);
1376 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1377 tmp = PHI_ARG_DEF (phi, 1);
1378 else
1379 return;
1380 if (TREE_CODE (tmp) != SSA_NAME
1381 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1382 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1383 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1384 return;
1385
1386 assign = SSA_NAME_DEF_STMT (tmp);
1387 assign_bb = gimple_bb (assign);
1388 if (!single_pred_p (assign_bb))
1389 return;
1390 expr = get_ancestor_addr_info (assign, &obj, &offset);
1391 if (!expr)
1392 return;
1393 parm = TREE_OPERAND (expr, 0);
1394 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1395 if (index < 0)
1396 return;
1397
1398 cond_bb = single_pred (assign_bb);
1399 cond = last_stmt (cond_bb);
1400 if (!cond
1401 || gimple_code (cond) != GIMPLE_COND
1402 || gimple_cond_code (cond) != NE_EXPR
1403 || gimple_cond_lhs (cond) != parm
1404 || !integer_zerop (gimple_cond_rhs (cond)))
1405 return;
1406
1407 phi_bb = gimple_bb (phi);
1408 for (i = 0; i < 2; i++)
1409 {
1410 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1411 if (pred != assign_bb && pred != cond_bb)
1412 return;
1413 }
1414
1415 ipa_set_ancestor_jf (jfunc, offset, index,
1416 parm_ref_data_pass_through_p (fbi, index, call, parm));
1417 }
1418
1419 /* Inspect the given TYPE and return true iff it has the same structure (the
1420 same number of fields of the same types) as a C++ member pointer. If
1421 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1422 corresponding fields there. */
1423
1424 static bool
1425 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1426 {
1427 tree fld;
1428
1429 if (TREE_CODE (type) != RECORD_TYPE)
1430 return false;
1431
1432 fld = TYPE_FIELDS (type);
1433 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1434 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1435 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1436 return false;
1437
1438 if (method_ptr)
1439 *method_ptr = fld;
1440
1441 fld = DECL_CHAIN (fld);
1442 if (!fld || INTEGRAL_TYPE_P (fld)
1443 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1444 return false;
1445 if (delta)
1446 *delta = fld;
1447
1448 if (DECL_CHAIN (fld))
1449 return false;
1450
1451 return true;
1452 }
1453
1454 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1455 return the rhs of its defining statement. Otherwise return RHS as it
1456 is. */
1457
1458 static inline tree
1459 get_ssa_def_if_simple_copy (tree rhs)
1460 {
1461 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1462 {
1463 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1464
1465 if (gimple_assign_single_p (def_stmt))
1466 rhs = gimple_assign_rhs1 (def_stmt);
1467 else
1468 break;
1469 }
1470 return rhs;
1471 }
1472
1473 /* Simple linked list, describing known contents of an aggregate beforere
1474 call. */
1475
1476 struct ipa_known_agg_contents_list
1477 {
1478 /* Offset and size of the described part of the aggregate. */
1479 HOST_WIDE_INT offset, size;
1480 /* Known constant value or NULL if the contents is known to be unknown. */
1481 tree constant;
1482 /* Pointer to the next structure in the list. */
1483 struct ipa_known_agg_contents_list *next;
1484 };
1485
1486 /* Find the proper place in linked list of ipa_known_agg_contents_list
1487 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1488 unless there is a partial overlap, in which case return NULL, or such
1489 element is already there, in which case set *ALREADY_THERE to true. */
1490
1491 static struct ipa_known_agg_contents_list **
1492 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1493 HOST_WIDE_INT lhs_offset,
1494 HOST_WIDE_INT lhs_size,
1495 bool *already_there)
1496 {
1497 struct ipa_known_agg_contents_list **p = list;
1498 while (*p && (*p)->offset < lhs_offset)
1499 {
1500 if ((*p)->offset + (*p)->size > lhs_offset)
1501 return NULL;
1502 p = &(*p)->next;
1503 }
1504
1505 if (*p && (*p)->offset < lhs_offset + lhs_size)
1506 {
1507 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1508 /* We already know this value is subsequently overwritten with
1509 something else. */
1510 *already_there = true;
1511 else
1512 /* Otherwise this is a partial overlap which we cannot
1513 represent. */
1514 return NULL;
1515 }
1516 return p;
1517 }
1518
1519 /* Build aggregate jump function from LIST, assuming there are exactly
1520 CONST_COUNT constant entries there and that th offset of the passed argument
1521 is ARG_OFFSET and store it into JFUNC. */
1522
1523 static void
1524 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1525 int const_count, HOST_WIDE_INT arg_offset,
1526 struct ipa_jump_func *jfunc)
1527 {
1528 vec_alloc (jfunc->agg.items, const_count);
1529 while (list)
1530 {
1531 if (list->constant)
1532 {
1533 struct ipa_agg_jf_item item;
1534 item.offset = list->offset - arg_offset;
1535 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1536 item.value = unshare_expr_without_location (list->constant);
1537 jfunc->agg.items->quick_push (item);
1538 }
1539 list = list->next;
1540 }
1541 }
1542
1543 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1544 in ARG is filled in with constant values. ARG can either be an aggregate
1545 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1546 aggregate. JFUNC is the jump function into which the constants are
1547 subsequently stored. */
1548
1549 static void
1550 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1551 tree arg_type,
1552 struct ipa_jump_func *jfunc)
1553 {
1554 struct ipa_known_agg_contents_list *list = NULL;
1555 int item_count = 0, const_count = 0;
1556 HOST_WIDE_INT arg_offset, arg_size;
1557 gimple_stmt_iterator gsi;
1558 tree arg_base;
1559 bool check_ref, by_ref;
1560 ao_ref r;
1561
1562 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1563 return;
1564
1565 /* The function operates in three stages. First, we prepare check_ref, r,
1566 arg_base and arg_offset based on what is actually passed as an actual
1567 argument. */
1568
1569 if (POINTER_TYPE_P (arg_type))
1570 {
1571 by_ref = true;
1572 if (TREE_CODE (arg) == SSA_NAME)
1573 {
1574 tree type_size;
1575 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1576 return;
1577 check_ref = true;
1578 arg_base = arg;
1579 arg_offset = 0;
1580 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1581 arg_size = tree_to_uhwi (type_size);
1582 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1583 }
1584 else if (TREE_CODE (arg) == ADDR_EXPR)
1585 {
1586 HOST_WIDE_INT arg_max_size;
1587 bool reverse;
1588
1589 arg = TREE_OPERAND (arg, 0);
1590 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1591 &arg_max_size, &reverse);
1592 if (arg_max_size == -1
1593 || arg_max_size != arg_size
1594 || arg_offset < 0)
1595 return;
1596 if (DECL_P (arg_base))
1597 {
1598 check_ref = false;
1599 ao_ref_init (&r, arg_base);
1600 }
1601 else
1602 return;
1603 }
1604 else
1605 return;
1606 }
1607 else
1608 {
1609 HOST_WIDE_INT arg_max_size;
1610 bool reverse;
1611
1612 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1613
1614 by_ref = false;
1615 check_ref = false;
1616 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1617 &arg_max_size, &reverse);
1618 if (arg_max_size == -1
1619 || arg_max_size != arg_size
1620 || arg_offset < 0)
1621 return;
1622
1623 ao_ref_init (&r, arg);
1624 }
1625
1626 /* Second stage walks back the BB, looks at individual statements and as long
1627 as it is confident of how the statements affect contents of the
1628 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1629 describing it. */
1630 gsi = gsi_for_stmt (call);
1631 gsi_prev (&gsi);
1632 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1633 {
1634 struct ipa_known_agg_contents_list *n, **p;
1635 gimple *stmt = gsi_stmt (gsi);
1636 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1637 tree lhs, rhs, lhs_base;
1638 bool reverse;
1639
1640 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1641 continue;
1642 if (!gimple_assign_single_p (stmt))
1643 break;
1644
1645 lhs = gimple_assign_lhs (stmt);
1646 rhs = gimple_assign_rhs1 (stmt);
1647 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1648 || TREE_CODE (lhs) == BIT_FIELD_REF
1649 || contains_bitfld_component_ref_p (lhs))
1650 break;
1651
1652 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1653 &lhs_max_size, &reverse);
1654 if (lhs_max_size == -1
1655 || lhs_max_size != lhs_size)
1656 break;
1657
1658 if (check_ref)
1659 {
1660 if (TREE_CODE (lhs_base) != MEM_REF
1661 || TREE_OPERAND (lhs_base, 0) != arg_base
1662 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1663 break;
1664 }
1665 else if (lhs_base != arg_base)
1666 {
1667 if (DECL_P (lhs_base))
1668 continue;
1669 else
1670 break;
1671 }
1672
1673 bool already_there = false;
1674 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1675 &already_there);
1676 if (!p)
1677 break;
1678 if (already_there)
1679 continue;
1680
1681 rhs = get_ssa_def_if_simple_copy (rhs);
1682 n = XALLOCA (struct ipa_known_agg_contents_list);
1683 n->size = lhs_size;
1684 n->offset = lhs_offset;
1685 if (is_gimple_ip_invariant (rhs))
1686 {
1687 n->constant = rhs;
1688 const_count++;
1689 }
1690 else
1691 n->constant = NULL_TREE;
1692 n->next = *p;
1693 *p = n;
1694
1695 item_count++;
1696 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1697 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1698 break;
1699 }
1700
1701 /* Third stage just goes over the list and creates an appropriate vector of
1702 ipa_agg_jf_item structures out of it, of sourse only if there are
1703 any known constants to begin with. */
1704
1705 if (const_count)
1706 {
1707 jfunc->agg.by_ref = by_ref;
1708 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1709 }
1710 }
1711
1712 /* Return the Ith param type of callee associated with call graph
1713 edge E. */
1714
1715 tree
1716 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1717 {
1718 int n;
1719 tree type = (e->callee
1720 ? TREE_TYPE (e->callee->decl)
1721 : gimple_call_fntype (e->call_stmt));
1722 tree t = TYPE_ARG_TYPES (type);
1723
1724 for (n = 0; n < i; n++)
1725 {
1726 if (!t)
1727 break;
1728 t = TREE_CHAIN (t);
1729 }
1730 if (t)
1731 return TREE_VALUE (t);
1732 if (!e->callee)
1733 return NULL;
1734 t = DECL_ARGUMENTS (e->callee->decl);
1735 for (n = 0; n < i; n++)
1736 {
1737 if (!t)
1738 return NULL;
1739 t = TREE_CHAIN (t);
1740 }
1741 if (t)
1742 return TREE_TYPE (t);
1743 return NULL;
1744 }
1745
1746 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1747 allocated structure or a previously existing one shared with other jump
1748 functions and/or transformation summaries. */
1749
1750 ipa_bits *
1751 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1752 {
1753 ipa_bits tmp;
1754 tmp.value = value;
1755 tmp.mask = mask;
1756
1757 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1758 if (*slot)
1759 return *slot;
1760
1761 ipa_bits *res = ggc_alloc<ipa_bits> ();
1762 res->value = value;
1763 res->mask = mask;
1764 *slot = res;
1765
1766 return res;
1767 }
1768
1769 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1770 table in order to avoid creating multiple same ipa_bits structures. */
1771
1772 static void
1773 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1774 const widest_int &mask)
1775 {
1776 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1777 }
1778
1779 /* Return a pointer to a value_range just like *TMP, but either find it in
1780 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1781
1782 static value_range *
1783 ipa_get_value_range (value_range *tmp)
1784 {
1785 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1786 if (*slot)
1787 return *slot;
1788
1789 value_range *vr = ggc_alloc<value_range> ();
1790 *vr = *tmp;
1791 *slot = vr;
1792
1793 return vr;
1794 }
1795
1796 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1797 equiv set. Use hash table in order to avoid creating multiple same copies of
1798 value_ranges. */
1799
1800 static value_range *
1801 ipa_get_value_range (enum value_range_type type, tree min, tree max)
1802 {
1803 value_range tmp;
1804 tmp.type = type;
1805 tmp.min = min;
1806 tmp.max = max;
1807 tmp.equiv = NULL;
1808 return ipa_get_value_range (&tmp);
1809 }
1810
1811 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1812 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1813 same value_range structures. */
1814
1815 static void
1816 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1817 tree min, tree max)
1818 {
1819 jf->m_vr = ipa_get_value_range (type, min, max);
1820 }
1821
1822 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1823 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1824
1825 static void
1826 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1827 {
1828 jf->m_vr = ipa_get_value_range (tmp);
1829 }
1830
1831 /* Compute jump function for all arguments of callsite CS and insert the
1832 information in the jump_functions array in the ipa_edge_args corresponding
1833 to this callsite. */
1834
1835 static void
1836 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1837 struct cgraph_edge *cs)
1838 {
1839 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1840 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1841 gcall *call = cs->call_stmt;
1842 int n, arg_num = gimple_call_num_args (call);
1843 bool useful_context = false;
1844
1845 if (arg_num == 0 || args->jump_functions)
1846 return;
1847 vec_safe_grow_cleared (args->jump_functions, arg_num);
1848 if (flag_devirtualize)
1849 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1850
1851 if (gimple_call_internal_p (call))
1852 return;
1853 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1854 return;
1855
1856 for (n = 0; n < arg_num; n++)
1857 {
1858 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1859 tree arg = gimple_call_arg (call, n);
1860 tree param_type = ipa_get_callee_param_type (cs, n);
1861 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1862 {
1863 tree instance;
1864 struct ipa_polymorphic_call_context context (cs->caller->decl,
1865 arg, cs->call_stmt,
1866 &instance);
1867 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1868 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1869 if (!context.useless_p ())
1870 useful_context = true;
1871 }
1872
1873 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1874 {
1875 bool addr_nonzero = false;
1876 bool strict_overflow = false;
1877
1878 if (TREE_CODE (arg) == SSA_NAME
1879 && param_type
1880 && get_ptr_nonnull (arg))
1881 addr_nonzero = true;
1882 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1883 addr_nonzero = true;
1884
1885 if (addr_nonzero)
1886 {
1887 tree z = build_int_cst (TREE_TYPE (arg), 0);
1888 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1889 }
1890 else
1891 gcc_assert (!jfunc->m_vr);
1892 }
1893 else
1894 {
1895 wide_int min, max;
1896 value_range_type type;
1897 if (TREE_CODE (arg) == SSA_NAME
1898 && param_type
1899 && (type = get_range_info (arg, &min, &max))
1900 && (type == VR_RANGE || type == VR_ANTI_RANGE))
1901 {
1902 value_range tmpvr,resvr;
1903
1904 tmpvr.type = type;
1905 tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1906 tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1907 tmpvr.equiv = NULL;
1908 memset (&resvr, 0, sizeof (resvr));
1909 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1910 &tmpvr, TREE_TYPE (arg));
1911 if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1912 ipa_set_jfunc_vr (jfunc, &resvr);
1913 else
1914 gcc_assert (!jfunc->m_vr);
1915 }
1916 else
1917 gcc_assert (!jfunc->m_vr);
1918 }
1919
1920 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1921 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1922 {
1923 if (TREE_CODE (arg) == SSA_NAME)
1924 ipa_set_jfunc_bits (jfunc, 0,
1925 widest_int::from (get_nonzero_bits (arg),
1926 TYPE_SIGN (TREE_TYPE (arg))));
1927 else
1928 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1929 }
1930 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1931 {
1932 unsigned HOST_WIDE_INT bitpos;
1933 unsigned align;
1934
1935 get_pointer_alignment_1 (arg, &align, &bitpos);
1936 widest_int mask
1937 = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1938 .and_not (align / BITS_PER_UNIT - 1);
1939 widest_int value = bitpos / BITS_PER_UNIT;
1940 ipa_set_jfunc_bits (jfunc, value, mask);
1941 }
1942 else
1943 gcc_assert (!jfunc->bits);
1944
1945 if (is_gimple_ip_invariant (arg)
1946 || (VAR_P (arg)
1947 && is_global_var (arg)
1948 && TREE_READONLY (arg)))
1949 ipa_set_jf_constant (jfunc, arg, cs);
1950 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1951 && TREE_CODE (arg) == PARM_DECL)
1952 {
1953 int index = ipa_get_param_decl_index (info, arg);
1954
1955 gcc_assert (index >=0);
1956 /* Aggregate passed by value, check for pass-through, otherwise we
1957 will attempt to fill in aggregate contents later in this
1958 for cycle. */
1959 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1960 {
1961 ipa_set_jf_simple_pass_through (jfunc, index, false);
1962 continue;
1963 }
1964 }
1965 else if (TREE_CODE (arg) == SSA_NAME)
1966 {
1967 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1968 {
1969 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1970 if (index >= 0)
1971 {
1972 bool agg_p;
1973 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1974 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1975 }
1976 }
1977 else
1978 {
1979 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1980 if (is_gimple_assign (stmt))
1981 compute_complex_assign_jump_func (fbi, info, jfunc,
1982 call, stmt, arg, param_type);
1983 else if (gimple_code (stmt) == GIMPLE_PHI)
1984 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1985 call,
1986 as_a <gphi *> (stmt));
1987 }
1988 }
1989
1990 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1991 passed (because type conversions are ignored in gimple). Usually we can
1992 safely get type from function declaration, but in case of K&R prototypes or
1993 variadic functions we can try our luck with type of the pointer passed.
1994 TODO: Since we look for actual initialization of the memory object, we may better
1995 work out the type based on the memory stores we find. */
1996 if (!param_type)
1997 param_type = TREE_TYPE (arg);
1998
1999 if ((jfunc->type != IPA_JF_PASS_THROUGH
2000 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2001 && (jfunc->type != IPA_JF_ANCESTOR
2002 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2003 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2004 || POINTER_TYPE_P (param_type)))
2005 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
2006 }
2007 if (!useful_context)
2008 vec_free (args->polymorphic_call_contexts);
2009 }
2010
2011 /* Compute jump functions for all edges - both direct and indirect - outgoing
2012 from BB. */
2013
2014 static void
2015 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2016 {
2017 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2018 int i;
2019 struct cgraph_edge *cs;
2020
2021 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2022 {
2023 struct cgraph_node *callee = cs->callee;
2024
2025 if (callee)
2026 {
2027 callee->ultimate_alias_target ();
2028 /* We do not need to bother analyzing calls to unknown functions
2029 unless they may become known during lto/whopr. */
2030 if (!callee->definition && !flag_lto)
2031 continue;
2032 }
2033 ipa_compute_jump_functions_for_edge (fbi, cs);
2034 }
2035 }
2036
2037 /* If STMT looks like a statement loading a value from a member pointer formal
2038 parameter, return that parameter and store the offset of the field to
2039 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2040 might be clobbered). If USE_DELTA, then we look for a use of the delta
2041 field rather than the pfn. */
2042
2043 static tree
2044 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2045 HOST_WIDE_INT *offset_p)
2046 {
2047 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2048
2049 if (!gimple_assign_single_p (stmt))
2050 return NULL_TREE;
2051
2052 rhs = gimple_assign_rhs1 (stmt);
2053 if (TREE_CODE (rhs) == COMPONENT_REF)
2054 {
2055 ref_field = TREE_OPERAND (rhs, 1);
2056 rhs = TREE_OPERAND (rhs, 0);
2057 }
2058 else
2059 ref_field = NULL_TREE;
2060 if (TREE_CODE (rhs) != MEM_REF)
2061 return NULL_TREE;
2062 rec = TREE_OPERAND (rhs, 0);
2063 if (TREE_CODE (rec) != ADDR_EXPR)
2064 return NULL_TREE;
2065 rec = TREE_OPERAND (rec, 0);
2066 if (TREE_CODE (rec) != PARM_DECL
2067 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2068 return NULL_TREE;
2069 ref_offset = TREE_OPERAND (rhs, 1);
2070
2071 if (use_delta)
2072 fld = delta_field;
2073 else
2074 fld = ptr_field;
2075 if (offset_p)
2076 *offset_p = int_bit_position (fld);
2077
2078 if (ref_field)
2079 {
2080 if (integer_nonzerop (ref_offset))
2081 return NULL_TREE;
2082 return ref_field == fld ? rec : NULL_TREE;
2083 }
2084 else
2085 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2086 : NULL_TREE;
2087 }
2088
2089 /* Returns true iff T is an SSA_NAME defined by a statement. */
2090
2091 static bool
2092 ipa_is_ssa_with_stmt_def (tree t)
2093 {
2094 if (TREE_CODE (t) == SSA_NAME
2095 && !SSA_NAME_IS_DEFAULT_DEF (t))
2096 return true;
2097 else
2098 return false;
2099 }
2100
2101 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2102 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2103 indirect call graph edge. */
2104
2105 static struct cgraph_edge *
2106 ipa_note_param_call (struct cgraph_node *node, int param_index,
2107 gcall *stmt)
2108 {
2109 struct cgraph_edge *cs;
2110
2111 cs = node->get_edge (stmt);
2112 cs->indirect_info->param_index = param_index;
2113 cs->indirect_info->agg_contents = 0;
2114 cs->indirect_info->member_ptr = 0;
2115 cs->indirect_info->guaranteed_unmodified = 0;
2116 return cs;
2117 }
2118
2119 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2120 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2121 intermediate information about each formal parameter. Currently it checks
2122 whether the call calls a pointer that is a formal parameter and if so, the
2123 parameter is marked with the called flag and an indirect call graph edge
2124 describing the call is created. This is very simple for ordinary pointers
2125 represented in SSA but not-so-nice when it comes to member pointers. The
2126 ugly part of this function does nothing more than trying to match the
2127 pattern of such a call. An example of such a pattern is the gimple dump
2128 below, the call is on the last line:
2129
2130 <bb 2>:
2131 f$__delta_5 = f.__delta;
2132 f$__pfn_24 = f.__pfn;
2133
2134 or
2135 <bb 2>:
2136 f$__delta_5 = MEM[(struct *)&f];
2137 f$__pfn_24 = MEM[(struct *)&f + 4B];
2138
2139 and a few lines below:
2140
2141 <bb 5>
2142 D.2496_3 = (int) f$__pfn_24;
2143 D.2497_4 = D.2496_3 & 1;
2144 if (D.2497_4 != 0)
2145 goto <bb 3>;
2146 else
2147 goto <bb 4>;
2148
2149 <bb 6>:
2150 D.2500_7 = (unsigned int) f$__delta_5;
2151 D.2501_8 = &S + D.2500_7;
2152 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2153 D.2503_10 = *D.2502_9;
2154 D.2504_12 = f$__pfn_24 + -1;
2155 D.2505_13 = (unsigned int) D.2504_12;
2156 D.2506_14 = D.2503_10 + D.2505_13;
2157 D.2507_15 = *D.2506_14;
2158 iftmp.11_16 = (String:: *) D.2507_15;
2159
2160 <bb 7>:
2161 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2162 D.2500_19 = (unsigned int) f$__delta_5;
2163 D.2508_20 = &S + D.2500_19;
2164 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2165
2166 Such patterns are results of simple calls to a member pointer:
2167
2168 int doprinting (int (MyString::* f)(int) const)
2169 {
2170 MyString S ("somestring");
2171
2172 return (S.*f)(4);
2173 }
2174
2175 Moreover, the function also looks for called pointers loaded from aggregates
2176 passed by value or reference. */
2177
2178 static void
2179 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2180 tree target)
2181 {
2182 struct ipa_node_params *info = fbi->info;
2183 HOST_WIDE_INT offset;
2184 bool by_ref;
2185
2186 if (SSA_NAME_IS_DEFAULT_DEF (target))
2187 {
2188 tree var = SSA_NAME_VAR (target);
2189 int index = ipa_get_param_decl_index (info, var);
2190 if (index >= 0)
2191 ipa_note_param_call (fbi->node, index, call);
2192 return;
2193 }
2194
2195 int index;
2196 gimple *def = SSA_NAME_DEF_STMT (target);
2197 bool guaranteed_unmodified;
2198 if (gimple_assign_single_p (def)
2199 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2200 gimple_assign_rhs1 (def), &index, &offset,
2201 NULL, &by_ref, &guaranteed_unmodified))
2202 {
2203 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2204 cs->indirect_info->offset = offset;
2205 cs->indirect_info->agg_contents = 1;
2206 cs->indirect_info->by_ref = by_ref;
2207 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2208 return;
2209 }
2210
2211 /* Now we need to try to match the complex pattern of calling a member
2212 pointer. */
2213 if (gimple_code (def) != GIMPLE_PHI
2214 || gimple_phi_num_args (def) != 2
2215 || !POINTER_TYPE_P (TREE_TYPE (target))
2216 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2217 return;
2218
2219 /* First, we need to check whether one of these is a load from a member
2220 pointer that is a parameter to this function. */
2221 tree n1 = PHI_ARG_DEF (def, 0);
2222 tree n2 = PHI_ARG_DEF (def, 1);
2223 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2224 return;
2225 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2226 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2227
2228 tree rec;
2229 basic_block bb, virt_bb;
2230 basic_block join = gimple_bb (def);
2231 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2232 {
2233 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2234 return;
2235
2236 bb = EDGE_PRED (join, 0)->src;
2237 virt_bb = gimple_bb (d2);
2238 }
2239 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2240 {
2241 bb = EDGE_PRED (join, 1)->src;
2242 virt_bb = gimple_bb (d1);
2243 }
2244 else
2245 return;
2246
2247 /* Second, we need to check that the basic blocks are laid out in the way
2248 corresponding to the pattern. */
2249
2250 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2251 || single_pred (virt_bb) != bb
2252 || single_succ (virt_bb) != join)
2253 return;
2254
2255 /* Third, let's see that the branching is done depending on the least
2256 significant bit of the pfn. */
2257
2258 gimple *branch = last_stmt (bb);
2259 if (!branch || gimple_code (branch) != GIMPLE_COND)
2260 return;
2261
2262 if ((gimple_cond_code (branch) != NE_EXPR
2263 && gimple_cond_code (branch) != EQ_EXPR)
2264 || !integer_zerop (gimple_cond_rhs (branch)))
2265 return;
2266
2267 tree cond = gimple_cond_lhs (branch);
2268 if (!ipa_is_ssa_with_stmt_def (cond))
2269 return;
2270
2271 def = SSA_NAME_DEF_STMT (cond);
2272 if (!is_gimple_assign (def)
2273 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2274 || !integer_onep (gimple_assign_rhs2 (def)))
2275 return;
2276
2277 cond = gimple_assign_rhs1 (def);
2278 if (!ipa_is_ssa_with_stmt_def (cond))
2279 return;
2280
2281 def = SSA_NAME_DEF_STMT (cond);
2282
2283 if (is_gimple_assign (def)
2284 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2285 {
2286 cond = gimple_assign_rhs1 (def);
2287 if (!ipa_is_ssa_with_stmt_def (cond))
2288 return;
2289 def = SSA_NAME_DEF_STMT (cond);
2290 }
2291
2292 tree rec2;
2293 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2294 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2295 == ptrmemfunc_vbit_in_delta),
2296 NULL);
2297 if (rec != rec2)
2298 return;
2299
2300 index = ipa_get_param_decl_index (info, rec);
2301 if (index >= 0
2302 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2303 {
2304 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2305 cs->indirect_info->offset = offset;
2306 cs->indirect_info->agg_contents = 1;
2307 cs->indirect_info->member_ptr = 1;
2308 cs->indirect_info->guaranteed_unmodified = 1;
2309 }
2310
2311 return;
2312 }
2313
2314 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2315 object referenced in the expression is a formal parameter of the caller
2316 FBI->node (described by FBI->info), create a call note for the
2317 statement. */
2318
2319 static void
2320 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2321 gcall *call, tree target)
2322 {
2323 tree obj = OBJ_TYPE_REF_OBJECT (target);
2324 int index;
2325 HOST_WIDE_INT anc_offset;
2326
2327 if (!flag_devirtualize)
2328 return;
2329
2330 if (TREE_CODE (obj) != SSA_NAME)
2331 return;
2332
2333 struct ipa_node_params *info = fbi->info;
2334 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2335 {
2336 struct ipa_jump_func jfunc;
2337 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2338 return;
2339
2340 anc_offset = 0;
2341 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2342 gcc_assert (index >= 0);
2343 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2344 call, &jfunc))
2345 return;
2346 }
2347 else
2348 {
2349 struct ipa_jump_func jfunc;
2350 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2351 tree expr;
2352
2353 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2354 if (!expr)
2355 return;
2356 index = ipa_get_param_decl_index (info,
2357 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2358 gcc_assert (index >= 0);
2359 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2360 call, &jfunc, anc_offset))
2361 return;
2362 }
2363
2364 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2365 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2366 ii->offset = anc_offset;
2367 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2368 ii->otr_type = obj_type_ref_class (target);
2369 ii->polymorphic = 1;
2370 }
2371
2372 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2373 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2374 containing intermediate information about each formal parameter. */
2375
2376 static void
2377 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2378 {
2379 tree target = gimple_call_fn (call);
2380
2381 if (!target
2382 || (TREE_CODE (target) != SSA_NAME
2383 && !virtual_method_call_p (target)))
2384 return;
2385
2386 struct cgraph_edge *cs = fbi->node->get_edge (call);
2387 /* If we previously turned the call into a direct call, there is
2388 no need to analyze. */
2389 if (cs && !cs->indirect_unknown_callee)
2390 return;
2391
2392 if (cs->indirect_info->polymorphic && flag_devirtualize)
2393 {
2394 tree instance;
2395 tree target = gimple_call_fn (call);
2396 ipa_polymorphic_call_context context (current_function_decl,
2397 target, call, &instance);
2398
2399 gcc_checking_assert (cs->indirect_info->otr_type
2400 == obj_type_ref_class (target));
2401 gcc_checking_assert (cs->indirect_info->otr_token
2402 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2403
2404 cs->indirect_info->vptr_changed
2405 = !context.get_dynamic_type (instance,
2406 OBJ_TYPE_REF_OBJECT (target),
2407 obj_type_ref_class (target), call);
2408 cs->indirect_info->context = context;
2409 }
2410
2411 if (TREE_CODE (target) == SSA_NAME)
2412 ipa_analyze_indirect_call_uses (fbi, call, target);
2413 else if (virtual_method_call_p (target))
2414 ipa_analyze_virtual_call_uses (fbi, call, target);
2415 }
2416
2417
2418 /* Analyze the call statement STMT with respect to formal parameters (described
2419 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2420 formal parameters are called. */
2421
2422 static void
2423 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2424 {
2425 if (is_gimple_call (stmt))
2426 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2427 }
2428
2429 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2430 If OP is a parameter declaration, mark it as used in the info structure
2431 passed in DATA. */
2432
2433 static bool
2434 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2435 {
2436 struct ipa_node_params *info = (struct ipa_node_params *) data;
2437
2438 op = get_base_address (op);
2439 if (op
2440 && TREE_CODE (op) == PARM_DECL)
2441 {
2442 int index = ipa_get_param_decl_index (info, op);
2443 gcc_assert (index >= 0);
2444 ipa_set_param_used (info, index, true);
2445 }
2446
2447 return false;
2448 }
2449
2450 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2451 the findings in various structures of the associated ipa_node_params
2452 structure, such as parameter flags, notes etc. FBI holds various data about
2453 the function being analyzed. */
2454
2455 static void
2456 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2457 {
2458 gimple_stmt_iterator gsi;
2459 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2460 {
2461 gimple *stmt = gsi_stmt (gsi);
2462
2463 if (is_gimple_debug (stmt))
2464 continue;
2465
2466 ipa_analyze_stmt_uses (fbi, stmt);
2467 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2468 visit_ref_for_mod_analysis,
2469 visit_ref_for_mod_analysis,
2470 visit_ref_for_mod_analysis);
2471 }
2472 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2473 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2474 visit_ref_for_mod_analysis,
2475 visit_ref_for_mod_analysis,
2476 visit_ref_for_mod_analysis);
2477 }
2478
2479 /* Calculate controlled uses of parameters of NODE. */
2480
2481 static void
2482 ipa_analyze_controlled_uses (struct cgraph_node *node)
2483 {
2484 struct ipa_node_params *info = IPA_NODE_REF (node);
2485
2486 for (int i = 0; i < ipa_get_param_count (info); i++)
2487 {
2488 tree parm = ipa_get_param (info, i);
2489 int controlled_uses = 0;
2490
2491 /* For SSA regs see if parameter is used. For non-SSA we compute
2492 the flag during modification analysis. */
2493 if (is_gimple_reg (parm))
2494 {
2495 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2496 parm);
2497 if (ddef && !has_zero_uses (ddef))
2498 {
2499 imm_use_iterator imm_iter;
2500 use_operand_p use_p;
2501
2502 ipa_set_param_used (info, i, true);
2503 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2504 if (!is_gimple_call (USE_STMT (use_p)))
2505 {
2506 if (!is_gimple_debug (USE_STMT (use_p)))
2507 {
2508 controlled_uses = IPA_UNDESCRIBED_USE;
2509 break;
2510 }
2511 }
2512 else
2513 controlled_uses++;
2514 }
2515 else
2516 controlled_uses = 0;
2517 }
2518 else
2519 controlled_uses = IPA_UNDESCRIBED_USE;
2520 ipa_set_controlled_uses (info, i, controlled_uses);
2521 }
2522 }
2523
2524 /* Free stuff in BI. */
2525
2526 static void
2527 free_ipa_bb_info (struct ipa_bb_info *bi)
2528 {
2529 bi->cg_edges.release ();
2530 bi->param_aa_statuses.release ();
2531 }
2532
2533 /* Dominator walker driving the analysis. */
2534
2535 class analysis_dom_walker : public dom_walker
2536 {
2537 public:
2538 analysis_dom_walker (struct ipa_func_body_info *fbi)
2539 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2540
2541 virtual edge before_dom_children (basic_block);
2542
2543 private:
2544 struct ipa_func_body_info *m_fbi;
2545 };
2546
2547 edge
2548 analysis_dom_walker::before_dom_children (basic_block bb)
2549 {
2550 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2551 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2552 return NULL;
2553 }
2554
2555 /* Release body info FBI. */
2556
2557 void
2558 ipa_release_body_info (struct ipa_func_body_info *fbi)
2559 {
2560 int i;
2561 struct ipa_bb_info *bi;
2562
2563 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2564 free_ipa_bb_info (bi);
2565 fbi->bb_infos.release ();
2566 }
2567
2568 /* Initialize the array describing properties of formal parameters
2569 of NODE, analyze their uses and compute jump functions associated
2570 with actual arguments of calls from within NODE. */
2571
2572 void
2573 ipa_analyze_node (struct cgraph_node *node)
2574 {
2575 struct ipa_func_body_info fbi;
2576 struct ipa_node_params *info;
2577
2578 ipa_check_create_node_params ();
2579 ipa_check_create_edge_args ();
2580 info = IPA_NODE_REF (node);
2581
2582 if (info->analysis_done)
2583 return;
2584 info->analysis_done = 1;
2585
2586 if (ipa_func_spec_opts_forbid_analysis_p (node))
2587 {
2588 for (int i = 0; i < ipa_get_param_count (info); i++)
2589 {
2590 ipa_set_param_used (info, i, true);
2591 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2592 }
2593 return;
2594 }
2595
2596 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2597 push_cfun (func);
2598 calculate_dominance_info (CDI_DOMINATORS);
2599 ipa_initialize_node_params (node);
2600 ipa_analyze_controlled_uses (node);
2601
2602 fbi.node = node;
2603 fbi.info = IPA_NODE_REF (node);
2604 fbi.bb_infos = vNULL;
2605 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2606 fbi.param_count = ipa_get_param_count (info);
2607 fbi.aa_walked = 0;
2608
2609 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2610 {
2611 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2612 bi->cg_edges.safe_push (cs);
2613 }
2614
2615 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2616 {
2617 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2618 bi->cg_edges.safe_push (cs);
2619 }
2620
2621 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2622
2623 ipa_release_body_info (&fbi);
2624 free_dominance_info (CDI_DOMINATORS);
2625 pop_cfun ();
2626 }
2627
2628 /* Update the jump functions associated with call graph edge E when the call
2629 graph edge CS is being inlined, assuming that E->caller is already (possibly
2630 indirectly) inlined into CS->callee and that E has not been inlined. */
2631
2632 static void
2633 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2634 struct cgraph_edge *e)
2635 {
2636 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2637 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2638 int count = ipa_get_cs_argument_count (args);
2639 int i;
2640
2641 for (i = 0; i < count; i++)
2642 {
2643 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2644 struct ipa_polymorphic_call_context *dst_ctx
2645 = ipa_get_ith_polymorhic_call_context (args, i);
2646
2647 if (dst->type == IPA_JF_ANCESTOR)
2648 {
2649 struct ipa_jump_func *src;
2650 int dst_fid = dst->value.ancestor.formal_id;
2651 struct ipa_polymorphic_call_context *src_ctx
2652 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2653
2654 /* Variable number of arguments can cause havoc if we try to access
2655 one that does not exist in the inlined edge. So make sure we
2656 don't. */
2657 if (dst_fid >= ipa_get_cs_argument_count (top))
2658 {
2659 ipa_set_jf_unknown (dst);
2660 continue;
2661 }
2662
2663 src = ipa_get_ith_jump_func (top, dst_fid);
2664
2665 if (src_ctx && !src_ctx->useless_p ())
2666 {
2667 struct ipa_polymorphic_call_context ctx = *src_ctx;
2668
2669 /* TODO: Make type preserved safe WRT contexts. */
2670 if (!ipa_get_jf_ancestor_type_preserved (dst))
2671 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2672 ctx.offset_by (dst->value.ancestor.offset);
2673 if (!ctx.useless_p ())
2674 {
2675 if (!dst_ctx)
2676 {
2677 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2678 count);
2679 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2680 }
2681
2682 dst_ctx->combine_with (ctx);
2683 }
2684 }
2685
2686 if (src->agg.items
2687 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2688 {
2689 struct ipa_agg_jf_item *item;
2690 int j;
2691
2692 /* Currently we do not produce clobber aggregate jump functions,
2693 replace with merging when we do. */
2694 gcc_assert (!dst->agg.items);
2695
2696 dst->agg.items = vec_safe_copy (src->agg.items);
2697 dst->agg.by_ref = src->agg.by_ref;
2698 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2699 item->offset -= dst->value.ancestor.offset;
2700 }
2701
2702 if (src->type == IPA_JF_PASS_THROUGH
2703 && src->value.pass_through.operation == NOP_EXPR)
2704 {
2705 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2706 dst->value.ancestor.agg_preserved &=
2707 src->value.pass_through.agg_preserved;
2708 }
2709 else if (src->type == IPA_JF_PASS_THROUGH
2710 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2711 {
2712 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2713 dst->value.ancestor.agg_preserved = false;
2714 }
2715 else if (src->type == IPA_JF_ANCESTOR)
2716 {
2717 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2718 dst->value.ancestor.offset += src->value.ancestor.offset;
2719 dst->value.ancestor.agg_preserved &=
2720 src->value.ancestor.agg_preserved;
2721 }
2722 else
2723 ipa_set_jf_unknown (dst);
2724 }
2725 else if (dst->type == IPA_JF_PASS_THROUGH)
2726 {
2727 struct ipa_jump_func *src;
2728 /* We must check range due to calls with variable number of arguments
2729 and we cannot combine jump functions with operations. */
2730 if (dst->value.pass_through.operation == NOP_EXPR
2731 && (dst->value.pass_through.formal_id
2732 < ipa_get_cs_argument_count (top)))
2733 {
2734 int dst_fid = dst->value.pass_through.formal_id;
2735 src = ipa_get_ith_jump_func (top, dst_fid);
2736 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2737 struct ipa_polymorphic_call_context *src_ctx
2738 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2739
2740 if (src_ctx && !src_ctx->useless_p ())
2741 {
2742 struct ipa_polymorphic_call_context ctx = *src_ctx;
2743
2744 /* TODO: Make type preserved safe WRT contexts. */
2745 if (!ipa_get_jf_pass_through_type_preserved (dst))
2746 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2747 if (!ctx.useless_p ())
2748 {
2749 if (!dst_ctx)
2750 {
2751 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2752 count);
2753 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2754 }
2755 dst_ctx->combine_with (ctx);
2756 }
2757 }
2758 switch (src->type)
2759 {
2760 case IPA_JF_UNKNOWN:
2761 ipa_set_jf_unknown (dst);
2762 break;
2763 case IPA_JF_CONST:
2764 ipa_set_jf_cst_copy (dst, src);
2765 break;
2766
2767 case IPA_JF_PASS_THROUGH:
2768 {
2769 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2770 enum tree_code operation;
2771 operation = ipa_get_jf_pass_through_operation (src);
2772
2773 if (operation == NOP_EXPR)
2774 {
2775 bool agg_p;
2776 agg_p = dst_agg_p
2777 && ipa_get_jf_pass_through_agg_preserved (src);
2778 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2779 }
2780 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2781 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2782 else
2783 {
2784 tree operand = ipa_get_jf_pass_through_operand (src);
2785 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2786 operation);
2787 }
2788 break;
2789 }
2790 case IPA_JF_ANCESTOR:
2791 {
2792 bool agg_p;
2793 agg_p = dst_agg_p
2794 && ipa_get_jf_ancestor_agg_preserved (src);
2795 ipa_set_ancestor_jf (dst,
2796 ipa_get_jf_ancestor_offset (src),
2797 ipa_get_jf_ancestor_formal_id (src),
2798 agg_p);
2799 break;
2800 }
2801 default:
2802 gcc_unreachable ();
2803 }
2804
2805 if (src->agg.items
2806 && (dst_agg_p || !src->agg.by_ref))
2807 {
2808 /* Currently we do not produce clobber aggregate jump
2809 functions, replace with merging when we do. */
2810 gcc_assert (!dst->agg.items);
2811
2812 dst->agg.by_ref = src->agg.by_ref;
2813 dst->agg.items = vec_safe_copy (src->agg.items);
2814 }
2815 }
2816 else
2817 ipa_set_jf_unknown (dst);
2818 }
2819 }
2820 }
2821
2822 /* If TARGET is an addr_expr of a function declaration, make it the
2823 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2824 Otherwise, return NULL. */
2825
2826 struct cgraph_edge *
2827 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2828 bool speculative)
2829 {
2830 struct cgraph_node *callee;
2831 struct inline_edge_summary *es = inline_edge_summary (ie);
2832 bool unreachable = false;
2833
2834 if (TREE_CODE (target) == ADDR_EXPR)
2835 target = TREE_OPERAND (target, 0);
2836 if (TREE_CODE (target) != FUNCTION_DECL)
2837 {
2838 target = canonicalize_constructor_val (target, NULL);
2839 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2840 {
2841 /* Member pointer call that goes through a VMT lookup. */
2842 if (ie->indirect_info->member_ptr
2843 /* Or if target is not an invariant expression and we do not
2844 know if it will evaulate to function at runtime.
2845 This can happen when folding through &VAR, where &VAR
2846 is IP invariant, but VAR itself is not.
2847
2848 TODO: Revisit this when GCC 5 is branched. It seems that
2849 member_ptr check is not needed and that we may try to fold
2850 the expression and see if VAR is readonly. */
2851 || !is_gimple_ip_invariant (target))
2852 {
2853 if (dump_enabled_p ())
2854 {
2855 location_t loc = gimple_location_safe (ie->call_stmt);
2856 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2857 "discovered direct call non-invariant "
2858 "%s/%i\n",
2859 ie->caller->name (), ie->caller->order);
2860 }
2861 return NULL;
2862 }
2863
2864
2865 if (dump_enabled_p ())
2866 {
2867 location_t loc = gimple_location_safe (ie->call_stmt);
2868 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2869 "discovered direct call to non-function in %s/%i, "
2870 "making it __builtin_unreachable\n",
2871 ie->caller->name (), ie->caller->order);
2872 }
2873
2874 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2875 callee = cgraph_node::get_create (target);
2876 unreachable = true;
2877 }
2878 else
2879 callee = cgraph_node::get (target);
2880 }
2881 else
2882 callee = cgraph_node::get (target);
2883
2884 /* Because may-edges are not explicitely represented and vtable may be external,
2885 we may create the first reference to the object in the unit. */
2886 if (!callee || callee->global.inlined_to)
2887 {
2888
2889 /* We are better to ensure we can refer to it.
2890 In the case of static functions we are out of luck, since we already
2891 removed its body. In the case of public functions we may or may
2892 not introduce the reference. */
2893 if (!canonicalize_constructor_val (target, NULL)
2894 || !TREE_PUBLIC (target))
2895 {
2896 if (dump_file)
2897 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2898 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2899 xstrdup_for_dump (ie->caller->name ()),
2900 ie->caller->order,
2901 xstrdup_for_dump (ie->callee->name ()),
2902 ie->callee->order);
2903 return NULL;
2904 }
2905 callee = cgraph_node::get_create (target);
2906 }
2907
2908 /* If the edge is already speculated. */
2909 if (speculative && ie->speculative)
2910 {
2911 struct cgraph_edge *e2;
2912 struct ipa_ref *ref;
2913 ie->speculative_call_info (e2, ie, ref);
2914 if (e2->callee->ultimate_alias_target ()
2915 != callee->ultimate_alias_target ())
2916 {
2917 if (dump_file)
2918 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2919 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2920 xstrdup_for_dump (ie->caller->name ()),
2921 ie->caller->order,
2922 xstrdup_for_dump (callee->name ()),
2923 callee->order,
2924 xstrdup_for_dump (e2->callee->name ()),
2925 e2->callee->order);
2926 }
2927 else
2928 {
2929 if (dump_file)
2930 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2931 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2932 xstrdup_for_dump (ie->caller->name ()),
2933 ie->caller->order,
2934 xstrdup_for_dump (callee->name ()),
2935 callee->order);
2936 }
2937 return NULL;
2938 }
2939
2940 if (!dbg_cnt (devirt))
2941 return NULL;
2942
2943 ipa_check_create_node_params ();
2944
2945 /* We can not make edges to inline clones. It is bug that someone removed
2946 the cgraph node too early. */
2947 gcc_assert (!callee->global.inlined_to);
2948
2949 if (dump_file && !unreachable)
2950 {
2951 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2952 "(%s/%i -> %s/%i), for stmt ",
2953 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2954 speculative ? "speculative" : "known",
2955 xstrdup_for_dump (ie->caller->name ()),
2956 ie->caller->order,
2957 xstrdup_for_dump (callee->name ()),
2958 callee->order);
2959 if (ie->call_stmt)
2960 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2961 else
2962 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2963 }
2964 if (dump_enabled_p ())
2965 {
2966 location_t loc = gimple_location_safe (ie->call_stmt);
2967
2968 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2969 "converting indirect call in %s to direct call to %s\n",
2970 ie->caller->name (), callee->name ());
2971 }
2972 if (!speculative)
2973 {
2974 struct cgraph_edge *orig = ie;
2975 ie = ie->make_direct (callee);
2976 /* If we resolved speculative edge the cost is already up to date
2977 for direct call (adjusted by inline_edge_duplication_hook). */
2978 if (ie == orig)
2979 {
2980 es = inline_edge_summary (ie);
2981 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2982 - eni_size_weights.call_cost);
2983 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2984 - eni_time_weights.call_cost);
2985 }
2986 }
2987 else
2988 {
2989 if (!callee->can_be_discarded_p ())
2990 {
2991 cgraph_node *alias;
2992 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2993 if (alias)
2994 callee = alias;
2995 }
2996 /* make_speculative will update ie's cost to direct call cost. */
2997 ie = ie->make_speculative
2998 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2999 }
3000
3001 return ie;
3002 }
3003
3004 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3005 CONSTRUCTOR and return it. Return NULL if the search fails for some
3006 reason. */
3007
3008 static tree
3009 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3010 {
3011 tree type = TREE_TYPE (constructor);
3012 if (TREE_CODE (type) != ARRAY_TYPE
3013 && TREE_CODE (type) != RECORD_TYPE)
3014 return NULL;
3015
3016 unsigned ix;
3017 tree index, val;
3018 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3019 {
3020 HOST_WIDE_INT elt_offset;
3021 if (TREE_CODE (type) == ARRAY_TYPE)
3022 {
3023 offset_int off;
3024 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3025 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3026
3027 if (index)
3028 {
3029 off = wi::to_offset (index);
3030 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3031 {
3032 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3033 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3034 off = wi::sext (off - wi::to_offset (low_bound),
3035 TYPE_PRECISION (TREE_TYPE (index)));
3036 }
3037 off *= wi::to_offset (unit_size);
3038 }
3039 else
3040 off = wi::to_offset (unit_size) * ix;
3041
3042 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3043 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3044 continue;
3045 elt_offset = off.to_shwi ();
3046 }
3047 else if (TREE_CODE (type) == RECORD_TYPE)
3048 {
3049 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3050 if (DECL_BIT_FIELD (index))
3051 continue;
3052 elt_offset = int_bit_position (index);
3053 }
3054 else
3055 gcc_unreachable ();
3056
3057 if (elt_offset > req_offset)
3058 return NULL;
3059
3060 if (TREE_CODE (val) == CONSTRUCTOR)
3061 return find_constructor_constant_at_offset (val,
3062 req_offset - elt_offset);
3063
3064 if (elt_offset == req_offset
3065 && is_gimple_reg_type (TREE_TYPE (val))
3066 && is_gimple_ip_invariant (val))
3067 return val;
3068 }
3069 return NULL;
3070 }
3071
3072 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3073 invariant from a static constructor and if so, return it. Otherwise return
3074 NULL. */
3075
3076 static tree
3077 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3078 {
3079 if (by_ref)
3080 {
3081 if (TREE_CODE (scalar) != ADDR_EXPR)
3082 return NULL;
3083 scalar = TREE_OPERAND (scalar, 0);
3084 }
3085
3086 if (!VAR_P (scalar)
3087 || !is_global_var (scalar)
3088 || !TREE_READONLY (scalar)
3089 || !DECL_INITIAL (scalar)
3090 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3091 return NULL;
3092
3093 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3094 }
3095
3096 /* Retrieve value from aggregate jump function AGG or static initializer of
3097 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3098 none. BY_REF specifies whether the value has to be passed by reference or
3099 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3100 to is set to true if the value comes from an initializer of a constant. */
3101
3102 tree
3103 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3104 HOST_WIDE_INT offset, bool by_ref,
3105 bool *from_global_constant)
3106 {
3107 struct ipa_agg_jf_item *item;
3108 int i;
3109
3110 if (scalar)
3111 {
3112 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3113 if (res)
3114 {
3115 if (from_global_constant)
3116 *from_global_constant = true;
3117 return res;
3118 }
3119 }
3120
3121 if (!agg
3122 || by_ref != agg->by_ref)
3123 return NULL;
3124
3125 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3126 if (item->offset == offset)
3127 {
3128 /* Currently we do not have clobber values, return NULL for them once
3129 we do. */
3130 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3131 if (from_global_constant)
3132 *from_global_constant = false;
3133 return item->value;
3134 }
3135 return NULL;
3136 }
3137
3138 /* Remove a reference to SYMBOL from the list of references of a node given by
3139 reference description RDESC. Return true if the reference has been
3140 successfully found and removed. */
3141
3142 static bool
3143 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3144 {
3145 struct ipa_ref *to_del;
3146 struct cgraph_edge *origin;
3147
3148 origin = rdesc->cs;
3149 if (!origin)
3150 return false;
3151 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3152 origin->lto_stmt_uid);
3153 if (!to_del)
3154 return false;
3155
3156 to_del->remove_reference ();
3157 if (dump_file)
3158 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3159 xstrdup_for_dump (origin->caller->name ()),
3160 origin->caller->order, xstrdup_for_dump (symbol->name ()));
3161 return true;
3162 }
3163
3164 /* If JFUNC has a reference description with refcount different from
3165 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3166 NULL. JFUNC must be a constant jump function. */
3167
3168 static struct ipa_cst_ref_desc *
3169 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3170 {
3171 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3172 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3173 return rdesc;
3174 else
3175 return NULL;
3176 }
3177
3178 /* If the value of constant jump function JFUNC is an address of a function
3179 declaration, return the associated call graph node. Otherwise return
3180 NULL. */
3181
3182 static cgraph_node *
3183 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3184 {
3185 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3186 tree cst = ipa_get_jf_constant (jfunc);
3187 if (TREE_CODE (cst) != ADDR_EXPR
3188 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3189 return NULL;
3190
3191 return cgraph_node::get (TREE_OPERAND (cst, 0));
3192 }
3193
3194
3195 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3196 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3197 the edge specified in the rdesc. Return false if either the symbol or the
3198 reference could not be found, otherwise return true. */
3199
3200 static bool
3201 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3202 {
3203 struct ipa_cst_ref_desc *rdesc;
3204 if (jfunc->type == IPA_JF_CONST
3205 && (rdesc = jfunc_rdesc_usable (jfunc))
3206 && --rdesc->refcount == 0)
3207 {
3208 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3209 if (!symbol)
3210 return false;
3211
3212 return remove_described_reference (symbol, rdesc);
3213 }
3214 return true;
3215 }
3216
3217 /* Try to find a destination for indirect edge IE that corresponds to a simple
3218 call or a call of a member function pointer and where the destination is a
3219 pointer formal parameter described by jump function JFUNC. If it can be
3220 determined, return the newly direct edge, otherwise return NULL.
3221 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3222
3223 static struct cgraph_edge *
3224 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3225 struct ipa_jump_func *jfunc,
3226 struct ipa_node_params *new_root_info)
3227 {
3228 struct cgraph_edge *cs;
3229 tree target;
3230 bool agg_contents = ie->indirect_info->agg_contents;
3231 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3232 if (agg_contents)
3233 {
3234 bool from_global_constant;
3235 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3236 ie->indirect_info->offset,
3237 ie->indirect_info->by_ref,
3238 &from_global_constant);
3239 if (target
3240 && !from_global_constant
3241 && !ie->indirect_info->guaranteed_unmodified)
3242 return NULL;
3243 }
3244 else
3245 target = scalar;
3246 if (!target)
3247 return NULL;
3248 cs = ipa_make_edge_direct_to_target (ie, target);
3249
3250 if (cs && !agg_contents)
3251 {
3252 bool ok;
3253 gcc_checking_assert (cs->callee
3254 && (cs != ie
3255 || jfunc->type != IPA_JF_CONST
3256 || !cgraph_node_for_jfunc (jfunc)
3257 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3258 ok = try_decrement_rdesc_refcount (jfunc);
3259 gcc_checking_assert (ok);
3260 }
3261
3262 return cs;
3263 }
3264
3265 /* Return the target to be used in cases of impossible devirtualization. IE
3266 and target (the latter can be NULL) are dumped when dumping is enabled. */
3267
3268 tree
3269 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3270 {
3271 if (dump_file)
3272 {
3273 if (target)
3274 fprintf (dump_file,
3275 "Type inconsistent devirtualization: %s/%i->%s\n",
3276 ie->caller->name (), ie->caller->order,
3277 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3278 else
3279 fprintf (dump_file,
3280 "No devirtualization target in %s/%i\n",
3281 ie->caller->name (), ie->caller->order);
3282 }
3283 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3284 cgraph_node::get_create (new_target);
3285 return new_target;
3286 }
3287
3288 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3289 call based on a formal parameter which is described by jump function JFUNC
3290 and if it can be determined, make it direct and return the direct edge.
3291 Otherwise, return NULL. CTX describes the polymorphic context that the
3292 parameter the call is based on brings along with it. */
3293
3294 static struct cgraph_edge *
3295 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3296 struct ipa_jump_func *jfunc,
3297 struct ipa_polymorphic_call_context ctx)
3298 {
3299 tree target = NULL;
3300 bool speculative = false;
3301
3302 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3303 return NULL;
3304
3305 gcc_assert (!ie->indirect_info->by_ref);
3306
3307 /* Try to do lookup via known virtual table pointer value. */
3308 if (!ie->indirect_info->vptr_changed
3309 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3310 {
3311 tree vtable;
3312 unsigned HOST_WIDE_INT offset;
3313 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3314 : NULL;
3315 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3316 ie->indirect_info->offset,
3317 true);
3318 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3319 {
3320 bool can_refer;
3321 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3322 vtable, offset, &can_refer);
3323 if (can_refer)
3324 {
3325 if (!t
3326 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3327 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3328 || !possible_polymorphic_call_target_p
3329 (ie, cgraph_node::get (t)))
3330 {
3331 /* Do not speculate builtin_unreachable, it is stupid! */
3332 if (!ie->indirect_info->vptr_changed)
3333 target = ipa_impossible_devirt_target (ie, target);
3334 else
3335 target = NULL;
3336 }
3337 else
3338 {
3339 target = t;
3340 speculative = ie->indirect_info->vptr_changed;
3341 }
3342 }
3343 }
3344 }
3345
3346 ipa_polymorphic_call_context ie_context (ie);
3347 vec <cgraph_node *>targets;
3348 bool final;
3349
3350 ctx.offset_by (ie->indirect_info->offset);
3351 if (ie->indirect_info->vptr_changed)
3352 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3353 ie->indirect_info->otr_type);
3354 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3355 targets = possible_polymorphic_call_targets
3356 (ie->indirect_info->otr_type,
3357 ie->indirect_info->otr_token,
3358 ctx, &final);
3359 if (final && targets.length () <= 1)
3360 {
3361 speculative = false;
3362 if (targets.length () == 1)
3363 target = targets[0]->decl;
3364 else
3365 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3366 }
3367 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3368 && !ie->speculative && ie->maybe_hot_p ())
3369 {
3370 cgraph_node *n;
3371 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3372 ie->indirect_info->otr_token,
3373 ie->indirect_info->context);
3374 if (n)
3375 {
3376 target = n->decl;
3377 speculative = true;
3378 }
3379 }
3380
3381 if (target)
3382 {
3383 if (!possible_polymorphic_call_target_p
3384 (ie, cgraph_node::get_create (target)))
3385 {
3386 if (speculative)
3387 return NULL;
3388 target = ipa_impossible_devirt_target (ie, target);
3389 }
3390 return ipa_make_edge_direct_to_target (ie, target, speculative);
3391 }
3392 else
3393 return NULL;
3394 }
3395
3396 /* Update the param called notes associated with NODE when CS is being inlined,
3397 assuming NODE is (potentially indirectly) inlined into CS->callee.
3398 Moreover, if the callee is discovered to be constant, create a new cgraph
3399 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3400 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3401
3402 static bool
3403 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3404 struct cgraph_node *node,
3405 vec<cgraph_edge *> *new_edges)
3406 {
3407 struct ipa_edge_args *top;
3408 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3409 struct ipa_node_params *new_root_info;
3410 bool res = false;
3411
3412 ipa_check_create_edge_args ();
3413 top = IPA_EDGE_REF (cs);
3414 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3415 ? cs->caller->global.inlined_to
3416 : cs->caller);
3417
3418 for (ie = node->indirect_calls; ie; ie = next_ie)
3419 {
3420 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3421 struct ipa_jump_func *jfunc;
3422 int param_index;
3423 cgraph_node *spec_target = NULL;
3424
3425 next_ie = ie->next_callee;
3426
3427 if (ici->param_index == -1)
3428 continue;
3429
3430 /* We must check range due to calls with variable number of arguments: */
3431 if (ici->param_index >= ipa_get_cs_argument_count (top))
3432 {
3433 ici->param_index = -1;
3434 continue;
3435 }
3436
3437 param_index = ici->param_index;
3438 jfunc = ipa_get_ith_jump_func (top, param_index);
3439
3440 if (ie->speculative)
3441 {
3442 struct cgraph_edge *de;
3443 struct ipa_ref *ref;
3444 ie->speculative_call_info (de, ie, ref);
3445 spec_target = de->callee;
3446 }
3447
3448 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3449 new_direct_edge = NULL;
3450 else if (ici->polymorphic)
3451 {
3452 ipa_polymorphic_call_context ctx;
3453 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3454 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3455 }
3456 else
3457 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3458 new_root_info);
3459 /* If speculation was removed, then we need to do nothing. */
3460 if (new_direct_edge && new_direct_edge != ie
3461 && new_direct_edge->callee == spec_target)
3462 {
3463 new_direct_edge->indirect_inlining_edge = 1;
3464 top = IPA_EDGE_REF (cs);
3465 res = true;
3466 if (!new_direct_edge->speculative)
3467 continue;
3468 }
3469 else if (new_direct_edge)
3470 {
3471 new_direct_edge->indirect_inlining_edge = 1;
3472 if (new_direct_edge->call_stmt)
3473 new_direct_edge->call_stmt_cannot_inline_p
3474 = !gimple_check_call_matching_types (
3475 new_direct_edge->call_stmt,
3476 new_direct_edge->callee->decl, false);
3477 if (new_edges)
3478 {
3479 new_edges->safe_push (new_direct_edge);
3480 res = true;
3481 }
3482 top = IPA_EDGE_REF (cs);
3483 /* If speculative edge was introduced we still need to update
3484 call info of the indirect edge. */
3485 if (!new_direct_edge->speculative)
3486 continue;
3487 }
3488 if (jfunc->type == IPA_JF_PASS_THROUGH
3489 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3490 {
3491 if (ici->agg_contents
3492 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3493 && !ici->polymorphic)
3494 ici->param_index = -1;
3495 else
3496 {
3497 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3498 if (ici->polymorphic
3499 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3500 ici->vptr_changed = true;
3501 }
3502 }
3503 else if (jfunc->type == IPA_JF_ANCESTOR)
3504 {
3505 if (ici->agg_contents
3506 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3507 && !ici->polymorphic)
3508 ici->param_index = -1;
3509 else
3510 {
3511 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3512 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3513 if (ici->polymorphic
3514 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3515 ici->vptr_changed = true;
3516 }
3517 }
3518 else
3519 /* Either we can find a destination for this edge now or never. */
3520 ici->param_index = -1;
3521 }
3522
3523 return res;
3524 }
3525
3526 /* Recursively traverse subtree of NODE (including node) made of inlined
3527 cgraph_edges when CS has been inlined and invoke
3528 update_indirect_edges_after_inlining on all nodes and
3529 update_jump_functions_after_inlining on all non-inlined edges that lead out
3530 of this subtree. Newly discovered indirect edges will be added to
3531 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3532 created. */
3533
3534 static bool
3535 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3536 struct cgraph_node *node,
3537 vec<cgraph_edge *> *new_edges)
3538 {
3539 struct cgraph_edge *e;
3540 bool res;
3541
3542 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3543
3544 for (e = node->callees; e; e = e->next_callee)
3545 if (!e->inline_failed)
3546 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3547 else
3548 update_jump_functions_after_inlining (cs, e);
3549 for (e = node->indirect_calls; e; e = e->next_callee)
3550 update_jump_functions_after_inlining (cs, e);
3551
3552 return res;
3553 }
3554
3555 /* Combine two controlled uses counts as done during inlining. */
3556
3557 static int
3558 combine_controlled_uses_counters (int c, int d)
3559 {
3560 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3561 return IPA_UNDESCRIBED_USE;
3562 else
3563 return c + d - 1;
3564 }
3565
3566 /* Propagate number of controlled users from CS->caleee to the new root of the
3567 tree of inlined nodes. */
3568
3569 static void
3570 propagate_controlled_uses (struct cgraph_edge *cs)
3571 {
3572 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3573 struct cgraph_node *new_root = cs->caller->global.inlined_to
3574 ? cs->caller->global.inlined_to : cs->caller;
3575 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3576 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3577 int count, i;
3578
3579 count = MIN (ipa_get_cs_argument_count (args),
3580 ipa_get_param_count (old_root_info));
3581 for (i = 0; i < count; i++)
3582 {
3583 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3584 struct ipa_cst_ref_desc *rdesc;
3585
3586 if (jf->type == IPA_JF_PASS_THROUGH)
3587 {
3588 int src_idx, c, d;
3589 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3590 c = ipa_get_controlled_uses (new_root_info, src_idx);
3591 d = ipa_get_controlled_uses (old_root_info, i);
3592
3593 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3594 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3595 c = combine_controlled_uses_counters (c, d);
3596 ipa_set_controlled_uses (new_root_info, src_idx, c);
3597 if (c == 0 && new_root_info->ipcp_orig_node)
3598 {
3599 struct cgraph_node *n;
3600 struct ipa_ref *ref;
3601 tree t = new_root_info->known_csts[src_idx];
3602
3603 if (t && TREE_CODE (t) == ADDR_EXPR
3604 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3605 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3606 && (ref = new_root->find_reference (n, NULL, 0)))
3607 {
3608 if (dump_file)
3609 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3610 "reference from %s/%i to %s/%i.\n",
3611 xstrdup_for_dump (new_root->name ()),
3612 new_root->order,
3613 xstrdup_for_dump (n->name ()), n->order);
3614 ref->remove_reference ();
3615 }
3616 }
3617 }
3618 else if (jf->type == IPA_JF_CONST
3619 && (rdesc = jfunc_rdesc_usable (jf)))
3620 {
3621 int d = ipa_get_controlled_uses (old_root_info, i);
3622 int c = rdesc->refcount;
3623 rdesc->refcount = combine_controlled_uses_counters (c, d);
3624 if (rdesc->refcount == 0)
3625 {
3626 tree cst = ipa_get_jf_constant (jf);
3627 struct cgraph_node *n;
3628 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3629 && TREE_CODE (TREE_OPERAND (cst, 0))
3630 == FUNCTION_DECL);
3631 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3632 if (n)
3633 {
3634 struct cgraph_node *clone;
3635 bool ok;
3636 ok = remove_described_reference (n, rdesc);
3637 gcc_checking_assert (ok);
3638
3639 clone = cs->caller;
3640 while (clone->global.inlined_to
3641 && clone != rdesc->cs->caller
3642 && IPA_NODE_REF (clone)->ipcp_orig_node)
3643 {
3644 struct ipa_ref *ref;
3645 ref = clone->find_reference (n, NULL, 0);
3646 if (ref)
3647 {
3648 if (dump_file)
3649 fprintf (dump_file, "ipa-prop: Removing "
3650 "cloning-created reference "
3651 "from %s/%i to %s/%i.\n",
3652 xstrdup_for_dump (clone->name ()),
3653 clone->order,
3654 xstrdup_for_dump (n->name ()),
3655 n->order);
3656 ref->remove_reference ();
3657 }
3658 clone = clone->callers->caller;
3659 }
3660 }
3661 }
3662 }
3663 }
3664
3665 for (i = ipa_get_param_count (old_root_info);
3666 i < ipa_get_cs_argument_count (args);
3667 i++)
3668 {
3669 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3670
3671 if (jf->type == IPA_JF_CONST)
3672 {
3673 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3674 if (rdesc)
3675 rdesc->refcount = IPA_UNDESCRIBED_USE;
3676 }
3677 else if (jf->type == IPA_JF_PASS_THROUGH)
3678 ipa_set_controlled_uses (new_root_info,
3679 jf->value.pass_through.formal_id,
3680 IPA_UNDESCRIBED_USE);
3681 }
3682 }
3683
3684 /* Update jump functions and call note functions on inlining the call site CS.
3685 CS is expected to lead to a node already cloned by
3686 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3687 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3688 created. */
3689
3690 bool
3691 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3692 vec<cgraph_edge *> *new_edges)
3693 {
3694 bool changed;
3695 /* Do nothing if the preparation phase has not been carried out yet
3696 (i.e. during early inlining). */
3697 if (!ipa_node_params_sum)
3698 return false;
3699 gcc_assert (ipa_edge_args_sum);
3700
3701 propagate_controlled_uses (cs);
3702 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3703
3704 return changed;
3705 }
3706
3707 /* Ensure that array of edge arguments infos is big enough to accommodate a
3708 structure for all edges and reallocates it if not. Also, allocate
3709 associated hash tables is they do not already exist. */
3710
3711 void
3712 ipa_check_create_edge_args (void)
3713 {
3714 if (!ipa_edge_args_sum)
3715 ipa_edge_args_sum
3716 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3717 ipa_edge_args_sum_t (symtab, true));
3718 if (!ipa_bits_hash_table)
3719 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3720 if (!ipa_vr_hash_table)
3721 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3722 }
3723
3724 /* Frees all dynamically allocated structures that the argument info points
3725 to. */
3726
3727 void
3728 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3729 {
3730 vec_free (args->jump_functions);
3731 memset (args, 0, sizeof (*args));
3732 }
3733
3734 /* Free all ipa_edge structures. */
3735
3736 void
3737 ipa_free_all_edge_args (void)
3738 {
3739 if (!ipa_edge_args_sum)
3740 return;
3741
3742 ipa_edge_args_sum->release ();
3743 ipa_edge_args_sum = NULL;
3744 }
3745
3746 /* Free all ipa_node_params structures. */
3747
3748 void
3749 ipa_free_all_node_params (void)
3750 {
3751 ipa_node_params_sum->release ();
3752 ipa_node_params_sum = NULL;
3753 }
3754
3755 /* Grow ipcp_transformations if necessary. Also allocate any necessary hash
3756 tables if they do not already exist. */
3757
3758 void
3759 ipcp_grow_transformations_if_necessary (void)
3760 {
3761 if (vec_safe_length (ipcp_transformations)
3762 <= (unsigned) symtab->cgraph_max_uid)
3763 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3764 if (!ipa_bits_hash_table)
3765 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3766 if (!ipa_vr_hash_table)
3767 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3768 }
3769
3770 /* Set the aggregate replacements of NODE to be AGGVALS. */
3771
3772 void
3773 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3774 struct ipa_agg_replacement_value *aggvals)
3775 {
3776 ipcp_grow_transformations_if_necessary ();
3777 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3778 }
3779
3780 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3781 count data structures accordingly. */
3782
3783 void
3784 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3785 {
3786 if (args->jump_functions)
3787 {
3788 struct ipa_jump_func *jf;
3789 int i;
3790 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3791 {
3792 struct ipa_cst_ref_desc *rdesc;
3793 try_decrement_rdesc_refcount (jf);
3794 if (jf->type == IPA_JF_CONST
3795 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3796 && rdesc->cs == cs)
3797 rdesc->cs = NULL;
3798 }
3799 }
3800 }
3801
3802 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3803 reference count data strucutres accordingly. */
3804
3805 void
3806 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3807 ipa_edge_args *old_args, ipa_edge_args *new_args)
3808 {
3809 unsigned int i;
3810
3811 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3812 if (old_args->polymorphic_call_contexts)
3813 new_args->polymorphic_call_contexts
3814 = vec_safe_copy (old_args->polymorphic_call_contexts);
3815
3816 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3817 {
3818 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3819 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3820
3821 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3822
3823 if (src_jf->type == IPA_JF_CONST)
3824 {
3825 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3826
3827 if (!src_rdesc)
3828 dst_jf->value.constant.rdesc = NULL;
3829 else if (src->caller == dst->caller)
3830 {
3831 struct ipa_ref *ref;
3832 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3833 gcc_checking_assert (n);
3834 ref = src->caller->find_reference (n, src->call_stmt,
3835 src->lto_stmt_uid);
3836 gcc_checking_assert (ref);
3837 dst->caller->clone_reference (ref, ref->stmt);
3838
3839 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3840 dst_rdesc->cs = dst;
3841 dst_rdesc->refcount = src_rdesc->refcount;
3842 dst_rdesc->next_duplicate = NULL;
3843 dst_jf->value.constant.rdesc = dst_rdesc;
3844 }
3845 else if (src_rdesc->cs == src)
3846 {
3847 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3848 dst_rdesc->cs = dst;
3849 dst_rdesc->refcount = src_rdesc->refcount;
3850 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3851 src_rdesc->next_duplicate = dst_rdesc;
3852 dst_jf->value.constant.rdesc = dst_rdesc;
3853 }
3854 else
3855 {
3856 struct ipa_cst_ref_desc *dst_rdesc;
3857 /* This can happen during inlining, when a JFUNC can refer to a
3858 reference taken in a function up in the tree of inline clones.
3859 We need to find the duplicate that refers to our tree of
3860 inline clones. */
3861
3862 gcc_assert (dst->caller->global.inlined_to);
3863 for (dst_rdesc = src_rdesc->next_duplicate;
3864 dst_rdesc;
3865 dst_rdesc = dst_rdesc->next_duplicate)
3866 {
3867 struct cgraph_node *top;
3868 top = dst_rdesc->cs->caller->global.inlined_to
3869 ? dst_rdesc->cs->caller->global.inlined_to
3870 : dst_rdesc->cs->caller;
3871 if (dst->caller->global.inlined_to == top)
3872 break;
3873 }
3874 gcc_assert (dst_rdesc);
3875 dst_jf->value.constant.rdesc = dst_rdesc;
3876 }
3877 }
3878 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3879 && src->caller == dst->caller)
3880 {
3881 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3882 ? dst->caller->global.inlined_to : dst->caller;
3883 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3884 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3885
3886 int c = ipa_get_controlled_uses (root_info, idx);
3887 if (c != IPA_UNDESCRIBED_USE)
3888 {
3889 c++;
3890 ipa_set_controlled_uses (root_info, idx, c);
3891 }
3892 }
3893 }
3894 }
3895
3896 /* Analyze newly added function into callgraph. */
3897
3898 static void
3899 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3900 {
3901 if (node->has_gimple_body_p ())
3902 ipa_analyze_node (node);
3903 }
3904
3905 /* Hook that is called by summary when a node is duplicated. */
3906
3907 void
3908 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3909 ipa_node_params *old_info,
3910 ipa_node_params *new_info)
3911 {
3912 ipa_agg_replacement_value *old_av, *new_av;
3913
3914 new_info->descriptors = vec_safe_copy (old_info->descriptors);
3915 new_info->lattices = NULL;
3916 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3917 new_info->known_csts = old_info->known_csts.copy ();
3918 new_info->known_contexts = old_info->known_contexts.copy ();
3919
3920 new_info->analysis_done = old_info->analysis_done;
3921 new_info->node_enqueued = old_info->node_enqueued;
3922 new_info->versionable = old_info->versionable;
3923
3924 old_av = ipa_get_agg_replacements_for_node (src);
3925 if (old_av)
3926 {
3927 new_av = NULL;
3928 while (old_av)
3929 {
3930 struct ipa_agg_replacement_value *v;
3931
3932 v = ggc_alloc<ipa_agg_replacement_value> ();
3933 memcpy (v, old_av, sizeof (*v));
3934 v->next = new_av;
3935 new_av = v;
3936 old_av = old_av->next;
3937 }
3938 ipa_set_node_agg_value_chain (dst, new_av);
3939 }
3940
3941 ipcp_transformation_summary *src_trans
3942 = ipcp_get_transformation_summary (src);
3943
3944 if (src_trans)
3945 {
3946 ipcp_grow_transformations_if_necessary ();
3947 src_trans = ipcp_get_transformation_summary (src);
3948 ipcp_transformation_summary *dst_trans
3949 = ipcp_get_transformation_summary (dst);
3950
3951 dst_trans->bits = vec_safe_copy (src_trans->bits);
3952
3953 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3954 vec<ipa_vr, va_gc> *&dst_vr
3955 = ipcp_get_transformation_summary (dst)->m_vr;
3956 if (vec_safe_length (src_trans->m_vr) > 0)
3957 {
3958 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3959 for (unsigned i = 0; i < src_vr->length (); ++i)
3960 dst_vr->quick_push ((*src_vr)[i]);
3961 }
3962 }
3963 }
3964
3965 /* Register our cgraph hooks if they are not already there. */
3966
3967 void
3968 ipa_register_cgraph_hooks (void)
3969 {
3970 ipa_check_create_node_params ();
3971 ipa_check_create_edge_args ();
3972
3973 function_insertion_hook_holder =
3974 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3975 }
3976
3977 /* Unregister our cgraph hooks if they are not already there. */
3978
3979 static void
3980 ipa_unregister_cgraph_hooks (void)
3981 {
3982 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3983 function_insertion_hook_holder = NULL;
3984 }
3985
3986 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3987 longer needed after ipa-cp. */
3988
3989 void
3990 ipa_free_all_structures_after_ipa_cp (void)
3991 {
3992 if (!optimize && !in_lto_p)
3993 {
3994 ipa_free_all_edge_args ();
3995 ipa_free_all_node_params ();
3996 ipcp_sources_pool.release ();
3997 ipcp_cst_values_pool.release ();
3998 ipcp_poly_ctx_values_pool.release ();
3999 ipcp_agg_lattice_pool.release ();
4000 ipa_unregister_cgraph_hooks ();
4001 ipa_refdesc_pool.release ();
4002 }
4003 }
4004
4005 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4006 longer needed after indirect inlining. */
4007
4008 void
4009 ipa_free_all_structures_after_iinln (void)
4010 {
4011 ipa_free_all_edge_args ();
4012 ipa_free_all_node_params ();
4013 ipa_unregister_cgraph_hooks ();
4014 ipcp_sources_pool.release ();
4015 ipcp_cst_values_pool.release ();
4016 ipcp_poly_ctx_values_pool.release ();
4017 ipcp_agg_lattice_pool.release ();
4018 ipa_refdesc_pool.release ();
4019 }
4020
4021 /* Print ipa_tree_map data structures of all functions in the
4022 callgraph to F. */
4023
4024 void
4025 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4026 {
4027 int i, count;
4028 struct ipa_node_params *info;
4029
4030 if (!node->definition)
4031 return;
4032 info = IPA_NODE_REF (node);
4033 fprintf (f, " function %s/%i parameter descriptors:\n",
4034 node->name (), node->order);
4035 count = ipa_get_param_count (info);
4036 for (i = 0; i < count; i++)
4037 {
4038 int c;
4039
4040 fprintf (f, " ");
4041 ipa_dump_param (f, info, i);
4042 if (ipa_is_param_used (info, i))
4043 fprintf (f, " used");
4044 c = ipa_get_controlled_uses (info, i);
4045 if (c == IPA_UNDESCRIBED_USE)
4046 fprintf (f, " undescribed_use");
4047 else
4048 fprintf (f, " controlled_uses=%i", c);
4049 fprintf (f, "\n");
4050 }
4051 }
4052
4053 /* Print ipa_tree_map data structures of all functions in the
4054 callgraph to F. */
4055
4056 void
4057 ipa_print_all_params (FILE * f)
4058 {
4059 struct cgraph_node *node;
4060
4061 fprintf (f, "\nFunction parameters:\n");
4062 FOR_EACH_FUNCTION (node)
4063 ipa_print_node_params (f, node);
4064 }
4065
4066 /* Return a heap allocated vector containing formal parameters of FNDECL. */
4067
4068 vec<tree>
4069 ipa_get_vector_of_formal_parms (tree fndecl)
4070 {
4071 vec<tree> args;
4072 int count;
4073 tree parm;
4074
4075 gcc_assert (!flag_wpa);
4076 count = count_formal_params (fndecl);
4077 args.create (count);
4078 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4079 args.quick_push (parm);
4080
4081 return args;
4082 }
4083
4084 /* Return a heap allocated vector containing types of formal parameters of
4085 function type FNTYPE. */
4086
4087 vec<tree>
4088 ipa_get_vector_of_formal_parm_types (tree fntype)
4089 {
4090 vec<tree> types;
4091 int count = 0;
4092 tree t;
4093
4094 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4095 count++;
4096
4097 types.create (count);
4098 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4099 types.quick_push (TREE_VALUE (t));
4100
4101 return types;
4102 }
4103
4104 /* Modify the function declaration FNDECL and its type according to the plan in
4105 ADJUSTMENTS. It also sets base fields of individual adjustments structures
4106 to reflect the actual parameters being modified which are determined by the
4107 base_index field. */
4108
4109 void
4110 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
4111 {
4112 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4113 tree orig_type = TREE_TYPE (fndecl);
4114 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4115
4116 /* The following test is an ugly hack, some functions simply don't have any
4117 arguments in their type. This is probably a bug but well... */
4118 bool care_for_types = (old_arg_types != NULL_TREE);
4119 bool last_parm_void;
4120 vec<tree> otypes;
4121 if (care_for_types)
4122 {
4123 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4124 == void_type_node);
4125 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4126 if (last_parm_void)
4127 gcc_assert (oparms.length () + 1 == otypes.length ());
4128 else
4129 gcc_assert (oparms.length () == otypes.length ());
4130 }
4131 else
4132 {
4133 last_parm_void = false;
4134 otypes.create (0);
4135 }
4136
4137 int len = adjustments.length ();
4138 tree *link = &DECL_ARGUMENTS (fndecl);
4139 tree new_arg_types = NULL;
4140 for (int i = 0; i < len; i++)
4141 {
4142 struct ipa_parm_adjustment *adj;
4143 gcc_assert (link);
4144
4145 adj = &adjustments[i];
4146 tree parm;
4147 if (adj->op == IPA_PARM_OP_NEW)
4148 parm = NULL;
4149 else
4150 parm = oparms[adj->base_index];
4151 adj->base = parm;
4152
4153 if (adj->op == IPA_PARM_OP_COPY)
4154 {
4155 if (care_for_types)
4156 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4157 new_arg_types);
4158 *link = parm;
4159 link = &DECL_CHAIN (parm);
4160 }
4161 else if (adj->op != IPA_PARM_OP_REMOVE)
4162 {
4163 tree new_parm;
4164 tree ptype;
4165
4166 if (adj->by_ref)
4167 ptype = build_pointer_type (adj->type);
4168 else
4169 {
4170 ptype = adj->type;
4171 if (is_gimple_reg_type (ptype)
4172 && TYPE_MODE (ptype) != BLKmode)
4173 {
4174 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4175 if (TYPE_ALIGN (ptype) != malign)
4176 ptype = build_aligned_type (ptype, malign);
4177 }
4178 }
4179
4180 if (care_for_types)
4181 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4182
4183 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4184 ptype);
4185 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4186 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4187 DECL_ARTIFICIAL (new_parm) = 1;
4188 DECL_ARG_TYPE (new_parm) = ptype;
4189 DECL_CONTEXT (new_parm) = fndecl;
4190 TREE_USED (new_parm) = 1;
4191 DECL_IGNORED_P (new_parm) = 1;
4192 layout_decl (new_parm, 0);
4193
4194 if (adj->op == IPA_PARM_OP_NEW)
4195 adj->base = NULL;
4196 else
4197 adj->base = parm;
4198 adj->new_decl = new_parm;
4199
4200 *link = new_parm;
4201 link = &DECL_CHAIN (new_parm);
4202 }
4203 }
4204
4205 *link = NULL_TREE;
4206
4207 tree new_reversed = NULL;
4208 if (care_for_types)
4209 {
4210 new_reversed = nreverse (new_arg_types);
4211 if (last_parm_void)
4212 {
4213 if (new_reversed)
4214 TREE_CHAIN (new_arg_types) = void_list_node;
4215 else
4216 new_reversed = void_list_node;
4217 }
4218 }
4219
4220 /* Use copy_node to preserve as much as possible from original type
4221 (debug info, attribute lists etc.)
4222 Exception is METHOD_TYPEs must have THIS argument.
4223 When we are asked to remove it, we need to build new FUNCTION_TYPE
4224 instead. */
4225 tree new_type = NULL;
4226 if (TREE_CODE (orig_type) != METHOD_TYPE
4227 || (adjustments[0].op == IPA_PARM_OP_COPY
4228 && adjustments[0].base_index == 0))
4229 {
4230 new_type = build_distinct_type_copy (orig_type);
4231 TYPE_ARG_TYPES (new_type) = new_reversed;
4232 }
4233 else
4234 {
4235 new_type
4236 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4237 new_reversed));
4238 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4239 DECL_VINDEX (fndecl) = NULL_TREE;
4240 }
4241
4242 /* When signature changes, we need to clear builtin info. */
4243 if (DECL_BUILT_IN (fndecl))
4244 {
4245 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4246 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4247 }
4248
4249 TREE_TYPE (fndecl) = new_type;
4250 DECL_VIRTUAL_P (fndecl) = 0;
4251 DECL_LANG_SPECIFIC (fndecl) = NULL;
4252 otypes.release ();
4253 oparms.release ();
4254 }
4255
4256 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4257 If this is a directly recursive call, CS must be NULL. Otherwise it must
4258 contain the corresponding call graph edge. */
4259
4260 void
4261 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
4262 ipa_parm_adjustment_vec adjustments)
4263 {
4264 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4265 vec<tree> vargs;
4266 vec<tree, va_gc> **debug_args = NULL;
4267 gcall *new_stmt;
4268 gimple_stmt_iterator gsi, prev_gsi;
4269 tree callee_decl;
4270 int i, len;
4271
4272 len = adjustments.length ();
4273 vargs.create (len);
4274 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4275 current_node->remove_stmt_references (stmt);
4276
4277 gsi = gsi_for_stmt (stmt);
4278 prev_gsi = gsi;
4279 gsi_prev (&prev_gsi);
4280 for (i = 0; i < len; i++)
4281 {
4282 struct ipa_parm_adjustment *adj;
4283
4284 adj = &adjustments[i];
4285
4286 if (adj->op == IPA_PARM_OP_COPY)
4287 {
4288 tree arg = gimple_call_arg (stmt, adj->base_index);
4289
4290 vargs.quick_push (arg);
4291 }
4292 else if (adj->op != IPA_PARM_OP_REMOVE)
4293 {
4294 tree expr, base, off;
4295 location_t loc;
4296 unsigned int deref_align = 0;
4297 bool deref_base = false;
4298
4299 /* We create a new parameter out of the value of the old one, we can
4300 do the following kind of transformations:
4301
4302 - A scalar passed by reference is converted to a scalar passed by
4303 value. (adj->by_ref is false and the type of the original
4304 actual argument is a pointer to a scalar).
4305
4306 - A part of an aggregate is passed instead of the whole aggregate.
4307 The part can be passed either by value or by reference, this is
4308 determined by value of adj->by_ref. Moreover, the code below
4309 handles both situations when the original aggregate is passed by
4310 value (its type is not a pointer) and when it is passed by
4311 reference (it is a pointer to an aggregate).
4312
4313 When the new argument is passed by reference (adj->by_ref is true)
4314 it must be a part of an aggregate and therefore we form it by
4315 simply taking the address of a reference inside the original
4316 aggregate. */
4317
4318 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4319 base = gimple_call_arg (stmt, adj->base_index);
4320 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4321 : EXPR_LOCATION (base);
4322
4323 if (TREE_CODE (base) != ADDR_EXPR
4324 && POINTER_TYPE_P (TREE_TYPE (base)))
4325 off = build_int_cst (adj->alias_ptr_type,
4326 adj->offset / BITS_PER_UNIT);
4327 else
4328 {
4329 HOST_WIDE_INT base_offset;
4330 tree prev_base;
4331 bool addrof;
4332
4333 if (TREE_CODE (base) == ADDR_EXPR)
4334 {
4335 base = TREE_OPERAND (base, 0);
4336 addrof = true;
4337 }
4338 else
4339 addrof = false;
4340 prev_base = base;
4341 base = get_addr_base_and_unit_offset (base, &base_offset);
4342 /* Aggregate arguments can have non-invariant addresses. */
4343 if (!base)
4344 {
4345 base = build_fold_addr_expr (prev_base);
4346 off = build_int_cst (adj->alias_ptr_type,
4347 adj->offset / BITS_PER_UNIT);
4348 }
4349 else if (TREE_CODE (base) == MEM_REF)
4350 {
4351 if (!addrof)
4352 {
4353 deref_base = true;
4354 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4355 }
4356 off = build_int_cst (adj->alias_ptr_type,
4357 base_offset
4358 + adj->offset / BITS_PER_UNIT);
4359 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4360 off);
4361 base = TREE_OPERAND (base, 0);
4362 }
4363 else
4364 {
4365 off = build_int_cst (adj->alias_ptr_type,
4366 base_offset
4367 + adj->offset / BITS_PER_UNIT);
4368 base = build_fold_addr_expr (base);
4369 }
4370 }
4371
4372 if (!adj->by_ref)
4373 {
4374 tree type = adj->type;
4375 unsigned int align;
4376 unsigned HOST_WIDE_INT misalign;
4377
4378 if (deref_base)
4379 {
4380 align = deref_align;
4381 misalign = 0;
4382 }
4383 else
4384 {
4385 get_pointer_alignment_1 (base, &align, &misalign);
4386 if (TYPE_ALIGN (type) > align)
4387 align = TYPE_ALIGN (type);
4388 }
4389 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4390 * BITS_PER_UNIT);
4391 misalign = misalign & (align - 1);
4392 if (misalign != 0)
4393 align = least_bit_hwi (misalign);
4394 if (align < TYPE_ALIGN (type))
4395 type = build_aligned_type (type, align);
4396 base = force_gimple_operand_gsi (&gsi, base,
4397 true, NULL, true, GSI_SAME_STMT);
4398 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4399 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4400 /* If expr is not a valid gimple call argument emit
4401 a load into a temporary. */
4402 if (is_gimple_reg_type (TREE_TYPE (expr)))
4403 {
4404 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4405 if (gimple_in_ssa_p (cfun))
4406 {
4407 gimple_set_vuse (tem, gimple_vuse (stmt));
4408 expr = make_ssa_name (TREE_TYPE (expr), tem);
4409 }
4410 else
4411 expr = create_tmp_reg (TREE_TYPE (expr));
4412 gimple_assign_set_lhs (tem, expr);
4413 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4414 }
4415 }
4416 else
4417 {
4418 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4419 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4420 expr = build_fold_addr_expr (expr);
4421 expr = force_gimple_operand_gsi (&gsi, expr,
4422 true, NULL, true, GSI_SAME_STMT);
4423 }
4424 vargs.quick_push (expr);
4425 }
4426 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4427 {
4428 unsigned int ix;
4429 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4430 gimple *def_temp;
4431
4432 arg = gimple_call_arg (stmt, adj->base_index);
4433 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4434 {
4435 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4436 continue;
4437 arg = fold_convert_loc (gimple_location (stmt),
4438 TREE_TYPE (origin), arg);
4439 }
4440 if (debug_args == NULL)
4441 debug_args = decl_debug_args_insert (callee_decl);
4442 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4443 if (ddecl == origin)
4444 {
4445 ddecl = (**debug_args)[ix + 1];
4446 break;
4447 }
4448 if (ddecl == NULL)
4449 {
4450 ddecl = make_node (DEBUG_EXPR_DECL);
4451 DECL_ARTIFICIAL (ddecl) = 1;
4452 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4453 SET_DECL_MODE (ddecl, DECL_MODE (origin));
4454
4455 vec_safe_push (*debug_args, origin);
4456 vec_safe_push (*debug_args, ddecl);
4457 }
4458 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4459 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4460 }
4461 }
4462
4463 if (dump_file && (dump_flags & TDF_DETAILS))
4464 {
4465 fprintf (dump_file, "replacing stmt:");
4466 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
4467 }
4468
4469 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4470 vargs.release ();
4471 if (gimple_call_lhs (stmt))
4472 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4473
4474 gimple_set_block (new_stmt, gimple_block (stmt));
4475 if (gimple_has_location (stmt))
4476 gimple_set_location (new_stmt, gimple_location (stmt));
4477 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4478 gimple_call_copy_flags (new_stmt, stmt);
4479 if (gimple_in_ssa_p (cfun))
4480 {
4481 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4482 if (gimple_vdef (stmt))
4483 {
4484 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4485 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4486 }
4487 }
4488
4489 if (dump_file && (dump_flags & TDF_DETAILS))
4490 {
4491 fprintf (dump_file, "with stmt:");
4492 print_gimple_stmt (dump_file, new_stmt, 0);
4493 fprintf (dump_file, "\n");
4494 }
4495 gsi_replace (&gsi, new_stmt, true);
4496 if (cs)
4497 cs->set_call_stmt (new_stmt);
4498 do
4499 {
4500 current_node->record_stmt_references (gsi_stmt (gsi));
4501 gsi_prev (&gsi);
4502 }
4503 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4504 }
4505
4506 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4507 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4508 specifies whether the function should care about type incompatibility the
4509 current and new expressions. If it is false, the function will leave
4510 incompatibility issues to the caller. Return true iff the expression
4511 was modified. */
4512
4513 bool
4514 ipa_modify_expr (tree *expr, bool convert,
4515 ipa_parm_adjustment_vec adjustments)
4516 {
4517 struct ipa_parm_adjustment *cand
4518 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4519 if (!cand)
4520 return false;
4521
4522 tree src;
4523 if (cand->by_ref)
4524 {
4525 src = build_simple_mem_ref (cand->new_decl);
4526 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4527 }
4528 else
4529 src = cand->new_decl;
4530
4531 if (dump_file && (dump_flags & TDF_DETAILS))
4532 {
4533 fprintf (dump_file, "About to replace expr ");
4534 print_generic_expr (dump_file, *expr);
4535 fprintf (dump_file, " with ");
4536 print_generic_expr (dump_file, src);
4537 fprintf (dump_file, "\n");
4538 }
4539
4540 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4541 {
4542 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4543 *expr = vce;
4544 }
4545 else
4546 *expr = src;
4547 return true;
4548 }
4549
4550 /* If T is an SSA_NAME, return NULL if it is not a default def or
4551 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4552 the base variable is always returned, regardless if it is a default
4553 def. Return T if it is not an SSA_NAME. */
4554
4555 static tree
4556 get_ssa_base_param (tree t, bool ignore_default_def)
4557 {
4558 if (TREE_CODE (t) == SSA_NAME)
4559 {
4560 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4561 return SSA_NAME_VAR (t);
4562 else
4563 return NULL_TREE;
4564 }
4565 return t;
4566 }
4567
4568 /* Given an expression, return an adjustment entry specifying the
4569 transformation to be done on EXPR. If no suitable adjustment entry
4570 was found, returns NULL.
4571
4572 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4573 default def, otherwise bail on them.
4574
4575 If CONVERT is non-NULL, this function will set *CONVERT if the
4576 expression provided is a component reference. ADJUSTMENTS is the
4577 adjustments vector. */
4578
4579 ipa_parm_adjustment *
4580 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4581 ipa_parm_adjustment_vec adjustments,
4582 bool ignore_default_def)
4583 {
4584 if (TREE_CODE (**expr) == BIT_FIELD_REF
4585 || TREE_CODE (**expr) == IMAGPART_EXPR
4586 || TREE_CODE (**expr) == REALPART_EXPR)
4587 {
4588 *expr = &TREE_OPERAND (**expr, 0);
4589 if (convert)
4590 *convert = true;
4591 }
4592
4593 HOST_WIDE_INT offset, size, max_size;
4594 bool reverse;
4595 tree base
4596 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4597 if (!base || size == -1 || max_size == -1)
4598 return NULL;
4599
4600 if (TREE_CODE (base) == MEM_REF)
4601 {
4602 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4603 base = TREE_OPERAND (base, 0);
4604 }
4605
4606 base = get_ssa_base_param (base, ignore_default_def);
4607 if (!base || TREE_CODE (base) != PARM_DECL)
4608 return NULL;
4609
4610 struct ipa_parm_adjustment *cand = NULL;
4611 unsigned int len = adjustments.length ();
4612 for (unsigned i = 0; i < len; i++)
4613 {
4614 struct ipa_parm_adjustment *adj = &adjustments[i];
4615
4616 if (adj->base == base
4617 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4618 {
4619 cand = adj;
4620 break;
4621 }
4622 }
4623
4624 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4625 return NULL;
4626 return cand;
4627 }
4628
4629 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4630
4631 static bool
4632 index_in_adjustments_multiple_times_p (int base_index,
4633 ipa_parm_adjustment_vec adjustments)
4634 {
4635 int i, len = adjustments.length ();
4636 bool one = false;
4637
4638 for (i = 0; i < len; i++)
4639 {
4640 struct ipa_parm_adjustment *adj;
4641 adj = &adjustments[i];
4642
4643 if (adj->base_index == base_index)
4644 {
4645 if (one)
4646 return true;
4647 else
4648 one = true;
4649 }
4650 }
4651 return false;
4652 }
4653
4654
4655 /* Return adjustments that should have the same effect on function parameters
4656 and call arguments as if they were first changed according to adjustments in
4657 INNER and then by adjustments in OUTER. */
4658
4659 ipa_parm_adjustment_vec
4660 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4661 ipa_parm_adjustment_vec outer)
4662 {
4663 int i, outlen = outer.length ();
4664 int inlen = inner.length ();
4665 int removals = 0;
4666 ipa_parm_adjustment_vec adjustments, tmp;
4667
4668 tmp.create (inlen);
4669 for (i = 0; i < inlen; i++)
4670 {
4671 struct ipa_parm_adjustment *n;
4672 n = &inner[i];
4673
4674 if (n->op == IPA_PARM_OP_REMOVE)
4675 removals++;
4676 else
4677 {
4678 /* FIXME: Handling of new arguments are not implemented yet. */
4679 gcc_assert (n->op != IPA_PARM_OP_NEW);
4680 tmp.quick_push (*n);
4681 }
4682 }
4683
4684 adjustments.create (outlen + removals);
4685 for (i = 0; i < outlen; i++)
4686 {
4687 struct ipa_parm_adjustment r;
4688 struct ipa_parm_adjustment *out = &outer[i];
4689 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4690
4691 memset (&r, 0, sizeof (r));
4692 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4693 if (out->op == IPA_PARM_OP_REMOVE)
4694 {
4695 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4696 {
4697 r.op = IPA_PARM_OP_REMOVE;
4698 adjustments.quick_push (r);
4699 }
4700 continue;
4701 }
4702 else
4703 {
4704 /* FIXME: Handling of new arguments are not implemented yet. */
4705 gcc_assert (out->op != IPA_PARM_OP_NEW);
4706 }
4707
4708 r.base_index = in->base_index;
4709 r.type = out->type;
4710
4711 /* FIXME: Create nonlocal value too. */
4712
4713 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4714 r.op = IPA_PARM_OP_COPY;
4715 else if (in->op == IPA_PARM_OP_COPY)
4716 r.offset = out->offset;
4717 else if (out->op == IPA_PARM_OP_COPY)
4718 r.offset = in->offset;
4719 else
4720 r.offset = in->offset + out->offset;
4721 adjustments.quick_push (r);
4722 }
4723
4724 for (i = 0; i < inlen; i++)
4725 {
4726 struct ipa_parm_adjustment *n = &inner[i];
4727
4728 if (n->op == IPA_PARM_OP_REMOVE)
4729 adjustments.quick_push (*n);
4730 }
4731
4732 tmp.release ();
4733 return adjustments;
4734 }
4735
4736 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4737 friendly way, assuming they are meant to be applied to FNDECL. */
4738
4739 void
4740 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4741 tree fndecl)
4742 {
4743 int i, len = adjustments.length ();
4744 bool first = true;
4745 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4746
4747 fprintf (file, "IPA param adjustments: ");
4748 for (i = 0; i < len; i++)
4749 {
4750 struct ipa_parm_adjustment *adj;
4751 adj = &adjustments[i];
4752
4753 if (!first)
4754 fprintf (file, " ");
4755 else
4756 first = false;
4757
4758 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4759 print_generic_expr (file, parms[adj->base_index]);
4760 if (adj->base)
4761 {
4762 fprintf (file, ", base: ");
4763 print_generic_expr (file, adj->base);
4764 }
4765 if (adj->new_decl)
4766 {
4767 fprintf (file, ", new_decl: ");
4768 print_generic_expr (file, adj->new_decl);
4769 }
4770 if (adj->new_ssa_base)
4771 {
4772 fprintf (file, ", new_ssa_base: ");
4773 print_generic_expr (file, adj->new_ssa_base);
4774 }
4775
4776 if (adj->op == IPA_PARM_OP_COPY)
4777 fprintf (file, ", copy_param");
4778 else if (adj->op == IPA_PARM_OP_REMOVE)
4779 fprintf (file, ", remove_param");
4780 else
4781 fprintf (file, ", offset %li", (long) adj->offset);
4782 if (adj->by_ref)
4783 fprintf (file, ", by_ref");
4784 print_node_brief (file, ", type: ", adj->type, 0);
4785 fprintf (file, "\n");
4786 }
4787 parms.release ();
4788 }
4789
4790 /* Dump the AV linked list. */
4791
4792 void
4793 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4794 {
4795 bool comma = false;
4796 fprintf (f, " Aggregate replacements:");
4797 for (; av; av = av->next)
4798 {
4799 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4800 av->index, av->offset);
4801 print_generic_expr (f, av->value);
4802 comma = true;
4803 }
4804 fprintf (f, "\n");
4805 }
4806
4807 /* Stream out jump function JUMP_FUNC to OB. */
4808
4809 static void
4810 ipa_write_jump_function (struct output_block *ob,
4811 struct ipa_jump_func *jump_func)
4812 {
4813 struct ipa_agg_jf_item *item;
4814 struct bitpack_d bp;
4815 int i, count;
4816
4817 streamer_write_uhwi (ob, jump_func->type);
4818 switch (jump_func->type)
4819 {
4820 case IPA_JF_UNKNOWN:
4821 break;
4822 case IPA_JF_CONST:
4823 gcc_assert (
4824 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4825 stream_write_tree (ob, jump_func->value.constant.value, true);
4826 break;
4827 case IPA_JF_PASS_THROUGH:
4828 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4829 if (jump_func->value.pass_through.operation == NOP_EXPR)
4830 {
4831 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4832 bp = bitpack_create (ob->main_stream);
4833 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4834 streamer_write_bitpack (&bp);
4835 }
4836 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4837 == tcc_unary)
4838 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4839 else
4840 {
4841 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4842 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4843 }
4844 break;
4845 case IPA_JF_ANCESTOR:
4846 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4847 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4848 bp = bitpack_create (ob->main_stream);
4849 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4850 streamer_write_bitpack (&bp);
4851 break;
4852 }
4853
4854 count = vec_safe_length (jump_func->agg.items);
4855 streamer_write_uhwi (ob, count);
4856 if (count)
4857 {
4858 bp = bitpack_create (ob->main_stream);
4859 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4860 streamer_write_bitpack (&bp);
4861 }
4862
4863 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4864 {
4865 streamer_write_uhwi (ob, item->offset);
4866 stream_write_tree (ob, item->value, true);
4867 }
4868
4869 bp = bitpack_create (ob->main_stream);
4870 bp_pack_value (&bp, !!jump_func->bits, 1);
4871 streamer_write_bitpack (&bp);
4872 if (jump_func->bits)
4873 {
4874 streamer_write_widest_int (ob, jump_func->bits->value);
4875 streamer_write_widest_int (ob, jump_func->bits->mask);
4876 }
4877 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4878 streamer_write_bitpack (&bp);
4879 if (jump_func->m_vr)
4880 {
4881 streamer_write_enum (ob->main_stream, value_rang_type,
4882 VR_LAST, jump_func->m_vr->type);
4883 stream_write_tree (ob, jump_func->m_vr->min, true);
4884 stream_write_tree (ob, jump_func->m_vr->max, true);
4885 }
4886 }
4887
4888 /* Read in jump function JUMP_FUNC from IB. */
4889
4890 static void
4891 ipa_read_jump_function (struct lto_input_block *ib,
4892 struct ipa_jump_func *jump_func,
4893 struct cgraph_edge *cs,
4894 struct data_in *data_in)
4895 {
4896 enum jump_func_type jftype;
4897 enum tree_code operation;
4898 int i, count;
4899
4900 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4901 switch (jftype)
4902 {
4903 case IPA_JF_UNKNOWN:
4904 ipa_set_jf_unknown (jump_func);
4905 break;
4906 case IPA_JF_CONST:
4907 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4908 break;
4909 case IPA_JF_PASS_THROUGH:
4910 operation = (enum tree_code) streamer_read_uhwi (ib);
4911 if (operation == NOP_EXPR)
4912 {
4913 int formal_id = streamer_read_uhwi (ib);
4914 struct bitpack_d bp = streamer_read_bitpack (ib);
4915 bool agg_preserved = bp_unpack_value (&bp, 1);
4916 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4917 }
4918 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4919 {
4920 int formal_id = streamer_read_uhwi (ib);
4921 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4922 }
4923 else
4924 {
4925 tree operand = stream_read_tree (ib, data_in);
4926 int formal_id = streamer_read_uhwi (ib);
4927 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4928 operation);
4929 }
4930 break;
4931 case IPA_JF_ANCESTOR:
4932 {
4933 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4934 int formal_id = streamer_read_uhwi (ib);
4935 struct bitpack_d bp = streamer_read_bitpack (ib);
4936 bool agg_preserved = bp_unpack_value (&bp, 1);
4937 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4938 break;
4939 }
4940 }
4941
4942 count = streamer_read_uhwi (ib);
4943 vec_alloc (jump_func->agg.items, count);
4944 if (count)
4945 {
4946 struct bitpack_d bp = streamer_read_bitpack (ib);
4947 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4948 }
4949 for (i = 0; i < count; i++)
4950 {
4951 struct ipa_agg_jf_item item;
4952 item.offset = streamer_read_uhwi (ib);
4953 item.value = stream_read_tree (ib, data_in);
4954 jump_func->agg.items->quick_push (item);
4955 }
4956
4957 struct bitpack_d bp = streamer_read_bitpack (ib);
4958 bool bits_known = bp_unpack_value (&bp, 1);
4959 if (bits_known)
4960 {
4961 widest_int value = streamer_read_widest_int (ib);
4962 widest_int mask = streamer_read_widest_int (ib);
4963 ipa_set_jfunc_bits (jump_func, value, mask);
4964 }
4965 else
4966 jump_func->bits = NULL;
4967
4968 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4969 bool vr_known = bp_unpack_value (&vr_bp, 1);
4970 if (vr_known)
4971 {
4972 enum value_range_type type = streamer_read_enum (ib, value_range_type,
4973 VR_LAST);
4974 tree min = stream_read_tree (ib, data_in);
4975 tree max = stream_read_tree (ib, data_in);
4976 ipa_set_jfunc_vr (jump_func, type, min, max);
4977 }
4978 else
4979 jump_func->m_vr = NULL;
4980 }
4981
4982 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4983 relevant to indirect inlining to OB. */
4984
4985 static void
4986 ipa_write_indirect_edge_info (struct output_block *ob,
4987 struct cgraph_edge *cs)
4988 {
4989 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4990 struct bitpack_d bp;
4991
4992 streamer_write_hwi (ob, ii->param_index);
4993 bp = bitpack_create (ob->main_stream);
4994 bp_pack_value (&bp, ii->polymorphic, 1);
4995 bp_pack_value (&bp, ii->agg_contents, 1);
4996 bp_pack_value (&bp, ii->member_ptr, 1);
4997 bp_pack_value (&bp, ii->by_ref, 1);
4998 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4999 bp_pack_value (&bp, ii->vptr_changed, 1);
5000 streamer_write_bitpack (&bp);
5001 if (ii->agg_contents || ii->polymorphic)
5002 streamer_write_hwi (ob, ii->offset);
5003 else
5004 gcc_assert (ii->offset == 0);
5005
5006 if (ii->polymorphic)
5007 {
5008 streamer_write_hwi (ob, ii->otr_token);
5009 stream_write_tree (ob, ii->otr_type, true);
5010 ii->context.stream_out (ob);
5011 }
5012 }
5013
5014 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5015 relevant to indirect inlining from IB. */
5016
5017 static void
5018 ipa_read_indirect_edge_info (struct lto_input_block *ib,
5019 struct data_in *data_in,
5020 struct cgraph_edge *cs)
5021 {
5022 struct cgraph_indirect_call_info *ii = cs->indirect_info;
5023 struct bitpack_d bp;
5024
5025 ii->param_index = (int) streamer_read_hwi (ib);
5026 bp = streamer_read_bitpack (ib);
5027 ii->polymorphic = bp_unpack_value (&bp, 1);
5028 ii->agg_contents = bp_unpack_value (&bp, 1);
5029 ii->member_ptr = bp_unpack_value (&bp, 1);
5030 ii->by_ref = bp_unpack_value (&bp, 1);
5031 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
5032 ii->vptr_changed = bp_unpack_value (&bp, 1);
5033 if (ii->agg_contents || ii->polymorphic)
5034 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5035 else
5036 ii->offset = 0;
5037 if (ii->polymorphic)
5038 {
5039 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
5040 ii->otr_type = stream_read_tree (ib, data_in);
5041 ii->context.stream_in (ib, data_in);
5042 }
5043 }
5044
5045 /* Stream out NODE info to OB. */
5046
5047 static void
5048 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5049 {
5050 int node_ref;
5051 lto_symtab_encoder_t encoder;
5052 struct ipa_node_params *info = IPA_NODE_REF (node);
5053 int j;
5054 struct cgraph_edge *e;
5055 struct bitpack_d bp;
5056
5057 encoder = ob->decl_state->symtab_node_encoder;
5058 node_ref = lto_symtab_encoder_encode (encoder, node);
5059 streamer_write_uhwi (ob, node_ref);
5060
5061 streamer_write_uhwi (ob, ipa_get_param_count (info));
5062 for (j = 0; j < ipa_get_param_count (info); j++)
5063 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5064 bp = bitpack_create (ob->main_stream);
5065 gcc_assert (info->analysis_done
5066 || ipa_get_param_count (info) == 0);
5067 gcc_assert (!info->node_enqueued);
5068 gcc_assert (!info->ipcp_orig_node);
5069 for (j = 0; j < ipa_get_param_count (info); j++)
5070 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5071 streamer_write_bitpack (&bp);
5072 for (j = 0; j < ipa_get_param_count (info); j++)
5073 {
5074 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5075 stream_write_tree (ob, ipa_get_type (info, j), true);
5076 }
5077 for (e = node->callees; e; e = e->next_callee)
5078 {
5079 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5080
5081 streamer_write_uhwi (ob,
5082 ipa_get_cs_argument_count (args) * 2
5083 + (args->polymorphic_call_contexts != NULL));
5084 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5085 {
5086 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5087 if (args->polymorphic_call_contexts != NULL)
5088 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5089 }
5090 }
5091 for (e = node->indirect_calls; e; e = e->next_callee)
5092 {
5093 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5094
5095 streamer_write_uhwi (ob,
5096 ipa_get_cs_argument_count (args) * 2
5097 + (args->polymorphic_call_contexts != NULL));
5098 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5099 {
5100 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5101 if (args->polymorphic_call_contexts != NULL)
5102 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5103 }
5104 ipa_write_indirect_edge_info (ob, e);
5105 }
5106 }
5107
5108 /* Stream in NODE info from IB. */
5109
5110 static void
5111 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
5112 struct data_in *data_in)
5113 {
5114 struct ipa_node_params *info = IPA_NODE_REF (node);
5115 int k;
5116 struct cgraph_edge *e;
5117 struct bitpack_d bp;
5118
5119 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
5120
5121 for (k = 0; k < ipa_get_param_count (info); k++)
5122 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5123
5124 bp = streamer_read_bitpack (ib);
5125 if (ipa_get_param_count (info) != 0)
5126 info->analysis_done = true;
5127 info->node_enqueued = false;
5128 for (k = 0; k < ipa_get_param_count (info); k++)
5129 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
5130 for (k = 0; k < ipa_get_param_count (info); k++)
5131 {
5132 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
5133 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
5134 }
5135 for (e = node->callees; e; e = e->next_callee)
5136 {
5137 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5138 int count = streamer_read_uhwi (ib);
5139 bool contexts_computed = count & 1;
5140 count /= 2;
5141
5142 if (!count)
5143 continue;
5144 vec_safe_grow_cleared (args->jump_functions, count);
5145 if (contexts_computed)
5146 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5147
5148 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5149 {
5150 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5151 data_in);
5152 if (contexts_computed)
5153 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5154 }
5155 }
5156 for (e = node->indirect_calls; e; e = e->next_callee)
5157 {
5158 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5159 int count = streamer_read_uhwi (ib);
5160 bool contexts_computed = count & 1;
5161 count /= 2;
5162
5163 if (count)
5164 {
5165 vec_safe_grow_cleared (args->jump_functions, count);
5166 if (contexts_computed)
5167 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5168 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5169 {
5170 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5171 data_in);
5172 if (contexts_computed)
5173 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5174 }
5175 }
5176 ipa_read_indirect_edge_info (ib, data_in, e);
5177 }
5178 }
5179
5180 /* Write jump functions for nodes in SET. */
5181
5182 void
5183 ipa_prop_write_jump_functions (void)
5184 {
5185 struct cgraph_node *node;
5186 struct output_block *ob;
5187 unsigned int count = 0;
5188 lto_symtab_encoder_iterator lsei;
5189 lto_symtab_encoder_t encoder;
5190
5191 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5192 return;
5193
5194 ob = create_output_block (LTO_section_jump_functions);
5195 encoder = ob->decl_state->symtab_node_encoder;
5196 ob->symbol = NULL;
5197 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5198 lsei_next_function_in_partition (&lsei))
5199 {
5200 node = lsei_cgraph_node (lsei);
5201 if (node->has_gimple_body_p ()
5202 && IPA_NODE_REF (node) != NULL)
5203 count++;
5204 }
5205
5206 streamer_write_uhwi (ob, count);
5207
5208 /* Process all of the functions. */
5209 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5210 lsei_next_function_in_partition (&lsei))
5211 {
5212 node = lsei_cgraph_node (lsei);
5213 if (node->has_gimple_body_p ()
5214 && IPA_NODE_REF (node) != NULL)
5215 ipa_write_node_info (ob, node);
5216 }
5217 streamer_write_char_stream (ob->main_stream, 0);
5218 produce_asm (ob, NULL);
5219 destroy_output_block (ob);
5220 }
5221
5222 /* Read section in file FILE_DATA of length LEN with data DATA. */
5223
5224 static void
5225 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5226 size_t len)
5227 {
5228 const struct lto_function_header *header =
5229 (const struct lto_function_header *) data;
5230 const int cfg_offset = sizeof (struct lto_function_header);
5231 const int main_offset = cfg_offset + header->cfg_size;
5232 const int string_offset = main_offset + header->main_size;
5233 struct data_in *data_in;
5234 unsigned int i;
5235 unsigned int count;
5236
5237 lto_input_block ib_main ((const char *) data + main_offset,
5238 header->main_size, file_data->mode_table);
5239
5240 data_in =
5241 lto_data_in_create (file_data, (const char *) data + string_offset,
5242 header->string_size, vNULL);
5243 count = streamer_read_uhwi (&ib_main);
5244
5245 for (i = 0; i < count; i++)
5246 {
5247 unsigned int index;
5248 struct cgraph_node *node;
5249 lto_symtab_encoder_t encoder;
5250
5251 index = streamer_read_uhwi (&ib_main);
5252 encoder = file_data->symtab_node_encoder;
5253 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5254 index));
5255 gcc_assert (node->definition);
5256 ipa_read_node_info (&ib_main, node, data_in);
5257 }
5258 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5259 len);
5260 lto_data_in_delete (data_in);
5261 }
5262
5263 /* Read ipcp jump functions. */
5264
5265 void
5266 ipa_prop_read_jump_functions (void)
5267 {
5268 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5269 struct lto_file_decl_data *file_data;
5270 unsigned int j = 0;
5271
5272 ipa_check_create_node_params ();
5273 ipa_check_create_edge_args ();
5274 ipa_register_cgraph_hooks ();
5275
5276 while ((file_data = file_data_vec[j++]))
5277 {
5278 size_t len;
5279 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5280
5281 if (data)
5282 ipa_prop_read_section (file_data, data, len);
5283 }
5284 }
5285
5286 void
5287 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5288 {
5289 int node_ref;
5290 unsigned int count = 0;
5291 lto_symtab_encoder_t encoder;
5292 struct ipa_agg_replacement_value *aggvals, *av;
5293
5294 aggvals = ipa_get_agg_replacements_for_node (node);
5295 encoder = ob->decl_state->symtab_node_encoder;
5296 node_ref = lto_symtab_encoder_encode (encoder, node);
5297 streamer_write_uhwi (ob, node_ref);
5298
5299 for (av = aggvals; av; av = av->next)
5300 count++;
5301 streamer_write_uhwi (ob, count);
5302
5303 for (av = aggvals; av; av = av->next)
5304 {
5305 struct bitpack_d bp;
5306
5307 streamer_write_uhwi (ob, av->offset);
5308 streamer_write_uhwi (ob, av->index);
5309 stream_write_tree (ob, av->value, true);
5310
5311 bp = bitpack_create (ob->main_stream);
5312 bp_pack_value (&bp, av->by_ref, 1);
5313 streamer_write_bitpack (&bp);
5314 }
5315
5316 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5317 if (ts && vec_safe_length (ts->m_vr) > 0)
5318 {
5319 count = ts->m_vr->length ();
5320 streamer_write_uhwi (ob, count);
5321 for (unsigned i = 0; i < count; ++i)
5322 {
5323 struct bitpack_d bp;
5324 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5325 bp = bitpack_create (ob->main_stream);
5326 bp_pack_value (&bp, parm_vr->known, 1);
5327 streamer_write_bitpack (&bp);
5328 if (parm_vr->known)
5329 {
5330 streamer_write_enum (ob->main_stream, value_rang_type,
5331 VR_LAST, parm_vr->type);
5332 streamer_write_wide_int (ob, parm_vr->min);
5333 streamer_write_wide_int (ob, parm_vr->max);
5334 }
5335 }
5336 }
5337 else
5338 streamer_write_uhwi (ob, 0);
5339
5340 if (ts && vec_safe_length (ts->bits) > 0)
5341 {
5342 count = ts->bits->length ();
5343 streamer_write_uhwi (ob, count);
5344
5345 for (unsigned i = 0; i < count; ++i)
5346 {
5347 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5348 struct bitpack_d bp = bitpack_create (ob->main_stream);
5349 bp_pack_value (&bp, !!bits_jfunc, 1);
5350 streamer_write_bitpack (&bp);
5351 if (bits_jfunc)
5352 {
5353 streamer_write_widest_int (ob, bits_jfunc->value);
5354 streamer_write_widest_int (ob, bits_jfunc->mask);
5355 }
5356 }
5357 }
5358 else
5359 streamer_write_uhwi (ob, 0);
5360 }
5361
5362 /* Stream in the aggregate value replacement chain for NODE from IB. */
5363
5364 static void
5365 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5366 data_in *data_in)
5367 {
5368 struct ipa_agg_replacement_value *aggvals = NULL;
5369 unsigned int count, i;
5370
5371 count = streamer_read_uhwi (ib);
5372 for (i = 0; i <count; i++)
5373 {
5374 struct ipa_agg_replacement_value *av;
5375 struct bitpack_d bp;
5376
5377 av = ggc_alloc<ipa_agg_replacement_value> ();
5378 av->offset = streamer_read_uhwi (ib);
5379 av->index = streamer_read_uhwi (ib);
5380 av->value = stream_read_tree (ib, data_in);
5381 bp = streamer_read_bitpack (ib);
5382 av->by_ref = bp_unpack_value (&bp, 1);
5383 av->next = aggvals;
5384 aggvals = av;
5385 }
5386 ipa_set_node_agg_value_chain (node, aggvals);
5387
5388 count = streamer_read_uhwi (ib);
5389 if (count > 0)
5390 {
5391 ipcp_grow_transformations_if_necessary ();
5392
5393 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5394 vec_safe_grow_cleared (ts->m_vr, count);
5395 for (i = 0; i < count; i++)
5396 {
5397 ipa_vr *parm_vr;
5398 parm_vr = &(*ts->m_vr)[i];
5399 struct bitpack_d bp;
5400 bp = streamer_read_bitpack (ib);
5401 parm_vr->known = bp_unpack_value (&bp, 1);
5402 if (parm_vr->known)
5403 {
5404 parm_vr->type = streamer_read_enum (ib, value_range_type,
5405 VR_LAST);
5406 parm_vr->min = streamer_read_wide_int (ib);
5407 parm_vr->max = streamer_read_wide_int (ib);
5408 }
5409 }
5410 }
5411 count = streamer_read_uhwi (ib);
5412 if (count > 0)
5413 {
5414 ipcp_grow_transformations_if_necessary ();
5415
5416 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5417 vec_safe_grow_cleared (ts->bits, count);
5418
5419 for (i = 0; i < count; i++)
5420 {
5421 struct bitpack_d bp = streamer_read_bitpack (ib);
5422 bool known = bp_unpack_value (&bp, 1);
5423 if (known)
5424 {
5425 ipa_bits *bits
5426 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
5427 streamer_read_widest_int (ib));
5428 (*ts->bits)[i] = bits;
5429 }
5430 }
5431 }
5432 }
5433
5434 /* Write all aggregate replacement for nodes in set. */
5435
5436 void
5437 ipcp_write_transformation_summaries (void)
5438 {
5439 struct cgraph_node *node;
5440 struct output_block *ob;
5441 unsigned int count = 0;
5442 lto_symtab_encoder_iterator lsei;
5443 lto_symtab_encoder_t encoder;
5444
5445 ob = create_output_block (LTO_section_ipcp_transform);
5446 encoder = ob->decl_state->symtab_node_encoder;
5447 ob->symbol = NULL;
5448 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5449 lsei_next_function_in_partition (&lsei))
5450 {
5451 node = lsei_cgraph_node (lsei);
5452 if (node->has_gimple_body_p ())
5453 count++;
5454 }
5455
5456 streamer_write_uhwi (ob, count);
5457
5458 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5459 lsei_next_function_in_partition (&lsei))
5460 {
5461 node = lsei_cgraph_node (lsei);
5462 if (node->has_gimple_body_p ())
5463 write_ipcp_transformation_info (ob, node);
5464 }
5465 streamer_write_char_stream (ob->main_stream, 0);
5466 produce_asm (ob, NULL);
5467 destroy_output_block (ob);
5468 }
5469
5470 /* Read replacements section in file FILE_DATA of length LEN with data
5471 DATA. */
5472
5473 static void
5474 read_replacements_section (struct lto_file_decl_data *file_data,
5475 const char *data,
5476 size_t len)
5477 {
5478 const struct lto_function_header *header =
5479 (const struct lto_function_header *) data;
5480 const int cfg_offset = sizeof (struct lto_function_header);
5481 const int main_offset = cfg_offset + header->cfg_size;
5482 const int string_offset = main_offset + header->main_size;
5483 struct data_in *data_in;
5484 unsigned int i;
5485 unsigned int count;
5486
5487 lto_input_block ib_main ((const char *) data + main_offset,
5488 header->main_size, file_data->mode_table);
5489
5490 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5491 header->string_size, vNULL);
5492 count = streamer_read_uhwi (&ib_main);
5493
5494 for (i = 0; i < count; i++)
5495 {
5496 unsigned int index;
5497 struct cgraph_node *node;
5498 lto_symtab_encoder_t encoder;
5499
5500 index = streamer_read_uhwi (&ib_main);
5501 encoder = file_data->symtab_node_encoder;
5502 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5503 index));
5504 gcc_assert (node->definition);
5505 read_ipcp_transformation_info (&ib_main, node, data_in);
5506 }
5507 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5508 len);
5509 lto_data_in_delete (data_in);
5510 }
5511
5512 /* Read IPA-CP aggregate replacements. */
5513
5514 void
5515 ipcp_read_transformation_summaries (void)
5516 {
5517 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5518 struct lto_file_decl_data *file_data;
5519 unsigned int j = 0;
5520
5521 while ((file_data = file_data_vec[j++]))
5522 {
5523 size_t len;
5524 const char *data = lto_get_section_data (file_data,
5525 LTO_section_ipcp_transform,
5526 NULL, &len);
5527 if (data)
5528 read_replacements_section (file_data, data, len);
5529 }
5530 }
5531
5532 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5533 NODE. */
5534
5535 static void
5536 adjust_agg_replacement_values (struct cgraph_node *node,
5537 struct ipa_agg_replacement_value *aggval)
5538 {
5539 struct ipa_agg_replacement_value *v;
5540 int i, c = 0, d = 0, *adj;
5541
5542 if (!node->clone.combined_args_to_skip)
5543 return;
5544
5545 for (v = aggval; v; v = v->next)
5546 {
5547 gcc_assert (v->index >= 0);
5548 if (c < v->index)
5549 c = v->index;
5550 }
5551 c++;
5552
5553 adj = XALLOCAVEC (int, c);
5554 for (i = 0; i < c; i++)
5555 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5556 {
5557 adj[i] = -1;
5558 d++;
5559 }
5560 else
5561 adj[i] = i - d;
5562
5563 for (v = aggval; v; v = v->next)
5564 v->index = adj[v->index];
5565 }
5566
5567 /* Dominator walker driving the ipcp modification phase. */
5568
5569 class ipcp_modif_dom_walker : public dom_walker
5570 {
5571 public:
5572 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5573 vec<ipa_param_descriptor, va_gc> *descs,
5574 struct ipa_agg_replacement_value *av,
5575 bool *sc, bool *cc)
5576 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5577 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5578
5579 virtual edge before_dom_children (basic_block);
5580
5581 private:
5582 struct ipa_func_body_info *m_fbi;
5583 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5584 struct ipa_agg_replacement_value *m_aggval;
5585 bool *m_something_changed, *m_cfg_changed;
5586 };
5587
5588 edge
5589 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5590 {
5591 gimple_stmt_iterator gsi;
5592 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5593 {
5594 struct ipa_agg_replacement_value *v;
5595 gimple *stmt = gsi_stmt (gsi);
5596 tree rhs, val, t;
5597 HOST_WIDE_INT offset, size;
5598 int index;
5599 bool by_ref, vce;
5600
5601 if (!gimple_assign_load_p (stmt))
5602 continue;
5603 rhs = gimple_assign_rhs1 (stmt);
5604 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5605 continue;
5606
5607 vce = false;
5608 t = rhs;
5609 while (handled_component_p (t))
5610 {
5611 /* V_C_E can do things like convert an array of integers to one
5612 bigger integer and similar things we do not handle below. */
5613 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5614 {
5615 vce = true;
5616 break;
5617 }
5618 t = TREE_OPERAND (t, 0);
5619 }
5620 if (vce)
5621 continue;
5622
5623 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5624 &offset, &size, &by_ref))
5625 continue;
5626 for (v = m_aggval; v; v = v->next)
5627 if (v->index == index
5628 && v->offset == offset)
5629 break;
5630 if (!v
5631 || v->by_ref != by_ref
5632 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5633 continue;
5634
5635 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5636 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5637 {
5638 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5639 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5640 else if (TYPE_SIZE (TREE_TYPE (rhs))
5641 == TYPE_SIZE (TREE_TYPE (v->value)))
5642 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5643 else
5644 {
5645 if (dump_file)
5646 {
5647 fprintf (dump_file, " const ");
5648 print_generic_expr (dump_file, v->value);
5649 fprintf (dump_file, " can't be converted to type of ");
5650 print_generic_expr (dump_file, rhs);
5651 fprintf (dump_file, "\n");
5652 }
5653 continue;
5654 }
5655 }
5656 else
5657 val = v->value;
5658
5659 if (dump_file && (dump_flags & TDF_DETAILS))
5660 {
5661 fprintf (dump_file, "Modifying stmt:\n ");
5662 print_gimple_stmt (dump_file, stmt, 0);
5663 }
5664 gimple_assign_set_rhs_from_tree (&gsi, val);
5665 update_stmt (stmt);
5666
5667 if (dump_file && (dump_flags & TDF_DETAILS))
5668 {
5669 fprintf (dump_file, "into:\n ");
5670 print_gimple_stmt (dump_file, stmt, 0);
5671 fprintf (dump_file, "\n");
5672 }
5673
5674 *m_something_changed = true;
5675 if (maybe_clean_eh_stmt (stmt)
5676 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5677 *m_cfg_changed = true;
5678 }
5679 return NULL;
5680 }
5681
5682 /* Update bits info of formal parameters as described in
5683 ipcp_transformation_summary. */
5684
5685 static void
5686 ipcp_update_bits (struct cgraph_node *node)
5687 {
5688 tree parm = DECL_ARGUMENTS (node->decl);
5689 tree next_parm = parm;
5690 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5691
5692 if (!ts || vec_safe_length (ts->bits) == 0)
5693 return;
5694
5695 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5696 unsigned count = bits.length ();
5697
5698 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5699 {
5700 if (node->clone.combined_args_to_skip
5701 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5702 continue;
5703
5704 gcc_checking_assert (parm);
5705 next_parm = DECL_CHAIN (parm);
5706
5707 if (!bits[i]
5708 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5709 || POINTER_TYPE_P (TREE_TYPE (parm)))
5710 || !is_gimple_reg (parm))
5711 continue;
5712
5713 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5714 if (!ddef)
5715 continue;
5716
5717 if (dump_file)
5718 {
5719 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5720 print_hex (bits[i]->mask, dump_file);
5721 fprintf (dump_file, "\n");
5722 }
5723
5724 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5725 {
5726 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5727 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5728
5729 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5730 | wide_int::from (bits[i]->value, prec, sgn);
5731 set_nonzero_bits (ddef, nonzero_bits);
5732 }
5733 else
5734 {
5735 unsigned tem = bits[i]->mask.to_uhwi ();
5736 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5737 unsigned align = tem & -tem;
5738 unsigned misalign = bitpos & (align - 1);
5739
5740 if (align > 1)
5741 {
5742 if (dump_file)
5743 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5744
5745 unsigned old_align, old_misalign;
5746 struct ptr_info_def *pi = get_ptr_info (ddef);
5747 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5748
5749 if (old_known
5750 && old_align > align)
5751 {
5752 if (dump_file)
5753 {
5754 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5755 if ((old_misalign & (align - 1)) != misalign)
5756 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5757 old_misalign, misalign);
5758 }
5759 continue;
5760 }
5761
5762 if (old_known
5763 && ((misalign & (old_align - 1)) != old_misalign)
5764 && dump_file)
5765 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5766 old_misalign, misalign);
5767
5768 set_ptr_info_alignment (pi, align, misalign);
5769 }
5770 }
5771 }
5772 }
5773
5774 /* Update value range of formal parameters as described in
5775 ipcp_transformation_summary. */
5776
5777 static void
5778 ipcp_update_vr (struct cgraph_node *node)
5779 {
5780 tree fndecl = node->decl;
5781 tree parm = DECL_ARGUMENTS (fndecl);
5782 tree next_parm = parm;
5783 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5784 if (!ts || vec_safe_length (ts->m_vr) == 0)
5785 return;
5786 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5787 unsigned count = vr.length ();
5788
5789 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5790 {
5791 if (node->clone.combined_args_to_skip
5792 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5793 continue;
5794 gcc_checking_assert (parm);
5795 next_parm = DECL_CHAIN (parm);
5796 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5797
5798 if (!ddef || !is_gimple_reg (parm))
5799 continue;
5800
5801 if (vr[i].known
5802 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5803 {
5804 tree type = TREE_TYPE (ddef);
5805 unsigned prec = TYPE_PRECISION (type);
5806 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5807 {
5808 if (dump_file)
5809 {
5810 fprintf (dump_file, "Setting value range of param %u ", i);
5811 fprintf (dump_file, "%s[",
5812 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5813 print_decs (vr[i].min, dump_file);
5814 fprintf (dump_file, ", ");
5815 print_decs (vr[i].max, dump_file);
5816 fprintf (dump_file, "]\n");
5817 }
5818 set_range_info (ddef, vr[i].type,
5819 wide_int_storage::from (vr[i].min, prec,
5820 TYPE_SIGN (type)),
5821 wide_int_storage::from (vr[i].max, prec,
5822 TYPE_SIGN (type)));
5823 }
5824 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5825 && vr[i].type == VR_ANTI_RANGE
5826 && wi::eq_p (vr[i].min, 0)
5827 && wi::eq_p (vr[i].max, 0))
5828 {
5829 if (dump_file)
5830 fprintf (dump_file, "Setting nonnull for %u\n", i);
5831 set_ptr_nonnull (ddef);
5832 }
5833 }
5834 }
5835 }
5836
5837 /* IPCP transformation phase doing propagation of aggregate values. */
5838
5839 unsigned int
5840 ipcp_transform_function (struct cgraph_node *node)
5841 {
5842 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5843 struct ipa_func_body_info fbi;
5844 struct ipa_agg_replacement_value *aggval;
5845 int param_count;
5846 bool cfg_changed = false, something_changed = false;
5847
5848 gcc_checking_assert (cfun);
5849 gcc_checking_assert (current_function_decl);
5850
5851 if (dump_file)
5852 fprintf (dump_file, "Modification phase of node %s/%i\n",
5853 node->name (), node->order);
5854
5855 ipcp_update_bits (node);
5856 ipcp_update_vr (node);
5857 aggval = ipa_get_agg_replacements_for_node (node);
5858 if (!aggval)
5859 return 0;
5860 param_count = count_formal_params (node->decl);
5861 if (param_count == 0)
5862 return 0;
5863 adjust_agg_replacement_values (node, aggval);
5864 if (dump_file)
5865 ipa_dump_agg_replacement_values (dump_file, aggval);
5866
5867 fbi.node = node;
5868 fbi.info = NULL;
5869 fbi.bb_infos = vNULL;
5870 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5871 fbi.param_count = param_count;
5872 fbi.aa_walked = 0;
5873
5874 vec_safe_grow_cleared (descriptors, param_count);
5875 ipa_populate_param_decls (node, *descriptors);
5876 calculate_dominance_info (CDI_DOMINATORS);
5877 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5878 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5879
5880 int i;
5881 struct ipa_bb_info *bi;
5882 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5883 free_ipa_bb_info (bi);
5884 fbi.bb_infos.release ();
5885 free_dominance_info (CDI_DOMINATORS);
5886 (*ipcp_transformations)[node->uid].agg_values = NULL;
5887 (*ipcp_transformations)[node->uid].bits = NULL;
5888 (*ipcp_transformations)[node->uid].m_vr = NULL;
5889
5890 vec_free (descriptors);
5891
5892 if (!something_changed)
5893 return 0;
5894 else if (cfg_changed)
5895 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5896 else
5897 return TODO_update_ssa_only_virtuals;
5898 }
5899
5900 #include "gt-ipa-prop.h"