]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
ipa-param-manipulation.h (get_original_index): Declare.
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58
59 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
60
61 /* Edge summary for IPA-CP edge information. */
62 ipa_edge_args_sum_t *ipa_edge_args_sum;
63
64 /* Traits for a hash table for reusing already existing ipa_bits. */
65
66 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67 {
68 typedef ipa_bits *value_type;
69 typedef ipa_bits *compare_type;
70 static hashval_t
71 hash (const ipa_bits *p)
72 {
73 hashval_t t = (hashval_t) p->value.to_shwi ();
74 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 }
76 static bool
77 equal (const ipa_bits *a, const ipa_bits *b)
78 {
79 return a->value == b->value && a->mask == b->mask;
80 }
81 static void
82 mark_empty (ipa_bits *&p)
83 {
84 p = NULL;
85 }
86 static bool
87 is_empty (const ipa_bits *p)
88 {
89 return p == NULL;
90 }
91 static bool
92 is_deleted (const ipa_bits *p)
93 {
94 return p == reinterpret_cast<const ipa_bits *> (1);
95 }
96 static void
97 mark_deleted (ipa_bits *&p)
98 {
99 p = reinterpret_cast<ipa_bits *> (1);
100 }
101 };
102
103 /* Hash table for avoid repeated allocations of equal ipa_bits. */
104 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
105
106 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
107 the equiv bitmap is not hashed and is expected to be NULL. */
108
109 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
110 {
111 typedef value_range *value_type;
112 typedef value_range *compare_type;
113 static hashval_t
114 hash (const value_range *p)
115 {
116 inchash::hash hstate (p->kind ());
117 inchash::add_expr (p->min (), hstate);
118 inchash::add_expr (p->max (), hstate);
119 return hstate.end ();
120 }
121 static bool
122 equal (const value_range *a, const value_range *b)
123 {
124 return a->equal_p (*b);
125 }
126 static void
127 mark_empty (value_range *&p)
128 {
129 p = NULL;
130 }
131 static bool
132 is_empty (const value_range *p)
133 {
134 return p == NULL;
135 }
136 static bool
137 is_deleted (const value_range *p)
138 {
139 return p == reinterpret_cast<const value_range *> (1);
140 }
141 static void
142 mark_deleted (value_range *&p)
143 {
144 p = reinterpret_cast<value_range *> (1);
145 }
146 };
147
148 /* Hash table for avoid repeated allocations of equal value_ranges. */
149 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
150
151 /* Holders of ipa cgraph hooks: */
152 static struct cgraph_node_hook_list *function_insertion_hook_holder;
153
154 /* Description of a reference to an IPA constant. */
155 struct ipa_cst_ref_desc
156 {
157 /* Edge that corresponds to the statement which took the reference. */
158 struct cgraph_edge *cs;
159 /* Linked list of duplicates created when call graph edges are cloned. */
160 struct ipa_cst_ref_desc *next_duplicate;
161 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
162 if out of control. */
163 int refcount;
164 };
165
166 /* Allocation pool for reference descriptions. */
167
168 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
169 ("IPA-PROP ref descriptions");
170
171 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
172 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
173
174 static bool
175 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
176 {
177 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
178
179 if (!fs_opts)
180 return false;
181 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
182 }
183
184 /* Return index of the formal whose tree is PTREE in function which corresponds
185 to INFO. */
186
187 static int
188 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
189 tree ptree)
190 {
191 int i, count;
192
193 count = vec_safe_length (descriptors);
194 for (i = 0; i < count; i++)
195 if ((*descriptors)[i].decl_or_type == ptree)
196 return i;
197
198 return -1;
199 }
200
201 /* Return index of the formal whose tree is PTREE in function which corresponds
202 to INFO. */
203
204 int
205 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
206 {
207 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
208 }
209
210 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
211 NODE. */
212
213 static void
214 ipa_populate_param_decls (struct cgraph_node *node,
215 vec<ipa_param_descriptor, va_gc> &descriptors)
216 {
217 tree fndecl;
218 tree fnargs;
219 tree parm;
220 int param_num;
221
222 fndecl = node->decl;
223 gcc_assert (gimple_has_body_p (fndecl));
224 fnargs = DECL_ARGUMENTS (fndecl);
225 param_num = 0;
226 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
227 {
228 descriptors[param_num].decl_or_type = parm;
229 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
230 descriptors[param_num].move_cost = cost;
231 /* Watch overflow, move_cost is a bitfield. */
232 gcc_checking_assert (cost == descriptors[param_num].move_cost);
233 param_num++;
234 }
235 }
236
237 /* Return how many formal parameters FNDECL has. */
238
239 int
240 count_formal_params (tree fndecl)
241 {
242 tree parm;
243 int count = 0;
244 gcc_assert (gimple_has_body_p (fndecl));
245
246 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
247 count++;
248
249 return count;
250 }
251
252 /* Return the declaration of Ith formal parameter of the function corresponding
253 to INFO. Note there is no setter function as this array is built just once
254 using ipa_initialize_node_params. */
255
256 void
257 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
258 {
259 fprintf (file, "param #%i", i);
260 if ((*info->descriptors)[i].decl_or_type)
261 {
262 fprintf (file, " ");
263 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
264 }
265 }
266
267 /* If necessary, allocate vector of parameter descriptors in info of NODE.
268 Return true if they were allocated, false if not. */
269
270 static bool
271 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
272 {
273 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
274
275 if (!info->descriptors && param_count)
276 {
277 vec_safe_grow_cleared (info->descriptors, param_count);
278 return true;
279 }
280 else
281 return false;
282 }
283
284 /* Initialize the ipa_node_params structure associated with NODE by counting
285 the function parameters, creating the descriptors and populating their
286 param_decls. */
287
288 void
289 ipa_initialize_node_params (struct cgraph_node *node)
290 {
291 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
292
293 if (!info->descriptors
294 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
295 ipa_populate_param_decls (node, *info->descriptors);
296 }
297
298 /* Print the jump functions associated with call graph edge CS to file F. */
299
300 static void
301 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
302 {
303 int i, count;
304
305 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
306 for (i = 0; i < count; i++)
307 {
308 struct ipa_jump_func *jump_func;
309 enum jump_func_type type;
310
311 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
312 type = jump_func->type;
313
314 fprintf (f, " param %d: ", i);
315 if (type == IPA_JF_UNKNOWN)
316 fprintf (f, "UNKNOWN\n");
317 else if (type == IPA_JF_CONST)
318 {
319 tree val = jump_func->value.constant.value;
320 fprintf (f, "CONST: ");
321 print_generic_expr (f, val);
322 if (TREE_CODE (val) == ADDR_EXPR
323 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
324 {
325 fprintf (f, " -> ");
326 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
327 }
328 fprintf (f, "\n");
329 }
330 else if (type == IPA_JF_PASS_THROUGH)
331 {
332 fprintf (f, "PASS THROUGH: ");
333 fprintf (f, "%d, op %s",
334 jump_func->value.pass_through.formal_id,
335 get_tree_code_name(jump_func->value.pass_through.operation));
336 if (jump_func->value.pass_through.operation != NOP_EXPR)
337 {
338 fprintf (f, " ");
339 print_generic_expr (f, jump_func->value.pass_through.operand);
340 }
341 if (jump_func->value.pass_through.agg_preserved)
342 fprintf (f, ", agg_preserved");
343 fprintf (f, "\n");
344 }
345 else if (type == IPA_JF_ANCESTOR)
346 {
347 fprintf (f, "ANCESTOR: ");
348 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
349 jump_func->value.ancestor.formal_id,
350 jump_func->value.ancestor.offset);
351 if (jump_func->value.ancestor.agg_preserved)
352 fprintf (f, ", agg_preserved");
353 fprintf (f, "\n");
354 }
355
356 if (jump_func->agg.items)
357 {
358 struct ipa_agg_jf_item *item;
359 int j;
360
361 fprintf (f, " Aggregate passed by %s:\n",
362 jump_func->agg.by_ref ? "reference" : "value");
363 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
364 {
365 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
366 item->offset);
367 fprintf (f, "type: ");
368 print_generic_expr (f, item->type);
369 fprintf (f, ", ");
370 if (item->jftype == IPA_JF_PASS_THROUGH)
371 fprintf (f, "PASS THROUGH: %d,",
372 item->value.pass_through.formal_id);
373 else if (item->jftype == IPA_JF_LOAD_AGG)
374 {
375 fprintf (f, "LOAD AGG: %d",
376 item->value.pass_through.formal_id);
377 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
378 item->value.load_agg.offset,
379 item->value.load_agg.by_ref ? "reference"
380 : "value");
381 }
382
383 if (item->jftype == IPA_JF_PASS_THROUGH
384 || item->jftype == IPA_JF_LOAD_AGG)
385 {
386 fprintf (f, " op %s",
387 get_tree_code_name (item->value.pass_through.operation));
388 if (item->value.pass_through.operation != NOP_EXPR)
389 {
390 fprintf (f, " ");
391 print_generic_expr (f, item->value.pass_through.operand);
392 }
393 }
394 else if (item->jftype == IPA_JF_CONST)
395 {
396 fprintf (f, "CONST: ");
397 print_generic_expr (f, item->value.constant);
398 }
399 else if (item->jftype == IPA_JF_UNKNOWN)
400 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
401 tree_to_uhwi (TYPE_SIZE (item->type)));
402 fprintf (f, "\n");
403 }
404 }
405
406 class ipa_polymorphic_call_context *ctx
407 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
408 if (ctx && !ctx->useless_p ())
409 {
410 fprintf (f, " Context: ");
411 ctx->dump (dump_file);
412 }
413
414 if (jump_func->bits)
415 {
416 fprintf (f, " value: ");
417 print_hex (jump_func->bits->value, f);
418 fprintf (f, ", mask: ");
419 print_hex (jump_func->bits->mask, f);
420 fprintf (f, "\n");
421 }
422 else
423 fprintf (f, " Unknown bits\n");
424
425 if (jump_func->m_vr)
426 {
427 fprintf (f, " VR ");
428 fprintf (f, "%s[",
429 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
430 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
431 fprintf (f, ", ");
432 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
433 fprintf (f, "]\n");
434 }
435 else
436 fprintf (f, " Unknown VR\n");
437 }
438 }
439
440
441 /* Print the jump functions of all arguments on all call graph edges going from
442 NODE to file F. */
443
444 void
445 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
446 {
447 struct cgraph_edge *cs;
448
449 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
450 for (cs = node->callees; cs; cs = cs->next_callee)
451 {
452
453 fprintf (f, " callsite %s -> %s : \n",
454 node->dump_name (),
455 cs->callee->dump_name ());
456 if (!ipa_edge_args_info_available_for_edge_p (cs))
457 fprintf (f, " no arg info\n");
458 else
459 ipa_print_node_jump_functions_for_edge (f, cs);
460 }
461
462 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
463 {
464 class cgraph_indirect_call_info *ii;
465
466 ii = cs->indirect_info;
467 if (ii->agg_contents)
468 fprintf (f, " indirect %s callsite, calling param %i, "
469 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
470 ii->member_ptr ? "member ptr" : "aggregate",
471 ii->param_index, ii->offset,
472 ii->by_ref ? "by reference" : "by_value");
473 else
474 fprintf (f, " indirect %s callsite, calling param %i, "
475 "offset " HOST_WIDE_INT_PRINT_DEC,
476 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
477 ii->offset);
478
479 if (cs->call_stmt)
480 {
481 fprintf (f, ", for stmt ");
482 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
483 }
484 else
485 fprintf (f, "\n");
486 if (ii->polymorphic)
487 ii->context.dump (f);
488 if (!ipa_edge_args_info_available_for_edge_p (cs))
489 fprintf (f, " no arg info\n");
490 else
491 ipa_print_node_jump_functions_for_edge (f, cs);
492 }
493 }
494
495 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
496
497 void
498 ipa_print_all_jump_functions (FILE *f)
499 {
500 struct cgraph_node *node;
501
502 fprintf (f, "\nJump functions:\n");
503 FOR_EACH_FUNCTION (node)
504 {
505 ipa_print_node_jump_functions (f, node);
506 }
507 }
508
509 /* Set jfunc to be a know-really nothing jump function. */
510
511 static void
512 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
513 {
514 jfunc->type = IPA_JF_UNKNOWN;
515 }
516
517 /* Set JFUNC to be a copy of another jmp (to be used by jump function
518 combination code). The two functions will share their rdesc. */
519
520 static void
521 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
522 struct ipa_jump_func *src)
523
524 {
525 gcc_checking_assert (src->type == IPA_JF_CONST);
526 dst->type = IPA_JF_CONST;
527 dst->value.constant = src->value.constant;
528 }
529
530 /* Set JFUNC to be a constant jmp function. */
531
532 static void
533 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
534 struct cgraph_edge *cs)
535 {
536 jfunc->type = IPA_JF_CONST;
537 jfunc->value.constant.value = unshare_expr_without_location (constant);
538
539 if (TREE_CODE (constant) == ADDR_EXPR
540 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
541 {
542 struct ipa_cst_ref_desc *rdesc;
543
544 rdesc = ipa_refdesc_pool.allocate ();
545 rdesc->cs = cs;
546 rdesc->next_duplicate = NULL;
547 rdesc->refcount = 1;
548 jfunc->value.constant.rdesc = rdesc;
549 }
550 else
551 jfunc->value.constant.rdesc = NULL;
552 }
553
554 /* Set JFUNC to be a simple pass-through jump function. */
555 static void
556 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
557 bool agg_preserved)
558 {
559 jfunc->type = IPA_JF_PASS_THROUGH;
560 jfunc->value.pass_through.operand = NULL_TREE;
561 jfunc->value.pass_through.formal_id = formal_id;
562 jfunc->value.pass_through.operation = NOP_EXPR;
563 jfunc->value.pass_through.agg_preserved = agg_preserved;
564 }
565
566 /* Set JFUNC to be an unary pass through jump function. */
567
568 static void
569 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
570 enum tree_code operation)
571 {
572 jfunc->type = IPA_JF_PASS_THROUGH;
573 jfunc->value.pass_through.operand = NULL_TREE;
574 jfunc->value.pass_through.formal_id = formal_id;
575 jfunc->value.pass_through.operation = operation;
576 jfunc->value.pass_through.agg_preserved = false;
577 }
578 /* Set JFUNC to be an arithmetic pass through jump function. */
579
580 static void
581 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
582 tree operand, enum tree_code operation)
583 {
584 jfunc->type = IPA_JF_PASS_THROUGH;
585 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
586 jfunc->value.pass_through.formal_id = formal_id;
587 jfunc->value.pass_through.operation = operation;
588 jfunc->value.pass_through.agg_preserved = false;
589 }
590
591 /* Set JFUNC to be an ancestor jump function. */
592
593 static void
594 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
595 int formal_id, bool agg_preserved)
596 {
597 jfunc->type = IPA_JF_ANCESTOR;
598 jfunc->value.ancestor.formal_id = formal_id;
599 jfunc->value.ancestor.offset = offset;
600 jfunc->value.ancestor.agg_preserved = agg_preserved;
601 }
602
603 /* Get IPA BB information about the given BB. FBI is the context of analyzis
604 of this function body. */
605
606 static struct ipa_bb_info *
607 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
608 {
609 gcc_checking_assert (fbi);
610 return &fbi->bb_infos[bb->index];
611 }
612
613 /* Structure to be passed in between detect_type_change and
614 check_stmt_for_type_change. */
615
616 struct prop_type_change_info
617 {
618 /* Offset into the object where there is the virtual method pointer we are
619 looking for. */
620 HOST_WIDE_INT offset;
621 /* The declaration or SSA_NAME pointer of the base that we are checking for
622 type change. */
623 tree object;
624 /* Set to true if dynamic type change has been detected. */
625 bool type_maybe_changed;
626 };
627
628 /* Return true if STMT can modify a virtual method table pointer.
629
630 This function makes special assumptions about both constructors and
631 destructors which are all the functions that are allowed to alter the VMT
632 pointers. It assumes that destructors begin with assignment into all VMT
633 pointers and that constructors essentially look in the following way:
634
635 1) The very first thing they do is that they call constructors of ancestor
636 sub-objects that have them.
637
638 2) Then VMT pointers of this and all its ancestors is set to new values
639 corresponding to the type corresponding to the constructor.
640
641 3) Only afterwards, other stuff such as constructor of member sub-objects
642 and the code written by the user is run. Only this may include calling
643 virtual functions, directly or indirectly.
644
645 There is no way to call a constructor of an ancestor sub-object in any
646 other way.
647
648 This means that we do not have to care whether constructors get the correct
649 type information because they will always change it (in fact, if we define
650 the type to be given by the VMT pointer, it is undefined).
651
652 The most important fact to derive from the above is that if, for some
653 statement in the section 3, we try to detect whether the dynamic type has
654 changed, we can safely ignore all calls as we examine the function body
655 backwards until we reach statements in section 2 because these calls cannot
656 be ancestor constructors or destructors (if the input is not bogus) and so
657 do not change the dynamic type (this holds true only for automatically
658 allocated objects but at the moment we devirtualize only these). We then
659 must detect that statements in section 2 change the dynamic type and can try
660 to derive the new type. That is enough and we can stop, we will never see
661 the calls into constructors of sub-objects in this code. Therefore we can
662 safely ignore all call statements that we traverse.
663 */
664
665 static bool
666 stmt_may_be_vtbl_ptr_store (gimple *stmt)
667 {
668 if (is_gimple_call (stmt))
669 return false;
670 if (gimple_clobber_p (stmt))
671 return false;
672 else if (is_gimple_assign (stmt))
673 {
674 tree lhs = gimple_assign_lhs (stmt);
675
676 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
677 {
678 if (flag_strict_aliasing
679 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
680 return false;
681
682 if (TREE_CODE (lhs) == COMPONENT_REF
683 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
684 return false;
685 /* In the future we might want to use get_ref_base_and_extent to find
686 if there is a field corresponding to the offset and if so, proceed
687 almost like if it was a component ref. */
688 }
689 }
690 return true;
691 }
692
693 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
694 to check whether a particular statement may modify the virtual table
695 pointerIt stores its result into DATA, which points to a
696 prop_type_change_info structure. */
697
698 static bool
699 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
700 {
701 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
702 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
703
704 if (stmt_may_be_vtbl_ptr_store (stmt))
705 {
706 tci->type_maybe_changed = true;
707 return true;
708 }
709 else
710 return false;
711 }
712
713 /* See if ARG is PARAM_DECl describing instance passed by pointer
714 or reference in FUNCTION. Return false if the dynamic type may change
715 in between beggining of the function until CALL is invoked.
716
717 Generally functions are not allowed to change type of such instances,
718 but they call destructors. We assume that methods cannot destroy the THIS
719 pointer. Also as a special cases, constructor and destructors may change
720 type of the THIS pointer. */
721
722 static bool
723 param_type_may_change_p (tree function, tree arg, gimple *call)
724 {
725 /* Pure functions cannot do any changes on the dynamic type;
726 that require writting to memory. */
727 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
728 return false;
729 /* We need to check if we are within inlined consturctor
730 or destructor (ideally we would have way to check that the
731 inline cdtor is actually working on ARG, but we don't have
732 easy tie on this, so punt on all non-pure cdtors.
733 We may also record the types of cdtors and once we know type
734 of the instance match them.
735
736 Also code unification optimizations may merge calls from
737 different blocks making return values unreliable. So
738 do nothing during late optimization. */
739 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
740 return true;
741 if (TREE_CODE (arg) == SSA_NAME
742 && SSA_NAME_IS_DEFAULT_DEF (arg)
743 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
744 {
745 /* Normal (non-THIS) argument. */
746 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
747 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
748 /* THIS pointer of an method - here we want to watch constructors
749 and destructors as those definitely may change the dynamic
750 type. */
751 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
752 && !DECL_CXX_CONSTRUCTOR_P (function)
753 && !DECL_CXX_DESTRUCTOR_P (function)
754 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
755 {
756 /* Walk the inline stack and watch out for ctors/dtors. */
757 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
758 block = BLOCK_SUPERCONTEXT (block))
759 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
760 return true;
761 return false;
762 }
763 }
764 return true;
765 }
766
767 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
768 callsite CALL) by looking for assignments to its virtual table pointer. If
769 it is, return true. ARG is the object itself (not a pointer
770 to it, unless dereferenced). BASE is the base of the memory access as
771 returned by get_ref_base_and_extent, as is the offset.
772
773 This is helper function for detect_type_change and detect_type_change_ssa
774 that does the heavy work which is usually unnecesary. */
775
776 static bool
777 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
778 tree base, tree comp_type, gcall *call,
779 HOST_WIDE_INT offset)
780 {
781 struct prop_type_change_info tci;
782 ao_ref ao;
783
784 gcc_checking_assert (DECL_P (arg)
785 || TREE_CODE (arg) == MEM_REF
786 || handled_component_p (arg));
787
788 comp_type = TYPE_MAIN_VARIANT (comp_type);
789
790 /* Const calls cannot call virtual methods through VMT and so type changes do
791 not matter. */
792 if (!flag_devirtualize || !gimple_vuse (call)
793 /* Be sure expected_type is polymorphic. */
794 || !comp_type
795 || TREE_CODE (comp_type) != RECORD_TYPE
796 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
797 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
798 return true;
799
800 ao_ref_init (&ao, arg);
801 ao.base = base;
802 ao.offset = offset;
803 ao.size = POINTER_SIZE;
804 ao.max_size = ao.size;
805
806 tci.offset = offset;
807 tci.object = get_base_address (arg);
808 tci.type_maybe_changed = false;
809
810 int walked
811 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
812 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
813
814 if (walked >= 0 && !tci.type_maybe_changed)
815 return false;
816
817 return true;
818 }
819
820 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
821 If it is, return true. ARG is the object itself (not a pointer
822 to it, unless dereferenced). BASE is the base of the memory access as
823 returned by get_ref_base_and_extent, as is the offset. */
824
825 static bool
826 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
827 tree comp_type, gcall *call,
828 HOST_WIDE_INT offset)
829 {
830 if (!flag_devirtualize)
831 return false;
832
833 if (TREE_CODE (base) == MEM_REF
834 && !param_type_may_change_p (current_function_decl,
835 TREE_OPERAND (base, 0),
836 call))
837 return false;
838 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
839 call, offset);
840 }
841
842 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
843 SSA name (its dereference will become the base and the offset is assumed to
844 be zero). */
845
846 static bool
847 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
848 gcall *call)
849 {
850 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
851 if (!flag_devirtualize
852 || !POINTER_TYPE_P (TREE_TYPE (arg)))
853 return false;
854
855 if (!param_type_may_change_p (current_function_decl, arg, call))
856 return false;
857
858 arg = build2 (MEM_REF, ptr_type_node, arg,
859 build_int_cst (ptr_type_node, 0));
860
861 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
862 call, 0);
863 }
864
865 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
866 boolean variable pointed to by DATA. */
867
868 static bool
869 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
870 void *data)
871 {
872 bool *b = (bool *) data;
873 *b = true;
874 return true;
875 }
876
877 /* Find the nearest valid aa status for parameter specified by INDEX that
878 dominates BB. */
879
880 static struct ipa_param_aa_status *
881 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
882 int index)
883 {
884 while (true)
885 {
886 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
887 if (!bb)
888 return NULL;
889 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
890 if (!bi->param_aa_statuses.is_empty ()
891 && bi->param_aa_statuses[index].valid)
892 return &bi->param_aa_statuses[index];
893 }
894 }
895
896 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
897 structures and/or intialize the result with a dominating description as
898 necessary. */
899
900 static struct ipa_param_aa_status *
901 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
902 int index)
903 {
904 gcc_checking_assert (fbi);
905 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
906 if (bi->param_aa_statuses.is_empty ())
907 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
908 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
909 if (!paa->valid)
910 {
911 gcc_checking_assert (!paa->parm_modified
912 && !paa->ref_modified
913 && !paa->pt_modified);
914 struct ipa_param_aa_status *dom_paa;
915 dom_paa = find_dominating_aa_status (fbi, bb, index);
916 if (dom_paa)
917 *paa = *dom_paa;
918 else
919 paa->valid = true;
920 }
921
922 return paa;
923 }
924
925 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
926 a value known not to be modified in this function before reaching the
927 statement STMT. FBI holds information about the function we have so far
928 gathered but do not survive the summary building stage. */
929
930 static bool
931 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
932 gimple *stmt, tree parm_load)
933 {
934 struct ipa_param_aa_status *paa;
935 bool modified = false;
936 ao_ref refd;
937
938 tree base = get_base_address (parm_load);
939 gcc_assert (TREE_CODE (base) == PARM_DECL);
940 if (TREE_READONLY (base))
941 return true;
942
943 gcc_checking_assert (fbi);
944 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
945 if (paa->parm_modified)
946 return false;
947
948 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
949 ao_ref_init (&refd, parm_load);
950 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
951 &modified, NULL, NULL,
952 fbi->aa_walk_budget + 1);
953 if (walked < 0)
954 {
955 modified = true;
956 if (fbi)
957 fbi->aa_walk_budget = 0;
958 }
959 else if (fbi)
960 fbi->aa_walk_budget -= walked;
961 if (paa && modified)
962 paa->parm_modified = true;
963 return !modified;
964 }
965
966 /* If STMT is an assignment that loads a value from an parameter declaration,
967 return the index of the parameter in ipa_node_params which has not been
968 modified. Otherwise return -1. */
969
970 static int
971 load_from_unmodified_param (struct ipa_func_body_info *fbi,
972 vec<ipa_param_descriptor, va_gc> *descriptors,
973 gimple *stmt)
974 {
975 int index;
976 tree op1;
977
978 if (!gimple_assign_single_p (stmt))
979 return -1;
980
981 op1 = gimple_assign_rhs1 (stmt);
982 if (TREE_CODE (op1) != PARM_DECL)
983 return -1;
984
985 index = ipa_get_param_decl_index_1 (descriptors, op1);
986 if (index < 0
987 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
988 return -1;
989
990 return index;
991 }
992
993 /* Return true if memory reference REF (which must be a load through parameter
994 with INDEX) loads data that are known to be unmodified in this function
995 before reaching statement STMT. */
996
997 static bool
998 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
999 int index, gimple *stmt, tree ref)
1000 {
1001 struct ipa_param_aa_status *paa;
1002 bool modified = false;
1003 ao_ref refd;
1004
1005 gcc_checking_assert (fbi);
1006 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1007 if (paa->ref_modified)
1008 return false;
1009
1010 gcc_checking_assert (gimple_vuse (stmt));
1011 ao_ref_init (&refd, ref);
1012 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1013 &modified, NULL, NULL,
1014 fbi->aa_walk_budget + 1);
1015 if (walked < 0)
1016 {
1017 modified = true;
1018 fbi->aa_walk_budget = 0;
1019 }
1020 else
1021 fbi->aa_walk_budget -= walked;
1022 if (modified)
1023 paa->ref_modified = true;
1024 return !modified;
1025 }
1026
1027 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1028 is known to be unmodified in this function before reaching call statement
1029 CALL into which it is passed. FBI describes the function body. */
1030
1031 static bool
1032 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1033 gimple *call, tree parm)
1034 {
1035 bool modified = false;
1036 ao_ref refd;
1037
1038 /* It's unnecessary to calculate anything about memory contnets for a const
1039 function because it is not goin to use it. But do not cache the result
1040 either. Also, no such calculations for non-pointers. */
1041 if (!gimple_vuse (call)
1042 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1043 return false;
1044
1045 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1046 gimple_bb (call),
1047 index);
1048 if (paa->pt_modified)
1049 return false;
1050
1051 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1052 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1053 &modified, NULL, NULL,
1054 fbi->aa_walk_budget + 1);
1055 if (walked < 0)
1056 {
1057 fbi->aa_walk_budget = 0;
1058 modified = true;
1059 }
1060 else
1061 fbi->aa_walk_budget -= walked;
1062 if (modified)
1063 paa->pt_modified = true;
1064 return !modified;
1065 }
1066
1067 /* Return true if we can prove that OP is a memory reference loading
1068 data from an aggregate passed as a parameter.
1069
1070 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1071 false if it cannot prove that the value has not been modified before the
1072 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1073 if it cannot prove the value has not been modified, in that case it will
1074 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1075
1076 INFO and PARMS_AINFO describe parameters of the current function (but the
1077 latter can be NULL), STMT is the load statement. If function returns true,
1078 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1079 within the aggregate and whether it is a load from a value passed by
1080 reference respectively. */
1081
1082 bool
1083 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1084 vec<ipa_param_descriptor, va_gc> *descriptors,
1085 gimple *stmt, tree op, int *index_p,
1086 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1087 bool *by_ref_p, bool *guaranteed_unmodified)
1088 {
1089 int index;
1090 HOST_WIDE_INT size;
1091 bool reverse;
1092 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1093
1094 if (!base)
1095 return false;
1096
1097 if (DECL_P (base))
1098 {
1099 int index = ipa_get_param_decl_index_1 (descriptors, base);
1100 if (index >= 0
1101 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1102 {
1103 *index_p = index;
1104 *by_ref_p = false;
1105 if (size_p)
1106 *size_p = size;
1107 if (guaranteed_unmodified)
1108 *guaranteed_unmodified = true;
1109 return true;
1110 }
1111 return false;
1112 }
1113
1114 if (TREE_CODE (base) != MEM_REF
1115 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1116 || !integer_zerop (TREE_OPERAND (base, 1)))
1117 return false;
1118
1119 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1120 {
1121 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1122 index = ipa_get_param_decl_index_1 (descriptors, parm);
1123 }
1124 else
1125 {
1126 /* This branch catches situations where a pointer parameter is not a
1127 gimple register, for example:
1128
1129 void hip7(S*) (struct S * p)
1130 {
1131 void (*<T2e4>) (struct S *) D.1867;
1132 struct S * p.1;
1133
1134 <bb 2>:
1135 p.1_1 = p;
1136 D.1867_2 = p.1_1->f;
1137 D.1867_2 ();
1138 gdp = &p;
1139 */
1140
1141 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1142 index = load_from_unmodified_param (fbi, descriptors, def);
1143 }
1144
1145 if (index >= 0)
1146 {
1147 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1148 if (!data_preserved && !guaranteed_unmodified)
1149 return false;
1150
1151 *index_p = index;
1152 *by_ref_p = true;
1153 if (size_p)
1154 *size_p = size;
1155 if (guaranteed_unmodified)
1156 *guaranteed_unmodified = data_preserved;
1157 return true;
1158 }
1159 return false;
1160 }
1161
1162 /* If STMT is an assignment that loads a value from a parameter declaration,
1163 or from an aggregate passed as the parameter either by value or reference,
1164 return the index of the parameter in ipa_node_params. Otherwise return -1.
1165
1166 FBI holds gathered information about the function. INFO describes
1167 parameters of the function, STMT is the assignment statement. If it is a
1168 memory load from an aggregate, *OFFSET_P is filled with offset within the
1169 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1170 reference. */
1171
1172 static int
1173 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1174 class ipa_node_params *info,
1175 gimple *stmt,
1176 HOST_WIDE_INT *offset_p,
1177 bool *by_ref_p)
1178 {
1179 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1180 poly_int64 size;
1181
1182 /* Load value from a parameter declaration. */
1183 if (index >= 0)
1184 {
1185 *offset_p = -1;
1186 return index;
1187 }
1188
1189 if (!gimple_assign_load_p (stmt))
1190 return -1;
1191
1192 tree rhs = gimple_assign_rhs1 (stmt);
1193
1194 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1195 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1196 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1197 return -1;
1198
1199 /* Skip memory reference containing bit-field. */
1200 if (TREE_CODE (rhs) == BIT_FIELD_REF
1201 || contains_bitfld_component_ref_p (rhs))
1202 return -1;
1203
1204 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1205 offset_p, &size, by_ref_p))
1206 return -1;
1207
1208 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1209 size));
1210 if (!*by_ref_p)
1211 {
1212 tree param_type = ipa_get_type (info, index);
1213
1214 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1215 return -1;
1216 }
1217 else if (TREE_THIS_VOLATILE (rhs))
1218 return -1;
1219
1220 return index;
1221 }
1222
1223 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1224 of an assignment statement STMT, try to determine whether we are actually
1225 handling any of the following cases and construct an appropriate jump
1226 function into JFUNC if so:
1227
1228 1) The passed value is loaded from a formal parameter which is not a gimple
1229 register (most probably because it is addressable, the value has to be
1230 scalar) and we can guarantee the value has not changed. This case can
1231 therefore be described by a simple pass-through jump function. For example:
1232
1233 foo (int a)
1234 {
1235 int a.0;
1236
1237 a.0_2 = a;
1238 bar (a.0_2);
1239
1240 2) The passed value can be described by a simple arithmetic pass-through
1241 jump function. E.g.
1242
1243 foo (int a)
1244 {
1245 int D.2064;
1246
1247 D.2064_4 = a.1(D) + 4;
1248 bar (D.2064_4);
1249
1250 This case can also occur in combination of the previous one, e.g.:
1251
1252 foo (int a, int z)
1253 {
1254 int a.0;
1255 int D.2064;
1256
1257 a.0_3 = a;
1258 D.2064_4 = a.0_3 + 4;
1259 foo (D.2064_4);
1260
1261 3) The passed value is an address of an object within another one (which
1262 also passed by reference). Such situations are described by an ancestor
1263 jump function and describe situations such as:
1264
1265 B::foo() (struct B * const this)
1266 {
1267 struct A * D.1845;
1268
1269 D.1845_2 = &this_1(D)->D.1748;
1270 A::bar (D.1845_2);
1271
1272 INFO is the structure describing individual parameters access different
1273 stages of IPA optimizations. PARMS_AINFO contains the information that is
1274 only needed for intraprocedural analysis. */
1275
1276 static void
1277 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1278 class ipa_node_params *info,
1279 struct ipa_jump_func *jfunc,
1280 gcall *call, gimple *stmt, tree name,
1281 tree param_type)
1282 {
1283 HOST_WIDE_INT offset, size;
1284 tree op1, tc_ssa, base, ssa;
1285 bool reverse;
1286 int index;
1287
1288 op1 = gimple_assign_rhs1 (stmt);
1289
1290 if (TREE_CODE (op1) == SSA_NAME)
1291 {
1292 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1293 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1294 else
1295 index = load_from_unmodified_param (fbi, info->descriptors,
1296 SSA_NAME_DEF_STMT (op1));
1297 tc_ssa = op1;
1298 }
1299 else
1300 {
1301 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1302 tc_ssa = gimple_assign_lhs (stmt);
1303 }
1304
1305 if (index >= 0)
1306 {
1307 switch (gimple_assign_rhs_class (stmt))
1308 {
1309 case GIMPLE_BINARY_RHS:
1310 {
1311 tree op2 = gimple_assign_rhs2 (stmt);
1312 if (!is_gimple_ip_invariant (op2)
1313 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1314 != tcc_comparison)
1315 && !useless_type_conversion_p (TREE_TYPE (name),
1316 TREE_TYPE (op1))))
1317 return;
1318
1319 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1320 gimple_assign_rhs_code (stmt));
1321 break;
1322 }
1323 case GIMPLE_SINGLE_RHS:
1324 {
1325 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1326 tc_ssa);
1327 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1328 break;
1329 }
1330 case GIMPLE_UNARY_RHS:
1331 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1332 ipa_set_jf_unary_pass_through (jfunc, index,
1333 gimple_assign_rhs_code (stmt));
1334 default:;
1335 }
1336 return;
1337 }
1338
1339 if (TREE_CODE (op1) != ADDR_EXPR)
1340 return;
1341 op1 = TREE_OPERAND (op1, 0);
1342 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1343 return;
1344 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1345 offset_int mem_offset;
1346 if (!base
1347 || TREE_CODE (base) != MEM_REF
1348 || !mem_ref_offset (base).is_constant (&mem_offset))
1349 return;
1350 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1351 ssa = TREE_OPERAND (base, 0);
1352 if (TREE_CODE (ssa) != SSA_NAME
1353 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1354 || offset < 0)
1355 return;
1356
1357 /* Dynamic types are changed in constructors and destructors. */
1358 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1359 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1360 ipa_set_ancestor_jf (jfunc, offset, index,
1361 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1362 }
1363
1364 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1365 it looks like:
1366
1367 iftmp.1_3 = &obj_2(D)->D.1762;
1368
1369 The base of the MEM_REF must be a default definition SSA NAME of a
1370 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1371 whole MEM_REF expression is returned and the offset calculated from any
1372 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1373 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1374
1375 static tree
1376 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1377 {
1378 HOST_WIDE_INT size;
1379 tree expr, parm, obj;
1380 bool reverse;
1381
1382 if (!gimple_assign_single_p (assign))
1383 return NULL_TREE;
1384 expr = gimple_assign_rhs1 (assign);
1385
1386 if (TREE_CODE (expr) != ADDR_EXPR)
1387 return NULL_TREE;
1388 expr = TREE_OPERAND (expr, 0);
1389 obj = expr;
1390 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1391
1392 offset_int mem_offset;
1393 if (!expr
1394 || TREE_CODE (expr) != MEM_REF
1395 || !mem_ref_offset (expr).is_constant (&mem_offset))
1396 return NULL_TREE;
1397 parm = TREE_OPERAND (expr, 0);
1398 if (TREE_CODE (parm) != SSA_NAME
1399 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1400 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1401 return NULL_TREE;
1402
1403 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1404 *obj_p = obj;
1405 return expr;
1406 }
1407
1408
1409 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1410 statement PHI, try to find out whether NAME is in fact a
1411 multiple-inheritance typecast from a descendant into an ancestor of a formal
1412 parameter and thus can be described by an ancestor jump function and if so,
1413 write the appropriate function into JFUNC.
1414
1415 Essentially we want to match the following pattern:
1416
1417 if (obj_2(D) != 0B)
1418 goto <bb 3>;
1419 else
1420 goto <bb 4>;
1421
1422 <bb 3>:
1423 iftmp.1_3 = &obj_2(D)->D.1762;
1424
1425 <bb 4>:
1426 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1427 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1428 return D.1879_6; */
1429
1430 static void
1431 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1432 class ipa_node_params *info,
1433 struct ipa_jump_func *jfunc,
1434 gcall *call, gphi *phi)
1435 {
1436 HOST_WIDE_INT offset;
1437 gimple *assign, *cond;
1438 basic_block phi_bb, assign_bb, cond_bb;
1439 tree tmp, parm, expr, obj;
1440 int index, i;
1441
1442 if (gimple_phi_num_args (phi) != 2)
1443 return;
1444
1445 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1446 tmp = PHI_ARG_DEF (phi, 0);
1447 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1448 tmp = PHI_ARG_DEF (phi, 1);
1449 else
1450 return;
1451 if (TREE_CODE (tmp) != SSA_NAME
1452 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1453 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1454 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1455 return;
1456
1457 assign = SSA_NAME_DEF_STMT (tmp);
1458 assign_bb = gimple_bb (assign);
1459 if (!single_pred_p (assign_bb))
1460 return;
1461 expr = get_ancestor_addr_info (assign, &obj, &offset);
1462 if (!expr)
1463 return;
1464 parm = TREE_OPERAND (expr, 0);
1465 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1466 if (index < 0)
1467 return;
1468
1469 cond_bb = single_pred (assign_bb);
1470 cond = last_stmt (cond_bb);
1471 if (!cond
1472 || gimple_code (cond) != GIMPLE_COND
1473 || gimple_cond_code (cond) != NE_EXPR
1474 || gimple_cond_lhs (cond) != parm
1475 || !integer_zerop (gimple_cond_rhs (cond)))
1476 return;
1477
1478 phi_bb = gimple_bb (phi);
1479 for (i = 0; i < 2; i++)
1480 {
1481 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1482 if (pred != assign_bb && pred != cond_bb)
1483 return;
1484 }
1485
1486 ipa_set_ancestor_jf (jfunc, offset, index,
1487 parm_ref_data_pass_through_p (fbi, index, call, parm));
1488 }
1489
1490 /* Inspect the given TYPE and return true iff it has the same structure (the
1491 same number of fields of the same types) as a C++ member pointer. If
1492 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1493 corresponding fields there. */
1494
1495 static bool
1496 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1497 {
1498 tree fld;
1499
1500 if (TREE_CODE (type) != RECORD_TYPE)
1501 return false;
1502
1503 fld = TYPE_FIELDS (type);
1504 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1505 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1506 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1507 return false;
1508
1509 if (method_ptr)
1510 *method_ptr = fld;
1511
1512 fld = DECL_CHAIN (fld);
1513 if (!fld || INTEGRAL_TYPE_P (fld)
1514 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1515 return false;
1516 if (delta)
1517 *delta = fld;
1518
1519 if (DECL_CHAIN (fld))
1520 return false;
1521
1522 return true;
1523 }
1524
1525 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1526 return the rhs of its defining statement, and this statement is stored in
1527 *RHS_STMT. Otherwise return RHS as it is. */
1528
1529 static inline tree
1530 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1531 {
1532 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1533 {
1534 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1535
1536 if (gimple_assign_single_p (def_stmt))
1537 rhs = gimple_assign_rhs1 (def_stmt);
1538 else
1539 break;
1540 *rhs_stmt = def_stmt;
1541 }
1542 return rhs;
1543 }
1544
1545 /* Simple linked list, describing contents of an aggregate before call. */
1546
1547 struct ipa_known_agg_contents_list
1548 {
1549 /* Offset and size of the described part of the aggregate. */
1550 HOST_WIDE_INT offset, size;
1551
1552 /* Type of the described part of the aggregate. */
1553 tree type;
1554
1555 /* Known constant value or jump function data describing contents. */
1556 struct ipa_load_agg_data value;
1557
1558 /* Pointer to the next structure in the list. */
1559 struct ipa_known_agg_contents_list *next;
1560 };
1561
1562 /* Add an aggregate content item into a linked list of
1563 ipa_known_agg_contents_list structure, in which all elements
1564 are sorted ascendingly by offset. */
1565
1566 static inline void
1567 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1568 struct ipa_known_agg_contents_list *item)
1569 {
1570 struct ipa_known_agg_contents_list *list = *plist;
1571
1572 for (; list; list = list->next)
1573 {
1574 if (list->offset >= item->offset)
1575 break;
1576
1577 plist = &list->next;
1578 }
1579
1580 item->next = list;
1581 *plist = item;
1582 }
1583
1584 /* Check whether a given aggregate content is clobbered by certain element in
1585 a linked list of ipa_known_agg_contents_list. */
1586
1587 static inline bool
1588 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1589 struct ipa_known_agg_contents_list *item)
1590 {
1591 for (; list; list = list->next)
1592 {
1593 if (list->offset >= item->offset)
1594 return list->offset < item->offset + item->size;
1595
1596 if (list->offset + list->size > item->offset)
1597 return true;
1598 }
1599
1600 return false;
1601 }
1602
1603 /* Build aggregate jump function from LIST, assuming there are exactly
1604 VALUE_COUNT entries there and that offset of the passed argument
1605 is ARG_OFFSET and store it into JFUNC. */
1606
1607 static void
1608 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1609 int value_count, HOST_WIDE_INT arg_offset,
1610 struct ipa_jump_func *jfunc)
1611 {
1612 vec_alloc (jfunc->agg.items, value_count);
1613 for (; list; list = list->next)
1614 {
1615 struct ipa_agg_jf_item item;
1616 tree operand = list->value.pass_through.operand;
1617
1618 if (list->value.pass_through.formal_id >= 0)
1619 {
1620 /* Content value is derived from some formal paramerter. */
1621 if (list->value.offset >= 0)
1622 item.jftype = IPA_JF_LOAD_AGG;
1623 else
1624 item.jftype = IPA_JF_PASS_THROUGH;
1625
1626 item.value.load_agg = list->value;
1627 if (operand)
1628 item.value.pass_through.operand
1629 = unshare_expr_without_location (operand);
1630 }
1631 else if (operand)
1632 {
1633 /* Content value is known constant. */
1634 item.jftype = IPA_JF_CONST;
1635 item.value.constant = unshare_expr_without_location (operand);
1636 }
1637 else
1638 continue;
1639
1640 item.type = list->type;
1641 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1642
1643 item.offset = list->offset - arg_offset;
1644 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1645
1646 jfunc->agg.items->quick_push (item);
1647 }
1648 }
1649
1650 /* Given an assignment statement STMT, try to collect information into
1651 AGG_VALUE that will be used to construct jump function for RHS of the
1652 assignment, from which content value of an aggregate part comes.
1653
1654 Besides constant and simple pass-through jump functions, also try to
1655 identify whether it matches the following pattern that can be described by
1656 a load-value-from-aggregate jump function, which is a derivative of simple
1657 pass-through jump function.
1658
1659 foo (int *p)
1660 {
1661 ...
1662
1663 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1664 bar (q_5);
1665 }
1666
1667 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1668 constant, simple pass-through and load-vale-from-aggregate. If value
1669 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1670 set to -1. For simple pass-through and load-value-from-aggregate, field
1671 FORMAL_ID specifies the related formal parameter index, and field
1672 OFFSET can be used to distinguish them, -1 means simple pass-through,
1673 otherwise means load-value-from-aggregate. */
1674
1675 static void
1676 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1677 struct ipa_load_agg_data *agg_value,
1678 gimple *stmt)
1679 {
1680 tree lhs = gimple_assign_lhs (stmt);
1681 tree rhs1 = gimple_assign_rhs1 (stmt);
1682 enum tree_code code;
1683 int index = -1;
1684
1685 /* Initialize jump function data for the aggregate part. */
1686 memset (agg_value, 0, sizeof (*agg_value));
1687 agg_value->pass_through.operation = NOP_EXPR;
1688 agg_value->pass_through.formal_id = -1;
1689 agg_value->offset = -1;
1690
1691 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1692 || TREE_THIS_VOLATILE (lhs)
1693 || TREE_CODE (lhs) == BIT_FIELD_REF
1694 || contains_bitfld_component_ref_p (lhs))
1695 return;
1696
1697 /* Skip SSA copies. */
1698 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1699 {
1700 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1701 break;
1702
1703 stmt = SSA_NAME_DEF_STMT (rhs1);
1704 if (!is_gimple_assign (stmt))
1705 return;
1706
1707 rhs1 = gimple_assign_rhs1 (stmt);
1708 }
1709
1710 code = gimple_assign_rhs_code (stmt);
1711 switch (gimple_assign_rhs_class (stmt))
1712 {
1713 case GIMPLE_SINGLE_RHS:
1714 if (is_gimple_ip_invariant (rhs1))
1715 {
1716 agg_value->pass_through.operand = rhs1;
1717 return;
1718 }
1719 code = NOP_EXPR;
1720 break;
1721
1722 case GIMPLE_UNARY_RHS:
1723 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1724 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1725 tcc_binary, this subtleness is somewhat misleading.
1726
1727 Since tcc_unary is widely used in IPA-CP code to check an operation
1728 with one operand, here we only allow tc_unary operation to avoid
1729 possible problem. Then we can use (opclass == tc_unary) or not to
1730 distinguish unary and binary. */
1731 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1732 return;
1733
1734 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1735 break;
1736
1737 case GIMPLE_BINARY_RHS:
1738 {
1739 gimple *rhs1_stmt = stmt;
1740 gimple *rhs2_stmt = stmt;
1741 tree rhs2 = gimple_assign_rhs2 (stmt);
1742
1743 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1744 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1745
1746 if (is_gimple_ip_invariant (rhs2))
1747 {
1748 agg_value->pass_through.operand = rhs2;
1749 stmt = rhs1_stmt;
1750 }
1751 else if (is_gimple_ip_invariant (rhs1))
1752 {
1753 if (TREE_CODE_CLASS (code) == tcc_comparison)
1754 code = swap_tree_comparison (code);
1755 else if (!commutative_tree_code (code))
1756 return;
1757
1758 agg_value->pass_through.operand = rhs1;
1759 stmt = rhs2_stmt;
1760 rhs1 = rhs2;
1761 }
1762 else
1763 return;
1764
1765 if (TREE_CODE_CLASS (code) != tcc_comparison
1766 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs1)))
1767 return;
1768 }
1769 break;
1770
1771 default:
1772 return;
1773 }
1774
1775 if (TREE_CODE (rhs1) != SSA_NAME)
1776 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1777 &agg_value->offset,
1778 &agg_value->by_ref);
1779 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1780 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1781
1782 if (index >= 0)
1783 {
1784 if (agg_value->offset >= 0)
1785 agg_value->type = TREE_TYPE (rhs1);
1786 agg_value->pass_through.formal_id = index;
1787 agg_value->pass_through.operation = code;
1788 }
1789 else
1790 agg_value->pass_through.operand = NULL_TREE;
1791 }
1792
1793 /* If STMT is a memory store to the object whose address is BASE, extract
1794 information (offset, size, and value) into CONTENT, and return true,
1795 otherwise we conservatively assume the whole object is modified with
1796 unknown content, and return false. CHECK_REF means that access to object
1797 is expected to be in form of MEM_REF expression. */
1798
1799 static bool
1800 extract_mem_content (struct ipa_func_body_info *fbi,
1801 gimple *stmt, tree base, bool check_ref,
1802 struct ipa_known_agg_contents_list *content)
1803 {
1804 HOST_WIDE_INT lhs_offset, lhs_size;
1805 bool reverse;
1806
1807 if (!is_gimple_assign (stmt))
1808 return false;
1809
1810 tree lhs = gimple_assign_lhs (stmt);
1811 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1812 &reverse);
1813 if (!lhs_base)
1814 return false;
1815
1816 if (check_ref)
1817 {
1818 if (TREE_CODE (lhs_base) != MEM_REF
1819 || TREE_OPERAND (lhs_base, 0) != base
1820 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1821 return false;
1822 }
1823 else if (lhs_base != base)
1824 return false;
1825
1826 content->offset = lhs_offset;
1827 content->size = lhs_size;
1828 content->type = TREE_TYPE (lhs);
1829 content->next = NULL;
1830
1831 analyze_agg_content_value (fbi, &content->value, stmt);
1832 return true;
1833 }
1834
1835 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1836 in ARG is filled in constants or values that are derived from caller's
1837 formal parameter in the way described by some kinds of jump functions. FBI
1838 is the context of the caller function for interprocedural analysis. ARG can
1839 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1840 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1841
1842 static void
1843 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1844 gcall *call, tree arg,
1845 tree arg_type,
1846 struct ipa_jump_func *jfunc)
1847 {
1848 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1849 bitmap visited = NULL;
1850 int item_count = 0, value_count = 0;
1851 HOST_WIDE_INT arg_offset, arg_size;
1852 tree arg_base;
1853 bool check_ref, by_ref;
1854 ao_ref r;
1855
1856 if (param_ipa_max_agg_items == 0)
1857 return;
1858
1859 /* The function operates in three stages. First, we prepare check_ref, r,
1860 arg_base and arg_offset based on what is actually passed as an actual
1861 argument. */
1862
1863 if (POINTER_TYPE_P (arg_type))
1864 {
1865 by_ref = true;
1866 if (TREE_CODE (arg) == SSA_NAME)
1867 {
1868 tree type_size;
1869 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1870 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1871 return;
1872 check_ref = true;
1873 arg_base = arg;
1874 arg_offset = 0;
1875 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1876 arg_size = tree_to_uhwi (type_size);
1877 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1878 }
1879 else if (TREE_CODE (arg) == ADDR_EXPR)
1880 {
1881 bool reverse;
1882
1883 arg = TREE_OPERAND (arg, 0);
1884 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1885 &arg_size, &reverse);
1886 if (!arg_base)
1887 return;
1888 if (DECL_P (arg_base))
1889 {
1890 check_ref = false;
1891 ao_ref_init (&r, arg_base);
1892 }
1893 else
1894 return;
1895 }
1896 else
1897 return;
1898 }
1899 else
1900 {
1901 bool reverse;
1902
1903 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1904
1905 by_ref = false;
1906 check_ref = false;
1907 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1908 &arg_size, &reverse);
1909 if (!arg_base)
1910 return;
1911
1912 ao_ref_init (&r, arg);
1913 }
1914
1915 /* Second stage traverses virtual SSA web backwards starting from the call
1916 statement, only looks at individual dominating virtual operand (its
1917 definition dominates the call), as long as it is confident that content
1918 of the aggregate is affected by definition of the virtual operand, it
1919 builds a sorted linked list of ipa_agg_jf_list describing that. */
1920
1921 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1922 {
1923 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
1924
1925 if (gimple_code (stmt) == GIMPLE_PHI)
1926 {
1927 dom_vuse = get_continuation_for_phi (stmt, &r, true,
1928 fbi->aa_walk_budget,
1929 &visited, false, NULL, NULL);
1930 continue;
1931 }
1932
1933 if (stmt_may_clobber_ref_p_1 (stmt, &r))
1934 {
1935 struct ipa_known_agg_contents_list *content
1936 = XALLOCA (struct ipa_known_agg_contents_list);
1937
1938 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
1939 break;
1940
1941 /* Now we get a dominating virtual operand, and need to check
1942 whether its value is clobbered any other dominating one. */
1943 if ((content->value.pass_through.formal_id >= 0
1944 || content->value.pass_through.operand)
1945 && !clobber_by_agg_contents_list_p (all_list, content))
1946 {
1947 struct ipa_known_agg_contents_list *copy
1948 = XALLOCA (struct ipa_known_agg_contents_list);
1949
1950 /* Add to the list consisting of only dominating virtual
1951 operands, whose definitions can finally reach the call. */
1952 add_to_agg_contents_list (&list, (*copy = *content, copy));
1953
1954 if (++value_count == param_ipa_max_agg_items)
1955 break;
1956 }
1957
1958 /* Add to the list consisting of all dominating virtual operands. */
1959 add_to_agg_contents_list (&all_list, content);
1960
1961 if (++item_count == 2 * param_ipa_max_agg_items)
1962 break;
1963 }
1964 dom_vuse = gimple_vuse (stmt);
1965 }
1966
1967 if (visited)
1968 BITMAP_FREE (visited);
1969
1970 /* Third stage just goes over the list and creates an appropriate vector of
1971 ipa_agg_jf_item structures out of it, of course only if there are
1972 any meaningful items to begin with. */
1973
1974 if (value_count)
1975 {
1976 jfunc->agg.by_ref = by_ref;
1977 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
1978 }
1979 }
1980
1981
1982 /* Return the Ith param type of callee associated with call graph
1983 edge E. */
1984
1985 tree
1986 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1987 {
1988 int n;
1989 tree type = (e->callee
1990 ? TREE_TYPE (e->callee->decl)
1991 : gimple_call_fntype (e->call_stmt));
1992 tree t = TYPE_ARG_TYPES (type);
1993
1994 for (n = 0; n < i; n++)
1995 {
1996 if (!t)
1997 break;
1998 t = TREE_CHAIN (t);
1999 }
2000 if (t)
2001 return TREE_VALUE (t);
2002 if (!e->callee)
2003 return NULL;
2004 t = DECL_ARGUMENTS (e->callee->decl);
2005 for (n = 0; n < i; n++)
2006 {
2007 if (!t)
2008 return NULL;
2009 t = TREE_CHAIN (t);
2010 }
2011 if (t)
2012 return TREE_TYPE (t);
2013 return NULL;
2014 }
2015
2016 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2017 allocated structure or a previously existing one shared with other jump
2018 functions and/or transformation summaries. */
2019
2020 ipa_bits *
2021 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2022 {
2023 ipa_bits tmp;
2024 tmp.value = value;
2025 tmp.mask = mask;
2026
2027 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2028 if (*slot)
2029 return *slot;
2030
2031 ipa_bits *res = ggc_alloc<ipa_bits> ();
2032 res->value = value;
2033 res->mask = mask;
2034 *slot = res;
2035
2036 return res;
2037 }
2038
2039 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2040 table in order to avoid creating multiple same ipa_bits structures. */
2041
2042 static void
2043 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2044 const widest_int &mask)
2045 {
2046 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2047 }
2048
2049 /* Return a pointer to a value_range just like *TMP, but either find it in
2050 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2051
2052 static value_range *
2053 ipa_get_value_range (value_range *tmp)
2054 {
2055 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2056 if (*slot)
2057 return *slot;
2058
2059 value_range *vr = ggc_alloc<value_range> ();
2060 *vr = *tmp;
2061 *slot = vr;
2062
2063 return vr;
2064 }
2065
2066 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2067 equiv set. Use hash table in order to avoid creating multiple same copies of
2068 value_ranges. */
2069
2070 static value_range *
2071 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2072 {
2073 value_range tmp (min, max, kind);
2074 return ipa_get_value_range (&tmp);
2075 }
2076
2077 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2078 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2079 same value_range structures. */
2080
2081 static void
2082 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2083 tree min, tree max)
2084 {
2085 jf->m_vr = ipa_get_value_range (type, min, max);
2086 }
2087
2088 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2089 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2090
2091 static void
2092 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2093 {
2094 jf->m_vr = ipa_get_value_range (tmp);
2095 }
2096
2097 /* Compute jump function for all arguments of callsite CS and insert the
2098 information in the jump_functions array in the ipa_edge_args corresponding
2099 to this callsite. */
2100
2101 static void
2102 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2103 struct cgraph_edge *cs)
2104 {
2105 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
2106 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
2107 gcall *call = cs->call_stmt;
2108 int n, arg_num = gimple_call_num_args (call);
2109 bool useful_context = false;
2110
2111 if (arg_num == 0 || args->jump_functions)
2112 return;
2113 vec_safe_grow_cleared (args->jump_functions, arg_num);
2114 if (flag_devirtualize)
2115 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
2116
2117 if (gimple_call_internal_p (call))
2118 return;
2119 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2120 return;
2121
2122 for (n = 0; n < arg_num; n++)
2123 {
2124 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2125 tree arg = gimple_call_arg (call, n);
2126 tree param_type = ipa_get_callee_param_type (cs, n);
2127 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2128 {
2129 tree instance;
2130 class ipa_polymorphic_call_context context (cs->caller->decl,
2131 arg, cs->call_stmt,
2132 &instance);
2133 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2134 &fbi->aa_walk_budget);
2135 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2136 if (!context.useless_p ())
2137 useful_context = true;
2138 }
2139
2140 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2141 {
2142 bool addr_nonzero = false;
2143 bool strict_overflow = false;
2144
2145 if (TREE_CODE (arg) == SSA_NAME
2146 && param_type
2147 && get_ptr_nonnull (arg))
2148 addr_nonzero = true;
2149 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2150 addr_nonzero = true;
2151
2152 if (addr_nonzero)
2153 {
2154 tree z = build_int_cst (TREE_TYPE (arg), 0);
2155 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2156 }
2157 else
2158 gcc_assert (!jfunc->m_vr);
2159 }
2160 else
2161 {
2162 wide_int min, max;
2163 value_range_kind kind;
2164 if (TREE_CODE (arg) == SSA_NAME
2165 && param_type
2166 && (kind = get_range_info (arg, &min, &max))
2167 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
2168 {
2169 value_range resvr;
2170 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2171 wide_int_to_tree (TREE_TYPE (arg), max),
2172 kind);
2173 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2174 &tmpvr, TREE_TYPE (arg));
2175 if (!resvr.undefined_p () && !resvr.varying_p ())
2176 ipa_set_jfunc_vr (jfunc, &resvr);
2177 else
2178 gcc_assert (!jfunc->m_vr);
2179 }
2180 else
2181 gcc_assert (!jfunc->m_vr);
2182 }
2183
2184 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2185 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2186 {
2187 if (TREE_CODE (arg) == SSA_NAME)
2188 ipa_set_jfunc_bits (jfunc, 0,
2189 widest_int::from (get_nonzero_bits (arg),
2190 TYPE_SIGN (TREE_TYPE (arg))));
2191 else
2192 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2193 }
2194 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2195 {
2196 unsigned HOST_WIDE_INT bitpos;
2197 unsigned align;
2198
2199 get_pointer_alignment_1 (arg, &align, &bitpos);
2200 widest_int mask = wi::bit_and_not
2201 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2202 align / BITS_PER_UNIT - 1);
2203 widest_int value = bitpos / BITS_PER_UNIT;
2204 ipa_set_jfunc_bits (jfunc, value, mask);
2205 }
2206 else
2207 gcc_assert (!jfunc->bits);
2208
2209 if (is_gimple_ip_invariant (arg)
2210 || (VAR_P (arg)
2211 && is_global_var (arg)
2212 && TREE_READONLY (arg)))
2213 ipa_set_jf_constant (jfunc, arg, cs);
2214 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2215 && TREE_CODE (arg) == PARM_DECL)
2216 {
2217 int index = ipa_get_param_decl_index (info, arg);
2218
2219 gcc_assert (index >=0);
2220 /* Aggregate passed by value, check for pass-through, otherwise we
2221 will attempt to fill in aggregate contents later in this
2222 for cycle. */
2223 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2224 {
2225 ipa_set_jf_simple_pass_through (jfunc, index, false);
2226 continue;
2227 }
2228 }
2229 else if (TREE_CODE (arg) == SSA_NAME)
2230 {
2231 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2232 {
2233 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2234 if (index >= 0)
2235 {
2236 bool agg_p;
2237 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2238 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2239 }
2240 }
2241 else
2242 {
2243 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2244 if (is_gimple_assign (stmt))
2245 compute_complex_assign_jump_func (fbi, info, jfunc,
2246 call, stmt, arg, param_type);
2247 else if (gimple_code (stmt) == GIMPLE_PHI)
2248 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2249 call,
2250 as_a <gphi *> (stmt));
2251 }
2252 }
2253
2254 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2255 passed (because type conversions are ignored in gimple). Usually we can
2256 safely get type from function declaration, but in case of K&R prototypes or
2257 variadic functions we can try our luck with type of the pointer passed.
2258 TODO: Since we look for actual initialization of the memory object, we may better
2259 work out the type based on the memory stores we find. */
2260 if (!param_type)
2261 param_type = TREE_TYPE (arg);
2262
2263 if ((jfunc->type != IPA_JF_PASS_THROUGH
2264 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2265 && (jfunc->type != IPA_JF_ANCESTOR
2266 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2267 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2268 || POINTER_TYPE_P (param_type)))
2269 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2270 }
2271 if (!useful_context)
2272 vec_free (args->polymorphic_call_contexts);
2273 }
2274
2275 /* Compute jump functions for all edges - both direct and indirect - outgoing
2276 from BB. */
2277
2278 static void
2279 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2280 {
2281 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2282 int i;
2283 struct cgraph_edge *cs;
2284
2285 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2286 {
2287 struct cgraph_node *callee = cs->callee;
2288
2289 if (callee)
2290 {
2291 callee = callee->ultimate_alias_target ();
2292 /* We do not need to bother analyzing calls to unknown functions
2293 unless they may become known during lto/whopr. */
2294 if (!callee->definition && !flag_lto)
2295 continue;
2296 }
2297 ipa_compute_jump_functions_for_edge (fbi, cs);
2298 }
2299 }
2300
2301 /* If STMT looks like a statement loading a value from a member pointer formal
2302 parameter, return that parameter and store the offset of the field to
2303 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2304 might be clobbered). If USE_DELTA, then we look for a use of the delta
2305 field rather than the pfn. */
2306
2307 static tree
2308 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2309 HOST_WIDE_INT *offset_p)
2310 {
2311 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2312
2313 if (!gimple_assign_single_p (stmt))
2314 return NULL_TREE;
2315
2316 rhs = gimple_assign_rhs1 (stmt);
2317 if (TREE_CODE (rhs) == COMPONENT_REF)
2318 {
2319 ref_field = TREE_OPERAND (rhs, 1);
2320 rhs = TREE_OPERAND (rhs, 0);
2321 }
2322 else
2323 ref_field = NULL_TREE;
2324 if (TREE_CODE (rhs) != MEM_REF)
2325 return NULL_TREE;
2326 rec = TREE_OPERAND (rhs, 0);
2327 if (TREE_CODE (rec) != ADDR_EXPR)
2328 return NULL_TREE;
2329 rec = TREE_OPERAND (rec, 0);
2330 if (TREE_CODE (rec) != PARM_DECL
2331 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2332 return NULL_TREE;
2333 ref_offset = TREE_OPERAND (rhs, 1);
2334
2335 if (use_delta)
2336 fld = delta_field;
2337 else
2338 fld = ptr_field;
2339 if (offset_p)
2340 *offset_p = int_bit_position (fld);
2341
2342 if (ref_field)
2343 {
2344 if (integer_nonzerop (ref_offset))
2345 return NULL_TREE;
2346 return ref_field == fld ? rec : NULL_TREE;
2347 }
2348 else
2349 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2350 : NULL_TREE;
2351 }
2352
2353 /* Returns true iff T is an SSA_NAME defined by a statement. */
2354
2355 static bool
2356 ipa_is_ssa_with_stmt_def (tree t)
2357 {
2358 if (TREE_CODE (t) == SSA_NAME
2359 && !SSA_NAME_IS_DEFAULT_DEF (t))
2360 return true;
2361 else
2362 return false;
2363 }
2364
2365 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2366 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2367 indirect call graph edge.
2368 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2369
2370 static struct cgraph_edge *
2371 ipa_note_param_call (struct cgraph_node *node, int param_index,
2372 gcall *stmt, bool polymorphic)
2373 {
2374 struct cgraph_edge *cs;
2375
2376 cs = node->get_edge (stmt);
2377 cs->indirect_info->param_index = param_index;
2378 cs->indirect_info->agg_contents = 0;
2379 cs->indirect_info->member_ptr = 0;
2380 cs->indirect_info->guaranteed_unmodified = 0;
2381 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2382 param_index, true);
2383 if (cs->indirect_info->polymorphic || polymorphic)
2384 ipa_set_param_used_by_polymorphic_call
2385 (IPA_NODE_REF (node), param_index, true);
2386 return cs;
2387 }
2388
2389 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2390 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2391 intermediate information about each formal parameter. Currently it checks
2392 whether the call calls a pointer that is a formal parameter and if so, the
2393 parameter is marked with the called flag and an indirect call graph edge
2394 describing the call is created. This is very simple for ordinary pointers
2395 represented in SSA but not-so-nice when it comes to member pointers. The
2396 ugly part of this function does nothing more than trying to match the
2397 pattern of such a call. An example of such a pattern is the gimple dump
2398 below, the call is on the last line:
2399
2400 <bb 2>:
2401 f$__delta_5 = f.__delta;
2402 f$__pfn_24 = f.__pfn;
2403
2404 or
2405 <bb 2>:
2406 f$__delta_5 = MEM[(struct *)&f];
2407 f$__pfn_24 = MEM[(struct *)&f + 4B];
2408
2409 and a few lines below:
2410
2411 <bb 5>
2412 D.2496_3 = (int) f$__pfn_24;
2413 D.2497_4 = D.2496_3 & 1;
2414 if (D.2497_4 != 0)
2415 goto <bb 3>;
2416 else
2417 goto <bb 4>;
2418
2419 <bb 6>:
2420 D.2500_7 = (unsigned int) f$__delta_5;
2421 D.2501_8 = &S + D.2500_7;
2422 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2423 D.2503_10 = *D.2502_9;
2424 D.2504_12 = f$__pfn_24 + -1;
2425 D.2505_13 = (unsigned int) D.2504_12;
2426 D.2506_14 = D.2503_10 + D.2505_13;
2427 D.2507_15 = *D.2506_14;
2428 iftmp.11_16 = (String:: *) D.2507_15;
2429
2430 <bb 7>:
2431 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2432 D.2500_19 = (unsigned int) f$__delta_5;
2433 D.2508_20 = &S + D.2500_19;
2434 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2435
2436 Such patterns are results of simple calls to a member pointer:
2437
2438 int doprinting (int (MyString::* f)(int) const)
2439 {
2440 MyString S ("somestring");
2441
2442 return (S.*f)(4);
2443 }
2444
2445 Moreover, the function also looks for called pointers loaded from aggregates
2446 passed by value or reference. */
2447
2448 static void
2449 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2450 tree target)
2451 {
2452 class ipa_node_params *info = fbi->info;
2453 HOST_WIDE_INT offset;
2454 bool by_ref;
2455
2456 if (SSA_NAME_IS_DEFAULT_DEF (target))
2457 {
2458 tree var = SSA_NAME_VAR (target);
2459 int index = ipa_get_param_decl_index (info, var);
2460 if (index >= 0)
2461 ipa_note_param_call (fbi->node, index, call, false);
2462 return;
2463 }
2464
2465 int index;
2466 gimple *def = SSA_NAME_DEF_STMT (target);
2467 bool guaranteed_unmodified;
2468 if (gimple_assign_single_p (def)
2469 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2470 gimple_assign_rhs1 (def), &index, &offset,
2471 NULL, &by_ref, &guaranteed_unmodified))
2472 {
2473 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2474 call, false);
2475 cs->indirect_info->offset = offset;
2476 cs->indirect_info->agg_contents = 1;
2477 cs->indirect_info->by_ref = by_ref;
2478 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2479 return;
2480 }
2481
2482 /* Now we need to try to match the complex pattern of calling a member
2483 pointer. */
2484 if (gimple_code (def) != GIMPLE_PHI
2485 || gimple_phi_num_args (def) != 2
2486 || !POINTER_TYPE_P (TREE_TYPE (target))
2487 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2488 return;
2489
2490 /* First, we need to check whether one of these is a load from a member
2491 pointer that is a parameter to this function. */
2492 tree n1 = PHI_ARG_DEF (def, 0);
2493 tree n2 = PHI_ARG_DEF (def, 1);
2494 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2495 return;
2496 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2497 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2498
2499 tree rec;
2500 basic_block bb, virt_bb;
2501 basic_block join = gimple_bb (def);
2502 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2503 {
2504 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2505 return;
2506
2507 bb = EDGE_PRED (join, 0)->src;
2508 virt_bb = gimple_bb (d2);
2509 }
2510 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2511 {
2512 bb = EDGE_PRED (join, 1)->src;
2513 virt_bb = gimple_bb (d1);
2514 }
2515 else
2516 return;
2517
2518 /* Second, we need to check that the basic blocks are laid out in the way
2519 corresponding to the pattern. */
2520
2521 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2522 || single_pred (virt_bb) != bb
2523 || single_succ (virt_bb) != join)
2524 return;
2525
2526 /* Third, let's see that the branching is done depending on the least
2527 significant bit of the pfn. */
2528
2529 gimple *branch = last_stmt (bb);
2530 if (!branch || gimple_code (branch) != GIMPLE_COND)
2531 return;
2532
2533 if ((gimple_cond_code (branch) != NE_EXPR
2534 && gimple_cond_code (branch) != EQ_EXPR)
2535 || !integer_zerop (gimple_cond_rhs (branch)))
2536 return;
2537
2538 tree cond = gimple_cond_lhs (branch);
2539 if (!ipa_is_ssa_with_stmt_def (cond))
2540 return;
2541
2542 def = SSA_NAME_DEF_STMT (cond);
2543 if (!is_gimple_assign (def)
2544 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2545 || !integer_onep (gimple_assign_rhs2 (def)))
2546 return;
2547
2548 cond = gimple_assign_rhs1 (def);
2549 if (!ipa_is_ssa_with_stmt_def (cond))
2550 return;
2551
2552 def = SSA_NAME_DEF_STMT (cond);
2553
2554 if (is_gimple_assign (def)
2555 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2556 {
2557 cond = gimple_assign_rhs1 (def);
2558 if (!ipa_is_ssa_with_stmt_def (cond))
2559 return;
2560 def = SSA_NAME_DEF_STMT (cond);
2561 }
2562
2563 tree rec2;
2564 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2565 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2566 == ptrmemfunc_vbit_in_delta),
2567 NULL);
2568 if (rec != rec2)
2569 return;
2570
2571 index = ipa_get_param_decl_index (info, rec);
2572 if (index >= 0
2573 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2574 {
2575 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2576 call, false);
2577 cs->indirect_info->offset = offset;
2578 cs->indirect_info->agg_contents = 1;
2579 cs->indirect_info->member_ptr = 1;
2580 cs->indirect_info->guaranteed_unmodified = 1;
2581 }
2582
2583 return;
2584 }
2585
2586 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2587 object referenced in the expression is a formal parameter of the caller
2588 FBI->node (described by FBI->info), create a call note for the
2589 statement. */
2590
2591 static void
2592 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2593 gcall *call, tree target)
2594 {
2595 tree obj = OBJ_TYPE_REF_OBJECT (target);
2596 int index;
2597 HOST_WIDE_INT anc_offset;
2598
2599 if (!flag_devirtualize)
2600 return;
2601
2602 if (TREE_CODE (obj) != SSA_NAME)
2603 return;
2604
2605 class ipa_node_params *info = fbi->info;
2606 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2607 {
2608 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2609 return;
2610
2611 anc_offset = 0;
2612 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2613 gcc_assert (index >= 0);
2614 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2615 call))
2616 return;
2617 }
2618 else
2619 {
2620 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2621 tree expr;
2622
2623 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2624 if (!expr)
2625 return;
2626 index = ipa_get_param_decl_index (info,
2627 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2628 gcc_assert (index >= 0);
2629 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2630 call, anc_offset))
2631 return;
2632 }
2633
2634 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2635 call, true);
2636 class cgraph_indirect_call_info *ii = cs->indirect_info;
2637 ii->offset = anc_offset;
2638 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2639 ii->otr_type = obj_type_ref_class (target);
2640 ii->polymorphic = 1;
2641 }
2642
2643 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2644 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2645 containing intermediate information about each formal parameter. */
2646
2647 static void
2648 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2649 {
2650 tree target = gimple_call_fn (call);
2651
2652 if (!target
2653 || (TREE_CODE (target) != SSA_NAME
2654 && !virtual_method_call_p (target)))
2655 return;
2656
2657 struct cgraph_edge *cs = fbi->node->get_edge (call);
2658 /* If we previously turned the call into a direct call, there is
2659 no need to analyze. */
2660 if (cs && !cs->indirect_unknown_callee)
2661 return;
2662
2663 if (cs->indirect_info->polymorphic && flag_devirtualize)
2664 {
2665 tree instance;
2666 tree target = gimple_call_fn (call);
2667 ipa_polymorphic_call_context context (current_function_decl,
2668 target, call, &instance);
2669
2670 gcc_checking_assert (cs->indirect_info->otr_type
2671 == obj_type_ref_class (target));
2672 gcc_checking_assert (cs->indirect_info->otr_token
2673 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2674
2675 cs->indirect_info->vptr_changed
2676 = !context.get_dynamic_type (instance,
2677 OBJ_TYPE_REF_OBJECT (target),
2678 obj_type_ref_class (target), call,
2679 &fbi->aa_walk_budget);
2680 cs->indirect_info->context = context;
2681 }
2682
2683 if (TREE_CODE (target) == SSA_NAME)
2684 ipa_analyze_indirect_call_uses (fbi, call, target);
2685 else if (virtual_method_call_p (target))
2686 ipa_analyze_virtual_call_uses (fbi, call, target);
2687 }
2688
2689
2690 /* Analyze the call statement STMT with respect to formal parameters (described
2691 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2692 formal parameters are called. */
2693
2694 static void
2695 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2696 {
2697 if (is_gimple_call (stmt))
2698 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2699 }
2700
2701 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2702 If OP is a parameter declaration, mark it as used in the info structure
2703 passed in DATA. */
2704
2705 static bool
2706 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2707 {
2708 class ipa_node_params *info = (class ipa_node_params *) data;
2709
2710 op = get_base_address (op);
2711 if (op
2712 && TREE_CODE (op) == PARM_DECL)
2713 {
2714 int index = ipa_get_param_decl_index (info, op);
2715 gcc_assert (index >= 0);
2716 ipa_set_param_used (info, index, true);
2717 }
2718
2719 return false;
2720 }
2721
2722 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2723 the findings in various structures of the associated ipa_node_params
2724 structure, such as parameter flags, notes etc. FBI holds various data about
2725 the function being analyzed. */
2726
2727 static void
2728 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2729 {
2730 gimple_stmt_iterator gsi;
2731 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2732 {
2733 gimple *stmt = gsi_stmt (gsi);
2734
2735 if (is_gimple_debug (stmt))
2736 continue;
2737
2738 ipa_analyze_stmt_uses (fbi, stmt);
2739 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2740 visit_ref_for_mod_analysis,
2741 visit_ref_for_mod_analysis,
2742 visit_ref_for_mod_analysis);
2743 }
2744 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2745 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2746 visit_ref_for_mod_analysis,
2747 visit_ref_for_mod_analysis,
2748 visit_ref_for_mod_analysis);
2749 }
2750
2751 /* Calculate controlled uses of parameters of NODE. */
2752
2753 static void
2754 ipa_analyze_controlled_uses (struct cgraph_node *node)
2755 {
2756 class ipa_node_params *info = IPA_NODE_REF (node);
2757
2758 for (int i = 0; i < ipa_get_param_count (info); i++)
2759 {
2760 tree parm = ipa_get_param (info, i);
2761 int controlled_uses = 0;
2762
2763 /* For SSA regs see if parameter is used. For non-SSA we compute
2764 the flag during modification analysis. */
2765 if (is_gimple_reg (parm))
2766 {
2767 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2768 parm);
2769 if (ddef && !has_zero_uses (ddef))
2770 {
2771 imm_use_iterator imm_iter;
2772 use_operand_p use_p;
2773
2774 ipa_set_param_used (info, i, true);
2775 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2776 if (!is_gimple_call (USE_STMT (use_p)))
2777 {
2778 if (!is_gimple_debug (USE_STMT (use_p)))
2779 {
2780 controlled_uses = IPA_UNDESCRIBED_USE;
2781 break;
2782 }
2783 }
2784 else
2785 controlled_uses++;
2786 }
2787 else
2788 controlled_uses = 0;
2789 }
2790 else
2791 controlled_uses = IPA_UNDESCRIBED_USE;
2792 ipa_set_controlled_uses (info, i, controlled_uses);
2793 }
2794 }
2795
2796 /* Free stuff in BI. */
2797
2798 static void
2799 free_ipa_bb_info (struct ipa_bb_info *bi)
2800 {
2801 bi->cg_edges.release ();
2802 bi->param_aa_statuses.release ();
2803 }
2804
2805 /* Dominator walker driving the analysis. */
2806
2807 class analysis_dom_walker : public dom_walker
2808 {
2809 public:
2810 analysis_dom_walker (struct ipa_func_body_info *fbi)
2811 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2812
2813 virtual edge before_dom_children (basic_block);
2814
2815 private:
2816 struct ipa_func_body_info *m_fbi;
2817 };
2818
2819 edge
2820 analysis_dom_walker::before_dom_children (basic_block bb)
2821 {
2822 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2823 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2824 return NULL;
2825 }
2826
2827 /* Release body info FBI. */
2828
2829 void
2830 ipa_release_body_info (struct ipa_func_body_info *fbi)
2831 {
2832 int i;
2833 struct ipa_bb_info *bi;
2834
2835 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2836 free_ipa_bb_info (bi);
2837 fbi->bb_infos.release ();
2838 }
2839
2840 /* Initialize the array describing properties of formal parameters
2841 of NODE, analyze their uses and compute jump functions associated
2842 with actual arguments of calls from within NODE. */
2843
2844 void
2845 ipa_analyze_node (struct cgraph_node *node)
2846 {
2847 struct ipa_func_body_info fbi;
2848 class ipa_node_params *info;
2849
2850 ipa_check_create_node_params ();
2851 ipa_check_create_edge_args ();
2852 info = IPA_NODE_REF_GET_CREATE (node);
2853
2854 if (info->analysis_done)
2855 return;
2856 info->analysis_done = 1;
2857
2858 if (ipa_func_spec_opts_forbid_analysis_p (node))
2859 {
2860 for (int i = 0; i < ipa_get_param_count (info); i++)
2861 {
2862 ipa_set_param_used (info, i, true);
2863 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2864 }
2865 return;
2866 }
2867
2868 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2869 push_cfun (func);
2870 calculate_dominance_info (CDI_DOMINATORS);
2871 ipa_initialize_node_params (node);
2872 ipa_analyze_controlled_uses (node);
2873
2874 fbi.node = node;
2875 fbi.info = IPA_NODE_REF (node);
2876 fbi.bb_infos = vNULL;
2877 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2878 fbi.param_count = ipa_get_param_count (info);
2879 fbi.aa_walk_budget = param_ipa_max_aa_steps;
2880
2881 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2882 {
2883 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2884 bi->cg_edges.safe_push (cs);
2885 }
2886
2887 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2888 {
2889 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2890 bi->cg_edges.safe_push (cs);
2891 }
2892
2893 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2894
2895 ipa_release_body_info (&fbi);
2896 free_dominance_info (CDI_DOMINATORS);
2897 pop_cfun ();
2898 }
2899
2900 /* Update the jump functions associated with call graph edge E when the call
2901 graph edge CS is being inlined, assuming that E->caller is already (possibly
2902 indirectly) inlined into CS->callee and that E has not been inlined. */
2903
2904 static void
2905 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2906 struct cgraph_edge *e)
2907 {
2908 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2909 class ipa_edge_args *args = IPA_EDGE_REF (e);
2910 if (!args)
2911 return;
2912 int count = ipa_get_cs_argument_count (args);
2913 int i;
2914
2915 for (i = 0; i < count; i++)
2916 {
2917 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2918 class ipa_polymorphic_call_context *dst_ctx
2919 = ipa_get_ith_polymorhic_call_context (args, i);
2920
2921 if (dst->agg.items)
2922 {
2923 struct ipa_agg_jf_item *item;
2924 int j;
2925
2926 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
2927 {
2928 int dst_fid;
2929 struct ipa_jump_func *src;
2930
2931 if (item->jftype != IPA_JF_PASS_THROUGH
2932 && item->jftype != IPA_JF_LOAD_AGG)
2933 continue;
2934
2935 dst_fid = item->value.pass_through.formal_id;
2936 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
2937 {
2938 item->jftype = IPA_JF_UNKNOWN;
2939 continue;
2940 }
2941
2942 item->value.pass_through.formal_id = -1;
2943 src = ipa_get_ith_jump_func (top, dst_fid);
2944 if (src->type == IPA_JF_CONST)
2945 {
2946 if (item->jftype == IPA_JF_PASS_THROUGH
2947 && item->value.pass_through.operation == NOP_EXPR)
2948 {
2949 item->jftype = IPA_JF_CONST;
2950 item->value.constant = src->value.constant.value;
2951 continue;
2952 }
2953 }
2954 else if (src->type == IPA_JF_PASS_THROUGH
2955 && src->value.pass_through.operation == NOP_EXPR)
2956 {
2957 if (item->jftype == IPA_JF_PASS_THROUGH
2958 || !item->value.load_agg.by_ref
2959 || src->value.pass_through.agg_preserved)
2960 item->value.pass_through.formal_id
2961 = src->value.pass_through.formal_id;
2962 }
2963 else if (src->type == IPA_JF_ANCESTOR)
2964 {
2965 if (item->jftype == IPA_JF_PASS_THROUGH)
2966 {
2967 if (!src->value.ancestor.offset)
2968 item->value.pass_through.formal_id
2969 = src->value.ancestor.formal_id;
2970 }
2971 else if (src->value.ancestor.agg_preserved)
2972 {
2973 gcc_checking_assert (item->value.load_agg.by_ref);
2974
2975 item->value.pass_through.formal_id
2976 = src->value.ancestor.formal_id;
2977 item->value.load_agg.offset
2978 += src->value.ancestor.offset;
2979 }
2980 }
2981
2982 if (item->value.pass_through.formal_id < 0)
2983 item->jftype = IPA_JF_UNKNOWN;
2984 }
2985 }
2986
2987 if (!top)
2988 {
2989 ipa_set_jf_unknown (dst);
2990 continue;
2991 }
2992
2993 if (dst->type == IPA_JF_ANCESTOR)
2994 {
2995 struct ipa_jump_func *src;
2996 int dst_fid = dst->value.ancestor.formal_id;
2997 class ipa_polymorphic_call_context *src_ctx
2998 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2999
3000 /* Variable number of arguments can cause havoc if we try to access
3001 one that does not exist in the inlined edge. So make sure we
3002 don't. */
3003 if (dst_fid >= ipa_get_cs_argument_count (top))
3004 {
3005 ipa_set_jf_unknown (dst);
3006 continue;
3007 }
3008
3009 src = ipa_get_ith_jump_func (top, dst_fid);
3010
3011 if (src_ctx && !src_ctx->useless_p ())
3012 {
3013 class ipa_polymorphic_call_context ctx = *src_ctx;
3014
3015 /* TODO: Make type preserved safe WRT contexts. */
3016 if (!ipa_get_jf_ancestor_type_preserved (dst))
3017 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3018 ctx.offset_by (dst->value.ancestor.offset);
3019 if (!ctx.useless_p ())
3020 {
3021 if (!dst_ctx)
3022 {
3023 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3024 count);
3025 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3026 }
3027
3028 dst_ctx->combine_with (ctx);
3029 }
3030 }
3031
3032 /* Parameter and argument in ancestor jump function must be pointer
3033 type, which means access to aggregate must be by-reference. */
3034 gcc_assert (!src->agg.items || src->agg.by_ref);
3035
3036 if (src->agg.items && dst->value.ancestor.agg_preserved)
3037 {
3038 struct ipa_agg_jf_item *item;
3039 int j;
3040
3041 /* Currently we do not produce clobber aggregate jump functions,
3042 replace with merging when we do. */
3043 gcc_assert (!dst->agg.items);
3044
3045 dst->agg.items = vec_safe_copy (src->agg.items);
3046 dst->agg.by_ref = src->agg.by_ref;
3047 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3048 item->offset -= dst->value.ancestor.offset;
3049 }
3050
3051 if (src->type == IPA_JF_PASS_THROUGH
3052 && src->value.pass_through.operation == NOP_EXPR)
3053 {
3054 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3055 dst->value.ancestor.agg_preserved &=
3056 src->value.pass_through.agg_preserved;
3057 }
3058 else if (src->type == IPA_JF_ANCESTOR)
3059 {
3060 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3061 dst->value.ancestor.offset += src->value.ancestor.offset;
3062 dst->value.ancestor.agg_preserved &=
3063 src->value.ancestor.agg_preserved;
3064 }
3065 else
3066 ipa_set_jf_unknown (dst);
3067 }
3068 else if (dst->type == IPA_JF_PASS_THROUGH)
3069 {
3070 struct ipa_jump_func *src;
3071 /* We must check range due to calls with variable number of arguments
3072 and we cannot combine jump functions with operations. */
3073 if (dst->value.pass_through.operation == NOP_EXPR
3074 && (top && dst->value.pass_through.formal_id
3075 < ipa_get_cs_argument_count (top)))
3076 {
3077 int dst_fid = dst->value.pass_through.formal_id;
3078 src = ipa_get_ith_jump_func (top, dst_fid);
3079 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3080 class ipa_polymorphic_call_context *src_ctx
3081 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3082
3083 if (src_ctx && !src_ctx->useless_p ())
3084 {
3085 class ipa_polymorphic_call_context ctx = *src_ctx;
3086
3087 /* TODO: Make type preserved safe WRT contexts. */
3088 if (!ipa_get_jf_pass_through_type_preserved (dst))
3089 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3090 if (!ctx.useless_p ())
3091 {
3092 if (!dst_ctx)
3093 {
3094 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3095 count);
3096 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3097 }
3098 dst_ctx->combine_with (ctx);
3099 }
3100 }
3101 switch (src->type)
3102 {
3103 case IPA_JF_UNKNOWN:
3104 ipa_set_jf_unknown (dst);
3105 break;
3106 case IPA_JF_CONST:
3107 ipa_set_jf_cst_copy (dst, src);
3108 break;
3109
3110 case IPA_JF_PASS_THROUGH:
3111 {
3112 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3113 enum tree_code operation;
3114 operation = ipa_get_jf_pass_through_operation (src);
3115
3116 if (operation == NOP_EXPR)
3117 {
3118 bool agg_p;
3119 agg_p = dst_agg_p
3120 && ipa_get_jf_pass_through_agg_preserved (src);
3121 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3122 }
3123 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3124 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3125 else
3126 {
3127 tree operand = ipa_get_jf_pass_through_operand (src);
3128 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3129 operation);
3130 }
3131 break;
3132 }
3133 case IPA_JF_ANCESTOR:
3134 {
3135 bool agg_p;
3136 agg_p = dst_agg_p
3137 && ipa_get_jf_ancestor_agg_preserved (src);
3138 ipa_set_ancestor_jf (dst,
3139 ipa_get_jf_ancestor_offset (src),
3140 ipa_get_jf_ancestor_formal_id (src),
3141 agg_p);
3142 break;
3143 }
3144 default:
3145 gcc_unreachable ();
3146 }
3147
3148 if (src->agg.items
3149 && (dst_agg_p || !src->agg.by_ref))
3150 {
3151 /* Currently we do not produce clobber aggregate jump
3152 functions, replace with merging when we do. */
3153 gcc_assert (!dst->agg.items);
3154
3155 dst->agg.by_ref = src->agg.by_ref;
3156 dst->agg.items = vec_safe_copy (src->agg.items);
3157 }
3158 }
3159 else
3160 ipa_set_jf_unknown (dst);
3161 }
3162 }
3163 }
3164
3165 /* If TARGET is an addr_expr of a function declaration, make it the
3166 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3167 Otherwise, return NULL. */
3168
3169 struct cgraph_edge *
3170 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3171 bool speculative)
3172 {
3173 struct cgraph_node *callee;
3174 bool unreachable = false;
3175
3176 if (TREE_CODE (target) == ADDR_EXPR)
3177 target = TREE_OPERAND (target, 0);
3178 if (TREE_CODE (target) != FUNCTION_DECL)
3179 {
3180 target = canonicalize_constructor_val (target, NULL);
3181 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3182 {
3183 /* Member pointer call that goes through a VMT lookup. */
3184 if (ie->indirect_info->member_ptr
3185 /* Or if target is not an invariant expression and we do not
3186 know if it will evaulate to function at runtime.
3187 This can happen when folding through &VAR, where &VAR
3188 is IP invariant, but VAR itself is not.
3189
3190 TODO: Revisit this when GCC 5 is branched. It seems that
3191 member_ptr check is not needed and that we may try to fold
3192 the expression and see if VAR is readonly. */
3193 || !is_gimple_ip_invariant (target))
3194 {
3195 if (dump_enabled_p ())
3196 {
3197 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3198 "discovered direct call non-invariant %s\n",
3199 ie->caller->dump_name ());
3200 }
3201 return NULL;
3202 }
3203
3204
3205 if (dump_enabled_p ())
3206 {
3207 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3208 "discovered direct call to non-function in %s, "
3209 "making it __builtin_unreachable\n",
3210 ie->caller->dump_name ());
3211 }
3212
3213 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3214 callee = cgraph_node::get_create (target);
3215 unreachable = true;
3216 }
3217 else
3218 callee = cgraph_node::get (target);
3219 }
3220 else
3221 callee = cgraph_node::get (target);
3222
3223 /* Because may-edges are not explicitely represented and vtable may be external,
3224 we may create the first reference to the object in the unit. */
3225 if (!callee || callee->inlined_to)
3226 {
3227
3228 /* We are better to ensure we can refer to it.
3229 In the case of static functions we are out of luck, since we already
3230 removed its body. In the case of public functions we may or may
3231 not introduce the reference. */
3232 if (!canonicalize_constructor_val (target, NULL)
3233 || !TREE_PUBLIC (target))
3234 {
3235 if (dump_file)
3236 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3237 "(%s -> %s) but cannot refer to it. Giving up.\n",
3238 ie->caller->dump_name (),
3239 ie->callee->dump_name ());
3240 return NULL;
3241 }
3242 callee = cgraph_node::get_create (target);
3243 }
3244
3245 /* If the edge is already speculated. */
3246 if (speculative && ie->speculative)
3247 {
3248 struct cgraph_edge *e2;
3249 struct ipa_ref *ref;
3250 ie->speculative_call_info (e2, ie, ref);
3251 if (e2->callee->ultimate_alias_target ()
3252 != callee->ultimate_alias_target ())
3253 {
3254 if (dump_file)
3255 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
3256 "target (%s -> %s) but the call is already "
3257 "speculated to %s. Giving up.\n",
3258 ie->caller->dump_name (), callee->dump_name (),
3259 e2->callee->dump_name ());
3260 }
3261 else
3262 {
3263 if (dump_file)
3264 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
3265 "(%s -> %s) this agree with previous speculation.\n",
3266 ie->caller->dump_name (), callee->dump_name ());
3267 }
3268 return NULL;
3269 }
3270
3271 if (!dbg_cnt (devirt))
3272 return NULL;
3273
3274 ipa_check_create_node_params ();
3275
3276 /* We cannot make edges to inline clones. It is bug that someone removed
3277 the cgraph node too early. */
3278 gcc_assert (!callee->inlined_to);
3279
3280 if (dump_file && !unreachable)
3281 {
3282 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3283 "(%s -> %s), for stmt ",
3284 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3285 speculative ? "speculative" : "known",
3286 ie->caller->dump_name (),
3287 callee->dump_name ());
3288 if (ie->call_stmt)
3289 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3290 else
3291 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3292 }
3293 if (dump_enabled_p ())
3294 {
3295 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3296 "converting indirect call in %s to direct call to %s\n",
3297 ie->caller->name (), callee->name ());
3298 }
3299 if (!speculative)
3300 {
3301 struct cgraph_edge *orig = ie;
3302 ie = ie->make_direct (callee);
3303 /* If we resolved speculative edge the cost is already up to date
3304 for direct call (adjusted by inline_edge_duplication_hook). */
3305 if (ie == orig)
3306 {
3307 ipa_call_summary *es = ipa_call_summaries->get (ie);
3308 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3309 - eni_size_weights.call_cost);
3310 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3311 - eni_time_weights.call_cost);
3312 }
3313 }
3314 else
3315 {
3316 if (!callee->can_be_discarded_p ())
3317 {
3318 cgraph_node *alias;
3319 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3320 if (alias)
3321 callee = alias;
3322 }
3323 /* make_speculative will update ie's cost to direct call cost. */
3324 ie = ie->make_speculative
3325 (callee, ie->count.apply_scale (8, 10));
3326 }
3327
3328 return ie;
3329 }
3330
3331 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3332 CONSTRUCTOR and return it. Return NULL if the search fails for some
3333 reason. */
3334
3335 static tree
3336 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3337 {
3338 tree type = TREE_TYPE (constructor);
3339 if (TREE_CODE (type) != ARRAY_TYPE
3340 && TREE_CODE (type) != RECORD_TYPE)
3341 return NULL;
3342
3343 unsigned ix;
3344 tree index, val;
3345 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3346 {
3347 HOST_WIDE_INT elt_offset;
3348 if (TREE_CODE (type) == ARRAY_TYPE)
3349 {
3350 offset_int off;
3351 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3352 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3353
3354 if (index)
3355 {
3356 if (TREE_CODE (index) == RANGE_EXPR)
3357 off = wi::to_offset (TREE_OPERAND (index, 0));
3358 else
3359 off = wi::to_offset (index);
3360 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3361 {
3362 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3363 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3364 off = wi::sext (off - wi::to_offset (low_bound),
3365 TYPE_PRECISION (TREE_TYPE (index)));
3366 }
3367 off *= wi::to_offset (unit_size);
3368 /* ??? Handle more than just the first index of a
3369 RANGE_EXPR. */
3370 }
3371 else
3372 off = wi::to_offset (unit_size) * ix;
3373
3374 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3375 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3376 continue;
3377 elt_offset = off.to_shwi ();
3378 }
3379 else if (TREE_CODE (type) == RECORD_TYPE)
3380 {
3381 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3382 if (DECL_BIT_FIELD (index))
3383 continue;
3384 elt_offset = int_bit_position (index);
3385 }
3386 else
3387 gcc_unreachable ();
3388
3389 if (elt_offset > req_offset)
3390 return NULL;
3391
3392 if (TREE_CODE (val) == CONSTRUCTOR)
3393 return find_constructor_constant_at_offset (val,
3394 req_offset - elt_offset);
3395
3396 if (elt_offset == req_offset
3397 && is_gimple_reg_type (TREE_TYPE (val))
3398 && is_gimple_ip_invariant (val))
3399 return val;
3400 }
3401 return NULL;
3402 }
3403
3404 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3405 invariant from a static constructor and if so, return it. Otherwise return
3406 NULL. */
3407
3408 static tree
3409 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3410 {
3411 if (by_ref)
3412 {
3413 if (TREE_CODE (scalar) != ADDR_EXPR)
3414 return NULL;
3415 scalar = TREE_OPERAND (scalar, 0);
3416 }
3417
3418 if (!VAR_P (scalar)
3419 || !is_global_var (scalar)
3420 || !TREE_READONLY (scalar)
3421 || !DECL_INITIAL (scalar)
3422 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3423 return NULL;
3424
3425 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3426 }
3427
3428 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3429 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3430 return NULL if there is none. BY_REF specifies whether the value has to be
3431 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3432 the boolean it points to is set to true if the value comes from an
3433 initializer of a constant. */
3434
3435 tree
3436 ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
3437 HOST_WIDE_INT offset, bool by_ref,
3438 bool *from_global_constant)
3439 {
3440 struct ipa_agg_value *item;
3441 int i;
3442
3443 if (scalar)
3444 {
3445 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3446 if (res)
3447 {
3448 if (from_global_constant)
3449 *from_global_constant = true;
3450 return res;
3451 }
3452 }
3453
3454 if (!agg
3455 || by_ref != agg->by_ref)
3456 return NULL;
3457
3458 FOR_EACH_VEC_ELT (agg->items, i, item)
3459 if (item->offset == offset)
3460 {
3461 /* Currently we do not have clobber values, return NULL for them once
3462 we do. */
3463 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3464 if (from_global_constant)
3465 *from_global_constant = false;
3466 return item->value;
3467 }
3468 return NULL;
3469 }
3470
3471 /* Remove a reference to SYMBOL from the list of references of a node given by
3472 reference description RDESC. Return true if the reference has been
3473 successfully found and removed. */
3474
3475 static bool
3476 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3477 {
3478 struct ipa_ref *to_del;
3479 struct cgraph_edge *origin;
3480
3481 origin = rdesc->cs;
3482 if (!origin)
3483 return false;
3484 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3485 origin->lto_stmt_uid);
3486 if (!to_del)
3487 return false;
3488
3489 to_del->remove_reference ();
3490 if (dump_file)
3491 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3492 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3493 return true;
3494 }
3495
3496 /* If JFUNC has a reference description with refcount different from
3497 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3498 NULL. JFUNC must be a constant jump function. */
3499
3500 static struct ipa_cst_ref_desc *
3501 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3502 {
3503 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3504 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3505 return rdesc;
3506 else
3507 return NULL;
3508 }
3509
3510 /* If the value of constant jump function JFUNC is an address of a function
3511 declaration, return the associated call graph node. Otherwise return
3512 NULL. */
3513
3514 static cgraph_node *
3515 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3516 {
3517 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3518 tree cst = ipa_get_jf_constant (jfunc);
3519 if (TREE_CODE (cst) != ADDR_EXPR
3520 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3521 return NULL;
3522
3523 return cgraph_node::get (TREE_OPERAND (cst, 0));
3524 }
3525
3526
3527 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3528 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3529 the edge specified in the rdesc. Return false if either the symbol or the
3530 reference could not be found, otherwise return true. */
3531
3532 static bool
3533 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3534 {
3535 struct ipa_cst_ref_desc *rdesc;
3536 if (jfunc->type == IPA_JF_CONST
3537 && (rdesc = jfunc_rdesc_usable (jfunc))
3538 && --rdesc->refcount == 0)
3539 {
3540 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3541 if (!symbol)
3542 return false;
3543
3544 return remove_described_reference (symbol, rdesc);
3545 }
3546 return true;
3547 }
3548
3549 /* Try to find a destination for indirect edge IE that corresponds to a simple
3550 call or a call of a member function pointer and where the destination is a
3551 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3552 the type of the parameter to which the result of JFUNC is passed. If it can
3553 be determined, return the newly direct edge, otherwise return NULL.
3554 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3555 relative to. */
3556
3557 static struct cgraph_edge *
3558 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3559 struct ipa_jump_func *jfunc, tree target_type,
3560 struct cgraph_node *new_root,
3561 class ipa_node_params *new_root_info)
3562 {
3563 struct cgraph_edge *cs;
3564 tree target;
3565 bool agg_contents = ie->indirect_info->agg_contents;
3566 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3567 if (agg_contents)
3568 {
3569 bool from_global_constant;
3570 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3571 new_root,
3572 &jfunc->agg);
3573 target = ipa_find_agg_cst_for_param (&agg, scalar,
3574 ie->indirect_info->offset,
3575 ie->indirect_info->by_ref,
3576 &from_global_constant);
3577 agg.release ();
3578 if (target
3579 && !from_global_constant
3580 && !ie->indirect_info->guaranteed_unmodified)
3581 return NULL;
3582 }
3583 else
3584 target = scalar;
3585 if (!target)
3586 return NULL;
3587 cs = ipa_make_edge_direct_to_target (ie, target);
3588
3589 if (cs && !agg_contents)
3590 {
3591 bool ok;
3592 gcc_checking_assert (cs->callee
3593 && (cs != ie
3594 || jfunc->type != IPA_JF_CONST
3595 || !cgraph_node_for_jfunc (jfunc)
3596 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3597 ok = try_decrement_rdesc_refcount (jfunc);
3598 gcc_checking_assert (ok);
3599 }
3600
3601 return cs;
3602 }
3603
3604 /* Return the target to be used in cases of impossible devirtualization. IE
3605 and target (the latter can be NULL) are dumped when dumping is enabled. */
3606
3607 tree
3608 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3609 {
3610 if (dump_file)
3611 {
3612 if (target)
3613 fprintf (dump_file,
3614 "Type inconsistent devirtualization: %s->%s\n",
3615 ie->caller->dump_name (),
3616 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3617 else
3618 fprintf (dump_file,
3619 "No devirtualization target in %s\n",
3620 ie->caller->dump_name ());
3621 }
3622 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3623 cgraph_node::get_create (new_target);
3624 return new_target;
3625 }
3626
3627 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3628 call based on a formal parameter which is described by jump function JFUNC
3629 and if it can be determined, make it direct and return the direct edge.
3630 Otherwise, return NULL. CTX describes the polymorphic context that the
3631 parameter the call is based on brings along with it. NEW_ROOT and
3632 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3633 to. */
3634
3635 static struct cgraph_edge *
3636 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3637 struct ipa_jump_func *jfunc,
3638 class ipa_polymorphic_call_context ctx,
3639 struct cgraph_node *new_root,
3640 class ipa_node_params *new_root_info)
3641 {
3642 tree target = NULL;
3643 bool speculative = false;
3644
3645 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3646 return NULL;
3647
3648 gcc_assert (!ie->indirect_info->by_ref);
3649
3650 /* Try to do lookup via known virtual table pointer value. */
3651 if (!ie->indirect_info->vptr_changed
3652 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3653 {
3654 tree vtable;
3655 unsigned HOST_WIDE_INT offset;
3656 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3657 : NULL;
3658 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3659 new_root,
3660 &jfunc->agg);
3661 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3662 ie->indirect_info->offset,
3663 true);
3664 agg.release ();
3665 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3666 {
3667 bool can_refer;
3668 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3669 vtable, offset, &can_refer);
3670 if (can_refer)
3671 {
3672 if (!t
3673 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3674 || !possible_polymorphic_call_target_p
3675 (ie, cgraph_node::get (t)))
3676 {
3677 /* Do not speculate builtin_unreachable, it is stupid! */
3678 if (!ie->indirect_info->vptr_changed)
3679 target = ipa_impossible_devirt_target (ie, target);
3680 else
3681 target = NULL;
3682 }
3683 else
3684 {
3685 target = t;
3686 speculative = ie->indirect_info->vptr_changed;
3687 }
3688 }
3689 }
3690 }
3691
3692 ipa_polymorphic_call_context ie_context (ie);
3693 vec <cgraph_node *>targets;
3694 bool final;
3695
3696 ctx.offset_by (ie->indirect_info->offset);
3697 if (ie->indirect_info->vptr_changed)
3698 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3699 ie->indirect_info->otr_type);
3700 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3701 targets = possible_polymorphic_call_targets
3702 (ie->indirect_info->otr_type,
3703 ie->indirect_info->otr_token,
3704 ctx, &final);
3705 if (final && targets.length () <= 1)
3706 {
3707 speculative = false;
3708 if (targets.length () == 1)
3709 target = targets[0]->decl;
3710 else
3711 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3712 }
3713 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3714 && !ie->speculative && ie->maybe_hot_p ())
3715 {
3716 cgraph_node *n;
3717 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3718 ie->indirect_info->otr_token,
3719 ie->indirect_info->context);
3720 if (n)
3721 {
3722 target = n->decl;
3723 speculative = true;
3724 }
3725 }
3726
3727 if (target)
3728 {
3729 if (!possible_polymorphic_call_target_p
3730 (ie, cgraph_node::get_create (target)))
3731 {
3732 if (speculative)
3733 return NULL;
3734 target = ipa_impossible_devirt_target (ie, target);
3735 }
3736 return ipa_make_edge_direct_to_target (ie, target, speculative);
3737 }
3738 else
3739 return NULL;
3740 }
3741
3742 /* Update the param called notes associated with NODE when CS is being inlined,
3743 assuming NODE is (potentially indirectly) inlined into CS->callee.
3744 Moreover, if the callee is discovered to be constant, create a new cgraph
3745 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3746 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3747
3748 static bool
3749 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3750 struct cgraph_node *node,
3751 vec<cgraph_edge *> *new_edges)
3752 {
3753 class ipa_edge_args *top;
3754 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3755 struct cgraph_node *new_root;
3756 class ipa_node_params *new_root_info, *inlined_node_info;
3757 bool res = false;
3758
3759 ipa_check_create_edge_args ();
3760 top = IPA_EDGE_REF (cs);
3761 new_root = cs->caller->inlined_to
3762 ? cs->caller->inlined_to : cs->caller;
3763 new_root_info = IPA_NODE_REF (new_root);
3764 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3765
3766 for (ie = node->indirect_calls; ie; ie = next_ie)
3767 {
3768 class cgraph_indirect_call_info *ici = ie->indirect_info;
3769 struct ipa_jump_func *jfunc;
3770 int param_index;
3771 cgraph_node *spec_target = NULL;
3772
3773 next_ie = ie->next_callee;
3774
3775 if (ici->param_index == -1)
3776 continue;
3777
3778 /* We must check range due to calls with variable number of arguments: */
3779 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3780 {
3781 ici->param_index = -1;
3782 continue;
3783 }
3784
3785 param_index = ici->param_index;
3786 jfunc = ipa_get_ith_jump_func (top, param_index);
3787
3788 if (ie->speculative)
3789 {
3790 struct cgraph_edge *de;
3791 struct ipa_ref *ref;
3792 ie->speculative_call_info (de, ie, ref);
3793 spec_target = de->callee;
3794 }
3795
3796 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3797 new_direct_edge = NULL;
3798 else if (ici->polymorphic)
3799 {
3800 ipa_polymorphic_call_context ctx;
3801 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3802 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3803 new_root,
3804 new_root_info);
3805 }
3806 else
3807 {
3808 tree target_type = ipa_get_type (inlined_node_info, param_index);
3809 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3810 target_type,
3811 new_root,
3812 new_root_info);
3813 }
3814
3815 /* If speculation was removed, then we need to do nothing. */
3816 if (new_direct_edge && new_direct_edge != ie
3817 && new_direct_edge->callee == spec_target)
3818 {
3819 new_direct_edge->indirect_inlining_edge = 1;
3820 top = IPA_EDGE_REF (cs);
3821 res = true;
3822 if (!new_direct_edge->speculative)
3823 continue;
3824 }
3825 else if (new_direct_edge)
3826 {
3827 new_direct_edge->indirect_inlining_edge = 1;
3828 if (new_edges)
3829 {
3830 new_edges->safe_push (new_direct_edge);
3831 res = true;
3832 }
3833 top = IPA_EDGE_REF (cs);
3834 /* If speculative edge was introduced we still need to update
3835 call info of the indirect edge. */
3836 if (!new_direct_edge->speculative)
3837 continue;
3838 }
3839 if (jfunc->type == IPA_JF_PASS_THROUGH
3840 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3841 {
3842 if (ici->agg_contents
3843 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3844 && !ici->polymorphic)
3845 ici->param_index = -1;
3846 else
3847 {
3848 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3849 if (ici->polymorphic
3850 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3851 ici->vptr_changed = true;
3852 ipa_set_param_used_by_indirect_call (new_root_info,
3853 ici->param_index, true);
3854 if (ici->polymorphic)
3855 ipa_set_param_used_by_polymorphic_call (new_root_info,
3856 ici->param_index, true);
3857 }
3858 }
3859 else if (jfunc->type == IPA_JF_ANCESTOR)
3860 {
3861 if (ici->agg_contents
3862 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3863 && !ici->polymorphic)
3864 ici->param_index = -1;
3865 else
3866 {
3867 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3868 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3869 if (ici->polymorphic
3870 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3871 ici->vptr_changed = true;
3872 ipa_set_param_used_by_indirect_call (new_root_info,
3873 ici->param_index, true);
3874 if (ici->polymorphic)
3875 ipa_set_param_used_by_polymorphic_call (new_root_info,
3876 ici->param_index, true);
3877 }
3878 }
3879 else
3880 /* Either we can find a destination for this edge now or never. */
3881 ici->param_index = -1;
3882 }
3883
3884 return res;
3885 }
3886
3887 /* Recursively traverse subtree of NODE (including node) made of inlined
3888 cgraph_edges when CS has been inlined and invoke
3889 update_indirect_edges_after_inlining on all nodes and
3890 update_jump_functions_after_inlining on all non-inlined edges that lead out
3891 of this subtree. Newly discovered indirect edges will be added to
3892 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3893 created. */
3894
3895 static bool
3896 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3897 struct cgraph_node *node,
3898 vec<cgraph_edge *> *new_edges)
3899 {
3900 struct cgraph_edge *e;
3901 bool res;
3902
3903 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3904
3905 for (e = node->callees; e; e = e->next_callee)
3906 if (!e->inline_failed)
3907 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3908 else
3909 update_jump_functions_after_inlining (cs, e);
3910 for (e = node->indirect_calls; e; e = e->next_callee)
3911 update_jump_functions_after_inlining (cs, e);
3912
3913 return res;
3914 }
3915
3916 /* Combine two controlled uses counts as done during inlining. */
3917
3918 static int
3919 combine_controlled_uses_counters (int c, int d)
3920 {
3921 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3922 return IPA_UNDESCRIBED_USE;
3923 else
3924 return c + d - 1;
3925 }
3926
3927 /* Propagate number of controlled users from CS->caleee to the new root of the
3928 tree of inlined nodes. */
3929
3930 static void
3931 propagate_controlled_uses (struct cgraph_edge *cs)
3932 {
3933 class ipa_edge_args *args = IPA_EDGE_REF (cs);
3934 if (!args)
3935 return;
3936 struct cgraph_node *new_root = cs->caller->inlined_to
3937 ? cs->caller->inlined_to : cs->caller;
3938 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3939 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3940 int count, i;
3941
3942 if (!old_root_info)
3943 return;
3944
3945 count = MIN (ipa_get_cs_argument_count (args),
3946 ipa_get_param_count (old_root_info));
3947 for (i = 0; i < count; i++)
3948 {
3949 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3950 struct ipa_cst_ref_desc *rdesc;
3951
3952 if (jf->type == IPA_JF_PASS_THROUGH)
3953 {
3954 int src_idx, c, d;
3955 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3956 c = ipa_get_controlled_uses (new_root_info, src_idx);
3957 d = ipa_get_controlled_uses (old_root_info, i);
3958
3959 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3960 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3961 c = combine_controlled_uses_counters (c, d);
3962 ipa_set_controlled_uses (new_root_info, src_idx, c);
3963 if (c == 0 && new_root_info->ipcp_orig_node)
3964 {
3965 struct cgraph_node *n;
3966 struct ipa_ref *ref;
3967 tree t = new_root_info->known_csts[src_idx];
3968
3969 if (t && TREE_CODE (t) == ADDR_EXPR
3970 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3971 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3972 && (ref = new_root->find_reference (n, NULL, 0)))
3973 {
3974 if (dump_file)
3975 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3976 "reference from %s to %s.\n",
3977 new_root->dump_name (),
3978 n->dump_name ());
3979 ref->remove_reference ();
3980 }
3981 }
3982 }
3983 else if (jf->type == IPA_JF_CONST
3984 && (rdesc = jfunc_rdesc_usable (jf)))
3985 {
3986 int d = ipa_get_controlled_uses (old_root_info, i);
3987 int c = rdesc->refcount;
3988 rdesc->refcount = combine_controlled_uses_counters (c, d);
3989 if (rdesc->refcount == 0)
3990 {
3991 tree cst = ipa_get_jf_constant (jf);
3992 struct cgraph_node *n;
3993 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3994 && TREE_CODE (TREE_OPERAND (cst, 0))
3995 == FUNCTION_DECL);
3996 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3997 if (n)
3998 {
3999 struct cgraph_node *clone;
4000 bool ok;
4001 ok = remove_described_reference (n, rdesc);
4002 gcc_checking_assert (ok);
4003
4004 clone = cs->caller;
4005 while (clone->inlined_to
4006 && clone->ipcp_clone
4007 && clone != rdesc->cs->caller)
4008 {
4009 struct ipa_ref *ref;
4010 ref = clone->find_reference (n, NULL, 0);
4011 if (ref)
4012 {
4013 if (dump_file)
4014 fprintf (dump_file, "ipa-prop: Removing "
4015 "cloning-created reference "
4016 "from %s to %s.\n",
4017 clone->dump_name (),
4018 n->dump_name ());
4019 ref->remove_reference ();
4020 }
4021 clone = clone->callers->caller;
4022 }
4023 }
4024 }
4025 }
4026 }
4027
4028 for (i = ipa_get_param_count (old_root_info);
4029 i < ipa_get_cs_argument_count (args);
4030 i++)
4031 {
4032 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4033
4034 if (jf->type == IPA_JF_CONST)
4035 {
4036 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4037 if (rdesc)
4038 rdesc->refcount = IPA_UNDESCRIBED_USE;
4039 }
4040 else if (jf->type == IPA_JF_PASS_THROUGH)
4041 ipa_set_controlled_uses (new_root_info,
4042 jf->value.pass_through.formal_id,
4043 IPA_UNDESCRIBED_USE);
4044 }
4045 }
4046
4047 /* Update jump functions and call note functions on inlining the call site CS.
4048 CS is expected to lead to a node already cloned by
4049 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4050 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4051 created. */
4052
4053 bool
4054 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4055 vec<cgraph_edge *> *new_edges)
4056 {
4057 bool changed;
4058 /* Do nothing if the preparation phase has not been carried out yet
4059 (i.e. during early inlining). */
4060 if (!ipa_node_params_sum)
4061 return false;
4062 gcc_assert (ipa_edge_args_sum);
4063
4064 propagate_controlled_uses (cs);
4065 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4066 ipa_node_params_sum->remove (cs->callee);
4067
4068 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4069 if (args)
4070 {
4071 bool ok = true;
4072 if (args->jump_functions)
4073 {
4074 struct ipa_jump_func *jf;
4075 int i;
4076 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4077 if (jf->type == IPA_JF_CONST
4078 && ipa_get_jf_constant_rdesc (jf))
4079 {
4080 ok = false;
4081 break;
4082 }
4083 }
4084 if (ok)
4085 ipa_edge_args_sum->remove (cs);
4086 }
4087 if (ipcp_transformation_sum)
4088 ipcp_transformation_sum->remove (cs->callee);
4089
4090 return changed;
4091 }
4092
4093 /* Ensure that array of edge arguments infos is big enough to accommodate a
4094 structure for all edges and reallocates it if not. Also, allocate
4095 associated hash tables is they do not already exist. */
4096
4097 void
4098 ipa_check_create_edge_args (void)
4099 {
4100 if (!ipa_edge_args_sum)
4101 ipa_edge_args_sum
4102 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4103 ipa_edge_args_sum_t (symtab, true));
4104 if (!ipa_bits_hash_table)
4105 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4106 if (!ipa_vr_hash_table)
4107 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4108 }
4109
4110 /* Free all ipa_edge structures. */
4111
4112 void
4113 ipa_free_all_edge_args (void)
4114 {
4115 if (!ipa_edge_args_sum)
4116 return;
4117
4118 ggc_delete (ipa_edge_args_sum);
4119 ipa_edge_args_sum = NULL;
4120 }
4121
4122 /* Free all ipa_node_params structures. */
4123
4124 void
4125 ipa_free_all_node_params (void)
4126 {
4127 ggc_delete (ipa_node_params_sum);
4128 ipa_node_params_sum = NULL;
4129 }
4130
4131 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4132 tables if they do not already exist. */
4133
4134 void
4135 ipcp_transformation_initialize (void)
4136 {
4137 if (!ipa_bits_hash_table)
4138 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4139 if (!ipa_vr_hash_table)
4140 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4141 if (ipcp_transformation_sum == NULL)
4142 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4143 }
4144
4145 /* Release the IPA CP transformation summary. */
4146
4147 void
4148 ipcp_free_transformation_sum (void)
4149 {
4150 if (!ipcp_transformation_sum)
4151 return;
4152
4153 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4154 ggc_free (ipcp_transformation_sum);
4155 ipcp_transformation_sum = NULL;
4156 }
4157
4158 /* Set the aggregate replacements of NODE to be AGGVALS. */
4159
4160 void
4161 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4162 struct ipa_agg_replacement_value *aggvals)
4163 {
4164 ipcp_transformation_initialize ();
4165 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4166 s->agg_values = aggvals;
4167 }
4168
4169 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4170 count data structures accordingly. */
4171
4172 void
4173 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4174 {
4175 if (args->jump_functions)
4176 {
4177 struct ipa_jump_func *jf;
4178 int i;
4179 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4180 {
4181 struct ipa_cst_ref_desc *rdesc;
4182 try_decrement_rdesc_refcount (jf);
4183 if (jf->type == IPA_JF_CONST
4184 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4185 && rdesc->cs == cs)
4186 rdesc->cs = NULL;
4187 }
4188 }
4189 }
4190
4191 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4192 reference count data strucutres accordingly. */
4193
4194 void
4195 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4196 ipa_edge_args *old_args, ipa_edge_args *new_args)
4197 {
4198 unsigned int i;
4199
4200 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4201 if (old_args->polymorphic_call_contexts)
4202 new_args->polymorphic_call_contexts
4203 = vec_safe_copy (old_args->polymorphic_call_contexts);
4204
4205 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4206 {
4207 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4208 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4209
4210 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4211
4212 if (src_jf->type == IPA_JF_CONST)
4213 {
4214 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4215
4216 if (!src_rdesc)
4217 dst_jf->value.constant.rdesc = NULL;
4218 else if (src->caller == dst->caller)
4219 {
4220 struct ipa_ref *ref;
4221 symtab_node *n = cgraph_node_for_jfunc (src_jf);
4222 gcc_checking_assert (n);
4223 ref = src->caller->find_reference (n, src->call_stmt,
4224 src->lto_stmt_uid);
4225 gcc_checking_assert (ref);
4226 dst->caller->clone_reference (ref, ref->stmt);
4227
4228 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4229 dst_rdesc->cs = dst;
4230 dst_rdesc->refcount = src_rdesc->refcount;
4231 dst_rdesc->next_duplicate = NULL;
4232 dst_jf->value.constant.rdesc = dst_rdesc;
4233 }
4234 else if (src_rdesc->cs == src)
4235 {
4236 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4237 dst_rdesc->cs = dst;
4238 dst_rdesc->refcount = src_rdesc->refcount;
4239 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4240 src_rdesc->next_duplicate = dst_rdesc;
4241 dst_jf->value.constant.rdesc = dst_rdesc;
4242 }
4243 else
4244 {
4245 struct ipa_cst_ref_desc *dst_rdesc;
4246 /* This can happen during inlining, when a JFUNC can refer to a
4247 reference taken in a function up in the tree of inline clones.
4248 We need to find the duplicate that refers to our tree of
4249 inline clones. */
4250
4251 gcc_assert (dst->caller->inlined_to);
4252 for (dst_rdesc = src_rdesc->next_duplicate;
4253 dst_rdesc;
4254 dst_rdesc = dst_rdesc->next_duplicate)
4255 {
4256 struct cgraph_node *top;
4257 top = dst_rdesc->cs->caller->inlined_to
4258 ? dst_rdesc->cs->caller->inlined_to
4259 : dst_rdesc->cs->caller;
4260 if (dst->caller->inlined_to == top)
4261 break;
4262 }
4263 gcc_assert (dst_rdesc);
4264 dst_jf->value.constant.rdesc = dst_rdesc;
4265 }
4266 }
4267 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4268 && src->caller == dst->caller)
4269 {
4270 struct cgraph_node *inline_root = dst->caller->inlined_to
4271 ? dst->caller->inlined_to : dst->caller;
4272 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
4273 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4274
4275 int c = ipa_get_controlled_uses (root_info, idx);
4276 if (c != IPA_UNDESCRIBED_USE)
4277 {
4278 c++;
4279 ipa_set_controlled_uses (root_info, idx, c);
4280 }
4281 }
4282 }
4283 }
4284
4285 /* Analyze newly added function into callgraph. */
4286
4287 static void
4288 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4289 {
4290 if (node->has_gimple_body_p ())
4291 ipa_analyze_node (node);
4292 }
4293
4294 /* Hook that is called by summary when a node is duplicated. */
4295
4296 void
4297 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4298 ipa_node_params *old_info,
4299 ipa_node_params *new_info)
4300 {
4301 ipa_agg_replacement_value *old_av, *new_av;
4302
4303 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4304 new_info->lattices = NULL;
4305 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4306 new_info->known_csts = old_info->known_csts.copy ();
4307 new_info->known_contexts = old_info->known_contexts.copy ();
4308
4309 new_info->analysis_done = old_info->analysis_done;
4310 new_info->node_enqueued = old_info->node_enqueued;
4311 new_info->versionable = old_info->versionable;
4312
4313 old_av = ipa_get_agg_replacements_for_node (src);
4314 if (old_av)
4315 {
4316 new_av = NULL;
4317 while (old_av)
4318 {
4319 struct ipa_agg_replacement_value *v;
4320
4321 v = ggc_alloc<ipa_agg_replacement_value> ();
4322 memcpy (v, old_av, sizeof (*v));
4323 v->next = new_av;
4324 new_av = v;
4325 old_av = old_av->next;
4326 }
4327 ipa_set_node_agg_value_chain (dst, new_av);
4328 }
4329 }
4330
4331 /* Duplication of ipcp transformation summaries. */
4332
4333 void
4334 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4335 ipcp_transformation *src_trans,
4336 ipcp_transformation *dst_trans)
4337 {
4338 /* Avoid redundant work of duplicating vectors we will never use. */
4339 if (dst->inlined_to)
4340 return;
4341 dst_trans->bits = vec_safe_copy (src_trans->bits);
4342 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4343 ipa_agg_replacement_value *agg = src_trans->agg_values,
4344 **aggptr = &dst_trans->agg_values;
4345 while (agg)
4346 {
4347 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4348 **aggptr = *agg;
4349 agg = agg->next;
4350 aggptr = &(*aggptr)->next;
4351 }
4352 }
4353
4354 /* Register our cgraph hooks if they are not already there. */
4355
4356 void
4357 ipa_register_cgraph_hooks (void)
4358 {
4359 ipa_check_create_node_params ();
4360 ipa_check_create_edge_args ();
4361
4362 function_insertion_hook_holder =
4363 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4364 }
4365
4366 /* Unregister our cgraph hooks if they are not already there. */
4367
4368 static void
4369 ipa_unregister_cgraph_hooks (void)
4370 {
4371 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4372 function_insertion_hook_holder = NULL;
4373 }
4374
4375 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4376 longer needed after ipa-cp. */
4377
4378 void
4379 ipa_free_all_structures_after_ipa_cp (void)
4380 {
4381 if (!optimize && !in_lto_p)
4382 {
4383 ipa_free_all_edge_args ();
4384 ipa_free_all_node_params ();
4385 ipcp_sources_pool.release ();
4386 ipcp_cst_values_pool.release ();
4387 ipcp_poly_ctx_values_pool.release ();
4388 ipcp_agg_lattice_pool.release ();
4389 ipa_unregister_cgraph_hooks ();
4390 ipa_refdesc_pool.release ();
4391 }
4392 }
4393
4394 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4395 longer needed after indirect inlining. */
4396
4397 void
4398 ipa_free_all_structures_after_iinln (void)
4399 {
4400 ipa_free_all_edge_args ();
4401 ipa_free_all_node_params ();
4402 ipa_unregister_cgraph_hooks ();
4403 ipcp_sources_pool.release ();
4404 ipcp_cst_values_pool.release ();
4405 ipcp_poly_ctx_values_pool.release ();
4406 ipcp_agg_lattice_pool.release ();
4407 ipa_refdesc_pool.release ();
4408 }
4409
4410 /* Print ipa_tree_map data structures of all functions in the
4411 callgraph to F. */
4412
4413 void
4414 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4415 {
4416 int i, count;
4417 class ipa_node_params *info;
4418
4419 if (!node->definition)
4420 return;
4421 info = IPA_NODE_REF (node);
4422 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4423 if (!info)
4424 {
4425 fprintf (f, " no params return\n");
4426 return;
4427 }
4428 count = ipa_get_param_count (info);
4429 for (i = 0; i < count; i++)
4430 {
4431 int c;
4432
4433 fprintf (f, " ");
4434 ipa_dump_param (f, info, i);
4435 if (ipa_is_param_used (info, i))
4436 fprintf (f, " used");
4437 if (ipa_is_param_used_by_ipa_predicates (info, i))
4438 fprintf (f, " used_by_ipa_predicates");
4439 if (ipa_is_param_used_by_indirect_call (info, i))
4440 fprintf (f, " used_by_indirect_call");
4441 if (ipa_is_param_used_by_polymorphic_call (info, i))
4442 fprintf (f, " used_by_polymorphic_call");
4443 c = ipa_get_controlled_uses (info, i);
4444 if (c == IPA_UNDESCRIBED_USE)
4445 fprintf (f, " undescribed_use");
4446 else
4447 fprintf (f, " controlled_uses=%i", c);
4448 fprintf (f, "\n");
4449 }
4450 }
4451
4452 /* Print ipa_tree_map data structures of all functions in the
4453 callgraph to F. */
4454
4455 void
4456 ipa_print_all_params (FILE * f)
4457 {
4458 struct cgraph_node *node;
4459
4460 fprintf (f, "\nFunction parameters:\n");
4461 FOR_EACH_FUNCTION (node)
4462 ipa_print_node_params (f, node);
4463 }
4464
4465 /* Dump the AV linked list. */
4466
4467 void
4468 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4469 {
4470 bool comma = false;
4471 fprintf (f, " Aggregate replacements:");
4472 for (; av; av = av->next)
4473 {
4474 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4475 av->index, av->offset);
4476 print_generic_expr (f, av->value);
4477 comma = true;
4478 }
4479 fprintf (f, "\n");
4480 }
4481
4482 /* Stream out jump function JUMP_FUNC to OB. */
4483
4484 static void
4485 ipa_write_jump_function (struct output_block *ob,
4486 struct ipa_jump_func *jump_func)
4487 {
4488 struct ipa_agg_jf_item *item;
4489 struct bitpack_d bp;
4490 int i, count;
4491 int flag = 0;
4492
4493 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4494 as well as WPA memory by handling them specially. */
4495 if (jump_func->type == IPA_JF_CONST
4496 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4497 flag = 1;
4498
4499 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4500 switch (jump_func->type)
4501 {
4502 case IPA_JF_UNKNOWN:
4503 break;
4504 case IPA_JF_CONST:
4505 gcc_assert (
4506 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4507 stream_write_tree (ob,
4508 flag
4509 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4510 : jump_func->value.constant.value, true);
4511 break;
4512 case IPA_JF_PASS_THROUGH:
4513 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4514 if (jump_func->value.pass_through.operation == NOP_EXPR)
4515 {
4516 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4517 bp = bitpack_create (ob->main_stream);
4518 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4519 streamer_write_bitpack (&bp);
4520 }
4521 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4522 == tcc_unary)
4523 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4524 else
4525 {
4526 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4527 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4528 }
4529 break;
4530 case IPA_JF_ANCESTOR:
4531 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4532 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4533 bp = bitpack_create (ob->main_stream);
4534 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4535 streamer_write_bitpack (&bp);
4536 break;
4537 default:
4538 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4539 }
4540
4541 count = vec_safe_length (jump_func->agg.items);
4542 streamer_write_uhwi (ob, count);
4543 if (count)
4544 {
4545 bp = bitpack_create (ob->main_stream);
4546 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4547 streamer_write_bitpack (&bp);
4548 }
4549
4550 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4551 {
4552 stream_write_tree (ob, item->type, true);
4553 streamer_write_uhwi (ob, item->offset);
4554 streamer_write_uhwi (ob, item->jftype);
4555 switch (item->jftype)
4556 {
4557 case IPA_JF_UNKNOWN:
4558 break;
4559 case IPA_JF_CONST:
4560 stream_write_tree (ob, item->value.constant, true);
4561 break;
4562 case IPA_JF_PASS_THROUGH:
4563 case IPA_JF_LOAD_AGG:
4564 streamer_write_uhwi (ob, item->value.pass_through.operation);
4565 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4566 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4567 != tcc_unary)
4568 stream_write_tree (ob, item->value.pass_through.operand, true);
4569 if (item->jftype == IPA_JF_LOAD_AGG)
4570 {
4571 stream_write_tree (ob, item->value.load_agg.type, true);
4572 streamer_write_uhwi (ob, item->value.load_agg.offset);
4573 bp = bitpack_create (ob->main_stream);
4574 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4575 streamer_write_bitpack (&bp);
4576 }
4577 break;
4578 default:
4579 fatal_error (UNKNOWN_LOCATION,
4580 "invalid jump function in LTO stream");
4581 }
4582 }
4583
4584 bp = bitpack_create (ob->main_stream);
4585 bp_pack_value (&bp, !!jump_func->bits, 1);
4586 streamer_write_bitpack (&bp);
4587 if (jump_func->bits)
4588 {
4589 streamer_write_widest_int (ob, jump_func->bits->value);
4590 streamer_write_widest_int (ob, jump_func->bits->mask);
4591 }
4592 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4593 streamer_write_bitpack (&bp);
4594 if (jump_func->m_vr)
4595 {
4596 streamer_write_enum (ob->main_stream, value_rang_type,
4597 VR_LAST, jump_func->m_vr->kind ());
4598 stream_write_tree (ob, jump_func->m_vr->min (), true);
4599 stream_write_tree (ob, jump_func->m_vr->max (), true);
4600 }
4601 }
4602
4603 /* Read in jump function JUMP_FUNC from IB. */
4604
4605 static void
4606 ipa_read_jump_function (class lto_input_block *ib,
4607 struct ipa_jump_func *jump_func,
4608 struct cgraph_edge *cs,
4609 class data_in *data_in,
4610 bool prevails)
4611 {
4612 enum jump_func_type jftype;
4613 enum tree_code operation;
4614 int i, count;
4615 int val = streamer_read_uhwi (ib);
4616 bool flag = val & 1;
4617
4618 jftype = (enum jump_func_type) (val / 2);
4619 switch (jftype)
4620 {
4621 case IPA_JF_UNKNOWN:
4622 ipa_set_jf_unknown (jump_func);
4623 break;
4624 case IPA_JF_CONST:
4625 {
4626 tree t = stream_read_tree (ib, data_in);
4627 if (flag && prevails)
4628 t = build_fold_addr_expr (t);
4629 ipa_set_jf_constant (jump_func, t, cs);
4630 }
4631 break;
4632 case IPA_JF_PASS_THROUGH:
4633 operation = (enum tree_code) streamer_read_uhwi (ib);
4634 if (operation == NOP_EXPR)
4635 {
4636 int formal_id = streamer_read_uhwi (ib);
4637 struct bitpack_d bp = streamer_read_bitpack (ib);
4638 bool agg_preserved = bp_unpack_value (&bp, 1);
4639 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4640 }
4641 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4642 {
4643 int formal_id = streamer_read_uhwi (ib);
4644 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4645 }
4646 else
4647 {
4648 tree operand = stream_read_tree (ib, data_in);
4649 int formal_id = streamer_read_uhwi (ib);
4650 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4651 operation);
4652 }
4653 break;
4654 case IPA_JF_ANCESTOR:
4655 {
4656 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4657 int formal_id = streamer_read_uhwi (ib);
4658 struct bitpack_d bp = streamer_read_bitpack (ib);
4659 bool agg_preserved = bp_unpack_value (&bp, 1);
4660 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4661 break;
4662 }
4663 default:
4664 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4665 }
4666
4667 count = streamer_read_uhwi (ib);
4668 if (prevails)
4669 vec_alloc (jump_func->agg.items, count);
4670 if (count)
4671 {
4672 struct bitpack_d bp = streamer_read_bitpack (ib);
4673 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4674 }
4675 for (i = 0; i < count; i++)
4676 {
4677 struct ipa_agg_jf_item item;
4678 item.type = stream_read_tree (ib, data_in);
4679 item.offset = streamer_read_uhwi (ib);
4680 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4681
4682 switch (item.jftype)
4683 {
4684 case IPA_JF_UNKNOWN:
4685 break;
4686 case IPA_JF_CONST:
4687 item.value.constant = stream_read_tree (ib, data_in);
4688 break;
4689 case IPA_JF_PASS_THROUGH:
4690 case IPA_JF_LOAD_AGG:
4691 operation = (enum tree_code) streamer_read_uhwi (ib);
4692 item.value.pass_through.operation = operation;
4693 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4694 if (TREE_CODE_CLASS (operation) == tcc_unary)
4695 item.value.pass_through.operand = NULL_TREE;
4696 else
4697 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4698 if (item.jftype == IPA_JF_LOAD_AGG)
4699 {
4700 struct bitpack_d bp;
4701 item.value.load_agg.type = stream_read_tree (ib, data_in);
4702 item.value.load_agg.offset = streamer_read_uhwi (ib);
4703 bp = streamer_read_bitpack (ib);
4704 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4705 }
4706 break;
4707 default:
4708 fatal_error (UNKNOWN_LOCATION,
4709 "invalid jump function in LTO stream");
4710 }
4711 if (prevails)
4712 jump_func->agg.items->quick_push (item);
4713 }
4714
4715 struct bitpack_d bp = streamer_read_bitpack (ib);
4716 bool bits_known = bp_unpack_value (&bp, 1);
4717 if (bits_known)
4718 {
4719 widest_int value = streamer_read_widest_int (ib);
4720 widest_int mask = streamer_read_widest_int (ib);
4721 if (prevails)
4722 ipa_set_jfunc_bits (jump_func, value, mask);
4723 }
4724 else
4725 jump_func->bits = NULL;
4726
4727 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4728 bool vr_known = bp_unpack_value (&vr_bp, 1);
4729 if (vr_known)
4730 {
4731 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4732 VR_LAST);
4733 tree min = stream_read_tree (ib, data_in);
4734 tree max = stream_read_tree (ib, data_in);
4735 if (prevails)
4736 ipa_set_jfunc_vr (jump_func, type, min, max);
4737 }
4738 else
4739 jump_func->m_vr = NULL;
4740 }
4741
4742 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4743 relevant to indirect inlining to OB. */
4744
4745 static void
4746 ipa_write_indirect_edge_info (struct output_block *ob,
4747 struct cgraph_edge *cs)
4748 {
4749 class cgraph_indirect_call_info *ii = cs->indirect_info;
4750 struct bitpack_d bp;
4751
4752 streamer_write_hwi (ob, ii->param_index);
4753 bp = bitpack_create (ob->main_stream);
4754 bp_pack_value (&bp, ii->polymorphic, 1);
4755 bp_pack_value (&bp, ii->agg_contents, 1);
4756 bp_pack_value (&bp, ii->member_ptr, 1);
4757 bp_pack_value (&bp, ii->by_ref, 1);
4758 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4759 bp_pack_value (&bp, ii->vptr_changed, 1);
4760 streamer_write_bitpack (&bp);
4761 if (ii->agg_contents || ii->polymorphic)
4762 streamer_write_hwi (ob, ii->offset);
4763 else
4764 gcc_assert (ii->offset == 0);
4765
4766 if (ii->polymorphic)
4767 {
4768 streamer_write_hwi (ob, ii->otr_token);
4769 stream_write_tree (ob, ii->otr_type, true);
4770 ii->context.stream_out (ob);
4771 }
4772 }
4773
4774 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4775 relevant to indirect inlining from IB. */
4776
4777 static void
4778 ipa_read_indirect_edge_info (class lto_input_block *ib,
4779 class data_in *data_in,
4780 struct cgraph_edge *cs,
4781 class ipa_node_params *info)
4782 {
4783 class cgraph_indirect_call_info *ii = cs->indirect_info;
4784 struct bitpack_d bp;
4785
4786 ii->param_index = (int) streamer_read_hwi (ib);
4787 bp = streamer_read_bitpack (ib);
4788 ii->polymorphic = bp_unpack_value (&bp, 1);
4789 ii->agg_contents = bp_unpack_value (&bp, 1);
4790 ii->member_ptr = bp_unpack_value (&bp, 1);
4791 ii->by_ref = bp_unpack_value (&bp, 1);
4792 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4793 ii->vptr_changed = bp_unpack_value (&bp, 1);
4794 if (ii->agg_contents || ii->polymorphic)
4795 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4796 else
4797 ii->offset = 0;
4798 if (ii->polymorphic)
4799 {
4800 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4801 ii->otr_type = stream_read_tree (ib, data_in);
4802 ii->context.stream_in (ib, data_in);
4803 }
4804 if (info && ii->param_index >= 0)
4805 {
4806 if (ii->polymorphic)
4807 ipa_set_param_used_by_polymorphic_call (info,
4808 ii->param_index , true);
4809 ipa_set_param_used_by_indirect_call (info,
4810 ii->param_index, true);
4811 }
4812 }
4813
4814 /* Stream out NODE info to OB. */
4815
4816 static void
4817 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4818 {
4819 int node_ref;
4820 lto_symtab_encoder_t encoder;
4821 class ipa_node_params *info = IPA_NODE_REF (node);
4822 int j;
4823 struct cgraph_edge *e;
4824 struct bitpack_d bp;
4825
4826 encoder = ob->decl_state->symtab_node_encoder;
4827 node_ref = lto_symtab_encoder_encode (encoder, node);
4828 streamer_write_uhwi (ob, node_ref);
4829
4830 streamer_write_uhwi (ob, ipa_get_param_count (info));
4831 for (j = 0; j < ipa_get_param_count (info); j++)
4832 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4833 bp = bitpack_create (ob->main_stream);
4834 gcc_assert (info->analysis_done
4835 || ipa_get_param_count (info) == 0);
4836 gcc_assert (!info->node_enqueued);
4837 gcc_assert (!info->ipcp_orig_node);
4838 for (j = 0; j < ipa_get_param_count (info); j++)
4839 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4840 streamer_write_bitpack (&bp);
4841 for (j = 0; j < ipa_get_param_count (info); j++)
4842 {
4843 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4844 stream_write_tree (ob, ipa_get_type (info, j), true);
4845 }
4846 for (e = node->callees; e; e = e->next_callee)
4847 {
4848 class ipa_edge_args *args = IPA_EDGE_REF (e);
4849
4850 if (!args)
4851 {
4852 streamer_write_uhwi (ob, 0);
4853 continue;
4854 }
4855
4856 streamer_write_uhwi (ob,
4857 ipa_get_cs_argument_count (args) * 2
4858 + (args->polymorphic_call_contexts != NULL));
4859 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4860 {
4861 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4862 if (args->polymorphic_call_contexts != NULL)
4863 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4864 }
4865 }
4866 for (e = node->indirect_calls; e; e = e->next_callee)
4867 {
4868 class ipa_edge_args *args = IPA_EDGE_REF (e);
4869 if (!args)
4870 streamer_write_uhwi (ob, 0);
4871 else
4872 {
4873 streamer_write_uhwi (ob,
4874 ipa_get_cs_argument_count (args) * 2
4875 + (args->polymorphic_call_contexts != NULL));
4876 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4877 {
4878 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4879 if (args->polymorphic_call_contexts != NULL)
4880 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4881 }
4882 }
4883 ipa_write_indirect_edge_info (ob, e);
4884 }
4885 }
4886
4887 /* Stream in edge E from IB. */
4888
4889 static void
4890 ipa_read_edge_info (class lto_input_block *ib,
4891 class data_in *data_in,
4892 struct cgraph_edge *e, bool prevails)
4893 {
4894 int count = streamer_read_uhwi (ib);
4895 bool contexts_computed = count & 1;
4896
4897 count /= 2;
4898 if (!count)
4899 return;
4900 if (prevails && e->possibly_call_in_translation_unit_p ())
4901 {
4902 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
4903 vec_safe_grow_cleared (args->jump_functions, count);
4904 if (contexts_computed)
4905 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4906 for (int k = 0; k < count; k++)
4907 {
4908 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4909 data_in, prevails);
4910 if (contexts_computed)
4911 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4912 (ib, data_in);
4913 }
4914 }
4915 else
4916 {
4917 for (int k = 0; k < count; k++)
4918 {
4919 struct ipa_jump_func dummy;
4920 ipa_read_jump_function (ib, &dummy, e,
4921 data_in, prevails);
4922 if (contexts_computed)
4923 {
4924 class ipa_polymorphic_call_context ctx;
4925 ctx.stream_in (ib, data_in);
4926 }
4927 }
4928 }
4929 }
4930
4931 /* Stream in NODE info from IB. */
4932
4933 static void
4934 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4935 class data_in *data_in)
4936 {
4937 int k;
4938 struct cgraph_edge *e;
4939 struct bitpack_d bp;
4940 bool prevails = node->prevailing_p ();
4941 class ipa_node_params *info = prevails
4942 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
4943
4944 int param_count = streamer_read_uhwi (ib);
4945 if (prevails)
4946 {
4947 ipa_alloc_node_params (node, param_count);
4948 for (k = 0; k < param_count; k++)
4949 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4950 if (ipa_get_param_count (info) != 0)
4951 info->analysis_done = true;
4952 info->node_enqueued = false;
4953 }
4954 else
4955 for (k = 0; k < param_count; k++)
4956 streamer_read_uhwi (ib);
4957
4958 bp = streamer_read_bitpack (ib);
4959 for (k = 0; k < param_count; k++)
4960 {
4961 bool used = bp_unpack_value (&bp, 1);
4962
4963 if (prevails)
4964 ipa_set_param_used (info, k, used);
4965 }
4966 for (k = 0; k < param_count; k++)
4967 {
4968 int nuses = streamer_read_hwi (ib);
4969 tree type = stream_read_tree (ib, data_in);
4970
4971 if (prevails)
4972 {
4973 ipa_set_controlled_uses (info, k, nuses);
4974 (*info->descriptors)[k].decl_or_type = type;
4975 }
4976 }
4977 for (e = node->callees; e; e = e->next_callee)
4978 ipa_read_edge_info (ib, data_in, e, prevails);
4979 for (e = node->indirect_calls; e; e = e->next_callee)
4980 {
4981 ipa_read_edge_info (ib, data_in, e, prevails);
4982 ipa_read_indirect_edge_info (ib, data_in, e, info);
4983 }
4984 }
4985
4986 /* Write jump functions for nodes in SET. */
4987
4988 void
4989 ipa_prop_write_jump_functions (void)
4990 {
4991 struct cgraph_node *node;
4992 struct output_block *ob;
4993 unsigned int count = 0;
4994 lto_symtab_encoder_iterator lsei;
4995 lto_symtab_encoder_t encoder;
4996
4997 if (!ipa_node_params_sum || !ipa_edge_args_sum)
4998 return;
4999
5000 ob = create_output_block (LTO_section_jump_functions);
5001 encoder = ob->decl_state->symtab_node_encoder;
5002 ob->symbol = NULL;
5003 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5004 lsei_next_function_in_partition (&lsei))
5005 {
5006 node = lsei_cgraph_node (lsei);
5007 if (node->has_gimple_body_p ()
5008 && IPA_NODE_REF (node) != NULL)
5009 count++;
5010 }
5011
5012 streamer_write_uhwi (ob, count);
5013
5014 /* Process all of the functions. */
5015 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5016 lsei_next_function_in_partition (&lsei))
5017 {
5018 node = lsei_cgraph_node (lsei);
5019 if (node->has_gimple_body_p ()
5020 && IPA_NODE_REF (node) != NULL)
5021 ipa_write_node_info (ob, node);
5022 }
5023 streamer_write_char_stream (ob->main_stream, 0);
5024 produce_asm (ob, NULL);
5025 destroy_output_block (ob);
5026 }
5027
5028 /* Read section in file FILE_DATA of length LEN with data DATA. */
5029
5030 static void
5031 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5032 size_t len)
5033 {
5034 const struct lto_function_header *header =
5035 (const struct lto_function_header *) data;
5036 const int cfg_offset = sizeof (struct lto_function_header);
5037 const int main_offset = cfg_offset + header->cfg_size;
5038 const int string_offset = main_offset + header->main_size;
5039 class data_in *data_in;
5040 unsigned int i;
5041 unsigned int count;
5042
5043 lto_input_block ib_main ((const char *) data + main_offset,
5044 header->main_size, file_data->mode_table);
5045
5046 data_in =
5047 lto_data_in_create (file_data, (const char *) data + string_offset,
5048 header->string_size, vNULL);
5049 count = streamer_read_uhwi (&ib_main);
5050
5051 for (i = 0; i < count; i++)
5052 {
5053 unsigned int index;
5054 struct cgraph_node *node;
5055 lto_symtab_encoder_t encoder;
5056
5057 index = streamer_read_uhwi (&ib_main);
5058 encoder = file_data->symtab_node_encoder;
5059 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5060 index));
5061 gcc_assert (node->definition);
5062 ipa_read_node_info (&ib_main, node, data_in);
5063 }
5064 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5065 len);
5066 lto_data_in_delete (data_in);
5067 }
5068
5069 /* Read ipcp jump functions. */
5070
5071 void
5072 ipa_prop_read_jump_functions (void)
5073 {
5074 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5075 struct lto_file_decl_data *file_data;
5076 unsigned int j = 0;
5077
5078 ipa_check_create_node_params ();
5079 ipa_check_create_edge_args ();
5080 ipa_register_cgraph_hooks ();
5081
5082 while ((file_data = file_data_vec[j++]))
5083 {
5084 size_t len;
5085 const char *data
5086 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5087 &len);
5088 if (data)
5089 ipa_prop_read_section (file_data, data, len);
5090 }
5091 }
5092
5093 void
5094 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5095 {
5096 int node_ref;
5097 unsigned int count = 0;
5098 lto_symtab_encoder_t encoder;
5099 struct ipa_agg_replacement_value *aggvals, *av;
5100
5101 aggvals = ipa_get_agg_replacements_for_node (node);
5102 encoder = ob->decl_state->symtab_node_encoder;
5103 node_ref = lto_symtab_encoder_encode (encoder, node);
5104 streamer_write_uhwi (ob, node_ref);
5105
5106 for (av = aggvals; av; av = av->next)
5107 count++;
5108 streamer_write_uhwi (ob, count);
5109
5110 for (av = aggvals; av; av = av->next)
5111 {
5112 struct bitpack_d bp;
5113
5114 streamer_write_uhwi (ob, av->offset);
5115 streamer_write_uhwi (ob, av->index);
5116 stream_write_tree (ob, av->value, true);
5117
5118 bp = bitpack_create (ob->main_stream);
5119 bp_pack_value (&bp, av->by_ref, 1);
5120 streamer_write_bitpack (&bp);
5121 }
5122
5123 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5124 if (ts && vec_safe_length (ts->m_vr) > 0)
5125 {
5126 count = ts->m_vr->length ();
5127 streamer_write_uhwi (ob, count);
5128 for (unsigned i = 0; i < count; ++i)
5129 {
5130 struct bitpack_d bp;
5131 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5132 bp = bitpack_create (ob->main_stream);
5133 bp_pack_value (&bp, parm_vr->known, 1);
5134 streamer_write_bitpack (&bp);
5135 if (parm_vr->known)
5136 {
5137 streamer_write_enum (ob->main_stream, value_rang_type,
5138 VR_LAST, parm_vr->type);
5139 streamer_write_wide_int (ob, parm_vr->min);
5140 streamer_write_wide_int (ob, parm_vr->max);
5141 }
5142 }
5143 }
5144 else
5145 streamer_write_uhwi (ob, 0);
5146
5147 if (ts && vec_safe_length (ts->bits) > 0)
5148 {
5149 count = ts->bits->length ();
5150 streamer_write_uhwi (ob, count);
5151
5152 for (unsigned i = 0; i < count; ++i)
5153 {
5154 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5155 struct bitpack_d bp = bitpack_create (ob->main_stream);
5156 bp_pack_value (&bp, !!bits_jfunc, 1);
5157 streamer_write_bitpack (&bp);
5158 if (bits_jfunc)
5159 {
5160 streamer_write_widest_int (ob, bits_jfunc->value);
5161 streamer_write_widest_int (ob, bits_jfunc->mask);
5162 }
5163 }
5164 }
5165 else
5166 streamer_write_uhwi (ob, 0);
5167 }
5168
5169 /* Stream in the aggregate value replacement chain for NODE from IB. */
5170
5171 static void
5172 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5173 data_in *data_in)
5174 {
5175 struct ipa_agg_replacement_value *aggvals = NULL;
5176 unsigned int count, i;
5177
5178 count = streamer_read_uhwi (ib);
5179 for (i = 0; i <count; i++)
5180 {
5181 struct ipa_agg_replacement_value *av;
5182 struct bitpack_d bp;
5183
5184 av = ggc_alloc<ipa_agg_replacement_value> ();
5185 av->offset = streamer_read_uhwi (ib);
5186 av->index = streamer_read_uhwi (ib);
5187 av->value = stream_read_tree (ib, data_in);
5188 bp = streamer_read_bitpack (ib);
5189 av->by_ref = bp_unpack_value (&bp, 1);
5190 av->next = aggvals;
5191 aggvals = av;
5192 }
5193 ipa_set_node_agg_value_chain (node, aggvals);
5194
5195 count = streamer_read_uhwi (ib);
5196 if (count > 0)
5197 {
5198 ipcp_transformation_initialize ();
5199 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5200 vec_safe_grow_cleared (ts->m_vr, count);
5201 for (i = 0; i < count; i++)
5202 {
5203 ipa_vr *parm_vr;
5204 parm_vr = &(*ts->m_vr)[i];
5205 struct bitpack_d bp;
5206 bp = streamer_read_bitpack (ib);
5207 parm_vr->known = bp_unpack_value (&bp, 1);
5208 if (parm_vr->known)
5209 {
5210 parm_vr->type = streamer_read_enum (ib, value_range_kind,
5211 VR_LAST);
5212 parm_vr->min = streamer_read_wide_int (ib);
5213 parm_vr->max = streamer_read_wide_int (ib);
5214 }
5215 }
5216 }
5217 count = streamer_read_uhwi (ib);
5218 if (count > 0)
5219 {
5220 ipcp_transformation_initialize ();
5221 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5222 vec_safe_grow_cleared (ts->bits, count);
5223
5224 for (i = 0; i < count; i++)
5225 {
5226 struct bitpack_d bp = streamer_read_bitpack (ib);
5227 bool known = bp_unpack_value (&bp, 1);
5228 if (known)
5229 {
5230 const widest_int value = streamer_read_widest_int (ib);
5231 const widest_int mask = streamer_read_widest_int (ib);
5232 ipa_bits *bits
5233 = ipa_get_ipa_bits_for_value (value, mask);
5234 (*ts->bits)[i] = bits;
5235 }
5236 }
5237 }
5238 }
5239
5240 /* Write all aggregate replacement for nodes in set. */
5241
5242 void
5243 ipcp_write_transformation_summaries (void)
5244 {
5245 struct cgraph_node *node;
5246 struct output_block *ob;
5247 unsigned int count = 0;
5248 lto_symtab_encoder_iterator lsei;
5249 lto_symtab_encoder_t encoder;
5250
5251 ob = create_output_block (LTO_section_ipcp_transform);
5252 encoder = ob->decl_state->symtab_node_encoder;
5253 ob->symbol = NULL;
5254 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5255 lsei_next_function_in_partition (&lsei))
5256 {
5257 node = lsei_cgraph_node (lsei);
5258 if (node->has_gimple_body_p ())
5259 count++;
5260 }
5261
5262 streamer_write_uhwi (ob, count);
5263
5264 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5265 lsei_next_function_in_partition (&lsei))
5266 {
5267 node = lsei_cgraph_node (lsei);
5268 if (node->has_gimple_body_p ())
5269 write_ipcp_transformation_info (ob, node);
5270 }
5271 streamer_write_char_stream (ob->main_stream, 0);
5272 produce_asm (ob, NULL);
5273 destroy_output_block (ob);
5274 }
5275
5276 /* Read replacements section in file FILE_DATA of length LEN with data
5277 DATA. */
5278
5279 static void
5280 read_replacements_section (struct lto_file_decl_data *file_data,
5281 const char *data,
5282 size_t len)
5283 {
5284 const struct lto_function_header *header =
5285 (const struct lto_function_header *) data;
5286 const int cfg_offset = sizeof (struct lto_function_header);
5287 const int main_offset = cfg_offset + header->cfg_size;
5288 const int string_offset = main_offset + header->main_size;
5289 class data_in *data_in;
5290 unsigned int i;
5291 unsigned int count;
5292
5293 lto_input_block ib_main ((const char *) data + main_offset,
5294 header->main_size, file_data->mode_table);
5295
5296 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5297 header->string_size, vNULL);
5298 count = streamer_read_uhwi (&ib_main);
5299
5300 for (i = 0; i < count; i++)
5301 {
5302 unsigned int index;
5303 struct cgraph_node *node;
5304 lto_symtab_encoder_t encoder;
5305
5306 index = streamer_read_uhwi (&ib_main);
5307 encoder = file_data->symtab_node_encoder;
5308 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5309 index));
5310 gcc_assert (node->definition);
5311 read_ipcp_transformation_info (&ib_main, node, data_in);
5312 }
5313 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5314 len);
5315 lto_data_in_delete (data_in);
5316 }
5317
5318 /* Read IPA-CP aggregate replacements. */
5319
5320 void
5321 ipcp_read_transformation_summaries (void)
5322 {
5323 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5324 struct lto_file_decl_data *file_data;
5325 unsigned int j = 0;
5326
5327 while ((file_data = file_data_vec[j++]))
5328 {
5329 size_t len;
5330 const char *data
5331 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5332 &len);
5333 if (data)
5334 read_replacements_section (file_data, data, len);
5335 }
5336 }
5337
5338 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5339 NODE. */
5340
5341 static void
5342 adjust_agg_replacement_values (struct cgraph_node *node,
5343 struct ipa_agg_replacement_value *aggval)
5344 {
5345 struct ipa_agg_replacement_value *v;
5346
5347 if (!node->clone.param_adjustments)
5348 return;
5349
5350 auto_vec<int, 16> new_indices;
5351 node->clone.param_adjustments->get_updated_indices (&new_indices);
5352 for (v = aggval; v; v = v->next)
5353 {
5354 gcc_checking_assert (v->index >= 0);
5355
5356 if ((unsigned) v->index < new_indices.length ())
5357 v->index = new_indices[v->index];
5358 else
5359 /* This can happen if we know about a constant passed by reference by
5360 an argument which is never actually used for anything, let alone
5361 loading that constant. */
5362 v->index = -1;
5363 }
5364 }
5365
5366 /* Dominator walker driving the ipcp modification phase. */
5367
5368 class ipcp_modif_dom_walker : public dom_walker
5369 {
5370 public:
5371 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5372 vec<ipa_param_descriptor, va_gc> *descs,
5373 struct ipa_agg_replacement_value *av,
5374 bool *sc, bool *cc)
5375 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5376 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5377
5378 virtual edge before_dom_children (basic_block);
5379
5380 private:
5381 struct ipa_func_body_info *m_fbi;
5382 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5383 struct ipa_agg_replacement_value *m_aggval;
5384 bool *m_something_changed, *m_cfg_changed;
5385 };
5386
5387 edge
5388 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5389 {
5390 gimple_stmt_iterator gsi;
5391 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5392 {
5393 struct ipa_agg_replacement_value *v;
5394 gimple *stmt = gsi_stmt (gsi);
5395 tree rhs, val, t;
5396 HOST_WIDE_INT offset;
5397 poly_int64 size;
5398 int index;
5399 bool by_ref, vce;
5400
5401 if (!gimple_assign_load_p (stmt))
5402 continue;
5403 rhs = gimple_assign_rhs1 (stmt);
5404 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5405 continue;
5406
5407 vce = false;
5408 t = rhs;
5409 while (handled_component_p (t))
5410 {
5411 /* V_C_E can do things like convert an array of integers to one
5412 bigger integer and similar things we do not handle below. */
5413 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5414 {
5415 vce = true;
5416 break;
5417 }
5418 t = TREE_OPERAND (t, 0);
5419 }
5420 if (vce)
5421 continue;
5422
5423 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5424 &offset, &size, &by_ref))
5425 continue;
5426 for (v = m_aggval; v; v = v->next)
5427 if (v->index == index
5428 && v->offset == offset)
5429 break;
5430 if (!v
5431 || v->by_ref != by_ref
5432 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5433 size))
5434 continue;
5435
5436 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5437 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5438 {
5439 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5440 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5441 else if (TYPE_SIZE (TREE_TYPE (rhs))
5442 == TYPE_SIZE (TREE_TYPE (v->value)))
5443 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5444 else
5445 {
5446 if (dump_file)
5447 {
5448 fprintf (dump_file, " const ");
5449 print_generic_expr (dump_file, v->value);
5450 fprintf (dump_file, " can't be converted to type of ");
5451 print_generic_expr (dump_file, rhs);
5452 fprintf (dump_file, "\n");
5453 }
5454 continue;
5455 }
5456 }
5457 else
5458 val = v->value;
5459
5460 if (dump_file && (dump_flags & TDF_DETAILS))
5461 {
5462 fprintf (dump_file, "Modifying stmt:\n ");
5463 print_gimple_stmt (dump_file, stmt, 0);
5464 }
5465 gimple_assign_set_rhs_from_tree (&gsi, val);
5466 update_stmt (stmt);
5467
5468 if (dump_file && (dump_flags & TDF_DETAILS))
5469 {
5470 fprintf (dump_file, "into:\n ");
5471 print_gimple_stmt (dump_file, stmt, 0);
5472 fprintf (dump_file, "\n");
5473 }
5474
5475 *m_something_changed = true;
5476 if (maybe_clean_eh_stmt (stmt)
5477 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5478 *m_cfg_changed = true;
5479 }
5480 return NULL;
5481 }
5482
5483 /* Return true if we have recorded VALUE and MASK about PARM.
5484 Set VALUE and MASk accordingly. */
5485
5486 bool
5487 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5488 {
5489 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5490 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5491 if (!ts || vec_safe_length (ts->bits) == 0)
5492 return false;
5493
5494 int i = 0;
5495 for (tree p = DECL_ARGUMENTS (current_function_decl);
5496 p != parm; p = DECL_CHAIN (p))
5497 {
5498 i++;
5499 /* Ignore static chain. */
5500 if (!p)
5501 return false;
5502 }
5503
5504 if (cnode->clone.param_adjustments)
5505 {
5506 i = cnode->clone.param_adjustments->get_original_index (i);
5507 if (i < 0)
5508 return false;
5509 }
5510
5511 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5512 if (!bits[i])
5513 return false;
5514 *mask = bits[i]->mask;
5515 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5516 return true;
5517 }
5518
5519
5520 /* Update bits info of formal parameters as described in
5521 ipcp_transformation. */
5522
5523 static void
5524 ipcp_update_bits (struct cgraph_node *node)
5525 {
5526 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5527
5528 if (!ts || vec_safe_length (ts->bits) == 0)
5529 return;
5530 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5531 unsigned count = bits.length ();
5532 if (!count)
5533 return;
5534
5535 auto_vec<int, 16> new_indices;
5536 bool need_remapping = false;
5537 if (node->clone.param_adjustments)
5538 {
5539 node->clone.param_adjustments->get_updated_indices (&new_indices);
5540 need_remapping = true;
5541 }
5542 auto_vec <tree, 16> parm_decls;
5543 push_function_arg_decls (&parm_decls, node->decl);
5544
5545 for (unsigned i = 0; i < count; ++i)
5546 {
5547 tree parm;
5548 if (need_remapping)
5549 {
5550 if (i >= new_indices.length ())
5551 continue;
5552 int idx = new_indices[i];
5553 if (idx < 0)
5554 continue;
5555 parm = parm_decls[idx];
5556 }
5557 else
5558 parm = parm_decls[i];
5559 gcc_checking_assert (parm);
5560
5561
5562 if (!bits[i]
5563 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5564 || POINTER_TYPE_P (TREE_TYPE (parm)))
5565 || !is_gimple_reg (parm))
5566 continue;
5567
5568 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5569 if (!ddef)
5570 continue;
5571
5572 if (dump_file)
5573 {
5574 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5575 print_hex (bits[i]->mask, dump_file);
5576 fprintf (dump_file, "\n");
5577 }
5578
5579 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5580 {
5581 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5582 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5583
5584 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5585 | wide_int::from (bits[i]->value, prec, sgn);
5586 set_nonzero_bits (ddef, nonzero_bits);
5587 }
5588 else
5589 {
5590 unsigned tem = bits[i]->mask.to_uhwi ();
5591 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5592 unsigned align = tem & -tem;
5593 unsigned misalign = bitpos & (align - 1);
5594
5595 if (align > 1)
5596 {
5597 if (dump_file)
5598 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5599
5600 unsigned old_align, old_misalign;
5601 struct ptr_info_def *pi = get_ptr_info (ddef);
5602 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5603
5604 if (old_known
5605 && old_align > align)
5606 {
5607 if (dump_file)
5608 {
5609 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5610 if ((old_misalign & (align - 1)) != misalign)
5611 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5612 old_misalign, misalign);
5613 }
5614 continue;
5615 }
5616
5617 if (old_known
5618 && ((misalign & (old_align - 1)) != old_misalign)
5619 && dump_file)
5620 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5621 old_misalign, misalign);
5622
5623 set_ptr_info_alignment (pi, align, misalign);
5624 }
5625 }
5626 }
5627 }
5628
5629 bool
5630 ipa_vr::nonzero_p (tree expr_type) const
5631 {
5632 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5633 return true;
5634
5635 unsigned prec = TYPE_PRECISION (expr_type);
5636 return (type == VR_RANGE
5637 && TYPE_UNSIGNED (expr_type)
5638 && wi::eq_p (min, wi::one (prec))
5639 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5640 }
5641
5642 /* Update value range of formal parameters as described in
5643 ipcp_transformation. */
5644
5645 static void
5646 ipcp_update_vr (struct cgraph_node *node)
5647 {
5648 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5649 if (!ts || vec_safe_length (ts->m_vr) == 0)
5650 return;
5651 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5652 unsigned count = vr.length ();
5653 if (!count)
5654 return;
5655
5656 auto_vec<int, 16> new_indices;
5657 bool need_remapping = false;
5658 if (node->clone.param_adjustments)
5659 {
5660 node->clone.param_adjustments->get_updated_indices (&new_indices);
5661 need_remapping = true;
5662 }
5663 auto_vec <tree, 16> parm_decls;
5664 push_function_arg_decls (&parm_decls, node->decl);
5665
5666 for (unsigned i = 0; i < count; ++i)
5667 {
5668 tree parm;
5669 int remapped_idx;
5670 if (need_remapping)
5671 {
5672 if (i >= new_indices.length ())
5673 continue;
5674 remapped_idx = new_indices[i];
5675 if (remapped_idx < 0)
5676 continue;
5677 }
5678 else
5679 remapped_idx = i;
5680
5681 parm = parm_decls[remapped_idx];
5682
5683 gcc_checking_assert (parm);
5684 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5685
5686 if (!ddef || !is_gimple_reg (parm))
5687 continue;
5688
5689 if (vr[i].known
5690 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5691 {
5692 tree type = TREE_TYPE (ddef);
5693 unsigned prec = TYPE_PRECISION (type);
5694 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5695 {
5696 if (dump_file)
5697 {
5698 fprintf (dump_file, "Setting value range of param %u "
5699 "(now %i) ", i, remapped_idx);
5700 fprintf (dump_file, "%s[",
5701 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5702 print_decs (vr[i].min, dump_file);
5703 fprintf (dump_file, ", ");
5704 print_decs (vr[i].max, dump_file);
5705 fprintf (dump_file, "]\n");
5706 }
5707 set_range_info (ddef, vr[i].type,
5708 wide_int_storage::from (vr[i].min, prec,
5709 TYPE_SIGN (type)),
5710 wide_int_storage::from (vr[i].max, prec,
5711 TYPE_SIGN (type)));
5712 }
5713 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5714 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5715 {
5716 if (dump_file)
5717 fprintf (dump_file, "Setting nonnull for %u\n", i);
5718 set_ptr_nonnull (ddef);
5719 }
5720 }
5721 }
5722 }
5723
5724 /* IPCP transformation phase doing propagation of aggregate values. */
5725
5726 unsigned int
5727 ipcp_transform_function (struct cgraph_node *node)
5728 {
5729 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5730 struct ipa_func_body_info fbi;
5731 struct ipa_agg_replacement_value *aggval;
5732 int param_count;
5733 bool cfg_changed = false, something_changed = false;
5734
5735 gcc_checking_assert (cfun);
5736 gcc_checking_assert (current_function_decl);
5737
5738 if (dump_file)
5739 fprintf (dump_file, "Modification phase of node %s\n",
5740 node->dump_name ());
5741
5742 ipcp_update_bits (node);
5743 ipcp_update_vr (node);
5744 aggval = ipa_get_agg_replacements_for_node (node);
5745 if (!aggval)
5746 return 0;
5747 param_count = count_formal_params (node->decl);
5748 if (param_count == 0)
5749 return 0;
5750 adjust_agg_replacement_values (node, aggval);
5751 if (dump_file)
5752 ipa_dump_agg_replacement_values (dump_file, aggval);
5753
5754 fbi.node = node;
5755 fbi.info = NULL;
5756 fbi.bb_infos = vNULL;
5757 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5758 fbi.param_count = param_count;
5759 fbi.aa_walk_budget = param_ipa_max_aa_steps;
5760
5761 vec_safe_grow_cleared (descriptors, param_count);
5762 ipa_populate_param_decls (node, *descriptors);
5763 calculate_dominance_info (CDI_DOMINATORS);
5764 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5765 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5766
5767 int i;
5768 struct ipa_bb_info *bi;
5769 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5770 free_ipa_bb_info (bi);
5771 fbi.bb_infos.release ();
5772 free_dominance_info (CDI_DOMINATORS);
5773
5774 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5775 s->agg_values = NULL;
5776 s->bits = NULL;
5777 s->m_vr = NULL;
5778
5779 vec_free (descriptors);
5780
5781 if (!something_changed)
5782 return 0;
5783
5784 if (cfg_changed)
5785 delete_unreachable_blocks_update_callgraph (node, false);
5786
5787 return TODO_update_ssa_only_virtuals;
5788 }
5789
5790
5791 /* Return true if OTHER describes same agg value. */
5792 bool
5793 ipa_agg_value::equal_to (const ipa_agg_value &other)
5794 {
5795 return offset == other.offset
5796 && operand_equal_p (value, other.value, 0);
5797 }
5798 #include "gt-ipa-prop.h"