]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-prop.c
[Ada] Remove Determine_License
[thirdparty/gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58
59 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
60
61 /* Edge summary for IPA-CP edge information. */
62 ipa_edge_args_sum_t *ipa_edge_args_sum;
63
64 /* Traits for a hash table for reusing already existing ipa_bits. */
65
66 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67 {
68 typedef ipa_bits *value_type;
69 typedef ipa_bits *compare_type;
70 static hashval_t
71 hash (const ipa_bits *p)
72 {
73 hashval_t t = (hashval_t) p->value.to_shwi ();
74 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 }
76 static bool
77 equal (const ipa_bits *a, const ipa_bits *b)
78 {
79 return a->value == b->value && a->mask == b->mask;
80 }
81 static const bool empty_zero_p = true;
82 static void
83 mark_empty (ipa_bits *&p)
84 {
85 p = NULL;
86 }
87 static bool
88 is_empty (const ipa_bits *p)
89 {
90 return p == NULL;
91 }
92 static bool
93 is_deleted (const ipa_bits *p)
94 {
95 return p == reinterpret_cast<const ipa_bits *> (1);
96 }
97 static void
98 mark_deleted (ipa_bits *&p)
99 {
100 p = reinterpret_cast<ipa_bits *> (1);
101 }
102 };
103
104 /* Hash table for avoid repeated allocations of equal ipa_bits. */
105 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106
107 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
108 the equiv bitmap is not hashed and is expected to be NULL. */
109
110 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
111 {
112 typedef value_range *value_type;
113 typedef value_range *compare_type;
114 static hashval_t
115 hash (const value_range *p)
116 {
117 inchash::hash hstate (p->kind ());
118 inchash::add_expr (p->min (), hstate);
119 inchash::add_expr (p->max (), hstate);
120 return hstate.end ();
121 }
122 static bool
123 equal (const value_range *a, const value_range *b)
124 {
125 return a->equal_p (*b);
126 }
127 static const bool empty_zero_p = true;
128 static void
129 mark_empty (value_range *&p)
130 {
131 p = NULL;
132 }
133 static bool
134 is_empty (const value_range *p)
135 {
136 return p == NULL;
137 }
138 static bool
139 is_deleted (const value_range *p)
140 {
141 return p == reinterpret_cast<const value_range *> (1);
142 }
143 static void
144 mark_deleted (value_range *&p)
145 {
146 p = reinterpret_cast<value_range *> (1);
147 }
148 };
149
150 /* Hash table for avoid repeated allocations of equal value_ranges. */
151 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
152
153 /* Holders of ipa cgraph hooks: */
154 static struct cgraph_node_hook_list *function_insertion_hook_holder;
155
156 /* Description of a reference to an IPA constant. */
157 struct ipa_cst_ref_desc
158 {
159 /* Edge that corresponds to the statement which took the reference. */
160 struct cgraph_edge *cs;
161 /* Linked list of duplicates created when call graph edges are cloned. */
162 struct ipa_cst_ref_desc *next_duplicate;
163 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
164 if out of control. */
165 int refcount;
166 };
167
168 /* Allocation pool for reference descriptions. */
169
170 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
171 ("IPA-PROP ref descriptions");
172
173 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
174 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
175
176 static bool
177 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
178 {
179 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
180
181 if (!fs_opts)
182 return false;
183 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
184 }
185
186 /* Return index of the formal whose tree is PTREE in function which corresponds
187 to INFO. */
188
189 static int
190 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
191 tree ptree)
192 {
193 int i, count;
194
195 count = vec_safe_length (descriptors);
196 for (i = 0; i < count; i++)
197 if ((*descriptors)[i].decl_or_type == ptree)
198 return i;
199
200 return -1;
201 }
202
203 /* Return index of the formal whose tree is PTREE in function which corresponds
204 to INFO. */
205
206 int
207 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
208 {
209 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
210 }
211
212 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
213 NODE. */
214
215 static void
216 ipa_populate_param_decls (struct cgraph_node *node,
217 vec<ipa_param_descriptor, va_gc> &descriptors)
218 {
219 tree fndecl;
220 tree fnargs;
221 tree parm;
222 int param_num;
223
224 fndecl = node->decl;
225 gcc_assert (gimple_has_body_p (fndecl));
226 fnargs = DECL_ARGUMENTS (fndecl);
227 param_num = 0;
228 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
229 {
230 descriptors[param_num].decl_or_type = parm;
231 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
232 descriptors[param_num].move_cost = cost;
233 /* Watch overflow, move_cost is a bitfield. */
234 gcc_checking_assert (cost == descriptors[param_num].move_cost);
235 param_num++;
236 }
237 }
238
239 /* Return how many formal parameters FNDECL has. */
240
241 int
242 count_formal_params (tree fndecl)
243 {
244 tree parm;
245 int count = 0;
246 gcc_assert (gimple_has_body_p (fndecl));
247
248 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
249 count++;
250
251 return count;
252 }
253
254 /* Return the declaration of Ith formal parameter of the function corresponding
255 to INFO. Note there is no setter function as this array is built just once
256 using ipa_initialize_node_params. */
257
258 void
259 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
260 {
261 fprintf (file, "param #%i", i);
262 if ((*info->descriptors)[i].decl_or_type)
263 {
264 fprintf (file, " ");
265 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
266 }
267 }
268
269 /* If necessary, allocate vector of parameter descriptors in info of NODE.
270 Return true if they were allocated, false if not. */
271
272 static bool
273 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
274 {
275 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
276
277 if (!info->descriptors && param_count)
278 {
279 vec_safe_grow_cleared (info->descriptors, param_count);
280 return true;
281 }
282 else
283 return false;
284 }
285
286 /* Initialize the ipa_node_params structure associated with NODE by counting
287 the function parameters, creating the descriptors and populating their
288 param_decls. */
289
290 void
291 ipa_initialize_node_params (struct cgraph_node *node)
292 {
293 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
294
295 if (!info->descriptors
296 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
297 ipa_populate_param_decls (node, *info->descriptors);
298 }
299
300 /* Print the jump functions associated with call graph edge CS to file F. */
301
302 static void
303 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
304 {
305 int i, count;
306
307 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
308 for (i = 0; i < count; i++)
309 {
310 struct ipa_jump_func *jump_func;
311 enum jump_func_type type;
312
313 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
314 type = jump_func->type;
315
316 fprintf (f, " param %d: ", i);
317 if (type == IPA_JF_UNKNOWN)
318 fprintf (f, "UNKNOWN\n");
319 else if (type == IPA_JF_CONST)
320 {
321 tree val = jump_func->value.constant.value;
322 fprintf (f, "CONST: ");
323 print_generic_expr (f, val);
324 if (TREE_CODE (val) == ADDR_EXPR
325 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
326 {
327 fprintf (f, " -> ");
328 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
329 }
330 fprintf (f, "\n");
331 }
332 else if (type == IPA_JF_PASS_THROUGH)
333 {
334 fprintf (f, "PASS THROUGH: ");
335 fprintf (f, "%d, op %s",
336 jump_func->value.pass_through.formal_id,
337 get_tree_code_name(jump_func->value.pass_through.operation));
338 if (jump_func->value.pass_through.operation != NOP_EXPR)
339 {
340 fprintf (f, " ");
341 print_generic_expr (f, jump_func->value.pass_through.operand);
342 }
343 if (jump_func->value.pass_through.agg_preserved)
344 fprintf (f, ", agg_preserved");
345 fprintf (f, "\n");
346 }
347 else if (type == IPA_JF_ANCESTOR)
348 {
349 fprintf (f, "ANCESTOR: ");
350 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
351 jump_func->value.ancestor.formal_id,
352 jump_func->value.ancestor.offset);
353 if (jump_func->value.ancestor.agg_preserved)
354 fprintf (f, ", agg_preserved");
355 fprintf (f, "\n");
356 }
357
358 if (jump_func->agg.items)
359 {
360 struct ipa_agg_jf_item *item;
361 int j;
362
363 fprintf (f, " Aggregate passed by %s:\n",
364 jump_func->agg.by_ref ? "reference" : "value");
365 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
366 {
367 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
368 item->offset);
369 fprintf (f, "type: ");
370 print_generic_expr (f, item->type);
371 fprintf (f, ", ");
372 if (item->jftype == IPA_JF_PASS_THROUGH)
373 fprintf (f, "PASS THROUGH: %d,",
374 item->value.pass_through.formal_id);
375 else if (item->jftype == IPA_JF_LOAD_AGG)
376 {
377 fprintf (f, "LOAD AGG: %d",
378 item->value.pass_through.formal_id);
379 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
380 item->value.load_agg.offset,
381 item->value.load_agg.by_ref ? "reference"
382 : "value");
383 }
384
385 if (item->jftype == IPA_JF_PASS_THROUGH
386 || item->jftype == IPA_JF_LOAD_AGG)
387 {
388 fprintf (f, " op %s",
389 get_tree_code_name (item->value.pass_through.operation));
390 if (item->value.pass_through.operation != NOP_EXPR)
391 {
392 fprintf (f, " ");
393 print_generic_expr (f, item->value.pass_through.operand);
394 }
395 }
396 else if (item->jftype == IPA_JF_CONST)
397 {
398 fprintf (f, "CONST: ");
399 print_generic_expr (f, item->value.constant);
400 }
401 else if (item->jftype == IPA_JF_UNKNOWN)
402 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
403 tree_to_uhwi (TYPE_SIZE (item->type)));
404 fprintf (f, "\n");
405 }
406 }
407
408 class ipa_polymorphic_call_context *ctx
409 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
410 if (ctx && !ctx->useless_p ())
411 {
412 fprintf (f, " Context: ");
413 ctx->dump (dump_file);
414 }
415
416 if (jump_func->bits)
417 {
418 fprintf (f, " value: ");
419 print_hex (jump_func->bits->value, f);
420 fprintf (f, ", mask: ");
421 print_hex (jump_func->bits->mask, f);
422 fprintf (f, "\n");
423 }
424 else
425 fprintf (f, " Unknown bits\n");
426
427 if (jump_func->m_vr)
428 {
429 fprintf (f, " VR ");
430 fprintf (f, "%s[",
431 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
432 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
433 fprintf (f, ", ");
434 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
435 fprintf (f, "]\n");
436 }
437 else
438 fprintf (f, " Unknown VR\n");
439 }
440 }
441
442
443 /* Print the jump functions of all arguments on all call graph edges going from
444 NODE to file F. */
445
446 void
447 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
448 {
449 struct cgraph_edge *cs;
450
451 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
452 for (cs = node->callees; cs; cs = cs->next_callee)
453 {
454
455 fprintf (f, " callsite %s -> %s : \n",
456 node->dump_name (),
457 cs->callee->dump_name ());
458 if (!ipa_edge_args_info_available_for_edge_p (cs))
459 fprintf (f, " no arg info\n");
460 else
461 ipa_print_node_jump_functions_for_edge (f, cs);
462 }
463
464 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
465 {
466 class cgraph_indirect_call_info *ii;
467
468 ii = cs->indirect_info;
469 if (ii->agg_contents)
470 fprintf (f, " indirect %s callsite, calling param %i, "
471 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
472 ii->member_ptr ? "member ptr" : "aggregate",
473 ii->param_index, ii->offset,
474 ii->by_ref ? "by reference" : "by_value");
475 else
476 fprintf (f, " indirect %s callsite, calling param %i, "
477 "offset " HOST_WIDE_INT_PRINT_DEC,
478 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
479 ii->offset);
480
481 if (cs->call_stmt)
482 {
483 fprintf (f, ", for stmt ");
484 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
485 }
486 else
487 fprintf (f, "\n");
488 if (ii->polymorphic)
489 ii->context.dump (f);
490 if (!ipa_edge_args_info_available_for_edge_p (cs))
491 fprintf (f, " no arg info\n");
492 else
493 ipa_print_node_jump_functions_for_edge (f, cs);
494 }
495 }
496
497 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
498
499 void
500 ipa_print_all_jump_functions (FILE *f)
501 {
502 struct cgraph_node *node;
503
504 fprintf (f, "\nJump functions:\n");
505 FOR_EACH_FUNCTION (node)
506 {
507 ipa_print_node_jump_functions (f, node);
508 }
509 }
510
511 /* Set jfunc to be a know-really nothing jump function. */
512
513 static void
514 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
515 {
516 jfunc->type = IPA_JF_UNKNOWN;
517 }
518
519 /* Set JFUNC to be a copy of another jmp (to be used by jump function
520 combination code). The two functions will share their rdesc. */
521
522 static void
523 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
524 struct ipa_jump_func *src)
525
526 {
527 gcc_checking_assert (src->type == IPA_JF_CONST);
528 dst->type = IPA_JF_CONST;
529 dst->value.constant = src->value.constant;
530 }
531
532 /* Set JFUNC to be a constant jmp function. */
533
534 static void
535 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
536 struct cgraph_edge *cs)
537 {
538 jfunc->type = IPA_JF_CONST;
539 jfunc->value.constant.value = unshare_expr_without_location (constant);
540
541 if (TREE_CODE (constant) == ADDR_EXPR
542 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
543 {
544 struct ipa_cst_ref_desc *rdesc;
545
546 rdesc = ipa_refdesc_pool.allocate ();
547 rdesc->cs = cs;
548 rdesc->next_duplicate = NULL;
549 rdesc->refcount = 1;
550 jfunc->value.constant.rdesc = rdesc;
551 }
552 else
553 jfunc->value.constant.rdesc = NULL;
554 }
555
556 /* Set JFUNC to be a simple pass-through jump function. */
557 static void
558 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
559 bool agg_preserved)
560 {
561 jfunc->type = IPA_JF_PASS_THROUGH;
562 jfunc->value.pass_through.operand = NULL_TREE;
563 jfunc->value.pass_through.formal_id = formal_id;
564 jfunc->value.pass_through.operation = NOP_EXPR;
565 jfunc->value.pass_through.agg_preserved = agg_preserved;
566 }
567
568 /* Set JFUNC to be an unary pass through jump function. */
569
570 static void
571 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
572 enum tree_code operation)
573 {
574 jfunc->type = IPA_JF_PASS_THROUGH;
575 jfunc->value.pass_through.operand = NULL_TREE;
576 jfunc->value.pass_through.formal_id = formal_id;
577 jfunc->value.pass_through.operation = operation;
578 jfunc->value.pass_through.agg_preserved = false;
579 }
580 /* Set JFUNC to be an arithmetic pass through jump function. */
581
582 static void
583 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
584 tree operand, enum tree_code operation)
585 {
586 jfunc->type = IPA_JF_PASS_THROUGH;
587 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
588 jfunc->value.pass_through.formal_id = formal_id;
589 jfunc->value.pass_through.operation = operation;
590 jfunc->value.pass_through.agg_preserved = false;
591 }
592
593 /* Set JFUNC to be an ancestor jump function. */
594
595 static void
596 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
597 int formal_id, bool agg_preserved)
598 {
599 jfunc->type = IPA_JF_ANCESTOR;
600 jfunc->value.ancestor.formal_id = formal_id;
601 jfunc->value.ancestor.offset = offset;
602 jfunc->value.ancestor.agg_preserved = agg_preserved;
603 }
604
605 /* Get IPA BB information about the given BB. FBI is the context of analyzis
606 of this function body. */
607
608 static struct ipa_bb_info *
609 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
610 {
611 gcc_checking_assert (fbi);
612 return &fbi->bb_infos[bb->index];
613 }
614
615 /* Structure to be passed in between detect_type_change and
616 check_stmt_for_type_change. */
617
618 struct prop_type_change_info
619 {
620 /* Offset into the object where there is the virtual method pointer we are
621 looking for. */
622 HOST_WIDE_INT offset;
623 /* The declaration or SSA_NAME pointer of the base that we are checking for
624 type change. */
625 tree object;
626 /* Set to true if dynamic type change has been detected. */
627 bool type_maybe_changed;
628 };
629
630 /* Return true if STMT can modify a virtual method table pointer.
631
632 This function makes special assumptions about both constructors and
633 destructors which are all the functions that are allowed to alter the VMT
634 pointers. It assumes that destructors begin with assignment into all VMT
635 pointers and that constructors essentially look in the following way:
636
637 1) The very first thing they do is that they call constructors of ancestor
638 sub-objects that have them.
639
640 2) Then VMT pointers of this and all its ancestors is set to new values
641 corresponding to the type corresponding to the constructor.
642
643 3) Only afterwards, other stuff such as constructor of member sub-objects
644 and the code written by the user is run. Only this may include calling
645 virtual functions, directly or indirectly.
646
647 There is no way to call a constructor of an ancestor sub-object in any
648 other way.
649
650 This means that we do not have to care whether constructors get the correct
651 type information because they will always change it (in fact, if we define
652 the type to be given by the VMT pointer, it is undefined).
653
654 The most important fact to derive from the above is that if, for some
655 statement in the section 3, we try to detect whether the dynamic type has
656 changed, we can safely ignore all calls as we examine the function body
657 backwards until we reach statements in section 2 because these calls cannot
658 be ancestor constructors or destructors (if the input is not bogus) and so
659 do not change the dynamic type (this holds true only for automatically
660 allocated objects but at the moment we devirtualize only these). We then
661 must detect that statements in section 2 change the dynamic type and can try
662 to derive the new type. That is enough and we can stop, we will never see
663 the calls into constructors of sub-objects in this code. Therefore we can
664 safely ignore all call statements that we traverse.
665 */
666
667 static bool
668 stmt_may_be_vtbl_ptr_store (gimple *stmt)
669 {
670 if (is_gimple_call (stmt))
671 return false;
672 if (gimple_clobber_p (stmt))
673 return false;
674 else if (is_gimple_assign (stmt))
675 {
676 tree lhs = gimple_assign_lhs (stmt);
677
678 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
679 {
680 if (flag_strict_aliasing
681 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
682 return false;
683
684 if (TREE_CODE (lhs) == COMPONENT_REF
685 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
686 return false;
687 /* In the future we might want to use get_ref_base_and_extent to find
688 if there is a field corresponding to the offset and if so, proceed
689 almost like if it was a component ref. */
690 }
691 }
692 return true;
693 }
694
695 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
696 to check whether a particular statement may modify the virtual table
697 pointerIt stores its result into DATA, which points to a
698 prop_type_change_info structure. */
699
700 static bool
701 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
702 {
703 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
704 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
705
706 if (stmt_may_be_vtbl_ptr_store (stmt))
707 {
708 tci->type_maybe_changed = true;
709 return true;
710 }
711 else
712 return false;
713 }
714
715 /* See if ARG is PARAM_DECl describing instance passed by pointer
716 or reference in FUNCTION. Return false if the dynamic type may change
717 in between beggining of the function until CALL is invoked.
718
719 Generally functions are not allowed to change type of such instances,
720 but they call destructors. We assume that methods cannot destroy the THIS
721 pointer. Also as a special cases, constructor and destructors may change
722 type of the THIS pointer. */
723
724 static bool
725 param_type_may_change_p (tree function, tree arg, gimple *call)
726 {
727 /* Pure functions cannot do any changes on the dynamic type;
728 that require writting to memory. */
729 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
730 return false;
731 /* We need to check if we are within inlined consturctor
732 or destructor (ideally we would have way to check that the
733 inline cdtor is actually working on ARG, but we don't have
734 easy tie on this, so punt on all non-pure cdtors.
735 We may also record the types of cdtors and once we know type
736 of the instance match them.
737
738 Also code unification optimizations may merge calls from
739 different blocks making return values unreliable. So
740 do nothing during late optimization. */
741 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
742 return true;
743 if (TREE_CODE (arg) == SSA_NAME
744 && SSA_NAME_IS_DEFAULT_DEF (arg)
745 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
746 {
747 /* Normal (non-THIS) argument. */
748 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
749 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
750 /* THIS pointer of an method - here we want to watch constructors
751 and destructors as those definitely may change the dynamic
752 type. */
753 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
754 && !DECL_CXX_CONSTRUCTOR_P (function)
755 && !DECL_CXX_DESTRUCTOR_P (function)
756 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
757 {
758 /* Walk the inline stack and watch out for ctors/dtors. */
759 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
760 block = BLOCK_SUPERCONTEXT (block))
761 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
762 return true;
763 return false;
764 }
765 }
766 return true;
767 }
768
769 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
770 callsite CALL) by looking for assignments to its virtual table pointer. If
771 it is, return true. ARG is the object itself (not a pointer
772 to it, unless dereferenced). BASE is the base of the memory access as
773 returned by get_ref_base_and_extent, as is the offset.
774
775 This is helper function for detect_type_change and detect_type_change_ssa
776 that does the heavy work which is usually unnecesary. */
777
778 static bool
779 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
780 tree base, tree comp_type, gcall *call,
781 HOST_WIDE_INT offset)
782 {
783 struct prop_type_change_info tci;
784 ao_ref ao;
785
786 gcc_checking_assert (DECL_P (arg)
787 || TREE_CODE (arg) == MEM_REF
788 || handled_component_p (arg));
789
790 comp_type = TYPE_MAIN_VARIANT (comp_type);
791
792 /* Const calls cannot call virtual methods through VMT and so type changes do
793 not matter. */
794 if (!flag_devirtualize || !gimple_vuse (call)
795 /* Be sure expected_type is polymorphic. */
796 || !comp_type
797 || TREE_CODE (comp_type) != RECORD_TYPE
798 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
799 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
800 return true;
801
802 ao_ref_init (&ao, arg);
803 ao.base = base;
804 ao.offset = offset;
805 ao.size = POINTER_SIZE;
806 ao.max_size = ao.size;
807
808 tci.offset = offset;
809 tci.object = get_base_address (arg);
810 tci.type_maybe_changed = false;
811
812 int walked
813 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
814 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
815
816 if (walked >= 0 && !tci.type_maybe_changed)
817 return false;
818
819 return true;
820 }
821
822 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
823 If it is, return true. ARG is the object itself (not a pointer
824 to it, unless dereferenced). BASE is the base of the memory access as
825 returned by get_ref_base_and_extent, as is the offset. */
826
827 static bool
828 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
829 tree comp_type, gcall *call,
830 HOST_WIDE_INT offset)
831 {
832 if (!flag_devirtualize)
833 return false;
834
835 if (TREE_CODE (base) == MEM_REF
836 && !param_type_may_change_p (current_function_decl,
837 TREE_OPERAND (base, 0),
838 call))
839 return false;
840 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
841 call, offset);
842 }
843
844 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
845 SSA name (its dereference will become the base and the offset is assumed to
846 be zero). */
847
848 static bool
849 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
850 gcall *call)
851 {
852 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
853 if (!flag_devirtualize
854 || !POINTER_TYPE_P (TREE_TYPE (arg)))
855 return false;
856
857 if (!param_type_may_change_p (current_function_decl, arg, call))
858 return false;
859
860 arg = build2 (MEM_REF, ptr_type_node, arg,
861 build_int_cst (ptr_type_node, 0));
862
863 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
864 call, 0);
865 }
866
867 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
868 boolean variable pointed to by DATA. */
869
870 static bool
871 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
872 void *data)
873 {
874 bool *b = (bool *) data;
875 *b = true;
876 return true;
877 }
878
879 /* Find the nearest valid aa status for parameter specified by INDEX that
880 dominates BB. */
881
882 static struct ipa_param_aa_status *
883 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
884 int index)
885 {
886 while (true)
887 {
888 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
889 if (!bb)
890 return NULL;
891 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
892 if (!bi->param_aa_statuses.is_empty ()
893 && bi->param_aa_statuses[index].valid)
894 return &bi->param_aa_statuses[index];
895 }
896 }
897
898 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
899 structures and/or intialize the result with a dominating description as
900 necessary. */
901
902 static struct ipa_param_aa_status *
903 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
904 int index)
905 {
906 gcc_checking_assert (fbi);
907 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
908 if (bi->param_aa_statuses.is_empty ())
909 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
910 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
911 if (!paa->valid)
912 {
913 gcc_checking_assert (!paa->parm_modified
914 && !paa->ref_modified
915 && !paa->pt_modified);
916 struct ipa_param_aa_status *dom_paa;
917 dom_paa = find_dominating_aa_status (fbi, bb, index);
918 if (dom_paa)
919 *paa = *dom_paa;
920 else
921 paa->valid = true;
922 }
923
924 return paa;
925 }
926
927 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
928 a value known not to be modified in this function before reaching the
929 statement STMT. FBI holds information about the function we have so far
930 gathered but do not survive the summary building stage. */
931
932 static bool
933 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
934 gimple *stmt, tree parm_load)
935 {
936 struct ipa_param_aa_status *paa;
937 bool modified = false;
938 ao_ref refd;
939
940 tree base = get_base_address (parm_load);
941 gcc_assert (TREE_CODE (base) == PARM_DECL);
942 if (TREE_READONLY (base))
943 return true;
944
945 gcc_checking_assert (fbi);
946 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
947 if (paa->parm_modified)
948 return false;
949
950 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
951 ao_ref_init (&refd, parm_load);
952 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
953 &modified, NULL, NULL,
954 fbi->aa_walk_budget + 1);
955 if (walked < 0)
956 {
957 modified = true;
958 if (fbi)
959 fbi->aa_walk_budget = 0;
960 }
961 else if (fbi)
962 fbi->aa_walk_budget -= walked;
963 if (paa && modified)
964 paa->parm_modified = true;
965 return !modified;
966 }
967
968 /* If STMT is an assignment that loads a value from an parameter declaration,
969 return the index of the parameter in ipa_node_params which has not been
970 modified. Otherwise return -1. */
971
972 static int
973 load_from_unmodified_param (struct ipa_func_body_info *fbi,
974 vec<ipa_param_descriptor, va_gc> *descriptors,
975 gimple *stmt)
976 {
977 int index;
978 tree op1;
979
980 if (!gimple_assign_single_p (stmt))
981 return -1;
982
983 op1 = gimple_assign_rhs1 (stmt);
984 if (TREE_CODE (op1) != PARM_DECL)
985 return -1;
986
987 index = ipa_get_param_decl_index_1 (descriptors, op1);
988 if (index < 0
989 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
990 return -1;
991
992 return index;
993 }
994
995 /* Return true if memory reference REF (which must be a load through parameter
996 with INDEX) loads data that are known to be unmodified in this function
997 before reaching statement STMT. */
998
999 static bool
1000 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1001 int index, gimple *stmt, tree ref)
1002 {
1003 struct ipa_param_aa_status *paa;
1004 bool modified = false;
1005 ao_ref refd;
1006
1007 gcc_checking_assert (fbi);
1008 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1009 if (paa->ref_modified)
1010 return false;
1011
1012 gcc_checking_assert (gimple_vuse (stmt));
1013 ao_ref_init (&refd, ref);
1014 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1015 &modified, NULL, NULL,
1016 fbi->aa_walk_budget + 1);
1017 if (walked < 0)
1018 {
1019 modified = true;
1020 fbi->aa_walk_budget = 0;
1021 }
1022 else
1023 fbi->aa_walk_budget -= walked;
1024 if (modified)
1025 paa->ref_modified = true;
1026 return !modified;
1027 }
1028
1029 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1030 is known to be unmodified in this function before reaching call statement
1031 CALL into which it is passed. FBI describes the function body. */
1032
1033 static bool
1034 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1035 gimple *call, tree parm)
1036 {
1037 bool modified = false;
1038 ao_ref refd;
1039
1040 /* It's unnecessary to calculate anything about memory contnets for a const
1041 function because it is not goin to use it. But do not cache the result
1042 either. Also, no such calculations for non-pointers. */
1043 if (!gimple_vuse (call)
1044 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1045 return false;
1046
1047 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1048 gimple_bb (call),
1049 index);
1050 if (paa->pt_modified)
1051 return false;
1052
1053 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1054 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1055 &modified, NULL, NULL,
1056 fbi->aa_walk_budget + 1);
1057 if (walked < 0)
1058 {
1059 fbi->aa_walk_budget = 0;
1060 modified = true;
1061 }
1062 else
1063 fbi->aa_walk_budget -= walked;
1064 if (modified)
1065 paa->pt_modified = true;
1066 return !modified;
1067 }
1068
1069 /* Return true if we can prove that OP is a memory reference loading
1070 data from an aggregate passed as a parameter.
1071
1072 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1073 false if it cannot prove that the value has not been modified before the
1074 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1075 if it cannot prove the value has not been modified, in that case it will
1076 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1077
1078 INFO and PARMS_AINFO describe parameters of the current function (but the
1079 latter can be NULL), STMT is the load statement. If function returns true,
1080 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1081 within the aggregate and whether it is a load from a value passed by
1082 reference respectively. */
1083
1084 bool
1085 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1086 vec<ipa_param_descriptor, va_gc> *descriptors,
1087 gimple *stmt, tree op, int *index_p,
1088 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1089 bool *by_ref_p, bool *guaranteed_unmodified)
1090 {
1091 int index;
1092 HOST_WIDE_INT size;
1093 bool reverse;
1094 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1095
1096 if (!base)
1097 return false;
1098
1099 if (DECL_P (base))
1100 {
1101 int index = ipa_get_param_decl_index_1 (descriptors, base);
1102 if (index >= 0
1103 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1104 {
1105 *index_p = index;
1106 *by_ref_p = false;
1107 if (size_p)
1108 *size_p = size;
1109 if (guaranteed_unmodified)
1110 *guaranteed_unmodified = true;
1111 return true;
1112 }
1113 return false;
1114 }
1115
1116 if (TREE_CODE (base) != MEM_REF
1117 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1118 || !integer_zerop (TREE_OPERAND (base, 1)))
1119 return false;
1120
1121 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1122 {
1123 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1124 index = ipa_get_param_decl_index_1 (descriptors, parm);
1125 }
1126 else
1127 {
1128 /* This branch catches situations where a pointer parameter is not a
1129 gimple register, for example:
1130
1131 void hip7(S*) (struct S * p)
1132 {
1133 void (*<T2e4>) (struct S *) D.1867;
1134 struct S * p.1;
1135
1136 <bb 2>:
1137 p.1_1 = p;
1138 D.1867_2 = p.1_1->f;
1139 D.1867_2 ();
1140 gdp = &p;
1141 */
1142
1143 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1144 index = load_from_unmodified_param (fbi, descriptors, def);
1145 }
1146
1147 if (index >= 0)
1148 {
1149 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1150 if (!data_preserved && !guaranteed_unmodified)
1151 return false;
1152
1153 *index_p = index;
1154 *by_ref_p = true;
1155 if (size_p)
1156 *size_p = size;
1157 if (guaranteed_unmodified)
1158 *guaranteed_unmodified = data_preserved;
1159 return true;
1160 }
1161 return false;
1162 }
1163
1164 /* If STMT is an assignment that loads a value from a parameter declaration,
1165 or from an aggregate passed as the parameter either by value or reference,
1166 return the index of the parameter in ipa_node_params. Otherwise return -1.
1167
1168 FBI holds gathered information about the function. INFO describes
1169 parameters of the function, STMT is the assignment statement. If it is a
1170 memory load from an aggregate, *OFFSET_P is filled with offset within the
1171 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1172 reference. */
1173
1174 static int
1175 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1176 class ipa_node_params *info,
1177 gimple *stmt,
1178 HOST_WIDE_INT *offset_p,
1179 bool *by_ref_p)
1180 {
1181 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1182 poly_int64 size;
1183
1184 /* Load value from a parameter declaration. */
1185 if (index >= 0)
1186 {
1187 *offset_p = -1;
1188 return index;
1189 }
1190
1191 if (!gimple_assign_load_p (stmt))
1192 return -1;
1193
1194 tree rhs = gimple_assign_rhs1 (stmt);
1195
1196 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1197 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1198 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1199 return -1;
1200
1201 /* Skip memory reference containing bit-field. */
1202 if (TREE_CODE (rhs) == BIT_FIELD_REF
1203 || contains_bitfld_component_ref_p (rhs))
1204 return -1;
1205
1206 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1207 offset_p, &size, by_ref_p))
1208 return -1;
1209
1210 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1211 size));
1212 if (!*by_ref_p)
1213 {
1214 tree param_type = ipa_get_type (info, index);
1215
1216 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1217 return -1;
1218 }
1219 else if (TREE_THIS_VOLATILE (rhs))
1220 return -1;
1221
1222 return index;
1223 }
1224
1225 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1226 of an assignment statement STMT, try to determine whether we are actually
1227 handling any of the following cases and construct an appropriate jump
1228 function into JFUNC if so:
1229
1230 1) The passed value is loaded from a formal parameter which is not a gimple
1231 register (most probably because it is addressable, the value has to be
1232 scalar) and we can guarantee the value has not changed. This case can
1233 therefore be described by a simple pass-through jump function. For example:
1234
1235 foo (int a)
1236 {
1237 int a.0;
1238
1239 a.0_2 = a;
1240 bar (a.0_2);
1241
1242 2) The passed value can be described by a simple arithmetic pass-through
1243 jump function. E.g.
1244
1245 foo (int a)
1246 {
1247 int D.2064;
1248
1249 D.2064_4 = a.1(D) + 4;
1250 bar (D.2064_4);
1251
1252 This case can also occur in combination of the previous one, e.g.:
1253
1254 foo (int a, int z)
1255 {
1256 int a.0;
1257 int D.2064;
1258
1259 a.0_3 = a;
1260 D.2064_4 = a.0_3 + 4;
1261 foo (D.2064_4);
1262
1263 3) The passed value is an address of an object within another one (which
1264 also passed by reference). Such situations are described by an ancestor
1265 jump function and describe situations such as:
1266
1267 B::foo() (struct B * const this)
1268 {
1269 struct A * D.1845;
1270
1271 D.1845_2 = &this_1(D)->D.1748;
1272 A::bar (D.1845_2);
1273
1274 INFO is the structure describing individual parameters access different
1275 stages of IPA optimizations. PARMS_AINFO contains the information that is
1276 only needed for intraprocedural analysis. */
1277
1278 static void
1279 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1280 class ipa_node_params *info,
1281 struct ipa_jump_func *jfunc,
1282 gcall *call, gimple *stmt, tree name,
1283 tree param_type)
1284 {
1285 HOST_WIDE_INT offset, size;
1286 tree op1, tc_ssa, base, ssa;
1287 bool reverse;
1288 int index;
1289
1290 op1 = gimple_assign_rhs1 (stmt);
1291
1292 if (TREE_CODE (op1) == SSA_NAME)
1293 {
1294 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1295 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1296 else
1297 index = load_from_unmodified_param (fbi, info->descriptors,
1298 SSA_NAME_DEF_STMT (op1));
1299 tc_ssa = op1;
1300 }
1301 else
1302 {
1303 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1304 tc_ssa = gimple_assign_lhs (stmt);
1305 }
1306
1307 if (index >= 0)
1308 {
1309 switch (gimple_assign_rhs_class (stmt))
1310 {
1311 case GIMPLE_BINARY_RHS:
1312 {
1313 tree op2 = gimple_assign_rhs2 (stmt);
1314 if (!is_gimple_ip_invariant (op2)
1315 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1316 != tcc_comparison)
1317 && !useless_type_conversion_p (TREE_TYPE (name),
1318 TREE_TYPE (op1))))
1319 return;
1320
1321 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1322 gimple_assign_rhs_code (stmt));
1323 break;
1324 }
1325 case GIMPLE_SINGLE_RHS:
1326 {
1327 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1328 tc_ssa);
1329 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1330 break;
1331 }
1332 case GIMPLE_UNARY_RHS:
1333 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1334 ipa_set_jf_unary_pass_through (jfunc, index,
1335 gimple_assign_rhs_code (stmt));
1336 default:;
1337 }
1338 return;
1339 }
1340
1341 if (TREE_CODE (op1) != ADDR_EXPR)
1342 return;
1343 op1 = TREE_OPERAND (op1, 0);
1344 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1345 return;
1346 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1347 offset_int mem_offset;
1348 if (!base
1349 || TREE_CODE (base) != MEM_REF
1350 || !mem_ref_offset (base).is_constant (&mem_offset))
1351 return;
1352 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1353 ssa = TREE_OPERAND (base, 0);
1354 if (TREE_CODE (ssa) != SSA_NAME
1355 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1356 || offset < 0)
1357 return;
1358
1359 /* Dynamic types are changed in constructors and destructors. */
1360 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1361 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1362 ipa_set_ancestor_jf (jfunc, offset, index,
1363 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1364 }
1365
1366 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1367 it looks like:
1368
1369 iftmp.1_3 = &obj_2(D)->D.1762;
1370
1371 The base of the MEM_REF must be a default definition SSA NAME of a
1372 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1373 whole MEM_REF expression is returned and the offset calculated from any
1374 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1375 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1376
1377 static tree
1378 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1379 {
1380 HOST_WIDE_INT size;
1381 tree expr, parm, obj;
1382 bool reverse;
1383
1384 if (!gimple_assign_single_p (assign))
1385 return NULL_TREE;
1386 expr = gimple_assign_rhs1 (assign);
1387
1388 if (TREE_CODE (expr) != ADDR_EXPR)
1389 return NULL_TREE;
1390 expr = TREE_OPERAND (expr, 0);
1391 obj = expr;
1392 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1393
1394 offset_int mem_offset;
1395 if (!expr
1396 || TREE_CODE (expr) != MEM_REF
1397 || !mem_ref_offset (expr).is_constant (&mem_offset))
1398 return NULL_TREE;
1399 parm = TREE_OPERAND (expr, 0);
1400 if (TREE_CODE (parm) != SSA_NAME
1401 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1402 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1403 return NULL_TREE;
1404
1405 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1406 *obj_p = obj;
1407 return expr;
1408 }
1409
1410
1411 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1412 statement PHI, try to find out whether NAME is in fact a
1413 multiple-inheritance typecast from a descendant into an ancestor of a formal
1414 parameter and thus can be described by an ancestor jump function and if so,
1415 write the appropriate function into JFUNC.
1416
1417 Essentially we want to match the following pattern:
1418
1419 if (obj_2(D) != 0B)
1420 goto <bb 3>;
1421 else
1422 goto <bb 4>;
1423
1424 <bb 3>:
1425 iftmp.1_3 = &obj_2(D)->D.1762;
1426
1427 <bb 4>:
1428 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1429 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1430 return D.1879_6; */
1431
1432 static void
1433 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1434 class ipa_node_params *info,
1435 struct ipa_jump_func *jfunc,
1436 gcall *call, gphi *phi)
1437 {
1438 HOST_WIDE_INT offset;
1439 gimple *assign, *cond;
1440 basic_block phi_bb, assign_bb, cond_bb;
1441 tree tmp, parm, expr, obj;
1442 int index, i;
1443
1444 if (gimple_phi_num_args (phi) != 2)
1445 return;
1446
1447 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1448 tmp = PHI_ARG_DEF (phi, 0);
1449 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1450 tmp = PHI_ARG_DEF (phi, 1);
1451 else
1452 return;
1453 if (TREE_CODE (tmp) != SSA_NAME
1454 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1455 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1456 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1457 return;
1458
1459 assign = SSA_NAME_DEF_STMT (tmp);
1460 assign_bb = gimple_bb (assign);
1461 if (!single_pred_p (assign_bb))
1462 return;
1463 expr = get_ancestor_addr_info (assign, &obj, &offset);
1464 if (!expr)
1465 return;
1466 parm = TREE_OPERAND (expr, 0);
1467 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1468 if (index < 0)
1469 return;
1470
1471 cond_bb = single_pred (assign_bb);
1472 cond = last_stmt (cond_bb);
1473 if (!cond
1474 || gimple_code (cond) != GIMPLE_COND
1475 || gimple_cond_code (cond) != NE_EXPR
1476 || gimple_cond_lhs (cond) != parm
1477 || !integer_zerop (gimple_cond_rhs (cond)))
1478 return;
1479
1480 phi_bb = gimple_bb (phi);
1481 for (i = 0; i < 2; i++)
1482 {
1483 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1484 if (pred != assign_bb && pred != cond_bb)
1485 return;
1486 }
1487
1488 ipa_set_ancestor_jf (jfunc, offset, index,
1489 parm_ref_data_pass_through_p (fbi, index, call, parm));
1490 }
1491
1492 /* Inspect the given TYPE and return true iff it has the same structure (the
1493 same number of fields of the same types) as a C++ member pointer. If
1494 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1495 corresponding fields there. */
1496
1497 static bool
1498 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1499 {
1500 tree fld;
1501
1502 if (TREE_CODE (type) != RECORD_TYPE)
1503 return false;
1504
1505 fld = TYPE_FIELDS (type);
1506 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1507 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1508 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1509 return false;
1510
1511 if (method_ptr)
1512 *method_ptr = fld;
1513
1514 fld = DECL_CHAIN (fld);
1515 if (!fld || INTEGRAL_TYPE_P (fld)
1516 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1517 return false;
1518 if (delta)
1519 *delta = fld;
1520
1521 if (DECL_CHAIN (fld))
1522 return false;
1523
1524 return true;
1525 }
1526
1527 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1528 return the rhs of its defining statement, and this statement is stored in
1529 *RHS_STMT. Otherwise return RHS as it is. */
1530
1531 static inline tree
1532 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1533 {
1534 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1535 {
1536 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1537
1538 if (gimple_assign_single_p (def_stmt))
1539 rhs = gimple_assign_rhs1 (def_stmt);
1540 else
1541 break;
1542 *rhs_stmt = def_stmt;
1543 }
1544 return rhs;
1545 }
1546
1547 /* Simple linked list, describing contents of an aggregate before call. */
1548
1549 struct ipa_known_agg_contents_list
1550 {
1551 /* Offset and size of the described part of the aggregate. */
1552 HOST_WIDE_INT offset, size;
1553
1554 /* Type of the described part of the aggregate. */
1555 tree type;
1556
1557 /* Known constant value or jump function data describing contents. */
1558 struct ipa_load_agg_data value;
1559
1560 /* Pointer to the next structure in the list. */
1561 struct ipa_known_agg_contents_list *next;
1562 };
1563
1564 /* Add an aggregate content item into a linked list of
1565 ipa_known_agg_contents_list structure, in which all elements
1566 are sorted ascendingly by offset. */
1567
1568 static inline void
1569 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1570 struct ipa_known_agg_contents_list *item)
1571 {
1572 struct ipa_known_agg_contents_list *list = *plist;
1573
1574 for (; list; list = list->next)
1575 {
1576 if (list->offset >= item->offset)
1577 break;
1578
1579 plist = &list->next;
1580 }
1581
1582 item->next = list;
1583 *plist = item;
1584 }
1585
1586 /* Check whether a given aggregate content is clobbered by certain element in
1587 a linked list of ipa_known_agg_contents_list. */
1588
1589 static inline bool
1590 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1591 struct ipa_known_agg_contents_list *item)
1592 {
1593 for (; list; list = list->next)
1594 {
1595 if (list->offset >= item->offset)
1596 return list->offset < item->offset + item->size;
1597
1598 if (list->offset + list->size > item->offset)
1599 return true;
1600 }
1601
1602 return false;
1603 }
1604
1605 /* Build aggregate jump function from LIST, assuming there are exactly
1606 VALUE_COUNT entries there and that offset of the passed argument
1607 is ARG_OFFSET and store it into JFUNC. */
1608
1609 static void
1610 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1611 int value_count, HOST_WIDE_INT arg_offset,
1612 struct ipa_jump_func *jfunc)
1613 {
1614 vec_alloc (jfunc->agg.items, value_count);
1615 for (; list; list = list->next)
1616 {
1617 struct ipa_agg_jf_item item;
1618 tree operand = list->value.pass_through.operand;
1619
1620 if (list->value.pass_through.formal_id >= 0)
1621 {
1622 /* Content value is derived from some formal paramerter. */
1623 if (list->value.offset >= 0)
1624 item.jftype = IPA_JF_LOAD_AGG;
1625 else
1626 item.jftype = IPA_JF_PASS_THROUGH;
1627
1628 item.value.load_agg = list->value;
1629 if (operand)
1630 item.value.pass_through.operand
1631 = unshare_expr_without_location (operand);
1632 }
1633 else if (operand)
1634 {
1635 /* Content value is known constant. */
1636 item.jftype = IPA_JF_CONST;
1637 item.value.constant = unshare_expr_without_location (operand);
1638 }
1639 else
1640 continue;
1641
1642 item.type = list->type;
1643 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1644
1645 item.offset = list->offset - arg_offset;
1646 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1647
1648 jfunc->agg.items->quick_push (item);
1649 }
1650 }
1651
1652 /* Given an assignment statement STMT, try to collect information into
1653 AGG_VALUE that will be used to construct jump function for RHS of the
1654 assignment, from which content value of an aggregate part comes.
1655
1656 Besides constant and simple pass-through jump functions, also try to
1657 identify whether it matches the following pattern that can be described by
1658 a load-value-from-aggregate jump function, which is a derivative of simple
1659 pass-through jump function.
1660
1661 foo (int *p)
1662 {
1663 ...
1664
1665 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1666 bar (q_5);
1667 }
1668
1669 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1670 constant, simple pass-through and load-vale-from-aggregate. If value
1671 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1672 set to -1. For simple pass-through and load-value-from-aggregate, field
1673 FORMAL_ID specifies the related formal parameter index, and field
1674 OFFSET can be used to distinguish them, -1 means simple pass-through,
1675 otherwise means load-value-from-aggregate. */
1676
1677 static void
1678 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1679 struct ipa_load_agg_data *agg_value,
1680 gimple *stmt)
1681 {
1682 tree lhs = gimple_assign_lhs (stmt);
1683 tree rhs1 = gimple_assign_rhs1 (stmt);
1684 enum tree_code code;
1685 int index = -1;
1686
1687 /* Initialize jump function data for the aggregate part. */
1688 memset (agg_value, 0, sizeof (*agg_value));
1689 agg_value->pass_through.operation = NOP_EXPR;
1690 agg_value->pass_through.formal_id = -1;
1691 agg_value->offset = -1;
1692
1693 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1694 || TREE_THIS_VOLATILE (lhs)
1695 || TREE_CODE (lhs) == BIT_FIELD_REF
1696 || contains_bitfld_component_ref_p (lhs))
1697 return;
1698
1699 /* Skip SSA copies. */
1700 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1701 {
1702 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1703 break;
1704
1705 stmt = SSA_NAME_DEF_STMT (rhs1);
1706 if (!is_gimple_assign (stmt))
1707 return;
1708
1709 rhs1 = gimple_assign_rhs1 (stmt);
1710 }
1711
1712 code = gimple_assign_rhs_code (stmt);
1713 switch (gimple_assign_rhs_class (stmt))
1714 {
1715 case GIMPLE_SINGLE_RHS:
1716 if (is_gimple_ip_invariant (rhs1))
1717 {
1718 agg_value->pass_through.operand = rhs1;
1719 return;
1720 }
1721 code = NOP_EXPR;
1722 break;
1723
1724 case GIMPLE_UNARY_RHS:
1725 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1726 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1727 tcc_binary, this subtleness is somewhat misleading.
1728
1729 Since tcc_unary is widely used in IPA-CP code to check an operation
1730 with one operand, here we only allow tc_unary operation to avoid
1731 possible problem. Then we can use (opclass == tc_unary) or not to
1732 distinguish unary and binary. */
1733 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1734 return;
1735
1736 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1737 break;
1738
1739 case GIMPLE_BINARY_RHS:
1740 {
1741 gimple *rhs1_stmt = stmt;
1742 gimple *rhs2_stmt = stmt;
1743 tree rhs2 = gimple_assign_rhs2 (stmt);
1744
1745 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1746 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1747
1748 if (is_gimple_ip_invariant (rhs2))
1749 {
1750 agg_value->pass_through.operand = rhs2;
1751 stmt = rhs1_stmt;
1752 }
1753 else if (is_gimple_ip_invariant (rhs1))
1754 {
1755 if (TREE_CODE_CLASS (code) == tcc_comparison)
1756 code = swap_tree_comparison (code);
1757 else if (!commutative_tree_code (code))
1758 return;
1759
1760 agg_value->pass_through.operand = rhs1;
1761 stmt = rhs2_stmt;
1762 rhs1 = rhs2;
1763 }
1764 else
1765 return;
1766
1767 if (TREE_CODE_CLASS (code) != tcc_comparison
1768 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs1)))
1769 return;
1770 }
1771 break;
1772
1773 default:
1774 return;
1775 }
1776
1777 if (TREE_CODE (rhs1) != SSA_NAME)
1778 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1779 &agg_value->offset,
1780 &agg_value->by_ref);
1781 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1782 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1783
1784 if (index >= 0)
1785 {
1786 if (agg_value->offset >= 0)
1787 agg_value->type = TREE_TYPE (rhs1);
1788 agg_value->pass_through.formal_id = index;
1789 agg_value->pass_through.operation = code;
1790 }
1791 else
1792 agg_value->pass_through.operand = NULL_TREE;
1793 }
1794
1795 /* If STMT is a memory store to the object whose address is BASE, extract
1796 information (offset, size, and value) into CONTENT, and return true,
1797 otherwise we conservatively assume the whole object is modified with
1798 unknown content, and return false. CHECK_REF means that access to object
1799 is expected to be in form of MEM_REF expression. */
1800
1801 static bool
1802 extract_mem_content (struct ipa_func_body_info *fbi,
1803 gimple *stmt, tree base, bool check_ref,
1804 struct ipa_known_agg_contents_list *content)
1805 {
1806 HOST_WIDE_INT lhs_offset, lhs_size;
1807 bool reverse;
1808
1809 if (!is_gimple_assign (stmt))
1810 return false;
1811
1812 tree lhs = gimple_assign_lhs (stmt);
1813 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1814 &reverse);
1815 if (!lhs_base)
1816 return false;
1817
1818 if (check_ref)
1819 {
1820 if (TREE_CODE (lhs_base) != MEM_REF
1821 || TREE_OPERAND (lhs_base, 0) != base
1822 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1823 return false;
1824 }
1825 else if (lhs_base != base)
1826 return false;
1827
1828 content->offset = lhs_offset;
1829 content->size = lhs_size;
1830 content->type = TREE_TYPE (lhs);
1831 content->next = NULL;
1832
1833 analyze_agg_content_value (fbi, &content->value, stmt);
1834 return true;
1835 }
1836
1837 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1838 in ARG is filled in constants or values that are derived from caller's
1839 formal parameter in the way described by some kinds of jump functions. FBI
1840 is the context of the caller function for interprocedural analysis. ARG can
1841 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1842 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1843
1844 static void
1845 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1846 gcall *call, tree arg,
1847 tree arg_type,
1848 struct ipa_jump_func *jfunc)
1849 {
1850 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1851 bitmap visited = NULL;
1852 int item_count = 0, value_count = 0;
1853 HOST_WIDE_INT arg_offset, arg_size;
1854 tree arg_base;
1855 bool check_ref, by_ref;
1856 ao_ref r;
1857 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
1858
1859 if (max_agg_items == 0)
1860 return;
1861
1862 /* The function operates in three stages. First, we prepare check_ref, r,
1863 arg_base and arg_offset based on what is actually passed as an actual
1864 argument. */
1865
1866 if (POINTER_TYPE_P (arg_type))
1867 {
1868 by_ref = true;
1869 if (TREE_CODE (arg) == SSA_NAME)
1870 {
1871 tree type_size;
1872 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1873 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1874 return;
1875 check_ref = true;
1876 arg_base = arg;
1877 arg_offset = 0;
1878 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1879 arg_size = tree_to_uhwi (type_size);
1880 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1881 }
1882 else if (TREE_CODE (arg) == ADDR_EXPR)
1883 {
1884 bool reverse;
1885
1886 arg = TREE_OPERAND (arg, 0);
1887 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1888 &arg_size, &reverse);
1889 if (!arg_base)
1890 return;
1891 if (DECL_P (arg_base))
1892 {
1893 check_ref = false;
1894 ao_ref_init (&r, arg_base);
1895 }
1896 else
1897 return;
1898 }
1899 else
1900 return;
1901 }
1902 else
1903 {
1904 bool reverse;
1905
1906 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1907
1908 by_ref = false;
1909 check_ref = false;
1910 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1911 &arg_size, &reverse);
1912 if (!arg_base)
1913 return;
1914
1915 ao_ref_init (&r, arg);
1916 }
1917
1918 /* Second stage traverses virtual SSA web backwards starting from the call
1919 statement, only looks at individual dominating virtual operand (its
1920 definition dominates the call), as long as it is confident that content
1921 of the aggregate is affected by definition of the virtual operand, it
1922 builds a sorted linked list of ipa_agg_jf_list describing that. */
1923
1924 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1925 {
1926 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
1927
1928 if (gimple_code (stmt) == GIMPLE_PHI)
1929 {
1930 dom_vuse = get_continuation_for_phi (stmt, &r, true,
1931 fbi->aa_walk_budget,
1932 &visited, false, NULL, NULL);
1933 continue;
1934 }
1935
1936 if (stmt_may_clobber_ref_p_1 (stmt, &r))
1937 {
1938 struct ipa_known_agg_contents_list *content
1939 = XALLOCA (struct ipa_known_agg_contents_list);
1940
1941 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
1942 break;
1943
1944 /* Now we get a dominating virtual operand, and need to check
1945 whether its value is clobbered any other dominating one. */
1946 if ((content->value.pass_through.formal_id >= 0
1947 || content->value.pass_through.operand)
1948 && !clobber_by_agg_contents_list_p (all_list, content))
1949 {
1950 struct ipa_known_agg_contents_list *copy
1951 = XALLOCA (struct ipa_known_agg_contents_list);
1952
1953 /* Add to the list consisting of only dominating virtual
1954 operands, whose definitions can finally reach the call. */
1955 add_to_agg_contents_list (&list, (*copy = *content, copy));
1956
1957 if (++value_count == max_agg_items)
1958 break;
1959 }
1960
1961 /* Add to the list consisting of all dominating virtual operands. */
1962 add_to_agg_contents_list (&all_list, content);
1963
1964 if (++item_count == 2 * max_agg_items)
1965 break;
1966 }
1967 dom_vuse = gimple_vuse (stmt);
1968 }
1969
1970 if (visited)
1971 BITMAP_FREE (visited);
1972
1973 /* Third stage just goes over the list and creates an appropriate vector of
1974 ipa_agg_jf_item structures out of it, of course only if there are
1975 any meaningful items to begin with. */
1976
1977 if (value_count)
1978 {
1979 jfunc->agg.by_ref = by_ref;
1980 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
1981 }
1982 }
1983
1984
1985 /* Return the Ith param type of callee associated with call graph
1986 edge E. */
1987
1988 tree
1989 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1990 {
1991 int n;
1992 tree type = (e->callee
1993 ? TREE_TYPE (e->callee->decl)
1994 : gimple_call_fntype (e->call_stmt));
1995 tree t = TYPE_ARG_TYPES (type);
1996
1997 for (n = 0; n < i; n++)
1998 {
1999 if (!t)
2000 break;
2001 t = TREE_CHAIN (t);
2002 }
2003 if (t)
2004 return TREE_VALUE (t);
2005 if (!e->callee)
2006 return NULL;
2007 t = DECL_ARGUMENTS (e->callee->decl);
2008 for (n = 0; n < i; n++)
2009 {
2010 if (!t)
2011 return NULL;
2012 t = TREE_CHAIN (t);
2013 }
2014 if (t)
2015 return TREE_TYPE (t);
2016 return NULL;
2017 }
2018
2019 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2020 allocated structure or a previously existing one shared with other jump
2021 functions and/or transformation summaries. */
2022
2023 ipa_bits *
2024 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2025 {
2026 ipa_bits tmp;
2027 tmp.value = value;
2028 tmp.mask = mask;
2029
2030 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2031 if (*slot)
2032 return *slot;
2033
2034 ipa_bits *res = ggc_alloc<ipa_bits> ();
2035 res->value = value;
2036 res->mask = mask;
2037 *slot = res;
2038
2039 return res;
2040 }
2041
2042 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2043 table in order to avoid creating multiple same ipa_bits structures. */
2044
2045 static void
2046 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2047 const widest_int &mask)
2048 {
2049 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2050 }
2051
2052 /* Return a pointer to a value_range just like *TMP, but either find it in
2053 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2054
2055 static value_range *
2056 ipa_get_value_range (value_range *tmp)
2057 {
2058 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2059 if (*slot)
2060 return *slot;
2061
2062 value_range *vr = ggc_alloc<value_range> ();
2063 *vr = *tmp;
2064 *slot = vr;
2065
2066 return vr;
2067 }
2068
2069 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2070 equiv set. Use hash table in order to avoid creating multiple same copies of
2071 value_ranges. */
2072
2073 static value_range *
2074 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2075 {
2076 value_range tmp (min, max, kind);
2077 return ipa_get_value_range (&tmp);
2078 }
2079
2080 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2081 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2082 same value_range structures. */
2083
2084 static void
2085 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2086 tree min, tree max)
2087 {
2088 jf->m_vr = ipa_get_value_range (type, min, max);
2089 }
2090
2091 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2092 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2093
2094 static void
2095 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2096 {
2097 jf->m_vr = ipa_get_value_range (tmp);
2098 }
2099
2100 /* Compute jump function for all arguments of callsite CS and insert the
2101 information in the jump_functions array in the ipa_edge_args corresponding
2102 to this callsite. */
2103
2104 static void
2105 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2106 struct cgraph_edge *cs)
2107 {
2108 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
2109 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
2110 gcall *call = cs->call_stmt;
2111 int n, arg_num = gimple_call_num_args (call);
2112 bool useful_context = false;
2113
2114 if (arg_num == 0 || args->jump_functions)
2115 return;
2116 vec_safe_grow_cleared (args->jump_functions, arg_num);
2117 if (flag_devirtualize)
2118 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
2119
2120 if (gimple_call_internal_p (call))
2121 return;
2122 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2123 return;
2124
2125 for (n = 0; n < arg_num; n++)
2126 {
2127 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2128 tree arg = gimple_call_arg (call, n);
2129 tree param_type = ipa_get_callee_param_type (cs, n);
2130 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2131 {
2132 tree instance;
2133 class ipa_polymorphic_call_context context (cs->caller->decl,
2134 arg, cs->call_stmt,
2135 &instance);
2136 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2137 &fbi->aa_walk_budget);
2138 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2139 if (!context.useless_p ())
2140 useful_context = true;
2141 }
2142
2143 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2144 {
2145 bool addr_nonzero = false;
2146 bool strict_overflow = false;
2147
2148 if (TREE_CODE (arg) == SSA_NAME
2149 && param_type
2150 && get_ptr_nonnull (arg))
2151 addr_nonzero = true;
2152 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2153 addr_nonzero = true;
2154
2155 if (addr_nonzero)
2156 {
2157 tree z = build_int_cst (TREE_TYPE (arg), 0);
2158 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2159 }
2160 else
2161 gcc_assert (!jfunc->m_vr);
2162 }
2163 else
2164 {
2165 wide_int min, max;
2166 value_range_kind kind;
2167 if (TREE_CODE (arg) == SSA_NAME
2168 && param_type
2169 && (kind = get_range_info (arg, &min, &max))
2170 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
2171 {
2172 value_range resvr;
2173 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2174 wide_int_to_tree (TREE_TYPE (arg), max),
2175 kind);
2176 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2177 &tmpvr, TREE_TYPE (arg));
2178 if (!resvr.undefined_p () && !resvr.varying_p ())
2179 ipa_set_jfunc_vr (jfunc, &resvr);
2180 else
2181 gcc_assert (!jfunc->m_vr);
2182 }
2183 else
2184 gcc_assert (!jfunc->m_vr);
2185 }
2186
2187 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2188 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2189 {
2190 if (TREE_CODE (arg) == SSA_NAME)
2191 ipa_set_jfunc_bits (jfunc, 0,
2192 widest_int::from (get_nonzero_bits (arg),
2193 TYPE_SIGN (TREE_TYPE (arg))));
2194 else
2195 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2196 }
2197 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2198 {
2199 unsigned HOST_WIDE_INT bitpos;
2200 unsigned align;
2201
2202 get_pointer_alignment_1 (arg, &align, &bitpos);
2203 widest_int mask = wi::bit_and_not
2204 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2205 align / BITS_PER_UNIT - 1);
2206 widest_int value = bitpos / BITS_PER_UNIT;
2207 ipa_set_jfunc_bits (jfunc, value, mask);
2208 }
2209 else
2210 gcc_assert (!jfunc->bits);
2211
2212 if (is_gimple_ip_invariant (arg)
2213 || (VAR_P (arg)
2214 && is_global_var (arg)
2215 && TREE_READONLY (arg)))
2216 ipa_set_jf_constant (jfunc, arg, cs);
2217 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2218 && TREE_CODE (arg) == PARM_DECL)
2219 {
2220 int index = ipa_get_param_decl_index (info, arg);
2221
2222 gcc_assert (index >=0);
2223 /* Aggregate passed by value, check for pass-through, otherwise we
2224 will attempt to fill in aggregate contents later in this
2225 for cycle. */
2226 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2227 {
2228 ipa_set_jf_simple_pass_through (jfunc, index, false);
2229 continue;
2230 }
2231 }
2232 else if (TREE_CODE (arg) == SSA_NAME)
2233 {
2234 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2235 {
2236 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2237 if (index >= 0)
2238 {
2239 bool agg_p;
2240 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2241 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2242 }
2243 }
2244 else
2245 {
2246 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2247 if (is_gimple_assign (stmt))
2248 compute_complex_assign_jump_func (fbi, info, jfunc,
2249 call, stmt, arg, param_type);
2250 else if (gimple_code (stmt) == GIMPLE_PHI)
2251 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2252 call,
2253 as_a <gphi *> (stmt));
2254 }
2255 }
2256
2257 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2258 passed (because type conversions are ignored in gimple). Usually we can
2259 safely get type from function declaration, but in case of K&R prototypes or
2260 variadic functions we can try our luck with type of the pointer passed.
2261 TODO: Since we look for actual initialization of the memory object, we may better
2262 work out the type based on the memory stores we find. */
2263 if (!param_type)
2264 param_type = TREE_TYPE (arg);
2265
2266 if ((jfunc->type != IPA_JF_PASS_THROUGH
2267 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2268 && (jfunc->type != IPA_JF_ANCESTOR
2269 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2270 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2271 || POINTER_TYPE_P (param_type)))
2272 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2273 }
2274 if (!useful_context)
2275 vec_free (args->polymorphic_call_contexts);
2276 }
2277
2278 /* Compute jump functions for all edges - both direct and indirect - outgoing
2279 from BB. */
2280
2281 static void
2282 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2283 {
2284 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2285 int i;
2286 struct cgraph_edge *cs;
2287
2288 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2289 {
2290 struct cgraph_node *callee = cs->callee;
2291
2292 if (callee)
2293 {
2294 callee = callee->ultimate_alias_target ();
2295 /* We do not need to bother analyzing calls to unknown functions
2296 unless they may become known during lto/whopr. */
2297 if (!callee->definition && !flag_lto)
2298 continue;
2299 }
2300 ipa_compute_jump_functions_for_edge (fbi, cs);
2301 }
2302 }
2303
2304 /* If STMT looks like a statement loading a value from a member pointer formal
2305 parameter, return that parameter and store the offset of the field to
2306 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2307 might be clobbered). If USE_DELTA, then we look for a use of the delta
2308 field rather than the pfn. */
2309
2310 static tree
2311 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2312 HOST_WIDE_INT *offset_p)
2313 {
2314 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2315
2316 if (!gimple_assign_single_p (stmt))
2317 return NULL_TREE;
2318
2319 rhs = gimple_assign_rhs1 (stmt);
2320 if (TREE_CODE (rhs) == COMPONENT_REF)
2321 {
2322 ref_field = TREE_OPERAND (rhs, 1);
2323 rhs = TREE_OPERAND (rhs, 0);
2324 }
2325 else
2326 ref_field = NULL_TREE;
2327 if (TREE_CODE (rhs) != MEM_REF)
2328 return NULL_TREE;
2329 rec = TREE_OPERAND (rhs, 0);
2330 if (TREE_CODE (rec) != ADDR_EXPR)
2331 return NULL_TREE;
2332 rec = TREE_OPERAND (rec, 0);
2333 if (TREE_CODE (rec) != PARM_DECL
2334 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2335 return NULL_TREE;
2336 ref_offset = TREE_OPERAND (rhs, 1);
2337
2338 if (use_delta)
2339 fld = delta_field;
2340 else
2341 fld = ptr_field;
2342 if (offset_p)
2343 *offset_p = int_bit_position (fld);
2344
2345 if (ref_field)
2346 {
2347 if (integer_nonzerop (ref_offset))
2348 return NULL_TREE;
2349 return ref_field == fld ? rec : NULL_TREE;
2350 }
2351 else
2352 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2353 : NULL_TREE;
2354 }
2355
2356 /* Returns true iff T is an SSA_NAME defined by a statement. */
2357
2358 static bool
2359 ipa_is_ssa_with_stmt_def (tree t)
2360 {
2361 if (TREE_CODE (t) == SSA_NAME
2362 && !SSA_NAME_IS_DEFAULT_DEF (t))
2363 return true;
2364 else
2365 return false;
2366 }
2367
2368 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2369 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2370 indirect call graph edge.
2371 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2372
2373 static struct cgraph_edge *
2374 ipa_note_param_call (struct cgraph_node *node, int param_index,
2375 gcall *stmt, bool polymorphic)
2376 {
2377 struct cgraph_edge *cs;
2378
2379 cs = node->get_edge (stmt);
2380 cs->indirect_info->param_index = param_index;
2381 cs->indirect_info->agg_contents = 0;
2382 cs->indirect_info->member_ptr = 0;
2383 cs->indirect_info->guaranteed_unmodified = 0;
2384 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2385 param_index, true);
2386 if (cs->indirect_info->polymorphic || polymorphic)
2387 ipa_set_param_used_by_polymorphic_call
2388 (IPA_NODE_REF (node), param_index, true);
2389 return cs;
2390 }
2391
2392 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2393 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2394 intermediate information about each formal parameter. Currently it checks
2395 whether the call calls a pointer that is a formal parameter and if so, the
2396 parameter is marked with the called flag and an indirect call graph edge
2397 describing the call is created. This is very simple for ordinary pointers
2398 represented in SSA but not-so-nice when it comes to member pointers. The
2399 ugly part of this function does nothing more than trying to match the
2400 pattern of such a call. An example of such a pattern is the gimple dump
2401 below, the call is on the last line:
2402
2403 <bb 2>:
2404 f$__delta_5 = f.__delta;
2405 f$__pfn_24 = f.__pfn;
2406
2407 or
2408 <bb 2>:
2409 f$__delta_5 = MEM[(struct *)&f];
2410 f$__pfn_24 = MEM[(struct *)&f + 4B];
2411
2412 and a few lines below:
2413
2414 <bb 5>
2415 D.2496_3 = (int) f$__pfn_24;
2416 D.2497_4 = D.2496_3 & 1;
2417 if (D.2497_4 != 0)
2418 goto <bb 3>;
2419 else
2420 goto <bb 4>;
2421
2422 <bb 6>:
2423 D.2500_7 = (unsigned int) f$__delta_5;
2424 D.2501_8 = &S + D.2500_7;
2425 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2426 D.2503_10 = *D.2502_9;
2427 D.2504_12 = f$__pfn_24 + -1;
2428 D.2505_13 = (unsigned int) D.2504_12;
2429 D.2506_14 = D.2503_10 + D.2505_13;
2430 D.2507_15 = *D.2506_14;
2431 iftmp.11_16 = (String:: *) D.2507_15;
2432
2433 <bb 7>:
2434 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2435 D.2500_19 = (unsigned int) f$__delta_5;
2436 D.2508_20 = &S + D.2500_19;
2437 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2438
2439 Such patterns are results of simple calls to a member pointer:
2440
2441 int doprinting (int (MyString::* f)(int) const)
2442 {
2443 MyString S ("somestring");
2444
2445 return (S.*f)(4);
2446 }
2447
2448 Moreover, the function also looks for called pointers loaded from aggregates
2449 passed by value or reference. */
2450
2451 static void
2452 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2453 tree target)
2454 {
2455 class ipa_node_params *info = fbi->info;
2456 HOST_WIDE_INT offset;
2457 bool by_ref;
2458
2459 if (SSA_NAME_IS_DEFAULT_DEF (target))
2460 {
2461 tree var = SSA_NAME_VAR (target);
2462 int index = ipa_get_param_decl_index (info, var);
2463 if (index >= 0)
2464 ipa_note_param_call (fbi->node, index, call, false);
2465 return;
2466 }
2467
2468 int index;
2469 gimple *def = SSA_NAME_DEF_STMT (target);
2470 bool guaranteed_unmodified;
2471 if (gimple_assign_single_p (def)
2472 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2473 gimple_assign_rhs1 (def), &index, &offset,
2474 NULL, &by_ref, &guaranteed_unmodified))
2475 {
2476 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2477 call, false);
2478 cs->indirect_info->offset = offset;
2479 cs->indirect_info->agg_contents = 1;
2480 cs->indirect_info->by_ref = by_ref;
2481 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2482 return;
2483 }
2484
2485 /* Now we need to try to match the complex pattern of calling a member
2486 pointer. */
2487 if (gimple_code (def) != GIMPLE_PHI
2488 || gimple_phi_num_args (def) != 2
2489 || !POINTER_TYPE_P (TREE_TYPE (target))
2490 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2491 return;
2492
2493 /* First, we need to check whether one of these is a load from a member
2494 pointer that is a parameter to this function. */
2495 tree n1 = PHI_ARG_DEF (def, 0);
2496 tree n2 = PHI_ARG_DEF (def, 1);
2497 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2498 return;
2499 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2500 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2501
2502 tree rec;
2503 basic_block bb, virt_bb;
2504 basic_block join = gimple_bb (def);
2505 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2506 {
2507 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2508 return;
2509
2510 bb = EDGE_PRED (join, 0)->src;
2511 virt_bb = gimple_bb (d2);
2512 }
2513 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2514 {
2515 bb = EDGE_PRED (join, 1)->src;
2516 virt_bb = gimple_bb (d1);
2517 }
2518 else
2519 return;
2520
2521 /* Second, we need to check that the basic blocks are laid out in the way
2522 corresponding to the pattern. */
2523
2524 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2525 || single_pred (virt_bb) != bb
2526 || single_succ (virt_bb) != join)
2527 return;
2528
2529 /* Third, let's see that the branching is done depending on the least
2530 significant bit of the pfn. */
2531
2532 gimple *branch = last_stmt (bb);
2533 if (!branch || gimple_code (branch) != GIMPLE_COND)
2534 return;
2535
2536 if ((gimple_cond_code (branch) != NE_EXPR
2537 && gimple_cond_code (branch) != EQ_EXPR)
2538 || !integer_zerop (gimple_cond_rhs (branch)))
2539 return;
2540
2541 tree cond = gimple_cond_lhs (branch);
2542 if (!ipa_is_ssa_with_stmt_def (cond))
2543 return;
2544
2545 def = SSA_NAME_DEF_STMT (cond);
2546 if (!is_gimple_assign (def)
2547 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2548 || !integer_onep (gimple_assign_rhs2 (def)))
2549 return;
2550
2551 cond = gimple_assign_rhs1 (def);
2552 if (!ipa_is_ssa_with_stmt_def (cond))
2553 return;
2554
2555 def = SSA_NAME_DEF_STMT (cond);
2556
2557 if (is_gimple_assign (def)
2558 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2559 {
2560 cond = gimple_assign_rhs1 (def);
2561 if (!ipa_is_ssa_with_stmt_def (cond))
2562 return;
2563 def = SSA_NAME_DEF_STMT (cond);
2564 }
2565
2566 tree rec2;
2567 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2568 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2569 == ptrmemfunc_vbit_in_delta),
2570 NULL);
2571 if (rec != rec2)
2572 return;
2573
2574 index = ipa_get_param_decl_index (info, rec);
2575 if (index >= 0
2576 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2577 {
2578 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2579 call, false);
2580 cs->indirect_info->offset = offset;
2581 cs->indirect_info->agg_contents = 1;
2582 cs->indirect_info->member_ptr = 1;
2583 cs->indirect_info->guaranteed_unmodified = 1;
2584 }
2585
2586 return;
2587 }
2588
2589 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2590 object referenced in the expression is a formal parameter of the caller
2591 FBI->node (described by FBI->info), create a call note for the
2592 statement. */
2593
2594 static void
2595 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2596 gcall *call, tree target)
2597 {
2598 tree obj = OBJ_TYPE_REF_OBJECT (target);
2599 int index;
2600 HOST_WIDE_INT anc_offset;
2601
2602 if (!flag_devirtualize)
2603 return;
2604
2605 if (TREE_CODE (obj) != SSA_NAME)
2606 return;
2607
2608 class ipa_node_params *info = fbi->info;
2609 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2610 {
2611 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2612 return;
2613
2614 anc_offset = 0;
2615 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2616 gcc_assert (index >= 0);
2617 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2618 call))
2619 return;
2620 }
2621 else
2622 {
2623 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2624 tree expr;
2625
2626 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2627 if (!expr)
2628 return;
2629 index = ipa_get_param_decl_index (info,
2630 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2631 gcc_assert (index >= 0);
2632 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2633 call, anc_offset))
2634 return;
2635 }
2636
2637 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2638 call, true);
2639 class cgraph_indirect_call_info *ii = cs->indirect_info;
2640 ii->offset = anc_offset;
2641 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2642 ii->otr_type = obj_type_ref_class (target);
2643 ii->polymorphic = 1;
2644 }
2645
2646 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2647 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2648 containing intermediate information about each formal parameter. */
2649
2650 static void
2651 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2652 {
2653 tree target = gimple_call_fn (call);
2654
2655 if (!target
2656 || (TREE_CODE (target) != SSA_NAME
2657 && !virtual_method_call_p (target)))
2658 return;
2659
2660 struct cgraph_edge *cs = fbi->node->get_edge (call);
2661 /* If we previously turned the call into a direct call, there is
2662 no need to analyze. */
2663 if (cs && !cs->indirect_unknown_callee)
2664 return;
2665
2666 if (cs->indirect_info->polymorphic && flag_devirtualize)
2667 {
2668 tree instance;
2669 tree target = gimple_call_fn (call);
2670 ipa_polymorphic_call_context context (current_function_decl,
2671 target, call, &instance);
2672
2673 gcc_checking_assert (cs->indirect_info->otr_type
2674 == obj_type_ref_class (target));
2675 gcc_checking_assert (cs->indirect_info->otr_token
2676 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2677
2678 cs->indirect_info->vptr_changed
2679 = !context.get_dynamic_type (instance,
2680 OBJ_TYPE_REF_OBJECT (target),
2681 obj_type_ref_class (target), call,
2682 &fbi->aa_walk_budget);
2683 cs->indirect_info->context = context;
2684 }
2685
2686 if (TREE_CODE (target) == SSA_NAME)
2687 ipa_analyze_indirect_call_uses (fbi, call, target);
2688 else if (virtual_method_call_p (target))
2689 ipa_analyze_virtual_call_uses (fbi, call, target);
2690 }
2691
2692
2693 /* Analyze the call statement STMT with respect to formal parameters (described
2694 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2695 formal parameters are called. */
2696
2697 static void
2698 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2699 {
2700 if (is_gimple_call (stmt))
2701 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2702 }
2703
2704 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2705 If OP is a parameter declaration, mark it as used in the info structure
2706 passed in DATA. */
2707
2708 static bool
2709 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2710 {
2711 class ipa_node_params *info = (class ipa_node_params *) data;
2712
2713 op = get_base_address (op);
2714 if (op
2715 && TREE_CODE (op) == PARM_DECL)
2716 {
2717 int index = ipa_get_param_decl_index (info, op);
2718 gcc_assert (index >= 0);
2719 ipa_set_param_used (info, index, true);
2720 }
2721
2722 return false;
2723 }
2724
2725 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2726 the findings in various structures of the associated ipa_node_params
2727 structure, such as parameter flags, notes etc. FBI holds various data about
2728 the function being analyzed. */
2729
2730 static void
2731 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2732 {
2733 gimple_stmt_iterator gsi;
2734 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2735 {
2736 gimple *stmt = gsi_stmt (gsi);
2737
2738 if (is_gimple_debug (stmt))
2739 continue;
2740
2741 ipa_analyze_stmt_uses (fbi, stmt);
2742 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2743 visit_ref_for_mod_analysis,
2744 visit_ref_for_mod_analysis,
2745 visit_ref_for_mod_analysis);
2746 }
2747 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2748 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2749 visit_ref_for_mod_analysis,
2750 visit_ref_for_mod_analysis,
2751 visit_ref_for_mod_analysis);
2752 }
2753
2754 /* Calculate controlled uses of parameters of NODE. */
2755
2756 static void
2757 ipa_analyze_controlled_uses (struct cgraph_node *node)
2758 {
2759 class ipa_node_params *info = IPA_NODE_REF (node);
2760
2761 for (int i = 0; i < ipa_get_param_count (info); i++)
2762 {
2763 tree parm = ipa_get_param (info, i);
2764 int controlled_uses = 0;
2765
2766 /* For SSA regs see if parameter is used. For non-SSA we compute
2767 the flag during modification analysis. */
2768 if (is_gimple_reg (parm))
2769 {
2770 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2771 parm);
2772 if (ddef && !has_zero_uses (ddef))
2773 {
2774 imm_use_iterator imm_iter;
2775 use_operand_p use_p;
2776
2777 ipa_set_param_used (info, i, true);
2778 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2779 if (!is_gimple_call (USE_STMT (use_p)))
2780 {
2781 if (!is_gimple_debug (USE_STMT (use_p)))
2782 {
2783 controlled_uses = IPA_UNDESCRIBED_USE;
2784 break;
2785 }
2786 }
2787 else
2788 controlled_uses++;
2789 }
2790 else
2791 controlled_uses = 0;
2792 }
2793 else
2794 controlled_uses = IPA_UNDESCRIBED_USE;
2795 ipa_set_controlled_uses (info, i, controlled_uses);
2796 }
2797 }
2798
2799 /* Free stuff in BI. */
2800
2801 static void
2802 free_ipa_bb_info (struct ipa_bb_info *bi)
2803 {
2804 bi->cg_edges.release ();
2805 bi->param_aa_statuses.release ();
2806 }
2807
2808 /* Dominator walker driving the analysis. */
2809
2810 class analysis_dom_walker : public dom_walker
2811 {
2812 public:
2813 analysis_dom_walker (struct ipa_func_body_info *fbi)
2814 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2815
2816 virtual edge before_dom_children (basic_block);
2817
2818 private:
2819 struct ipa_func_body_info *m_fbi;
2820 };
2821
2822 edge
2823 analysis_dom_walker::before_dom_children (basic_block bb)
2824 {
2825 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2826 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2827 return NULL;
2828 }
2829
2830 /* Release body info FBI. */
2831
2832 void
2833 ipa_release_body_info (struct ipa_func_body_info *fbi)
2834 {
2835 int i;
2836 struct ipa_bb_info *bi;
2837
2838 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2839 free_ipa_bb_info (bi);
2840 fbi->bb_infos.release ();
2841 }
2842
2843 /* Initialize the array describing properties of formal parameters
2844 of NODE, analyze their uses and compute jump functions associated
2845 with actual arguments of calls from within NODE. */
2846
2847 void
2848 ipa_analyze_node (struct cgraph_node *node)
2849 {
2850 struct ipa_func_body_info fbi;
2851 class ipa_node_params *info;
2852
2853 ipa_check_create_node_params ();
2854 ipa_check_create_edge_args ();
2855 info = IPA_NODE_REF_GET_CREATE (node);
2856
2857 if (info->analysis_done)
2858 return;
2859 info->analysis_done = 1;
2860
2861 if (ipa_func_spec_opts_forbid_analysis_p (node))
2862 {
2863 for (int i = 0; i < ipa_get_param_count (info); i++)
2864 {
2865 ipa_set_param_used (info, i, true);
2866 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2867 }
2868 return;
2869 }
2870
2871 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2872 push_cfun (func);
2873 calculate_dominance_info (CDI_DOMINATORS);
2874 ipa_initialize_node_params (node);
2875 ipa_analyze_controlled_uses (node);
2876
2877 fbi.node = node;
2878 fbi.info = IPA_NODE_REF (node);
2879 fbi.bb_infos = vNULL;
2880 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2881 fbi.param_count = ipa_get_param_count (info);
2882 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
2883
2884 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2885 {
2886 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2887 bi->cg_edges.safe_push (cs);
2888 }
2889
2890 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2891 {
2892 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2893 bi->cg_edges.safe_push (cs);
2894 }
2895
2896 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2897
2898 ipa_release_body_info (&fbi);
2899 free_dominance_info (CDI_DOMINATORS);
2900 pop_cfun ();
2901 }
2902
2903 /* Update the jump functions associated with call graph edge E when the call
2904 graph edge CS is being inlined, assuming that E->caller is already (possibly
2905 indirectly) inlined into CS->callee and that E has not been inlined. */
2906
2907 static void
2908 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2909 struct cgraph_edge *e)
2910 {
2911 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2912 class ipa_edge_args *args = IPA_EDGE_REF (e);
2913 if (!args)
2914 return;
2915 int count = ipa_get_cs_argument_count (args);
2916 int i;
2917
2918 for (i = 0; i < count; i++)
2919 {
2920 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2921 class ipa_polymorphic_call_context *dst_ctx
2922 = ipa_get_ith_polymorhic_call_context (args, i);
2923
2924 if (dst->agg.items)
2925 {
2926 struct ipa_agg_jf_item *item;
2927 int j;
2928
2929 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
2930 {
2931 int dst_fid;
2932 struct ipa_jump_func *src;
2933
2934 if (item->jftype != IPA_JF_PASS_THROUGH
2935 && item->jftype != IPA_JF_LOAD_AGG)
2936 continue;
2937
2938 dst_fid = item->value.pass_through.formal_id;
2939 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
2940 {
2941 item->jftype = IPA_JF_UNKNOWN;
2942 continue;
2943 }
2944
2945 item->value.pass_through.formal_id = -1;
2946 src = ipa_get_ith_jump_func (top, dst_fid);
2947 if (src->type == IPA_JF_CONST)
2948 {
2949 if (item->jftype == IPA_JF_PASS_THROUGH
2950 && item->value.pass_through.operation == NOP_EXPR)
2951 {
2952 item->jftype = IPA_JF_CONST;
2953 item->value.constant = src->value.constant.value;
2954 continue;
2955 }
2956 }
2957 else if (src->type == IPA_JF_PASS_THROUGH
2958 && src->value.pass_through.operation == NOP_EXPR)
2959 {
2960 if (item->jftype == IPA_JF_PASS_THROUGH
2961 || !item->value.load_agg.by_ref
2962 || src->value.pass_through.agg_preserved)
2963 item->value.pass_through.formal_id
2964 = src->value.pass_through.formal_id;
2965 }
2966 else if (src->type == IPA_JF_ANCESTOR)
2967 {
2968 if (item->jftype == IPA_JF_PASS_THROUGH)
2969 {
2970 if (!src->value.ancestor.offset)
2971 item->value.pass_through.formal_id
2972 = src->value.ancestor.formal_id;
2973 }
2974 else if (src->value.ancestor.agg_preserved)
2975 {
2976 gcc_checking_assert (item->value.load_agg.by_ref);
2977
2978 item->value.pass_through.formal_id
2979 = src->value.ancestor.formal_id;
2980 item->value.load_agg.offset
2981 += src->value.ancestor.offset;
2982 }
2983 }
2984
2985 if (item->value.pass_through.formal_id < 0)
2986 item->jftype = IPA_JF_UNKNOWN;
2987 }
2988 }
2989
2990 if (!top)
2991 {
2992 ipa_set_jf_unknown (dst);
2993 continue;
2994 }
2995
2996 if (dst->type == IPA_JF_ANCESTOR)
2997 {
2998 struct ipa_jump_func *src;
2999 int dst_fid = dst->value.ancestor.formal_id;
3000 class ipa_polymorphic_call_context *src_ctx
3001 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3002
3003 /* Variable number of arguments can cause havoc if we try to access
3004 one that does not exist in the inlined edge. So make sure we
3005 don't. */
3006 if (dst_fid >= ipa_get_cs_argument_count (top))
3007 {
3008 ipa_set_jf_unknown (dst);
3009 continue;
3010 }
3011
3012 src = ipa_get_ith_jump_func (top, dst_fid);
3013
3014 if (src_ctx && !src_ctx->useless_p ())
3015 {
3016 class ipa_polymorphic_call_context ctx = *src_ctx;
3017
3018 /* TODO: Make type preserved safe WRT contexts. */
3019 if (!ipa_get_jf_ancestor_type_preserved (dst))
3020 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3021 ctx.offset_by (dst->value.ancestor.offset);
3022 if (!ctx.useless_p ())
3023 {
3024 if (!dst_ctx)
3025 {
3026 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3027 count);
3028 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3029 }
3030
3031 dst_ctx->combine_with (ctx);
3032 }
3033 }
3034
3035 /* Parameter and argument in ancestor jump function must be pointer
3036 type, which means access to aggregate must be by-reference. */
3037 gcc_assert (!src->agg.items || src->agg.by_ref);
3038
3039 if (src->agg.items && dst->value.ancestor.agg_preserved)
3040 {
3041 struct ipa_agg_jf_item *item;
3042 int j;
3043
3044 /* Currently we do not produce clobber aggregate jump functions,
3045 replace with merging when we do. */
3046 gcc_assert (!dst->agg.items);
3047
3048 dst->agg.items = vec_safe_copy (src->agg.items);
3049 dst->agg.by_ref = src->agg.by_ref;
3050 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3051 item->offset -= dst->value.ancestor.offset;
3052 }
3053
3054 if (src->type == IPA_JF_PASS_THROUGH
3055 && src->value.pass_through.operation == NOP_EXPR)
3056 {
3057 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3058 dst->value.ancestor.agg_preserved &=
3059 src->value.pass_through.agg_preserved;
3060 }
3061 else if (src->type == IPA_JF_ANCESTOR)
3062 {
3063 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3064 dst->value.ancestor.offset += src->value.ancestor.offset;
3065 dst->value.ancestor.agg_preserved &=
3066 src->value.ancestor.agg_preserved;
3067 }
3068 else
3069 ipa_set_jf_unknown (dst);
3070 }
3071 else if (dst->type == IPA_JF_PASS_THROUGH)
3072 {
3073 struct ipa_jump_func *src;
3074 /* We must check range due to calls with variable number of arguments
3075 and we cannot combine jump functions with operations. */
3076 if (dst->value.pass_through.operation == NOP_EXPR
3077 && (top && dst->value.pass_through.formal_id
3078 < ipa_get_cs_argument_count (top)))
3079 {
3080 int dst_fid = dst->value.pass_through.formal_id;
3081 src = ipa_get_ith_jump_func (top, dst_fid);
3082 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3083 class ipa_polymorphic_call_context *src_ctx
3084 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3085
3086 if (src_ctx && !src_ctx->useless_p ())
3087 {
3088 class ipa_polymorphic_call_context ctx = *src_ctx;
3089
3090 /* TODO: Make type preserved safe WRT contexts. */
3091 if (!ipa_get_jf_pass_through_type_preserved (dst))
3092 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3093 if (!ctx.useless_p ())
3094 {
3095 if (!dst_ctx)
3096 {
3097 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3098 count);
3099 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3100 }
3101 dst_ctx->combine_with (ctx);
3102 }
3103 }
3104 switch (src->type)
3105 {
3106 case IPA_JF_UNKNOWN:
3107 ipa_set_jf_unknown (dst);
3108 break;
3109 case IPA_JF_CONST:
3110 ipa_set_jf_cst_copy (dst, src);
3111 break;
3112
3113 case IPA_JF_PASS_THROUGH:
3114 {
3115 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3116 enum tree_code operation;
3117 operation = ipa_get_jf_pass_through_operation (src);
3118
3119 if (operation == NOP_EXPR)
3120 {
3121 bool agg_p;
3122 agg_p = dst_agg_p
3123 && ipa_get_jf_pass_through_agg_preserved (src);
3124 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3125 }
3126 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3127 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3128 else
3129 {
3130 tree operand = ipa_get_jf_pass_through_operand (src);
3131 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3132 operation);
3133 }
3134 break;
3135 }
3136 case IPA_JF_ANCESTOR:
3137 {
3138 bool agg_p;
3139 agg_p = dst_agg_p
3140 && ipa_get_jf_ancestor_agg_preserved (src);
3141 ipa_set_ancestor_jf (dst,
3142 ipa_get_jf_ancestor_offset (src),
3143 ipa_get_jf_ancestor_formal_id (src),
3144 agg_p);
3145 break;
3146 }
3147 default:
3148 gcc_unreachable ();
3149 }
3150
3151 if (src->agg.items
3152 && (dst_agg_p || !src->agg.by_ref))
3153 {
3154 /* Currently we do not produce clobber aggregate jump
3155 functions, replace with merging when we do. */
3156 gcc_assert (!dst->agg.items);
3157
3158 dst->agg.by_ref = src->agg.by_ref;
3159 dst->agg.items = vec_safe_copy (src->agg.items);
3160 }
3161 }
3162 else
3163 ipa_set_jf_unknown (dst);
3164 }
3165 }
3166 }
3167
3168 /* If TARGET is an addr_expr of a function declaration, make it the
3169 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3170 Otherwise, return NULL. */
3171
3172 struct cgraph_edge *
3173 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3174 bool speculative)
3175 {
3176 struct cgraph_node *callee;
3177 bool unreachable = false;
3178
3179 if (TREE_CODE (target) == ADDR_EXPR)
3180 target = TREE_OPERAND (target, 0);
3181 if (TREE_CODE (target) != FUNCTION_DECL)
3182 {
3183 target = canonicalize_constructor_val (target, NULL);
3184 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3185 {
3186 /* Member pointer call that goes through a VMT lookup. */
3187 if (ie->indirect_info->member_ptr
3188 /* Or if target is not an invariant expression and we do not
3189 know if it will evaulate to function at runtime.
3190 This can happen when folding through &VAR, where &VAR
3191 is IP invariant, but VAR itself is not.
3192
3193 TODO: Revisit this when GCC 5 is branched. It seems that
3194 member_ptr check is not needed and that we may try to fold
3195 the expression and see if VAR is readonly. */
3196 || !is_gimple_ip_invariant (target))
3197 {
3198 if (dump_enabled_p ())
3199 {
3200 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3201 "discovered direct call non-invariant %s\n",
3202 ie->caller->dump_name ());
3203 }
3204 return NULL;
3205 }
3206
3207
3208 if (dump_enabled_p ())
3209 {
3210 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3211 "discovered direct call to non-function in %s, "
3212 "making it __builtin_unreachable\n",
3213 ie->caller->dump_name ());
3214 }
3215
3216 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3217 callee = cgraph_node::get_create (target);
3218 unreachable = true;
3219 }
3220 else
3221 callee = cgraph_node::get (target);
3222 }
3223 else
3224 callee = cgraph_node::get (target);
3225
3226 /* Because may-edges are not explicitely represented and vtable may be external,
3227 we may create the first reference to the object in the unit. */
3228 if (!callee || callee->inlined_to)
3229 {
3230
3231 /* We are better to ensure we can refer to it.
3232 In the case of static functions we are out of luck, since we already
3233 removed its body. In the case of public functions we may or may
3234 not introduce the reference. */
3235 if (!canonicalize_constructor_val (target, NULL)
3236 || !TREE_PUBLIC (target))
3237 {
3238 if (dump_file)
3239 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3240 "(%s -> %s) but cannot refer to it. Giving up.\n",
3241 ie->caller->dump_name (),
3242 ie->callee->dump_name ());
3243 return NULL;
3244 }
3245 callee = cgraph_node::get_create (target);
3246 }
3247
3248 /* If the edge is already speculated. */
3249 if (speculative && ie->speculative)
3250 {
3251 if (dump_file)
3252 {
3253 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3254 if (!e2)
3255 {
3256 if (dump_file)
3257 fprintf (dump_file, "ipa-prop: Discovered call to a "
3258 "speculative target (%s -> %s) but the call is "
3259 "already speculated to different target. "
3260 "Giving up.\n",
3261 ie->caller->dump_name (), callee->dump_name ());
3262 }
3263 else
3264 {
3265 if (dump_file)
3266 fprintf (dump_file,
3267 "ipa-prop: Discovered call to a speculative target "
3268 "(%s -> %s) this agree with previous speculation.\n",
3269 ie->caller->dump_name (), callee->dump_name ());
3270 }
3271 }
3272 return NULL;
3273 }
3274
3275 if (!dbg_cnt (devirt))
3276 return NULL;
3277
3278 ipa_check_create_node_params ();
3279
3280 /* We cannot make edges to inline clones. It is bug that someone removed
3281 the cgraph node too early. */
3282 gcc_assert (!callee->inlined_to);
3283
3284 if (dump_file && !unreachable)
3285 {
3286 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3287 "(%s -> %s), for stmt ",
3288 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3289 speculative ? "speculative" : "known",
3290 ie->caller->dump_name (),
3291 callee->dump_name ());
3292 if (ie->call_stmt)
3293 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3294 else
3295 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3296 }
3297 if (dump_enabled_p ())
3298 {
3299 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3300 "converting indirect call in %s to direct call to %s\n",
3301 ie->caller->dump_name (), callee->dump_name ());
3302 }
3303 if (!speculative)
3304 {
3305 struct cgraph_edge *orig = ie;
3306 ie = cgraph_edge::make_direct (ie, callee);
3307 /* If we resolved speculative edge the cost is already up to date
3308 for direct call (adjusted by inline_edge_duplication_hook). */
3309 if (ie == orig)
3310 {
3311 ipa_call_summary *es = ipa_call_summaries->get (ie);
3312 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3313 - eni_size_weights.call_cost);
3314 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3315 - eni_time_weights.call_cost);
3316 }
3317 }
3318 else
3319 {
3320 if (!callee->can_be_discarded_p ())
3321 {
3322 cgraph_node *alias;
3323 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3324 if (alias)
3325 callee = alias;
3326 }
3327 /* make_speculative will update ie's cost to direct call cost. */
3328 ie = ie->make_speculative
3329 (callee, ie->count.apply_scale (8, 10));
3330 }
3331
3332 return ie;
3333 }
3334
3335 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3336 CONSTRUCTOR and return it. Return NULL if the search fails for some
3337 reason. */
3338
3339 static tree
3340 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3341 {
3342 tree type = TREE_TYPE (constructor);
3343 if (TREE_CODE (type) != ARRAY_TYPE
3344 && TREE_CODE (type) != RECORD_TYPE)
3345 return NULL;
3346
3347 unsigned ix;
3348 tree index, val;
3349 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3350 {
3351 HOST_WIDE_INT elt_offset;
3352 if (TREE_CODE (type) == ARRAY_TYPE)
3353 {
3354 offset_int off;
3355 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3356 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3357
3358 if (index)
3359 {
3360 if (TREE_CODE (index) == RANGE_EXPR)
3361 off = wi::to_offset (TREE_OPERAND (index, 0));
3362 else
3363 off = wi::to_offset (index);
3364 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3365 {
3366 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3367 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3368 off = wi::sext (off - wi::to_offset (low_bound),
3369 TYPE_PRECISION (TREE_TYPE (index)));
3370 }
3371 off *= wi::to_offset (unit_size);
3372 /* ??? Handle more than just the first index of a
3373 RANGE_EXPR. */
3374 }
3375 else
3376 off = wi::to_offset (unit_size) * ix;
3377
3378 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3379 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3380 continue;
3381 elt_offset = off.to_shwi ();
3382 }
3383 else if (TREE_CODE (type) == RECORD_TYPE)
3384 {
3385 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3386 if (DECL_BIT_FIELD (index))
3387 continue;
3388 elt_offset = int_bit_position (index);
3389 }
3390 else
3391 gcc_unreachable ();
3392
3393 if (elt_offset > req_offset)
3394 return NULL;
3395
3396 if (TREE_CODE (val) == CONSTRUCTOR)
3397 return find_constructor_constant_at_offset (val,
3398 req_offset - elt_offset);
3399
3400 if (elt_offset == req_offset
3401 && is_gimple_reg_type (TREE_TYPE (val))
3402 && is_gimple_ip_invariant (val))
3403 return val;
3404 }
3405 return NULL;
3406 }
3407
3408 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3409 invariant from a static constructor and if so, return it. Otherwise return
3410 NULL. */
3411
3412 static tree
3413 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3414 {
3415 if (by_ref)
3416 {
3417 if (TREE_CODE (scalar) != ADDR_EXPR)
3418 return NULL;
3419 scalar = TREE_OPERAND (scalar, 0);
3420 }
3421
3422 if (!VAR_P (scalar)
3423 || !is_global_var (scalar)
3424 || !TREE_READONLY (scalar)
3425 || !DECL_INITIAL (scalar)
3426 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3427 return NULL;
3428
3429 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3430 }
3431
3432 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3433 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3434 return NULL if there is none. BY_REF specifies whether the value has to be
3435 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3436 the boolean it points to is set to true if the value comes from an
3437 initializer of a constant. */
3438
3439 tree
3440 ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
3441 HOST_WIDE_INT offset, bool by_ref,
3442 bool *from_global_constant)
3443 {
3444 struct ipa_agg_value *item;
3445 int i;
3446
3447 if (scalar)
3448 {
3449 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3450 if (res)
3451 {
3452 if (from_global_constant)
3453 *from_global_constant = true;
3454 return res;
3455 }
3456 }
3457
3458 if (!agg
3459 || by_ref != agg->by_ref)
3460 return NULL;
3461
3462 FOR_EACH_VEC_ELT (agg->items, i, item)
3463 if (item->offset == offset)
3464 {
3465 /* Currently we do not have clobber values, return NULL for them once
3466 we do. */
3467 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3468 if (from_global_constant)
3469 *from_global_constant = false;
3470 return item->value;
3471 }
3472 return NULL;
3473 }
3474
3475 /* Remove a reference to SYMBOL from the list of references of a node given by
3476 reference description RDESC. Return true if the reference has been
3477 successfully found and removed. */
3478
3479 static bool
3480 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3481 {
3482 struct ipa_ref *to_del;
3483 struct cgraph_edge *origin;
3484
3485 origin = rdesc->cs;
3486 if (!origin)
3487 return false;
3488 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3489 origin->lto_stmt_uid);
3490 if (!to_del)
3491 return false;
3492
3493 to_del->remove_reference ();
3494 if (dump_file)
3495 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3496 origin->caller->dump_name (), symbol->dump_name ());
3497 return true;
3498 }
3499
3500 /* If JFUNC has a reference description with refcount different from
3501 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3502 NULL. JFUNC must be a constant jump function. */
3503
3504 static struct ipa_cst_ref_desc *
3505 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3506 {
3507 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3508 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3509 return rdesc;
3510 else
3511 return NULL;
3512 }
3513
3514 /* If the value of constant jump function JFUNC is an address of a function
3515 declaration, return the associated call graph node. Otherwise return
3516 NULL. */
3517
3518 static cgraph_node *
3519 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3520 {
3521 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3522 tree cst = ipa_get_jf_constant (jfunc);
3523 if (TREE_CODE (cst) != ADDR_EXPR
3524 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3525 return NULL;
3526
3527 return cgraph_node::get (TREE_OPERAND (cst, 0));
3528 }
3529
3530
3531 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3532 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3533 the edge specified in the rdesc. Return false if either the symbol or the
3534 reference could not be found, otherwise return true. */
3535
3536 static bool
3537 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3538 {
3539 struct ipa_cst_ref_desc *rdesc;
3540 if (jfunc->type == IPA_JF_CONST
3541 && (rdesc = jfunc_rdesc_usable (jfunc))
3542 && --rdesc->refcount == 0)
3543 {
3544 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3545 if (!symbol)
3546 return false;
3547
3548 return remove_described_reference (symbol, rdesc);
3549 }
3550 return true;
3551 }
3552
3553 /* Try to find a destination for indirect edge IE that corresponds to a simple
3554 call or a call of a member function pointer and where the destination is a
3555 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3556 the type of the parameter to which the result of JFUNC is passed. If it can
3557 be determined, return the newly direct edge, otherwise return NULL.
3558 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3559 relative to. */
3560
3561 static struct cgraph_edge *
3562 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3563 struct ipa_jump_func *jfunc, tree target_type,
3564 struct cgraph_node *new_root,
3565 class ipa_node_params *new_root_info)
3566 {
3567 struct cgraph_edge *cs;
3568 tree target;
3569 bool agg_contents = ie->indirect_info->agg_contents;
3570 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3571 if (agg_contents)
3572 {
3573 bool from_global_constant;
3574 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3575 new_root,
3576 &jfunc->agg);
3577 target = ipa_find_agg_cst_for_param (&agg, scalar,
3578 ie->indirect_info->offset,
3579 ie->indirect_info->by_ref,
3580 &from_global_constant);
3581 agg.release ();
3582 if (target
3583 && !from_global_constant
3584 && !ie->indirect_info->guaranteed_unmodified)
3585 return NULL;
3586 }
3587 else
3588 target = scalar;
3589 if (!target)
3590 return NULL;
3591 cs = ipa_make_edge_direct_to_target (ie, target);
3592
3593 if (cs && !agg_contents)
3594 {
3595 bool ok;
3596 gcc_checking_assert (cs->callee
3597 && (cs != ie
3598 || jfunc->type != IPA_JF_CONST
3599 || !cgraph_node_for_jfunc (jfunc)
3600 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3601 ok = try_decrement_rdesc_refcount (jfunc);
3602 gcc_checking_assert (ok);
3603 }
3604
3605 return cs;
3606 }
3607
3608 /* Return the target to be used in cases of impossible devirtualization. IE
3609 and target (the latter can be NULL) are dumped when dumping is enabled. */
3610
3611 tree
3612 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3613 {
3614 if (dump_file)
3615 {
3616 if (target)
3617 fprintf (dump_file,
3618 "Type inconsistent devirtualization: %s->%s\n",
3619 ie->caller->dump_name (),
3620 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3621 else
3622 fprintf (dump_file,
3623 "No devirtualization target in %s\n",
3624 ie->caller->dump_name ());
3625 }
3626 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3627 cgraph_node::get_create (new_target);
3628 return new_target;
3629 }
3630
3631 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3632 call based on a formal parameter which is described by jump function JFUNC
3633 and if it can be determined, make it direct and return the direct edge.
3634 Otherwise, return NULL. CTX describes the polymorphic context that the
3635 parameter the call is based on brings along with it. NEW_ROOT and
3636 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3637 to. */
3638
3639 static struct cgraph_edge *
3640 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3641 struct ipa_jump_func *jfunc,
3642 class ipa_polymorphic_call_context ctx,
3643 struct cgraph_node *new_root,
3644 class ipa_node_params *new_root_info)
3645 {
3646 tree target = NULL;
3647 bool speculative = false;
3648
3649 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3650 return NULL;
3651
3652 gcc_assert (!ie->indirect_info->by_ref);
3653
3654 /* Try to do lookup via known virtual table pointer value. */
3655 if (!ie->indirect_info->vptr_changed
3656 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3657 {
3658 tree vtable;
3659 unsigned HOST_WIDE_INT offset;
3660 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3661 : NULL;
3662 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3663 new_root,
3664 &jfunc->agg);
3665 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3666 ie->indirect_info->offset,
3667 true);
3668 agg.release ();
3669 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3670 {
3671 bool can_refer;
3672 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3673 vtable, offset, &can_refer);
3674 if (can_refer)
3675 {
3676 if (!t
3677 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3678 || !possible_polymorphic_call_target_p
3679 (ie, cgraph_node::get (t)))
3680 {
3681 /* Do not speculate builtin_unreachable, it is stupid! */
3682 if (!ie->indirect_info->vptr_changed)
3683 target = ipa_impossible_devirt_target (ie, target);
3684 else
3685 target = NULL;
3686 }
3687 else
3688 {
3689 target = t;
3690 speculative = ie->indirect_info->vptr_changed;
3691 }
3692 }
3693 }
3694 }
3695
3696 ipa_polymorphic_call_context ie_context (ie);
3697 vec <cgraph_node *>targets;
3698 bool final;
3699
3700 ctx.offset_by (ie->indirect_info->offset);
3701 if (ie->indirect_info->vptr_changed)
3702 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3703 ie->indirect_info->otr_type);
3704 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3705 targets = possible_polymorphic_call_targets
3706 (ie->indirect_info->otr_type,
3707 ie->indirect_info->otr_token,
3708 ctx, &final);
3709 if (final && targets.length () <= 1)
3710 {
3711 speculative = false;
3712 if (targets.length () == 1)
3713 target = targets[0]->decl;
3714 else
3715 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3716 }
3717 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3718 && !ie->speculative && ie->maybe_hot_p ())
3719 {
3720 cgraph_node *n;
3721 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3722 ie->indirect_info->otr_token,
3723 ie->indirect_info->context);
3724 if (n)
3725 {
3726 target = n->decl;
3727 speculative = true;
3728 }
3729 }
3730
3731 if (target)
3732 {
3733 if (!possible_polymorphic_call_target_p
3734 (ie, cgraph_node::get_create (target)))
3735 {
3736 if (speculative)
3737 return NULL;
3738 target = ipa_impossible_devirt_target (ie, target);
3739 }
3740 return ipa_make_edge_direct_to_target (ie, target, speculative);
3741 }
3742 else
3743 return NULL;
3744 }
3745
3746 /* Update the param called notes associated with NODE when CS is being inlined,
3747 assuming NODE is (potentially indirectly) inlined into CS->callee.
3748 Moreover, if the callee is discovered to be constant, create a new cgraph
3749 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3750 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3751
3752 static bool
3753 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3754 struct cgraph_node *node,
3755 vec<cgraph_edge *> *new_edges)
3756 {
3757 class ipa_edge_args *top;
3758 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3759 struct cgraph_node *new_root;
3760 class ipa_node_params *new_root_info, *inlined_node_info;
3761 bool res = false;
3762
3763 ipa_check_create_edge_args ();
3764 top = IPA_EDGE_REF (cs);
3765 new_root = cs->caller->inlined_to
3766 ? cs->caller->inlined_to : cs->caller;
3767 new_root_info = IPA_NODE_REF (new_root);
3768 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3769
3770 for (ie = node->indirect_calls; ie; ie = next_ie)
3771 {
3772 class cgraph_indirect_call_info *ici = ie->indirect_info;
3773 struct ipa_jump_func *jfunc;
3774 int param_index;
3775
3776 next_ie = ie->next_callee;
3777
3778 if (ici->param_index == -1)
3779 continue;
3780
3781 /* We must check range due to calls with variable number of arguments: */
3782 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3783 {
3784 ici->param_index = -1;
3785 continue;
3786 }
3787
3788 param_index = ici->param_index;
3789 jfunc = ipa_get_ith_jump_func (top, param_index);
3790 cgraph_node *spec_target = NULL;
3791
3792 /* FIXME: This may need updating for multiple calls. */
3793 if (ie->speculative)
3794 spec_target = ie->first_speculative_call_target ()->callee;
3795
3796 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3797 new_direct_edge = NULL;
3798 else if (ici->polymorphic)
3799 {
3800 ipa_polymorphic_call_context ctx;
3801 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3802 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3803 new_root,
3804 new_root_info);
3805 }
3806 else
3807 {
3808 tree target_type = ipa_get_type (inlined_node_info, param_index);
3809 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3810 target_type,
3811 new_root,
3812 new_root_info);
3813 }
3814
3815 /* If speculation was removed, then we need to do nothing. */
3816 if (new_direct_edge && new_direct_edge != ie
3817 && new_direct_edge->callee == spec_target)
3818 {
3819 new_direct_edge->indirect_inlining_edge = 1;
3820 top = IPA_EDGE_REF (cs);
3821 res = true;
3822 if (!new_direct_edge->speculative)
3823 continue;
3824 }
3825 else if (new_direct_edge)
3826 {
3827 new_direct_edge->indirect_inlining_edge = 1;
3828 if (new_edges)
3829 {
3830 new_edges->safe_push (new_direct_edge);
3831 res = true;
3832 }
3833 top = IPA_EDGE_REF (cs);
3834 /* If speculative edge was introduced we still need to update
3835 call info of the indirect edge. */
3836 if (!new_direct_edge->speculative)
3837 continue;
3838 }
3839 if (jfunc->type == IPA_JF_PASS_THROUGH
3840 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3841 {
3842 if (ici->agg_contents
3843 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3844 && !ici->polymorphic)
3845 ici->param_index = -1;
3846 else
3847 {
3848 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3849 if (ici->polymorphic
3850 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3851 ici->vptr_changed = true;
3852 ipa_set_param_used_by_indirect_call (new_root_info,
3853 ici->param_index, true);
3854 if (ici->polymorphic)
3855 ipa_set_param_used_by_polymorphic_call (new_root_info,
3856 ici->param_index, true);
3857 }
3858 }
3859 else if (jfunc->type == IPA_JF_ANCESTOR)
3860 {
3861 if (ici->agg_contents
3862 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3863 && !ici->polymorphic)
3864 ici->param_index = -1;
3865 else
3866 {
3867 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3868 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3869 if (ici->polymorphic
3870 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3871 ici->vptr_changed = true;
3872 ipa_set_param_used_by_indirect_call (new_root_info,
3873 ici->param_index, true);
3874 if (ici->polymorphic)
3875 ipa_set_param_used_by_polymorphic_call (new_root_info,
3876 ici->param_index, true);
3877 }
3878 }
3879 else
3880 /* Either we can find a destination for this edge now or never. */
3881 ici->param_index = -1;
3882 }
3883
3884 return res;
3885 }
3886
3887 /* Recursively traverse subtree of NODE (including node) made of inlined
3888 cgraph_edges when CS has been inlined and invoke
3889 update_indirect_edges_after_inlining on all nodes and
3890 update_jump_functions_after_inlining on all non-inlined edges that lead out
3891 of this subtree. Newly discovered indirect edges will be added to
3892 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3893 created. */
3894
3895 static bool
3896 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3897 struct cgraph_node *node,
3898 vec<cgraph_edge *> *new_edges)
3899 {
3900 struct cgraph_edge *e;
3901 bool res;
3902
3903 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3904
3905 for (e = node->callees; e; e = e->next_callee)
3906 if (!e->inline_failed)
3907 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3908 else
3909 update_jump_functions_after_inlining (cs, e);
3910 for (e = node->indirect_calls; e; e = e->next_callee)
3911 update_jump_functions_after_inlining (cs, e);
3912
3913 return res;
3914 }
3915
3916 /* Combine two controlled uses counts as done during inlining. */
3917
3918 static int
3919 combine_controlled_uses_counters (int c, int d)
3920 {
3921 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3922 return IPA_UNDESCRIBED_USE;
3923 else
3924 return c + d - 1;
3925 }
3926
3927 /* Propagate number of controlled users from CS->caleee to the new root of the
3928 tree of inlined nodes. */
3929
3930 static void
3931 propagate_controlled_uses (struct cgraph_edge *cs)
3932 {
3933 class ipa_edge_args *args = IPA_EDGE_REF (cs);
3934 if (!args)
3935 return;
3936 struct cgraph_node *new_root = cs->caller->inlined_to
3937 ? cs->caller->inlined_to : cs->caller;
3938 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3939 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3940 int count, i;
3941
3942 if (!old_root_info)
3943 return;
3944
3945 count = MIN (ipa_get_cs_argument_count (args),
3946 ipa_get_param_count (old_root_info));
3947 for (i = 0; i < count; i++)
3948 {
3949 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3950 struct ipa_cst_ref_desc *rdesc;
3951
3952 if (jf->type == IPA_JF_PASS_THROUGH)
3953 {
3954 int src_idx, c, d;
3955 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3956 c = ipa_get_controlled_uses (new_root_info, src_idx);
3957 d = ipa_get_controlled_uses (old_root_info, i);
3958
3959 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3960 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3961 c = combine_controlled_uses_counters (c, d);
3962 ipa_set_controlled_uses (new_root_info, src_idx, c);
3963 if (c == 0 && new_root_info->ipcp_orig_node)
3964 {
3965 struct cgraph_node *n;
3966 struct ipa_ref *ref;
3967 tree t = new_root_info->known_csts[src_idx];
3968
3969 if (t && TREE_CODE (t) == ADDR_EXPR
3970 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3971 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3972 && (ref = new_root->find_reference (n, NULL, 0)))
3973 {
3974 if (dump_file)
3975 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3976 "reference from %s to %s.\n",
3977 new_root->dump_name (),
3978 n->dump_name ());
3979 ref->remove_reference ();
3980 }
3981 }
3982 }
3983 else if (jf->type == IPA_JF_CONST
3984 && (rdesc = jfunc_rdesc_usable (jf)))
3985 {
3986 int d = ipa_get_controlled_uses (old_root_info, i);
3987 int c = rdesc->refcount;
3988 rdesc->refcount = combine_controlled_uses_counters (c, d);
3989 if (rdesc->refcount == 0)
3990 {
3991 tree cst = ipa_get_jf_constant (jf);
3992 struct cgraph_node *n;
3993 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3994 && TREE_CODE (TREE_OPERAND (cst, 0))
3995 == FUNCTION_DECL);
3996 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3997 if (n)
3998 {
3999 struct cgraph_node *clone;
4000 bool ok;
4001 ok = remove_described_reference (n, rdesc);
4002 gcc_checking_assert (ok);
4003
4004 clone = cs->caller;
4005 while (clone->inlined_to
4006 && clone->ipcp_clone
4007 && clone != rdesc->cs->caller)
4008 {
4009 struct ipa_ref *ref;
4010 ref = clone->find_reference (n, NULL, 0);
4011 if (ref)
4012 {
4013 if (dump_file)
4014 fprintf (dump_file, "ipa-prop: Removing "
4015 "cloning-created reference "
4016 "from %s to %s.\n",
4017 clone->dump_name (),
4018 n->dump_name ());
4019 ref->remove_reference ();
4020 }
4021 clone = clone->callers->caller;
4022 }
4023 }
4024 }
4025 }
4026 }
4027
4028 for (i = ipa_get_param_count (old_root_info);
4029 i < ipa_get_cs_argument_count (args);
4030 i++)
4031 {
4032 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4033
4034 if (jf->type == IPA_JF_CONST)
4035 {
4036 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4037 if (rdesc)
4038 rdesc->refcount = IPA_UNDESCRIBED_USE;
4039 }
4040 else if (jf->type == IPA_JF_PASS_THROUGH)
4041 ipa_set_controlled_uses (new_root_info,
4042 jf->value.pass_through.formal_id,
4043 IPA_UNDESCRIBED_USE);
4044 }
4045 }
4046
4047 /* Update jump functions and call note functions on inlining the call site CS.
4048 CS is expected to lead to a node already cloned by
4049 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4050 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4051 created. */
4052
4053 bool
4054 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4055 vec<cgraph_edge *> *new_edges)
4056 {
4057 bool changed;
4058 /* Do nothing if the preparation phase has not been carried out yet
4059 (i.e. during early inlining). */
4060 if (!ipa_node_params_sum)
4061 return false;
4062 gcc_assert (ipa_edge_args_sum);
4063
4064 propagate_controlled_uses (cs);
4065 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4066 ipa_node_params_sum->remove (cs->callee);
4067
4068 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4069 if (args)
4070 {
4071 bool ok = true;
4072 if (args->jump_functions)
4073 {
4074 struct ipa_jump_func *jf;
4075 int i;
4076 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4077 if (jf->type == IPA_JF_CONST
4078 && ipa_get_jf_constant_rdesc (jf))
4079 {
4080 ok = false;
4081 break;
4082 }
4083 }
4084 if (ok)
4085 ipa_edge_args_sum->remove (cs);
4086 }
4087 if (ipcp_transformation_sum)
4088 ipcp_transformation_sum->remove (cs->callee);
4089
4090 return changed;
4091 }
4092
4093 /* Ensure that array of edge arguments infos is big enough to accommodate a
4094 structure for all edges and reallocates it if not. Also, allocate
4095 associated hash tables is they do not already exist. */
4096
4097 void
4098 ipa_check_create_edge_args (void)
4099 {
4100 if (!ipa_edge_args_sum)
4101 ipa_edge_args_sum
4102 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4103 ipa_edge_args_sum_t (symtab, true));
4104 if (!ipa_bits_hash_table)
4105 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4106 if (!ipa_vr_hash_table)
4107 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4108 }
4109
4110 /* Free all ipa_edge structures. */
4111
4112 void
4113 ipa_free_all_edge_args (void)
4114 {
4115 if (!ipa_edge_args_sum)
4116 return;
4117
4118 ggc_delete (ipa_edge_args_sum);
4119 ipa_edge_args_sum = NULL;
4120 }
4121
4122 /* Free all ipa_node_params structures. */
4123
4124 void
4125 ipa_free_all_node_params (void)
4126 {
4127 ggc_delete (ipa_node_params_sum);
4128 ipa_node_params_sum = NULL;
4129 }
4130
4131 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4132 tables if they do not already exist. */
4133
4134 void
4135 ipcp_transformation_initialize (void)
4136 {
4137 if (!ipa_bits_hash_table)
4138 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4139 if (!ipa_vr_hash_table)
4140 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4141 if (ipcp_transformation_sum == NULL)
4142 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4143 }
4144
4145 /* Release the IPA CP transformation summary. */
4146
4147 void
4148 ipcp_free_transformation_sum (void)
4149 {
4150 if (!ipcp_transformation_sum)
4151 return;
4152
4153 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4154 ggc_free (ipcp_transformation_sum);
4155 ipcp_transformation_sum = NULL;
4156 }
4157
4158 /* Set the aggregate replacements of NODE to be AGGVALS. */
4159
4160 void
4161 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4162 struct ipa_agg_replacement_value *aggvals)
4163 {
4164 ipcp_transformation_initialize ();
4165 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4166 s->agg_values = aggvals;
4167 }
4168
4169 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4170 count data structures accordingly. */
4171
4172 void
4173 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4174 {
4175 if (args->jump_functions)
4176 {
4177 struct ipa_jump_func *jf;
4178 int i;
4179 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4180 {
4181 struct ipa_cst_ref_desc *rdesc;
4182 try_decrement_rdesc_refcount (jf);
4183 if (jf->type == IPA_JF_CONST
4184 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4185 && rdesc->cs == cs)
4186 rdesc->cs = NULL;
4187 }
4188 }
4189 }
4190
4191 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4192 reference count data strucutres accordingly. */
4193
4194 void
4195 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4196 ipa_edge_args *old_args, ipa_edge_args *new_args)
4197 {
4198 unsigned int i;
4199
4200 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4201 if (old_args->polymorphic_call_contexts)
4202 new_args->polymorphic_call_contexts
4203 = vec_safe_copy (old_args->polymorphic_call_contexts);
4204
4205 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4206 {
4207 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4208 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4209
4210 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4211
4212 if (src_jf->type == IPA_JF_CONST)
4213 {
4214 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4215
4216 if (!src_rdesc)
4217 dst_jf->value.constant.rdesc = NULL;
4218 else if (src->caller == dst->caller)
4219 {
4220 struct ipa_ref *ref;
4221 symtab_node *n = cgraph_node_for_jfunc (src_jf);
4222 gcc_checking_assert (n);
4223 ref = src->caller->find_reference (n, src->call_stmt,
4224 src->lto_stmt_uid);
4225 gcc_checking_assert (ref);
4226 dst->caller->clone_reference (ref, ref->stmt);
4227
4228 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4229 dst_rdesc->cs = dst;
4230 dst_rdesc->refcount = src_rdesc->refcount;
4231 dst_rdesc->next_duplicate = NULL;
4232 dst_jf->value.constant.rdesc = dst_rdesc;
4233 }
4234 else if (src_rdesc->cs == src)
4235 {
4236 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4237 dst_rdesc->cs = dst;
4238 dst_rdesc->refcount = src_rdesc->refcount;
4239 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4240 src_rdesc->next_duplicate = dst_rdesc;
4241 dst_jf->value.constant.rdesc = dst_rdesc;
4242 }
4243 else
4244 {
4245 struct ipa_cst_ref_desc *dst_rdesc;
4246 /* This can happen during inlining, when a JFUNC can refer to a
4247 reference taken in a function up in the tree of inline clones.
4248 We need to find the duplicate that refers to our tree of
4249 inline clones. */
4250
4251 gcc_assert (dst->caller->inlined_to);
4252 for (dst_rdesc = src_rdesc->next_duplicate;
4253 dst_rdesc;
4254 dst_rdesc = dst_rdesc->next_duplicate)
4255 {
4256 struct cgraph_node *top;
4257 top = dst_rdesc->cs->caller->inlined_to
4258 ? dst_rdesc->cs->caller->inlined_to
4259 : dst_rdesc->cs->caller;
4260 if (dst->caller->inlined_to == top)
4261 break;
4262 }
4263 gcc_assert (dst_rdesc);
4264 dst_jf->value.constant.rdesc = dst_rdesc;
4265 }
4266 }
4267 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4268 && src->caller == dst->caller)
4269 {
4270 struct cgraph_node *inline_root = dst->caller->inlined_to
4271 ? dst->caller->inlined_to : dst->caller;
4272 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
4273 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4274
4275 int c = ipa_get_controlled_uses (root_info, idx);
4276 if (c != IPA_UNDESCRIBED_USE)
4277 {
4278 c++;
4279 ipa_set_controlled_uses (root_info, idx, c);
4280 }
4281 }
4282 }
4283 }
4284
4285 /* Analyze newly added function into callgraph. */
4286
4287 static void
4288 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4289 {
4290 if (node->has_gimple_body_p ())
4291 ipa_analyze_node (node);
4292 }
4293
4294 /* Hook that is called by summary when a node is duplicated. */
4295
4296 void
4297 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4298 ipa_node_params *old_info,
4299 ipa_node_params *new_info)
4300 {
4301 ipa_agg_replacement_value *old_av, *new_av;
4302
4303 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4304 new_info->lattices = NULL;
4305 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4306 new_info->known_csts = old_info->known_csts.copy ();
4307 new_info->known_contexts = old_info->known_contexts.copy ();
4308
4309 new_info->analysis_done = old_info->analysis_done;
4310 new_info->node_enqueued = old_info->node_enqueued;
4311 new_info->versionable = old_info->versionable;
4312
4313 old_av = ipa_get_agg_replacements_for_node (src);
4314 if (old_av)
4315 {
4316 new_av = NULL;
4317 while (old_av)
4318 {
4319 struct ipa_agg_replacement_value *v;
4320
4321 v = ggc_alloc<ipa_agg_replacement_value> ();
4322 memcpy (v, old_av, sizeof (*v));
4323 v->next = new_av;
4324 new_av = v;
4325 old_av = old_av->next;
4326 }
4327 ipa_set_node_agg_value_chain (dst, new_av);
4328 }
4329 }
4330
4331 /* Duplication of ipcp transformation summaries. */
4332
4333 void
4334 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4335 ipcp_transformation *src_trans,
4336 ipcp_transformation *dst_trans)
4337 {
4338 /* Avoid redundant work of duplicating vectors we will never use. */
4339 if (dst->inlined_to)
4340 return;
4341 dst_trans->bits = vec_safe_copy (src_trans->bits);
4342 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4343 ipa_agg_replacement_value *agg = src_trans->agg_values,
4344 **aggptr = &dst_trans->agg_values;
4345 while (agg)
4346 {
4347 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4348 **aggptr = *agg;
4349 agg = agg->next;
4350 aggptr = &(*aggptr)->next;
4351 }
4352 }
4353
4354 /* Register our cgraph hooks if they are not already there. */
4355
4356 void
4357 ipa_register_cgraph_hooks (void)
4358 {
4359 ipa_check_create_node_params ();
4360 ipa_check_create_edge_args ();
4361
4362 function_insertion_hook_holder =
4363 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4364 }
4365
4366 /* Unregister our cgraph hooks if they are not already there. */
4367
4368 static void
4369 ipa_unregister_cgraph_hooks (void)
4370 {
4371 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4372 function_insertion_hook_holder = NULL;
4373 }
4374
4375 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4376 longer needed after ipa-cp. */
4377
4378 void
4379 ipa_free_all_structures_after_ipa_cp (void)
4380 {
4381 if (!optimize && !in_lto_p)
4382 {
4383 ipa_free_all_edge_args ();
4384 ipa_free_all_node_params ();
4385 ipcp_sources_pool.release ();
4386 ipcp_cst_values_pool.release ();
4387 ipcp_poly_ctx_values_pool.release ();
4388 ipcp_agg_lattice_pool.release ();
4389 ipa_unregister_cgraph_hooks ();
4390 ipa_refdesc_pool.release ();
4391 }
4392 }
4393
4394 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4395 longer needed after indirect inlining. */
4396
4397 void
4398 ipa_free_all_structures_after_iinln (void)
4399 {
4400 ipa_free_all_edge_args ();
4401 ipa_free_all_node_params ();
4402 ipa_unregister_cgraph_hooks ();
4403 ipcp_sources_pool.release ();
4404 ipcp_cst_values_pool.release ();
4405 ipcp_poly_ctx_values_pool.release ();
4406 ipcp_agg_lattice_pool.release ();
4407 ipa_refdesc_pool.release ();
4408 }
4409
4410 /* Print ipa_tree_map data structures of all functions in the
4411 callgraph to F. */
4412
4413 void
4414 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4415 {
4416 int i, count;
4417 class ipa_node_params *info;
4418
4419 if (!node->definition)
4420 return;
4421 info = IPA_NODE_REF (node);
4422 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4423 if (!info)
4424 {
4425 fprintf (f, " no params return\n");
4426 return;
4427 }
4428 count = ipa_get_param_count (info);
4429 for (i = 0; i < count; i++)
4430 {
4431 int c;
4432
4433 fprintf (f, " ");
4434 ipa_dump_param (f, info, i);
4435 if (ipa_is_param_used (info, i))
4436 fprintf (f, " used");
4437 if (ipa_is_param_used_by_ipa_predicates (info, i))
4438 fprintf (f, " used_by_ipa_predicates");
4439 if (ipa_is_param_used_by_indirect_call (info, i))
4440 fprintf (f, " used_by_indirect_call");
4441 if (ipa_is_param_used_by_polymorphic_call (info, i))
4442 fprintf (f, " used_by_polymorphic_call");
4443 c = ipa_get_controlled_uses (info, i);
4444 if (c == IPA_UNDESCRIBED_USE)
4445 fprintf (f, " undescribed_use");
4446 else
4447 fprintf (f, " controlled_uses=%i", c);
4448 fprintf (f, "\n");
4449 }
4450 }
4451
4452 /* Print ipa_tree_map data structures of all functions in the
4453 callgraph to F. */
4454
4455 void
4456 ipa_print_all_params (FILE * f)
4457 {
4458 struct cgraph_node *node;
4459
4460 fprintf (f, "\nFunction parameters:\n");
4461 FOR_EACH_FUNCTION (node)
4462 ipa_print_node_params (f, node);
4463 }
4464
4465 /* Dump the AV linked list. */
4466
4467 void
4468 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4469 {
4470 bool comma = false;
4471 fprintf (f, " Aggregate replacements:");
4472 for (; av; av = av->next)
4473 {
4474 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4475 av->index, av->offset);
4476 print_generic_expr (f, av->value);
4477 comma = true;
4478 }
4479 fprintf (f, "\n");
4480 }
4481
4482 /* Stream out jump function JUMP_FUNC to OB. */
4483
4484 static void
4485 ipa_write_jump_function (struct output_block *ob,
4486 struct ipa_jump_func *jump_func)
4487 {
4488 struct ipa_agg_jf_item *item;
4489 struct bitpack_d bp;
4490 int i, count;
4491 int flag = 0;
4492
4493 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4494 as well as WPA memory by handling them specially. */
4495 if (jump_func->type == IPA_JF_CONST
4496 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4497 flag = 1;
4498
4499 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4500 switch (jump_func->type)
4501 {
4502 case IPA_JF_UNKNOWN:
4503 break;
4504 case IPA_JF_CONST:
4505 gcc_assert (
4506 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4507 stream_write_tree (ob,
4508 flag
4509 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4510 : jump_func->value.constant.value, true);
4511 break;
4512 case IPA_JF_PASS_THROUGH:
4513 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4514 if (jump_func->value.pass_through.operation == NOP_EXPR)
4515 {
4516 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4517 bp = bitpack_create (ob->main_stream);
4518 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4519 streamer_write_bitpack (&bp);
4520 }
4521 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4522 == tcc_unary)
4523 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4524 else
4525 {
4526 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4527 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4528 }
4529 break;
4530 case IPA_JF_ANCESTOR:
4531 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4532 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4533 bp = bitpack_create (ob->main_stream);
4534 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4535 streamer_write_bitpack (&bp);
4536 break;
4537 default:
4538 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4539 }
4540
4541 count = vec_safe_length (jump_func->agg.items);
4542 streamer_write_uhwi (ob, count);
4543 if (count)
4544 {
4545 bp = bitpack_create (ob->main_stream);
4546 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4547 streamer_write_bitpack (&bp);
4548 }
4549
4550 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4551 {
4552 stream_write_tree (ob, item->type, true);
4553 streamer_write_uhwi (ob, item->offset);
4554 streamer_write_uhwi (ob, item->jftype);
4555 switch (item->jftype)
4556 {
4557 case IPA_JF_UNKNOWN:
4558 break;
4559 case IPA_JF_CONST:
4560 stream_write_tree (ob, item->value.constant, true);
4561 break;
4562 case IPA_JF_PASS_THROUGH:
4563 case IPA_JF_LOAD_AGG:
4564 streamer_write_uhwi (ob, item->value.pass_through.operation);
4565 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4566 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4567 != tcc_unary)
4568 stream_write_tree (ob, item->value.pass_through.operand, true);
4569 if (item->jftype == IPA_JF_LOAD_AGG)
4570 {
4571 stream_write_tree (ob, item->value.load_agg.type, true);
4572 streamer_write_uhwi (ob, item->value.load_agg.offset);
4573 bp = bitpack_create (ob->main_stream);
4574 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4575 streamer_write_bitpack (&bp);
4576 }
4577 break;
4578 default:
4579 fatal_error (UNKNOWN_LOCATION,
4580 "invalid jump function in LTO stream");
4581 }
4582 }
4583
4584 bp = bitpack_create (ob->main_stream);
4585 bp_pack_value (&bp, !!jump_func->bits, 1);
4586 streamer_write_bitpack (&bp);
4587 if (jump_func->bits)
4588 {
4589 streamer_write_widest_int (ob, jump_func->bits->value);
4590 streamer_write_widest_int (ob, jump_func->bits->mask);
4591 }
4592 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4593 streamer_write_bitpack (&bp);
4594 if (jump_func->m_vr)
4595 {
4596 streamer_write_enum (ob->main_stream, value_rang_type,
4597 VR_LAST, jump_func->m_vr->kind ());
4598 stream_write_tree (ob, jump_func->m_vr->min (), true);
4599 stream_write_tree (ob, jump_func->m_vr->max (), true);
4600 }
4601 }
4602
4603 /* Read in jump function JUMP_FUNC from IB. */
4604
4605 static void
4606 ipa_read_jump_function (class lto_input_block *ib,
4607 struct ipa_jump_func *jump_func,
4608 struct cgraph_edge *cs,
4609 class data_in *data_in,
4610 bool prevails)
4611 {
4612 enum jump_func_type jftype;
4613 enum tree_code operation;
4614 int i, count;
4615 int val = streamer_read_uhwi (ib);
4616 bool flag = val & 1;
4617
4618 jftype = (enum jump_func_type) (val / 2);
4619 switch (jftype)
4620 {
4621 case IPA_JF_UNKNOWN:
4622 ipa_set_jf_unknown (jump_func);
4623 break;
4624 case IPA_JF_CONST:
4625 {
4626 tree t = stream_read_tree (ib, data_in);
4627 if (flag && prevails)
4628 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4629 ipa_set_jf_constant (jump_func, t, cs);
4630 }
4631 break;
4632 case IPA_JF_PASS_THROUGH:
4633 operation = (enum tree_code) streamer_read_uhwi (ib);
4634 if (operation == NOP_EXPR)
4635 {
4636 int formal_id = streamer_read_uhwi (ib);
4637 struct bitpack_d bp = streamer_read_bitpack (ib);
4638 bool agg_preserved = bp_unpack_value (&bp, 1);
4639 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4640 }
4641 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4642 {
4643 int formal_id = streamer_read_uhwi (ib);
4644 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4645 }
4646 else
4647 {
4648 tree operand = stream_read_tree (ib, data_in);
4649 int formal_id = streamer_read_uhwi (ib);
4650 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4651 operation);
4652 }
4653 break;
4654 case IPA_JF_ANCESTOR:
4655 {
4656 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4657 int formal_id = streamer_read_uhwi (ib);
4658 struct bitpack_d bp = streamer_read_bitpack (ib);
4659 bool agg_preserved = bp_unpack_value (&bp, 1);
4660 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4661 break;
4662 }
4663 default:
4664 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4665 }
4666
4667 count = streamer_read_uhwi (ib);
4668 if (prevails)
4669 vec_alloc (jump_func->agg.items, count);
4670 if (count)
4671 {
4672 struct bitpack_d bp = streamer_read_bitpack (ib);
4673 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4674 }
4675 for (i = 0; i < count; i++)
4676 {
4677 struct ipa_agg_jf_item item;
4678 item.type = stream_read_tree (ib, data_in);
4679 item.offset = streamer_read_uhwi (ib);
4680 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4681
4682 switch (item.jftype)
4683 {
4684 case IPA_JF_UNKNOWN:
4685 break;
4686 case IPA_JF_CONST:
4687 item.value.constant = stream_read_tree (ib, data_in);
4688 break;
4689 case IPA_JF_PASS_THROUGH:
4690 case IPA_JF_LOAD_AGG:
4691 operation = (enum tree_code) streamer_read_uhwi (ib);
4692 item.value.pass_through.operation = operation;
4693 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4694 if (TREE_CODE_CLASS (operation) == tcc_unary)
4695 item.value.pass_through.operand = NULL_TREE;
4696 else
4697 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4698 if (item.jftype == IPA_JF_LOAD_AGG)
4699 {
4700 struct bitpack_d bp;
4701 item.value.load_agg.type = stream_read_tree (ib, data_in);
4702 item.value.load_agg.offset = streamer_read_uhwi (ib);
4703 bp = streamer_read_bitpack (ib);
4704 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4705 }
4706 break;
4707 default:
4708 fatal_error (UNKNOWN_LOCATION,
4709 "invalid jump function in LTO stream");
4710 }
4711 if (prevails)
4712 jump_func->agg.items->quick_push (item);
4713 }
4714
4715 struct bitpack_d bp = streamer_read_bitpack (ib);
4716 bool bits_known = bp_unpack_value (&bp, 1);
4717 if (bits_known)
4718 {
4719 widest_int value = streamer_read_widest_int (ib);
4720 widest_int mask = streamer_read_widest_int (ib);
4721 if (prevails)
4722 ipa_set_jfunc_bits (jump_func, value, mask);
4723 }
4724 else
4725 jump_func->bits = NULL;
4726
4727 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4728 bool vr_known = bp_unpack_value (&vr_bp, 1);
4729 if (vr_known)
4730 {
4731 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4732 VR_LAST);
4733 tree min = stream_read_tree (ib, data_in);
4734 tree max = stream_read_tree (ib, data_in);
4735 if (prevails)
4736 ipa_set_jfunc_vr (jump_func, type, min, max);
4737 }
4738 else
4739 jump_func->m_vr = NULL;
4740 }
4741
4742 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4743 relevant to indirect inlining to OB. */
4744
4745 static void
4746 ipa_write_indirect_edge_info (struct output_block *ob,
4747 struct cgraph_edge *cs)
4748 {
4749 class cgraph_indirect_call_info *ii = cs->indirect_info;
4750 struct bitpack_d bp;
4751
4752 streamer_write_hwi (ob, ii->param_index);
4753 bp = bitpack_create (ob->main_stream);
4754 bp_pack_value (&bp, ii->polymorphic, 1);
4755 bp_pack_value (&bp, ii->agg_contents, 1);
4756 bp_pack_value (&bp, ii->member_ptr, 1);
4757 bp_pack_value (&bp, ii->by_ref, 1);
4758 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4759 bp_pack_value (&bp, ii->vptr_changed, 1);
4760 streamer_write_bitpack (&bp);
4761 if (ii->agg_contents || ii->polymorphic)
4762 streamer_write_hwi (ob, ii->offset);
4763 else
4764 gcc_assert (ii->offset == 0);
4765
4766 if (ii->polymorphic)
4767 {
4768 streamer_write_hwi (ob, ii->otr_token);
4769 stream_write_tree (ob, ii->otr_type, true);
4770 ii->context.stream_out (ob);
4771 }
4772 }
4773
4774 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4775 relevant to indirect inlining from IB. */
4776
4777 static void
4778 ipa_read_indirect_edge_info (class lto_input_block *ib,
4779 class data_in *data_in,
4780 struct cgraph_edge *cs,
4781 class ipa_node_params *info)
4782 {
4783 class cgraph_indirect_call_info *ii = cs->indirect_info;
4784 struct bitpack_d bp;
4785
4786 ii->param_index = (int) streamer_read_hwi (ib);
4787 bp = streamer_read_bitpack (ib);
4788 ii->polymorphic = bp_unpack_value (&bp, 1);
4789 ii->agg_contents = bp_unpack_value (&bp, 1);
4790 ii->member_ptr = bp_unpack_value (&bp, 1);
4791 ii->by_ref = bp_unpack_value (&bp, 1);
4792 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4793 ii->vptr_changed = bp_unpack_value (&bp, 1);
4794 if (ii->agg_contents || ii->polymorphic)
4795 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4796 else
4797 ii->offset = 0;
4798 if (ii->polymorphic)
4799 {
4800 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4801 ii->otr_type = stream_read_tree (ib, data_in);
4802 ii->context.stream_in (ib, data_in);
4803 }
4804 if (info && ii->param_index >= 0)
4805 {
4806 if (ii->polymorphic)
4807 ipa_set_param_used_by_polymorphic_call (info,
4808 ii->param_index , true);
4809 ipa_set_param_used_by_indirect_call (info,
4810 ii->param_index, true);
4811 }
4812 }
4813
4814 /* Stream out NODE info to OB. */
4815
4816 static void
4817 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4818 {
4819 int node_ref;
4820 lto_symtab_encoder_t encoder;
4821 class ipa_node_params *info = IPA_NODE_REF (node);
4822 int j;
4823 struct cgraph_edge *e;
4824 struct bitpack_d bp;
4825
4826 encoder = ob->decl_state->symtab_node_encoder;
4827 node_ref = lto_symtab_encoder_encode (encoder, node);
4828 streamer_write_uhwi (ob, node_ref);
4829
4830 streamer_write_uhwi (ob, ipa_get_param_count (info));
4831 for (j = 0; j < ipa_get_param_count (info); j++)
4832 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4833 bp = bitpack_create (ob->main_stream);
4834 gcc_assert (info->analysis_done
4835 || ipa_get_param_count (info) == 0);
4836 gcc_assert (!info->node_enqueued);
4837 gcc_assert (!info->ipcp_orig_node);
4838 for (j = 0; j < ipa_get_param_count (info); j++)
4839 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4840 streamer_write_bitpack (&bp);
4841 for (j = 0; j < ipa_get_param_count (info); j++)
4842 {
4843 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4844 stream_write_tree (ob, ipa_get_type (info, j), true);
4845 }
4846 for (e = node->callees; e; e = e->next_callee)
4847 {
4848 class ipa_edge_args *args = IPA_EDGE_REF (e);
4849
4850 if (!args)
4851 {
4852 streamer_write_uhwi (ob, 0);
4853 continue;
4854 }
4855
4856 streamer_write_uhwi (ob,
4857 ipa_get_cs_argument_count (args) * 2
4858 + (args->polymorphic_call_contexts != NULL));
4859 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4860 {
4861 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4862 if (args->polymorphic_call_contexts != NULL)
4863 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4864 }
4865 }
4866 for (e = node->indirect_calls; e; e = e->next_callee)
4867 {
4868 class ipa_edge_args *args = IPA_EDGE_REF (e);
4869 if (!args)
4870 streamer_write_uhwi (ob, 0);
4871 else
4872 {
4873 streamer_write_uhwi (ob,
4874 ipa_get_cs_argument_count (args) * 2
4875 + (args->polymorphic_call_contexts != NULL));
4876 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4877 {
4878 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4879 if (args->polymorphic_call_contexts != NULL)
4880 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4881 }
4882 }
4883 ipa_write_indirect_edge_info (ob, e);
4884 }
4885 }
4886
4887 /* Stream in edge E from IB. */
4888
4889 static void
4890 ipa_read_edge_info (class lto_input_block *ib,
4891 class data_in *data_in,
4892 struct cgraph_edge *e, bool prevails)
4893 {
4894 int count = streamer_read_uhwi (ib);
4895 bool contexts_computed = count & 1;
4896
4897 count /= 2;
4898 if (!count)
4899 return;
4900 if (prevails && e->possibly_call_in_translation_unit_p ())
4901 {
4902 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
4903 vec_safe_grow_cleared (args->jump_functions, count);
4904 if (contexts_computed)
4905 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4906 for (int k = 0; k < count; k++)
4907 {
4908 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4909 data_in, prevails);
4910 if (contexts_computed)
4911 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4912 (ib, data_in);
4913 }
4914 }
4915 else
4916 {
4917 for (int k = 0; k < count; k++)
4918 {
4919 struct ipa_jump_func dummy;
4920 ipa_read_jump_function (ib, &dummy, e,
4921 data_in, prevails);
4922 if (contexts_computed)
4923 {
4924 class ipa_polymorphic_call_context ctx;
4925 ctx.stream_in (ib, data_in);
4926 }
4927 }
4928 }
4929 }
4930
4931 /* Stream in NODE info from IB. */
4932
4933 static void
4934 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4935 class data_in *data_in)
4936 {
4937 int k;
4938 struct cgraph_edge *e;
4939 struct bitpack_d bp;
4940 bool prevails = node->prevailing_p ();
4941 class ipa_node_params *info = prevails
4942 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
4943
4944 int param_count = streamer_read_uhwi (ib);
4945 if (prevails)
4946 {
4947 ipa_alloc_node_params (node, param_count);
4948 for (k = 0; k < param_count; k++)
4949 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4950 if (ipa_get_param_count (info) != 0)
4951 info->analysis_done = true;
4952 info->node_enqueued = false;
4953 }
4954 else
4955 for (k = 0; k < param_count; k++)
4956 streamer_read_uhwi (ib);
4957
4958 bp = streamer_read_bitpack (ib);
4959 for (k = 0; k < param_count; k++)
4960 {
4961 bool used = bp_unpack_value (&bp, 1);
4962
4963 if (prevails)
4964 ipa_set_param_used (info, k, used);
4965 }
4966 for (k = 0; k < param_count; k++)
4967 {
4968 int nuses = streamer_read_hwi (ib);
4969 tree type = stream_read_tree (ib, data_in);
4970
4971 if (prevails)
4972 {
4973 ipa_set_controlled_uses (info, k, nuses);
4974 (*info->descriptors)[k].decl_or_type = type;
4975 }
4976 }
4977 for (e = node->callees; e; e = e->next_callee)
4978 ipa_read_edge_info (ib, data_in, e, prevails);
4979 for (e = node->indirect_calls; e; e = e->next_callee)
4980 {
4981 ipa_read_edge_info (ib, data_in, e, prevails);
4982 ipa_read_indirect_edge_info (ib, data_in, e, info);
4983 }
4984 }
4985
4986 /* Write jump functions for nodes in SET. */
4987
4988 void
4989 ipa_prop_write_jump_functions (void)
4990 {
4991 struct cgraph_node *node;
4992 struct output_block *ob;
4993 unsigned int count = 0;
4994 lto_symtab_encoder_iterator lsei;
4995 lto_symtab_encoder_t encoder;
4996
4997 if (!ipa_node_params_sum || !ipa_edge_args_sum)
4998 return;
4999
5000 ob = create_output_block (LTO_section_jump_functions);
5001 encoder = ob->decl_state->symtab_node_encoder;
5002 ob->symbol = NULL;
5003 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5004 lsei_next_function_in_partition (&lsei))
5005 {
5006 node = lsei_cgraph_node (lsei);
5007 if (node->has_gimple_body_p ()
5008 && IPA_NODE_REF (node) != NULL)
5009 count++;
5010 }
5011
5012 streamer_write_uhwi (ob, count);
5013
5014 /* Process all of the functions. */
5015 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5016 lsei_next_function_in_partition (&lsei))
5017 {
5018 node = lsei_cgraph_node (lsei);
5019 if (node->has_gimple_body_p ()
5020 && IPA_NODE_REF (node) != NULL)
5021 ipa_write_node_info (ob, node);
5022 }
5023 streamer_write_char_stream (ob->main_stream, 0);
5024 produce_asm (ob, NULL);
5025 destroy_output_block (ob);
5026 }
5027
5028 /* Read section in file FILE_DATA of length LEN with data DATA. */
5029
5030 static void
5031 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5032 size_t len)
5033 {
5034 const struct lto_function_header *header =
5035 (const struct lto_function_header *) data;
5036 const int cfg_offset = sizeof (struct lto_function_header);
5037 const int main_offset = cfg_offset + header->cfg_size;
5038 const int string_offset = main_offset + header->main_size;
5039 class data_in *data_in;
5040 unsigned int i;
5041 unsigned int count;
5042
5043 lto_input_block ib_main ((const char *) data + main_offset,
5044 header->main_size, file_data->mode_table);
5045
5046 data_in =
5047 lto_data_in_create (file_data, (const char *) data + string_offset,
5048 header->string_size, vNULL);
5049 count = streamer_read_uhwi (&ib_main);
5050
5051 for (i = 0; i < count; i++)
5052 {
5053 unsigned int index;
5054 struct cgraph_node *node;
5055 lto_symtab_encoder_t encoder;
5056
5057 index = streamer_read_uhwi (&ib_main);
5058 encoder = file_data->symtab_node_encoder;
5059 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5060 index));
5061 gcc_assert (node->definition);
5062 ipa_read_node_info (&ib_main, node, data_in);
5063 }
5064 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5065 len);
5066 lto_data_in_delete (data_in);
5067 }
5068
5069 /* Read ipcp jump functions. */
5070
5071 void
5072 ipa_prop_read_jump_functions (void)
5073 {
5074 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5075 struct lto_file_decl_data *file_data;
5076 unsigned int j = 0;
5077
5078 ipa_check_create_node_params ();
5079 ipa_check_create_edge_args ();
5080 ipa_register_cgraph_hooks ();
5081
5082 while ((file_data = file_data_vec[j++]))
5083 {
5084 size_t len;
5085 const char *data
5086 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5087 &len);
5088 if (data)
5089 ipa_prop_read_section (file_data, data, len);
5090 }
5091 }
5092
5093 void
5094 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5095 {
5096 int node_ref;
5097 unsigned int count = 0;
5098 lto_symtab_encoder_t encoder;
5099 struct ipa_agg_replacement_value *aggvals, *av;
5100
5101 aggvals = ipa_get_agg_replacements_for_node (node);
5102 encoder = ob->decl_state->symtab_node_encoder;
5103 node_ref = lto_symtab_encoder_encode (encoder, node);
5104 streamer_write_uhwi (ob, node_ref);
5105
5106 for (av = aggvals; av; av = av->next)
5107 count++;
5108 streamer_write_uhwi (ob, count);
5109
5110 for (av = aggvals; av; av = av->next)
5111 {
5112 struct bitpack_d bp;
5113
5114 streamer_write_uhwi (ob, av->offset);
5115 streamer_write_uhwi (ob, av->index);
5116 stream_write_tree (ob, av->value, true);
5117
5118 bp = bitpack_create (ob->main_stream);
5119 bp_pack_value (&bp, av->by_ref, 1);
5120 streamer_write_bitpack (&bp);
5121 }
5122
5123 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5124 if (ts && vec_safe_length (ts->m_vr) > 0)
5125 {
5126 count = ts->m_vr->length ();
5127 streamer_write_uhwi (ob, count);
5128 for (unsigned i = 0; i < count; ++i)
5129 {
5130 struct bitpack_d bp;
5131 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5132 bp = bitpack_create (ob->main_stream);
5133 bp_pack_value (&bp, parm_vr->known, 1);
5134 streamer_write_bitpack (&bp);
5135 if (parm_vr->known)
5136 {
5137 streamer_write_enum (ob->main_stream, value_rang_type,
5138 VR_LAST, parm_vr->type);
5139 streamer_write_wide_int (ob, parm_vr->min);
5140 streamer_write_wide_int (ob, parm_vr->max);
5141 }
5142 }
5143 }
5144 else
5145 streamer_write_uhwi (ob, 0);
5146
5147 if (ts && vec_safe_length (ts->bits) > 0)
5148 {
5149 count = ts->bits->length ();
5150 streamer_write_uhwi (ob, count);
5151
5152 for (unsigned i = 0; i < count; ++i)
5153 {
5154 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5155 struct bitpack_d bp = bitpack_create (ob->main_stream);
5156 bp_pack_value (&bp, !!bits_jfunc, 1);
5157 streamer_write_bitpack (&bp);
5158 if (bits_jfunc)
5159 {
5160 streamer_write_widest_int (ob, bits_jfunc->value);
5161 streamer_write_widest_int (ob, bits_jfunc->mask);
5162 }
5163 }
5164 }
5165 else
5166 streamer_write_uhwi (ob, 0);
5167 }
5168
5169 /* Stream in the aggregate value replacement chain for NODE from IB. */
5170
5171 static void
5172 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5173 data_in *data_in)
5174 {
5175 struct ipa_agg_replacement_value *aggvals = NULL;
5176 unsigned int count, i;
5177
5178 count = streamer_read_uhwi (ib);
5179 for (i = 0; i <count; i++)
5180 {
5181 struct ipa_agg_replacement_value *av;
5182 struct bitpack_d bp;
5183
5184 av = ggc_alloc<ipa_agg_replacement_value> ();
5185 av->offset = streamer_read_uhwi (ib);
5186 av->index = streamer_read_uhwi (ib);
5187 av->value = stream_read_tree (ib, data_in);
5188 bp = streamer_read_bitpack (ib);
5189 av->by_ref = bp_unpack_value (&bp, 1);
5190 av->next = aggvals;
5191 aggvals = av;
5192 }
5193 ipa_set_node_agg_value_chain (node, aggvals);
5194
5195 count = streamer_read_uhwi (ib);
5196 if (count > 0)
5197 {
5198 ipcp_transformation_initialize ();
5199 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5200 vec_safe_grow_cleared (ts->m_vr, count);
5201 for (i = 0; i < count; i++)
5202 {
5203 ipa_vr *parm_vr;
5204 parm_vr = &(*ts->m_vr)[i];
5205 struct bitpack_d bp;
5206 bp = streamer_read_bitpack (ib);
5207 parm_vr->known = bp_unpack_value (&bp, 1);
5208 if (parm_vr->known)
5209 {
5210 parm_vr->type = streamer_read_enum (ib, value_range_kind,
5211 VR_LAST);
5212 parm_vr->min = streamer_read_wide_int (ib);
5213 parm_vr->max = streamer_read_wide_int (ib);
5214 }
5215 }
5216 }
5217 count = streamer_read_uhwi (ib);
5218 if (count > 0)
5219 {
5220 ipcp_transformation_initialize ();
5221 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5222 vec_safe_grow_cleared (ts->bits, count);
5223
5224 for (i = 0; i < count; i++)
5225 {
5226 struct bitpack_d bp = streamer_read_bitpack (ib);
5227 bool known = bp_unpack_value (&bp, 1);
5228 if (known)
5229 {
5230 const widest_int value = streamer_read_widest_int (ib);
5231 const widest_int mask = streamer_read_widest_int (ib);
5232 ipa_bits *bits
5233 = ipa_get_ipa_bits_for_value (value, mask);
5234 (*ts->bits)[i] = bits;
5235 }
5236 }
5237 }
5238 }
5239
5240 /* Write all aggregate replacement for nodes in set. */
5241
5242 void
5243 ipcp_write_transformation_summaries (void)
5244 {
5245 struct cgraph_node *node;
5246 struct output_block *ob;
5247 unsigned int count = 0;
5248 lto_symtab_encoder_iterator lsei;
5249 lto_symtab_encoder_t encoder;
5250
5251 ob = create_output_block (LTO_section_ipcp_transform);
5252 encoder = ob->decl_state->symtab_node_encoder;
5253 ob->symbol = NULL;
5254 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5255 lsei_next_function_in_partition (&lsei))
5256 {
5257 node = lsei_cgraph_node (lsei);
5258 if (node->has_gimple_body_p ())
5259 count++;
5260 }
5261
5262 streamer_write_uhwi (ob, count);
5263
5264 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5265 lsei_next_function_in_partition (&lsei))
5266 {
5267 node = lsei_cgraph_node (lsei);
5268 if (node->has_gimple_body_p ())
5269 write_ipcp_transformation_info (ob, node);
5270 }
5271 streamer_write_char_stream (ob->main_stream, 0);
5272 produce_asm (ob, NULL);
5273 destroy_output_block (ob);
5274 }
5275
5276 /* Read replacements section in file FILE_DATA of length LEN with data
5277 DATA. */
5278
5279 static void
5280 read_replacements_section (struct lto_file_decl_data *file_data,
5281 const char *data,
5282 size_t len)
5283 {
5284 const struct lto_function_header *header =
5285 (const struct lto_function_header *) data;
5286 const int cfg_offset = sizeof (struct lto_function_header);
5287 const int main_offset = cfg_offset + header->cfg_size;
5288 const int string_offset = main_offset + header->main_size;
5289 class data_in *data_in;
5290 unsigned int i;
5291 unsigned int count;
5292
5293 lto_input_block ib_main ((const char *) data + main_offset,
5294 header->main_size, file_data->mode_table);
5295
5296 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5297 header->string_size, vNULL);
5298 count = streamer_read_uhwi (&ib_main);
5299
5300 for (i = 0; i < count; i++)
5301 {
5302 unsigned int index;
5303 struct cgraph_node *node;
5304 lto_symtab_encoder_t encoder;
5305
5306 index = streamer_read_uhwi (&ib_main);
5307 encoder = file_data->symtab_node_encoder;
5308 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5309 index));
5310 gcc_assert (node->definition);
5311 read_ipcp_transformation_info (&ib_main, node, data_in);
5312 }
5313 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5314 len);
5315 lto_data_in_delete (data_in);
5316 }
5317
5318 /* Read IPA-CP aggregate replacements. */
5319
5320 void
5321 ipcp_read_transformation_summaries (void)
5322 {
5323 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5324 struct lto_file_decl_data *file_data;
5325 unsigned int j = 0;
5326
5327 while ((file_data = file_data_vec[j++]))
5328 {
5329 size_t len;
5330 const char *data
5331 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5332 &len);
5333 if (data)
5334 read_replacements_section (file_data, data, len);
5335 }
5336 }
5337
5338 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5339 NODE. */
5340
5341 static void
5342 adjust_agg_replacement_values (struct cgraph_node *node,
5343 struct ipa_agg_replacement_value *aggval)
5344 {
5345 struct ipa_agg_replacement_value *v;
5346
5347 if (!node->clone.param_adjustments)
5348 return;
5349
5350 auto_vec<int, 16> new_indices;
5351 node->clone.param_adjustments->get_updated_indices (&new_indices);
5352 for (v = aggval; v; v = v->next)
5353 {
5354 gcc_checking_assert (v->index >= 0);
5355
5356 if ((unsigned) v->index < new_indices.length ())
5357 v->index = new_indices[v->index];
5358 else
5359 /* This can happen if we know about a constant passed by reference by
5360 an argument which is never actually used for anything, let alone
5361 loading that constant. */
5362 v->index = -1;
5363 }
5364 }
5365
5366 /* Dominator walker driving the ipcp modification phase. */
5367
5368 class ipcp_modif_dom_walker : public dom_walker
5369 {
5370 public:
5371 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5372 vec<ipa_param_descriptor, va_gc> *descs,
5373 struct ipa_agg_replacement_value *av,
5374 bool *sc, bool *cc)
5375 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5376 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5377
5378 virtual edge before_dom_children (basic_block);
5379
5380 private:
5381 struct ipa_func_body_info *m_fbi;
5382 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5383 struct ipa_agg_replacement_value *m_aggval;
5384 bool *m_something_changed, *m_cfg_changed;
5385 };
5386
5387 edge
5388 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5389 {
5390 gimple_stmt_iterator gsi;
5391 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5392 {
5393 struct ipa_agg_replacement_value *v;
5394 gimple *stmt = gsi_stmt (gsi);
5395 tree rhs, val, t;
5396 HOST_WIDE_INT offset;
5397 poly_int64 size;
5398 int index;
5399 bool by_ref, vce;
5400
5401 if (!gimple_assign_load_p (stmt))
5402 continue;
5403 rhs = gimple_assign_rhs1 (stmt);
5404 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5405 continue;
5406
5407 vce = false;
5408 t = rhs;
5409 while (handled_component_p (t))
5410 {
5411 /* V_C_E can do things like convert an array of integers to one
5412 bigger integer and similar things we do not handle below. */
5413 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5414 {
5415 vce = true;
5416 break;
5417 }
5418 t = TREE_OPERAND (t, 0);
5419 }
5420 if (vce)
5421 continue;
5422
5423 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5424 &offset, &size, &by_ref))
5425 continue;
5426 for (v = m_aggval; v; v = v->next)
5427 if (v->index == index
5428 && v->offset == offset)
5429 break;
5430 if (!v
5431 || v->by_ref != by_ref
5432 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5433 size))
5434 continue;
5435
5436 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5437 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5438 {
5439 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5440 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5441 else if (TYPE_SIZE (TREE_TYPE (rhs))
5442 == TYPE_SIZE (TREE_TYPE (v->value)))
5443 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5444 else
5445 {
5446 if (dump_file)
5447 {
5448 fprintf (dump_file, " const ");
5449 print_generic_expr (dump_file, v->value);
5450 fprintf (dump_file, " can't be converted to type of ");
5451 print_generic_expr (dump_file, rhs);
5452 fprintf (dump_file, "\n");
5453 }
5454 continue;
5455 }
5456 }
5457 else
5458 val = v->value;
5459
5460 if (dump_file && (dump_flags & TDF_DETAILS))
5461 {
5462 fprintf (dump_file, "Modifying stmt:\n ");
5463 print_gimple_stmt (dump_file, stmt, 0);
5464 }
5465 gimple_assign_set_rhs_from_tree (&gsi, val);
5466 update_stmt (stmt);
5467
5468 if (dump_file && (dump_flags & TDF_DETAILS))
5469 {
5470 fprintf (dump_file, "into:\n ");
5471 print_gimple_stmt (dump_file, stmt, 0);
5472 fprintf (dump_file, "\n");
5473 }
5474
5475 *m_something_changed = true;
5476 if (maybe_clean_eh_stmt (stmt)
5477 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5478 *m_cfg_changed = true;
5479 }
5480 return NULL;
5481 }
5482
5483 /* Return true if we have recorded VALUE and MASK about PARM.
5484 Set VALUE and MASk accordingly. */
5485
5486 bool
5487 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5488 {
5489 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5490 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5491 if (!ts || vec_safe_length (ts->bits) == 0)
5492 return false;
5493
5494 int i = 0;
5495 for (tree p = DECL_ARGUMENTS (current_function_decl);
5496 p != parm; p = DECL_CHAIN (p))
5497 {
5498 i++;
5499 /* Ignore static chain. */
5500 if (!p)
5501 return false;
5502 }
5503
5504 if (cnode->clone.param_adjustments)
5505 {
5506 i = cnode->clone.param_adjustments->get_original_index (i);
5507 if (i < 0)
5508 return false;
5509 }
5510
5511 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5512 if (!bits[i])
5513 return false;
5514 *mask = bits[i]->mask;
5515 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5516 return true;
5517 }
5518
5519
5520 /* Update bits info of formal parameters as described in
5521 ipcp_transformation. */
5522
5523 static void
5524 ipcp_update_bits (struct cgraph_node *node)
5525 {
5526 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5527
5528 if (!ts || vec_safe_length (ts->bits) == 0)
5529 return;
5530 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5531 unsigned count = bits.length ();
5532 if (!count)
5533 return;
5534
5535 auto_vec<int, 16> new_indices;
5536 bool need_remapping = false;
5537 if (node->clone.param_adjustments)
5538 {
5539 node->clone.param_adjustments->get_updated_indices (&new_indices);
5540 need_remapping = true;
5541 }
5542 auto_vec <tree, 16> parm_decls;
5543 push_function_arg_decls (&parm_decls, node->decl);
5544
5545 for (unsigned i = 0; i < count; ++i)
5546 {
5547 tree parm;
5548 if (need_remapping)
5549 {
5550 if (i >= new_indices.length ())
5551 continue;
5552 int idx = new_indices[i];
5553 if (idx < 0)
5554 continue;
5555 parm = parm_decls[idx];
5556 }
5557 else
5558 parm = parm_decls[i];
5559 gcc_checking_assert (parm);
5560
5561
5562 if (!bits[i]
5563 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5564 || POINTER_TYPE_P (TREE_TYPE (parm)))
5565 || !is_gimple_reg (parm))
5566 continue;
5567
5568 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5569 if (!ddef)
5570 continue;
5571
5572 if (dump_file)
5573 {
5574 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5575 print_hex (bits[i]->mask, dump_file);
5576 fprintf (dump_file, "\n");
5577 }
5578
5579 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5580 {
5581 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5582 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5583
5584 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5585 | wide_int::from (bits[i]->value, prec, sgn);
5586 set_nonzero_bits (ddef, nonzero_bits);
5587 }
5588 else
5589 {
5590 unsigned tem = bits[i]->mask.to_uhwi ();
5591 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5592 unsigned align = tem & -tem;
5593 unsigned misalign = bitpos & (align - 1);
5594
5595 if (align > 1)
5596 {
5597 if (dump_file)
5598 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5599
5600 unsigned old_align, old_misalign;
5601 struct ptr_info_def *pi = get_ptr_info (ddef);
5602 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5603
5604 if (old_known
5605 && old_align > align)
5606 {
5607 if (dump_file)
5608 {
5609 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5610 if ((old_misalign & (align - 1)) != misalign)
5611 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5612 old_misalign, misalign);
5613 }
5614 continue;
5615 }
5616
5617 if (old_known
5618 && ((misalign & (old_align - 1)) != old_misalign)
5619 && dump_file)
5620 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5621 old_misalign, misalign);
5622
5623 set_ptr_info_alignment (pi, align, misalign);
5624 }
5625 }
5626 }
5627 }
5628
5629 bool
5630 ipa_vr::nonzero_p (tree expr_type) const
5631 {
5632 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5633 return true;
5634
5635 unsigned prec = TYPE_PRECISION (expr_type);
5636 return (type == VR_RANGE
5637 && TYPE_UNSIGNED (expr_type)
5638 && wi::eq_p (min, wi::one (prec))
5639 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5640 }
5641
5642 /* Update value range of formal parameters as described in
5643 ipcp_transformation. */
5644
5645 static void
5646 ipcp_update_vr (struct cgraph_node *node)
5647 {
5648 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5649 if (!ts || vec_safe_length (ts->m_vr) == 0)
5650 return;
5651 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5652 unsigned count = vr.length ();
5653 if (!count)
5654 return;
5655
5656 auto_vec<int, 16> new_indices;
5657 bool need_remapping = false;
5658 if (node->clone.param_adjustments)
5659 {
5660 node->clone.param_adjustments->get_updated_indices (&new_indices);
5661 need_remapping = true;
5662 }
5663 auto_vec <tree, 16> parm_decls;
5664 push_function_arg_decls (&parm_decls, node->decl);
5665
5666 for (unsigned i = 0; i < count; ++i)
5667 {
5668 tree parm;
5669 int remapped_idx;
5670 if (need_remapping)
5671 {
5672 if (i >= new_indices.length ())
5673 continue;
5674 remapped_idx = new_indices[i];
5675 if (remapped_idx < 0)
5676 continue;
5677 }
5678 else
5679 remapped_idx = i;
5680
5681 parm = parm_decls[remapped_idx];
5682
5683 gcc_checking_assert (parm);
5684 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5685
5686 if (!ddef || !is_gimple_reg (parm))
5687 continue;
5688
5689 if (vr[i].known
5690 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5691 {
5692 tree type = TREE_TYPE (ddef);
5693 unsigned prec = TYPE_PRECISION (type);
5694 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5695 {
5696 if (dump_file)
5697 {
5698 fprintf (dump_file, "Setting value range of param %u "
5699 "(now %i) ", i, remapped_idx);
5700 fprintf (dump_file, "%s[",
5701 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5702 print_decs (vr[i].min, dump_file);
5703 fprintf (dump_file, ", ");
5704 print_decs (vr[i].max, dump_file);
5705 fprintf (dump_file, "]\n");
5706 }
5707 set_range_info (ddef, vr[i].type,
5708 wide_int_storage::from (vr[i].min, prec,
5709 TYPE_SIGN (type)),
5710 wide_int_storage::from (vr[i].max, prec,
5711 TYPE_SIGN (type)));
5712 }
5713 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5714 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5715 {
5716 if (dump_file)
5717 fprintf (dump_file, "Setting nonnull for %u\n", i);
5718 set_ptr_nonnull (ddef);
5719 }
5720 }
5721 }
5722 }
5723
5724 /* IPCP transformation phase doing propagation of aggregate values. */
5725
5726 unsigned int
5727 ipcp_transform_function (struct cgraph_node *node)
5728 {
5729 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5730 struct ipa_func_body_info fbi;
5731 struct ipa_agg_replacement_value *aggval;
5732 int param_count;
5733 bool cfg_changed = false, something_changed = false;
5734
5735 gcc_checking_assert (cfun);
5736 gcc_checking_assert (current_function_decl);
5737
5738 if (dump_file)
5739 fprintf (dump_file, "Modification phase of node %s\n",
5740 node->dump_name ());
5741
5742 ipcp_update_bits (node);
5743 ipcp_update_vr (node);
5744 aggval = ipa_get_agg_replacements_for_node (node);
5745 if (!aggval)
5746 return 0;
5747 param_count = count_formal_params (node->decl);
5748 if (param_count == 0)
5749 return 0;
5750 adjust_agg_replacement_values (node, aggval);
5751 if (dump_file)
5752 ipa_dump_agg_replacement_values (dump_file, aggval);
5753
5754 fbi.node = node;
5755 fbi.info = NULL;
5756 fbi.bb_infos = vNULL;
5757 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5758 fbi.param_count = param_count;
5759 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
5760
5761 vec_safe_grow_cleared (descriptors, param_count);
5762 ipa_populate_param_decls (node, *descriptors);
5763 calculate_dominance_info (CDI_DOMINATORS);
5764 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5765 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5766
5767 int i;
5768 struct ipa_bb_info *bi;
5769 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5770 free_ipa_bb_info (bi);
5771 fbi.bb_infos.release ();
5772 free_dominance_info (CDI_DOMINATORS);
5773
5774 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5775 s->agg_values = NULL;
5776 s->bits = NULL;
5777 s->m_vr = NULL;
5778
5779 vec_free (descriptors);
5780
5781 if (!something_changed)
5782 return 0;
5783
5784 if (cfg_changed)
5785 delete_unreachable_blocks_update_callgraph (node, false);
5786
5787 return TODO_update_ssa_only_virtuals;
5788 }
5789
5790
5791 /* Return true if OTHER describes same agg value. */
5792 bool
5793 ipa_agg_value::equal_to (const ipa_agg_value &other)
5794 {
5795 return offset == other.offset
5796 && operand_equal_p (value, other.value, 0);
5797 }
5798 #include "gt-ipa-prop.h"