]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
Move size time tables from GGC to heap
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
8d9254fc 2 Copyright (C) 2005-2020 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
27d020cf 48#include "ipa-fnsummary.h"
cf835838 49#include "gimple-pretty-print.h"
450ad0cd 50#include "ipa-utils.h"
2b5f0895 51#include "dbgcnt.h"
8aab5218 52#include "domwalk.h"
9b2b7279 53#include "builtins.h"
95a2ed03 54#include "tree-cfgcleanup.h"
c7b6a758 55#include "options.h"
ae7a23a3 56#include "symtab-clones.h"
771578a0 57
dd912cb8
ML
58/* Function summary where the parameter infos are actually stored. */
59ipa_node_params_t *ipa_node_params_sum = NULL;
9d3e0adc
ML
60
61function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
62
6fe906a3
MJ
63/* Edge summary for IPA-CP edge information. */
64ipa_edge_args_sum_t *ipa_edge_args_sum;
771578a0 65
86cd0334
MJ
66/* Traits for a hash table for reusing already existing ipa_bits. */
67
68struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
69{
70 typedef ipa_bits *value_type;
71 typedef ipa_bits *compare_type;
72 static hashval_t
73 hash (const ipa_bits *p)
74 {
75 hashval_t t = (hashval_t) p->value.to_shwi ();
76 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
77 }
78 static bool
79 equal (const ipa_bits *a, const ipa_bits *b)
80 {
81 return a->value == b->value && a->mask == b->mask;
82 }
7ca50de0 83 static const bool empty_zero_p = true;
86cd0334
MJ
84 static void
85 mark_empty (ipa_bits *&p)
86 {
87 p = NULL;
88 }
89 static bool
90 is_empty (const ipa_bits *p)
91 {
92 return p == NULL;
93 }
94 static bool
95 is_deleted (const ipa_bits *p)
96 {
97 return p == reinterpret_cast<const ipa_bits *> (1);
98 }
99 static void
100 mark_deleted (ipa_bits *&p)
101 {
102 p = reinterpret_cast<ipa_bits *> (1);
103 }
104};
105
106/* Hash table for avoid repeated allocations of equal ipa_bits. */
107static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
108
109/* Traits for a hash table for reusing value_ranges used for IPA. Note that
110 the equiv bitmap is not hashed and is expected to be NULL. */
111
028d81b1 112struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
86cd0334 113{
028d81b1
AH
114 typedef value_range *value_type;
115 typedef value_range *compare_type;
86cd0334 116 static hashval_t
028d81b1 117 hash (const value_range *p)
59b2c134 118 {
54994253 119 inchash::hash hstate (p->kind ());
74ca1c01
ML
120 inchash::add_expr (p->min (), hstate);
121 inchash::add_expr (p->max (), hstate);
59b2c134
JJ
122 return hstate.end ();
123 }
86cd0334 124 static bool
028d81b1 125 equal (const value_range *a, const value_range *b)
86cd0334 126 {
a8662390
ML
127 return (a->equal_p (*b)
128 && types_compatible_p (a->type (), b->type ()));
86cd0334 129 }
7ca50de0 130 static const bool empty_zero_p = true;
86cd0334 131 static void
028d81b1 132 mark_empty (value_range *&p)
86cd0334
MJ
133 {
134 p = NULL;
135 }
136 static bool
028d81b1 137 is_empty (const value_range *p)
86cd0334
MJ
138 {
139 return p == NULL;
140 }
141 static bool
028d81b1 142 is_deleted (const value_range *p)
86cd0334 143 {
028d81b1 144 return p == reinterpret_cast<const value_range *> (1);
86cd0334
MJ
145 }
146 static void
028d81b1 147 mark_deleted (value_range *&p)
86cd0334 148 {
028d81b1 149 p = reinterpret_cast<value_range *> (1);
86cd0334
MJ
150 }
151};
152
153/* Hash table for avoid repeated allocations of equal value_ranges. */
154static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
155
771578a0 156/* Holders of ipa cgraph hooks: */
40982661 157static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 158
4502fe8d
MJ
159/* Description of a reference to an IPA constant. */
160struct ipa_cst_ref_desc
161{
162 /* Edge that corresponds to the statement which took the reference. */
163 struct cgraph_edge *cs;
164 /* Linked list of duplicates created when call graph edges are cloned. */
165 struct ipa_cst_ref_desc *next_duplicate;
166 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
167 if out of control. */
168 int refcount;
169};
170
171/* Allocation pool for reference descriptions. */
172
fb0b2914 173static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 174 ("IPA-PROP ref descriptions");
4502fe8d 175
5fe8e757
MJ
176/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
177 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
178
179static bool
180ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
181{
67348ccc 182 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
183
184 if (!fs_opts)
185 return false;
2bf86c84 186 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
187}
188
be95e2b9
MJ
189/* Return index of the formal whose tree is PTREE in function which corresponds
190 to INFO. */
191
d044dd17 192static int
f65f1ae3
MJ
193ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
194 tree ptree)
518dc859
RL
195{
196 int i, count;
197
f65f1ae3 198 count = vec_safe_length (descriptors);
518dc859 199 for (i = 0; i < count; i++)
f65f1ae3 200 if ((*descriptors)[i].decl_or_type == ptree)
518dc859
RL
201 return i;
202
203 return -1;
204}
205
d044dd17
MJ
206/* Return index of the formal whose tree is PTREE in function which corresponds
207 to INFO. */
208
209int
99b1c316 210ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
d044dd17
MJ
211{
212 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
213}
214
215/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
216 NODE. */
be95e2b9 217
f8e2a1ed
MJ
218static void
219ipa_populate_param_decls (struct cgraph_node *node,
f65f1ae3 220 vec<ipa_param_descriptor, va_gc> &descriptors)
518dc859
RL
221{
222 tree fndecl;
223 tree fnargs;
224 tree parm;
225 int param_num;
3e293154 226
67348ccc 227 fndecl = node->decl;
0e8853ee 228 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
229 fnargs = DECL_ARGUMENTS (fndecl);
230 param_num = 0;
910ad8de 231 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 232 {
209ca542 233 descriptors[param_num].decl_or_type = parm;
40a777e8
JH
234 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
235 descriptors[param_num].move_cost = cost;
236 /* Watch overflow, move_cost is a bitfield. */
237 gcc_checking_assert (cost == descriptors[param_num].move_cost);
518dc859
RL
238 param_num++;
239 }
240}
241
3f84bf08
MJ
242/* Return how many formal parameters FNDECL has. */
243
fd29c024 244int
310bc633 245count_formal_params (tree fndecl)
3f84bf08
MJ
246{
247 tree parm;
248 int count = 0;
0e8853ee 249 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 250
910ad8de 251 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
252 count++;
253
254 return count;
255}
256
0e8853ee
JH
257/* Return the declaration of Ith formal parameter of the function corresponding
258 to INFO. Note there is no setter function as this array is built just once
259 using ipa_initialize_node_params. */
260
261void
99b1c316 262ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
0e8853ee
JH
263{
264 fprintf (file, "param #%i", i);
f65f1ae3 265 if ((*info->descriptors)[i].decl_or_type)
0e8853ee
JH
266 {
267 fprintf (file, " ");
ef6cb4c7 268 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
0e8853ee
JH
269 }
270}
271
159f01f8
MJ
272/* If necessary, allocate vector of parameter descriptors in info of NODE.
273 Return true if they were allocated, false if not. */
0e8853ee 274
159f01f8 275static bool
0e8853ee
JH
276ipa_alloc_node_params (struct cgraph_node *node, int param_count)
277{
6cf67b62 278 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
0e8853ee 279
f65f1ae3 280 if (!info->descriptors && param_count)
159f01f8 281 {
cb3874dc 282 vec_safe_grow_cleared (info->descriptors, param_count, true);
159f01f8
MJ
283 return true;
284 }
285 else
286 return false;
0e8853ee
JH
287}
288
f8e2a1ed
MJ
289/* Initialize the ipa_node_params structure associated with NODE by counting
290 the function parameters, creating the descriptors and populating their
291 param_decls. */
be95e2b9 292
f8e2a1ed
MJ
293void
294ipa_initialize_node_params (struct cgraph_node *node)
295{
6cf67b62 296 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
f8e2a1ed 297
159f01f8
MJ
298 if (!info->descriptors
299 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
300 ipa_populate_param_decls (node, *info->descriptors);
518dc859
RL
301}
302
749aa96d
MJ
303/* Print the jump functions associated with call graph edge CS to file F. */
304
305static void
306ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
307{
308 int i, count;
309
310 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
311 for (i = 0; i < count; i++)
312 {
313 struct ipa_jump_func *jump_func;
314 enum jump_func_type type;
315
316 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
317 type = jump_func->type;
318
319 fprintf (f, " param %d: ", i);
320 if (type == IPA_JF_UNKNOWN)
321 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
322 else if (type == IPA_JF_CONST)
323 {
4502fe8d 324 tree val = jump_func->value.constant.value;
749aa96d 325 fprintf (f, "CONST: ");
ef6cb4c7 326 print_generic_expr (f, val);
749aa96d
MJ
327 if (TREE_CODE (val) == ADDR_EXPR
328 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
329 {
330 fprintf (f, " -> ");
ef6cb4c7 331 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
749aa96d
MJ
332 }
333 fprintf (f, "\n");
334 }
749aa96d
MJ
335 else if (type == IPA_JF_PASS_THROUGH)
336 {
337 fprintf (f, "PASS THROUGH: ");
8b7773a4 338 fprintf (f, "%d, op %s",
749aa96d 339 jump_func->value.pass_through.formal_id,
5806f481 340 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 341 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
342 {
343 fprintf (f, " ");
ef6cb4c7 344 print_generic_expr (f, jump_func->value.pass_through.operand);
8b7773a4
MJ
345 }
346 if (jump_func->value.pass_through.agg_preserved)
347 fprintf (f, ", agg_preserved");
3ea6239f 348 fprintf (f, "\n");
749aa96d
MJ
349 }
350 else if (type == IPA_JF_ANCESTOR)
351 {
352 fprintf (f, "ANCESTOR: ");
16998094 353 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
354 jump_func->value.ancestor.formal_id,
355 jump_func->value.ancestor.offset);
8b7773a4
MJ
356 if (jump_func->value.ancestor.agg_preserved)
357 fprintf (f, ", agg_preserved");
3ea6239f 358 fprintf (f, "\n");
749aa96d 359 }
8b7773a4
MJ
360
361 if (jump_func->agg.items)
362 {
363 struct ipa_agg_jf_item *item;
364 int j;
365
366 fprintf (f, " Aggregate passed by %s:\n",
367 jump_func->agg.by_ref ? "reference" : "value");
eb270950 368 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
8b7773a4
MJ
369 {
370 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
371 item->offset);
eb270950
FX
372 fprintf (f, "type: ");
373 print_generic_expr (f, item->type);
374 fprintf (f, ", ");
375 if (item->jftype == IPA_JF_PASS_THROUGH)
376 fprintf (f, "PASS THROUGH: %d,",
377 item->value.pass_through.formal_id);
378 else if (item->jftype == IPA_JF_LOAD_AGG)
379 {
380 fprintf (f, "LOAD AGG: %d",
381 item->value.pass_through.formal_id);
382 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
383 item->value.load_agg.offset,
384 item->value.load_agg.by_ref ? "reference"
385 : "value");
386 }
387
388 if (item->jftype == IPA_JF_PASS_THROUGH
389 || item->jftype == IPA_JF_LOAD_AGG)
390 {
391 fprintf (f, " op %s",
392 get_tree_code_name (item->value.pass_through.operation));
393 if (item->value.pass_through.operation != NOP_EXPR)
394 {
395 fprintf (f, " ");
396 print_generic_expr (f, item->value.pass_through.operand);
397 }
398 }
399 else if (item->jftype == IPA_JF_CONST)
8b7773a4 400 {
eb270950
FX
401 fprintf (f, "CONST: ");
402 print_generic_expr (f, item->value.constant);
8b7773a4 403 }
eb270950
FX
404 else if (item->jftype == IPA_JF_UNKNOWN)
405 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
406 tree_to_uhwi (TYPE_SIZE (item->type)));
8b7773a4
MJ
407 fprintf (f, "\n");
408 }
409 }
44210a96 410
99b1c316 411 class ipa_polymorphic_call_context *ctx
44210a96
MJ
412 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
413 if (ctx && !ctx->useless_p ())
414 {
415 fprintf (f, " Context: ");
416 ctx->dump (dump_file);
417 }
04be694e 418
86cd0334 419 if (jump_func->bits)
209ca542 420 {
86cd0334
MJ
421 fprintf (f, " value: ");
422 print_hex (jump_func->bits->value, f);
423 fprintf (f, ", mask: ");
424 print_hex (jump_func->bits->mask, f);
209ca542
PK
425 fprintf (f, "\n");
426 }
427 else
428 fprintf (f, " Unknown bits\n");
8bc5448f 429
86cd0334 430 if (jump_func->m_vr)
8bc5448f
KV
431 {
432 fprintf (f, " VR ");
433 fprintf (f, "%s[",
54994253
AH
434 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
435 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
8bc5448f 436 fprintf (f, ", ");
54994253 437 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
8bc5448f
KV
438 fprintf (f, "]\n");
439 }
440 else
441 fprintf (f, " Unknown VR\n");
749aa96d
MJ
442 }
443}
444
445
be95e2b9
MJ
446/* Print the jump functions of all arguments on all call graph edges going from
447 NODE to file F. */
448
518dc859 449void
3e293154 450ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 451{
3e293154 452 struct cgraph_edge *cs;
518dc859 453
464d0118 454 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
3e293154
MJ
455 for (cs = node->callees; cs; cs = cs->next_callee)
456 {
3e293154 457
464d0118
ML
458 fprintf (f, " callsite %s -> %s : \n",
459 node->dump_name (),
460 cs->callee->dump_name ());
0302955a
JH
461 if (!ipa_edge_args_info_available_for_edge_p (cs))
462 fprintf (f, " no arg info\n");
463 else
464 ipa_print_node_jump_functions_for_edge (f, cs);
749aa96d 465 }
518dc859 466
9de04252 467 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 468 {
99b1c316 469 class cgraph_indirect_call_info *ii;
3e293154 470
9de04252
MJ
471 ii = cs->indirect_info;
472 if (ii->agg_contents)
c13bc3d9 473 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 474 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 475 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
476 ii->param_index, ii->offset,
477 ii->by_ref ? "by reference" : "by_value");
478 else
85942f45
JH
479 fprintf (f, " indirect %s callsite, calling param %i, "
480 "offset " HOST_WIDE_INT_PRINT_DEC,
481 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
482 ii->offset);
9de04252 483
749aa96d
MJ
484 if (cs->call_stmt)
485 {
9de04252 486 fprintf (f, ", for stmt ");
749aa96d 487 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 488 }
749aa96d 489 else
9de04252 490 fprintf (f, "\n");
ba392339
JH
491 if (ii->polymorphic)
492 ii->context.dump (f);
0302955a
JH
493 if (!ipa_edge_args_info_available_for_edge_p (cs))
494 fprintf (f, " no arg info\n");
495 else
496 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
497 }
498}
499
500/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 501
3e293154
MJ
502void
503ipa_print_all_jump_functions (FILE *f)
504{
505 struct cgraph_node *node;
506
ca30a539 507 fprintf (f, "\nJump functions:\n");
65c70e6b 508 FOR_EACH_FUNCTION (node)
3e293154
MJ
509 {
510 ipa_print_node_jump_functions (f, node);
511 }
512}
513
04be694e
MJ
514/* Set jfunc to be a know-really nothing jump function. */
515
516static void
517ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
518{
519 jfunc->type = IPA_JF_UNKNOWN;
04be694e
MJ
520}
521
b8f6e610
MJ
522/* Set JFUNC to be a copy of another jmp (to be used by jump function
523 combination code). The two functions will share their rdesc. */
524
525static void
526ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
527 struct ipa_jump_func *src)
528
529{
530 gcc_checking_assert (src->type == IPA_JF_CONST);
531 dst->type = IPA_JF_CONST;
532 dst->value.constant = src->value.constant;
533}
534
7b872d9e
MJ
535/* Set JFUNC to be a constant jmp function. */
536
537static void
4502fe8d
MJ
538ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
539 struct cgraph_edge *cs)
7b872d9e
MJ
540{
541 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
542 jfunc->value.constant.value = unshare_expr_without_location (constant);
543
544 if (TREE_CODE (constant) == ADDR_EXPR
545 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
546 {
547 struct ipa_cst_ref_desc *rdesc;
4502fe8d 548
601f3293 549 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
550 rdesc->cs = cs;
551 rdesc->next_duplicate = NULL;
552 rdesc->refcount = 1;
553 jfunc->value.constant.rdesc = rdesc;
554 }
555 else
556 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
557}
558
559/* Set JFUNC to be a simple pass-through jump function. */
560static void
8b7773a4 561ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 562 bool agg_preserved)
7b872d9e
MJ
563{
564 jfunc->type = IPA_JF_PASS_THROUGH;
565 jfunc->value.pass_through.operand = NULL_TREE;
566 jfunc->value.pass_through.formal_id = formal_id;
567 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 568 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
569}
570
a2b4c188
KV
571/* Set JFUNC to be an unary pass through jump function. */
572
573static void
574ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
575 enum tree_code operation)
576{
577 jfunc->type = IPA_JF_PASS_THROUGH;
578 jfunc->value.pass_through.operand = NULL_TREE;
579 jfunc->value.pass_through.formal_id = formal_id;
580 jfunc->value.pass_through.operation = operation;
581 jfunc->value.pass_through.agg_preserved = false;
582}
7b872d9e
MJ
583/* Set JFUNC to be an arithmetic pass through jump function. */
584
585static void
586ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
587 tree operand, enum tree_code operation)
588{
589 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 590 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
591 jfunc->value.pass_through.formal_id = formal_id;
592 jfunc->value.pass_through.operation = operation;
8b7773a4 593 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
594}
595
596/* Set JFUNC to be an ancestor jump function. */
597
598static void
599ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 600 int formal_id, bool agg_preserved)
7b872d9e
MJ
601{
602 jfunc->type = IPA_JF_ANCESTOR;
603 jfunc->value.ancestor.formal_id = formal_id;
604 jfunc->value.ancestor.offset = offset;
8b7773a4 605 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
606}
607
8aab5218
MJ
608/* Get IPA BB information about the given BB. FBI is the context of analyzis
609 of this function body. */
610
611static struct ipa_bb_info *
56b40062 612ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
613{
614 gcc_checking_assert (fbi);
615 return &fbi->bb_infos[bb->index];
616}
617
f65cf2b7
MJ
618/* Structure to be passed in between detect_type_change and
619 check_stmt_for_type_change. */
620
11478306 621struct prop_type_change_info
f65cf2b7 622{
290ebcb7
MJ
623 /* Offset into the object where there is the virtual method pointer we are
624 looking for. */
625 HOST_WIDE_INT offset;
626 /* The declaration or SSA_NAME pointer of the base that we are checking for
627 type change. */
628 tree object;
f65cf2b7
MJ
629 /* Set to true if dynamic type change has been detected. */
630 bool type_maybe_changed;
631};
632
633/* Return true if STMT can modify a virtual method table pointer.
634
635 This function makes special assumptions about both constructors and
636 destructors which are all the functions that are allowed to alter the VMT
637 pointers. It assumes that destructors begin with assignment into all VMT
638 pointers and that constructors essentially look in the following way:
639
640 1) The very first thing they do is that they call constructors of ancestor
641 sub-objects that have them.
642
643 2) Then VMT pointers of this and all its ancestors is set to new values
644 corresponding to the type corresponding to the constructor.
645
646 3) Only afterwards, other stuff such as constructor of member sub-objects
647 and the code written by the user is run. Only this may include calling
648 virtual functions, directly or indirectly.
649
650 There is no way to call a constructor of an ancestor sub-object in any
651 other way.
652
653 This means that we do not have to care whether constructors get the correct
654 type information because they will always change it (in fact, if we define
655 the type to be given by the VMT pointer, it is undefined).
656
657 The most important fact to derive from the above is that if, for some
658 statement in the section 3, we try to detect whether the dynamic type has
659 changed, we can safely ignore all calls as we examine the function body
660 backwards until we reach statements in section 2 because these calls cannot
661 be ancestor constructors or destructors (if the input is not bogus) and so
662 do not change the dynamic type (this holds true only for automatically
663 allocated objects but at the moment we devirtualize only these). We then
664 must detect that statements in section 2 change the dynamic type and can try
665 to derive the new type. That is enough and we can stop, we will never see
666 the calls into constructors of sub-objects in this code. Therefore we can
667 safely ignore all call statements that we traverse.
668 */
669
670static bool
355fe088 671stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
672{
673 if (is_gimple_call (stmt))
674 return false;
70f633c5
JH
675 if (gimple_clobber_p (stmt))
676 return false;
f65cf2b7
MJ
677 else if (is_gimple_assign (stmt))
678 {
679 tree lhs = gimple_assign_lhs (stmt);
680
0004f992
MJ
681 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
682 {
683 if (flag_strict_aliasing
684 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
685 return false;
686
687 if (TREE_CODE (lhs) == COMPONENT_REF
688 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 689 return false;
450aa0ee 690 /* In the future we might want to use get_ref_base_and_extent to find
0004f992
MJ
691 if there is a field corresponding to the offset and if so, proceed
692 almost like if it was a component ref. */
693 }
f65cf2b7
MJ
694 }
695 return true;
696}
697
3b97a5c7
MJ
698/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
699 to check whether a particular statement may modify the virtual table
700 pointerIt stores its result into DATA, which points to a
11478306 701 prop_type_change_info structure. */
f65cf2b7
MJ
702
703static bool
704check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
705{
355fe088 706 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 707 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
708
709 if (stmt_may_be_vtbl_ptr_store (stmt))
710 {
711 tci->type_maybe_changed = true;
712 return true;
713 }
714 else
715 return false;
716}
717
058d0a90
JH
718/* See if ARG is PARAM_DECl describing instance passed by pointer
719 or reference in FUNCTION. Return false if the dynamic type may change
720 in between beggining of the function until CALL is invoked.
290ebcb7 721
058d0a90 722 Generally functions are not allowed to change type of such instances,
67914693 723 but they call destructors. We assume that methods cannot destroy the THIS
058d0a90
JH
724 pointer. Also as a special cases, constructor and destructors may change
725 type of the THIS pointer. */
726
727static bool
355fe088 728param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90 729{
67914693 730 /* Pure functions cannot do any changes on the dynamic type;
058d0a90
JH
731 that require writting to memory. */
732 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
733 return false;
734 /* We need to check if we are within inlined consturctor
735 or destructor (ideally we would have way to check that the
736 inline cdtor is actually working on ARG, but we don't have
737 easy tie on this, so punt on all non-pure cdtors.
738 We may also record the types of cdtors and once we know type
739 of the instance match them.
740
741 Also code unification optimizations may merge calls from
742 different blocks making return values unreliable. So
743 do nothing during late optimization. */
744 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
745 return true;
746 if (TREE_CODE (arg) == SSA_NAME
747 && SSA_NAME_IS_DEFAULT_DEF (arg)
748 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
749 {
750 /* Normal (non-THIS) argument. */
751 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
752 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 753 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
754 and destructors as those definitely may change the dynamic
755 type. */
756 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
757 && !DECL_CXX_CONSTRUCTOR_P (function)
758 && !DECL_CXX_DESTRUCTOR_P (function)
759 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
760 {
761 /* Walk the inline stack and watch out for ctors/dtors. */
762 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
763 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
764 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
765 return true;
058d0a90
JH
766 return false;
767 }
768 }
769 return true;
770}
290ebcb7 771
06d65050
JH
772/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
773 callsite CALL) by looking for assignments to its virtual table pointer. If
c199f329 774 it is, return true. ARG is the object itself (not a pointer
06d65050 775 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
776 returned by get_ref_base_and_extent, as is the offset.
777
778 This is helper function for detect_type_change and detect_type_change_ssa
779 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
780
781static bool
c628d1c3
MJ
782detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
783 tree base, tree comp_type, gcall *call,
058d0a90 784 HOST_WIDE_INT offset)
f65cf2b7 785{
11478306 786 struct prop_type_change_info tci;
f65cf2b7
MJ
787 ao_ref ao;
788
789 gcc_checking_assert (DECL_P (arg)
790 || TREE_CODE (arg) == MEM_REF
791 || handled_component_p (arg));
f65cf2b7 792
b49407f8
JH
793 comp_type = TYPE_MAIN_VARIANT (comp_type);
794
d570d364
JH
795 /* Const calls cannot call virtual methods through VMT and so type changes do
796 not matter. */
797 if (!flag_devirtualize || !gimple_vuse (call)
798 /* Be sure expected_type is polymorphic. */
799 || !comp_type
800 || TREE_CODE (comp_type) != RECORD_TYPE
801 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
802 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
803 return true;
4bf2a588 804
dd887943 805 ao_ref_init (&ao, arg);
f65cf2b7
MJ
806 ao.base = base;
807 ao.offset = offset;
808 ao.size = POINTER_SIZE;
809 ao.max_size = ao.size;
f65cf2b7 810
290ebcb7
MJ
811 tci.offset = offset;
812 tci.object = get_base_address (arg);
290ebcb7 813 tci.type_maybe_changed = false;
290ebcb7 814
c628d1c3
MJ
815 int walked
816 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
817 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
818
819 if (walked >= 0 && !tci.type_maybe_changed)
f65cf2b7
MJ
820 return false;
821
f65cf2b7
MJ
822 return true;
823}
824
058d0a90 825/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
c199f329 826 If it is, return true. ARG is the object itself (not a pointer
058d0a90
JH
827 to it, unless dereferenced). BASE is the base of the memory access as
828 returned by get_ref_base_and_extent, as is the offset. */
829
830static bool
c628d1c3 831detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
c199f329 832 tree comp_type, gcall *call,
c628d1c3 833 HOST_WIDE_INT offset)
058d0a90
JH
834{
835 if (!flag_devirtualize)
836 return false;
837
838 if (TREE_CODE (base) == MEM_REF
839 && !param_type_may_change_p (current_function_decl,
840 TREE_OPERAND (base, 0),
841 call))
842 return false;
c628d1c3 843 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
c199f329 844 call, offset);
058d0a90
JH
845}
846
f65cf2b7
MJ
847/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
848 SSA name (its dereference will become the base and the offset is assumed to
849 be zero). */
850
851static bool
c628d1c3 852detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
c199f329 853 gcall *call)
f65cf2b7
MJ
854{
855 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 856 if (!flag_devirtualize
06d65050 857 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
858 return false;
859
058d0a90
JH
860 if (!param_type_may_change_p (current_function_decl, arg, call))
861 return false;
862
f65cf2b7 863 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 864 build_int_cst (ptr_type_node, 0));
f65cf2b7 865
c628d1c3 866 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
c199f329 867 call, 0);
f65cf2b7
MJ
868}
869
fdb0e1b4
MJ
870/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
871 boolean variable pointed to by DATA. */
872
873static bool
874mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
875 void *data)
876{
877 bool *b = (bool *) data;
878 *b = true;
879 return true;
880}
881
8aab5218
MJ
882/* Find the nearest valid aa status for parameter specified by INDEX that
883 dominates BB. */
884
56b40062
MJ
885static struct ipa_param_aa_status *
886find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
887 int index)
888{
889 while (true)
890 {
891 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
892 if (!bb)
893 return NULL;
894 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
895 if (!bi->param_aa_statuses.is_empty ()
896 && bi->param_aa_statuses[index].valid)
897 return &bi->param_aa_statuses[index];
898 }
899}
900
901/* Get AA status structure for the given BB and parameter with INDEX. Allocate
902 structures and/or intialize the result with a dominating description as
903 necessary. */
904
56b40062
MJ
905static struct ipa_param_aa_status *
906parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
907 int index)
908{
909 gcc_checking_assert (fbi);
910 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
911 if (bi->param_aa_statuses.is_empty ())
cb3874dc 912 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
56b40062 913 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
914 if (!paa->valid)
915 {
916 gcc_checking_assert (!paa->parm_modified
917 && !paa->ref_modified
918 && !paa->pt_modified);
56b40062 919 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
920 dom_paa = find_dominating_aa_status (fbi, bb, index);
921 if (dom_paa)
922 *paa = *dom_paa;
923 else
924 paa->valid = true;
925 }
926
927 return paa;
928}
929
688010ba 930/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 931 a value known not to be modified in this function before reaching the
8aab5218
MJ
932 statement STMT. FBI holds information about the function we have so far
933 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
934
935static bool
56b40062 936parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 937 gimple *stmt, tree parm_load)
fdb0e1b4 938{
56b40062 939 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
940 bool modified = false;
941 ao_ref refd;
942
776e4fe2
MJ
943 tree base = get_base_address (parm_load);
944 gcc_assert (TREE_CODE (base) == PARM_DECL);
945 if (TREE_READONLY (base))
946 return true;
947
c628d1c3
MJ
948 gcc_checking_assert (fbi);
949 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
950 if (paa->parm_modified)
951 return false;
fdb0e1b4
MJ
952
953 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 954 ao_ref_init (&refd, parm_load);
8aab5218 955 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3
MJ
956 &modified, NULL, NULL,
957 fbi->aa_walk_budget + 1);
958 if (walked < 0)
959 {
960 modified = true;
961 if (fbi)
962 fbi->aa_walk_budget = 0;
963 }
964 else if (fbi)
965 fbi->aa_walk_budget -= walked;
8aab5218
MJ
966 if (paa && modified)
967 paa->parm_modified = true;
8b7773a4 968 return !modified;
fdb0e1b4
MJ
969}
970
a2b4c188
KV
971/* If STMT is an assignment that loads a value from an parameter declaration,
972 return the index of the parameter in ipa_node_params which has not been
973 modified. Otherwise return -1. */
974
975static int
976load_from_unmodified_param (struct ipa_func_body_info *fbi,
f65f1ae3 977 vec<ipa_param_descriptor, va_gc> *descriptors,
a2b4c188
KV
978 gimple *stmt)
979{
bda2bc48
MJ
980 int index;
981 tree op1;
982
a2b4c188
KV
983 if (!gimple_assign_single_p (stmt))
984 return -1;
985
bda2bc48
MJ
986 op1 = gimple_assign_rhs1 (stmt);
987 if (TREE_CODE (op1) != PARM_DECL)
a2b4c188
KV
988 return -1;
989
bda2bc48
MJ
990 index = ipa_get_param_decl_index_1 (descriptors, op1);
991 if (index < 0
992 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
a2b4c188
KV
993 return -1;
994
bda2bc48 995 return index;
a2b4c188
KV
996}
997
8aab5218
MJ
998/* Return true if memory reference REF (which must be a load through parameter
999 with INDEX) loads data that are known to be unmodified in this function
1000 before reaching statement STMT. */
8b7773a4
MJ
1001
1002static bool
56b40062 1003parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 1004 int index, gimple *stmt, tree ref)
8b7773a4 1005{
56b40062 1006 struct ipa_param_aa_status *paa;
8b7773a4
MJ
1007 bool modified = false;
1008 ao_ref refd;
1009
c628d1c3
MJ
1010 gcc_checking_assert (fbi);
1011 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1012 if (paa->ref_modified)
1013 return false;
8b7773a4 1014
8aab5218 1015 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 1016 ao_ref_init (&refd, ref);
8aab5218 1017 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3
MJ
1018 &modified, NULL, NULL,
1019 fbi->aa_walk_budget + 1);
1020 if (walked < 0)
1021 {
1022 modified = true;
1023 fbi->aa_walk_budget = 0;
1024 }
1025 else
1026 fbi->aa_walk_budget -= walked;
1027 if (modified)
8aab5218 1028 paa->ref_modified = true;
8b7773a4
MJ
1029 return !modified;
1030}
1031
8aab5218
MJ
1032/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1033 is known to be unmodified in this function before reaching call statement
1034 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1035
1036static bool
56b40062 1037parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 1038 gimple *call, tree parm)
8b7773a4
MJ
1039{
1040 bool modified = false;
1041 ao_ref refd;
1042
1043 /* It's unnecessary to calculate anything about memory contnets for a const
1044 function because it is not goin to use it. But do not cache the result
1045 either. Also, no such calculations for non-pointers. */
1046 if (!gimple_vuse (call)
c628d1c3 1047 || !POINTER_TYPE_P (TREE_TYPE (parm)))
8b7773a4
MJ
1048 return false;
1049
56b40062
MJ
1050 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1051 gimple_bb (call),
1052 index);
8aab5218 1053 if (paa->pt_modified)
8b7773a4
MJ
1054 return false;
1055
1056 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218 1057 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
c628d1c3
MJ
1058 &modified, NULL, NULL,
1059 fbi->aa_walk_budget + 1);
1060 if (walked < 0)
1061 {
1062 fbi->aa_walk_budget = 0;
1063 modified = true;
1064 }
1065 else
1066 fbi->aa_walk_budget -= walked;
8b7773a4 1067 if (modified)
8aab5218 1068 paa->pt_modified = true;
8b7773a4
MJ
1069 return !modified;
1070}
1071
91bb9f80
MJ
1072/* Return true if we can prove that OP is a memory reference loading
1073 data from an aggregate passed as a parameter.
1074
1075 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1076 false if it cannot prove that the value has not been modified before the
1077 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1078 if it cannot prove the value has not been modified, in that case it will
1079 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1080
8b7773a4
MJ
1081 INFO and PARMS_AINFO describe parameters of the current function (but the
1082 latter can be NULL), STMT is the load statement. If function returns true,
1083 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1084 within the aggregate and whether it is a load from a value passed by
1085 reference respectively. */
1086
ff302741 1087bool
56b40062 1088ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
f65f1ae3 1089 vec<ipa_param_descriptor, va_gc> *descriptors,
355fe088 1090 gimple *stmt, tree op, int *index_p,
86003645 1091 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
91bb9f80 1092 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
1093{
1094 int index;
588db50c 1095 HOST_WIDE_INT size;
ee45a32d 1096 bool reverse;
588db50c 1097 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
8b7773a4 1098
588db50c 1099 if (!base)
8b7773a4
MJ
1100 return false;
1101
1102 if (DECL_P (base))
1103 {
d044dd17 1104 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1105 if (index >= 0
8aab5218 1106 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1107 {
1108 *index_p = index;
1109 *by_ref_p = false;
3ff2ca23
JJ
1110 if (size_p)
1111 *size_p = size;
91bb9f80
MJ
1112 if (guaranteed_unmodified)
1113 *guaranteed_unmodified = true;
8b7773a4
MJ
1114 return true;
1115 }
1116 return false;
1117 }
1118
1119 if (TREE_CODE (base) != MEM_REF
1120 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1121 || !integer_zerop (TREE_OPERAND (base, 1)))
1122 return false;
1123
1124 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1125 {
1126 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1127 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1128 }
1129 else
1130 {
1131 /* This branch catches situations where a pointer parameter is not a
1132 gimple register, for example:
1133
1134 void hip7(S*) (struct S * p)
1135 {
1136 void (*<T2e4>) (struct S *) D.1867;
1137 struct S * p.1;
1138
1139 <bb 2>:
1140 p.1_1 = p;
1141 D.1867_2 = p.1_1->f;
1142 D.1867_2 ();
1143 gdp = &p;
1144 */
1145
355fe088 1146 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1147 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1148 }
1149
91bb9f80 1150 if (index >= 0)
8b7773a4 1151 {
91bb9f80
MJ
1152 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1153 if (!data_preserved && !guaranteed_unmodified)
1154 return false;
1155
8b7773a4
MJ
1156 *index_p = index;
1157 *by_ref_p = true;
3ff2ca23
JJ
1158 if (size_p)
1159 *size_p = size;
91bb9f80
MJ
1160 if (guaranteed_unmodified)
1161 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1162 return true;
1163 }
1164 return false;
1165}
1166
eb270950
FX
1167/* If STMT is an assignment that loads a value from a parameter declaration,
1168 or from an aggregate passed as the parameter either by value or reference,
1169 return the index of the parameter in ipa_node_params. Otherwise return -1.
1170
1171 FBI holds gathered information about the function. INFO describes
1172 parameters of the function, STMT is the assignment statement. If it is a
1173 memory load from an aggregate, *OFFSET_P is filled with offset within the
1174 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1175 reference. */
1176
1177static int
1178load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1179 class ipa_node_params *info,
1180 gimple *stmt,
1181 HOST_WIDE_INT *offset_p,
1182 bool *by_ref_p)
1183{
1184 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1185 poly_int64 size;
1186
1187 /* Load value from a parameter declaration. */
1188 if (index >= 0)
1189 {
1190 *offset_p = -1;
1191 return index;
1192 }
1193
1194 if (!gimple_assign_load_p (stmt))
1195 return -1;
1196
1197 tree rhs = gimple_assign_rhs1 (stmt);
1198
1199 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1200 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1201 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1202 return -1;
1203
1204 /* Skip memory reference containing bit-field. */
1205 if (TREE_CODE (rhs) == BIT_FIELD_REF
1206 || contains_bitfld_component_ref_p (rhs))
1207 return -1;
1208
1209 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1210 offset_p, &size, by_ref_p))
1211 return -1;
1212
1213 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1214 size));
1215 if (!*by_ref_p)
1216 {
1217 tree param_type = ipa_get_type (info, index);
1218
1219 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1220 return -1;
1221 }
1222 else if (TREE_THIS_VOLATILE (rhs))
1223 return -1;
1224
1225 return index;
1226}
1227
c7b6a758
JH
1228/* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1229 to find original pointer. Initialize RET to the pointer which results from
1230 the walk.
1231 If offset is known return true and initialize OFFSET_RET. */
1232
1233bool
1234unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1235{
1236 poly_int64 offset = 0;
1237 bool offset_known = true;
1238 int i;
1239
1240 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1241 {
1242 if (TREE_CODE (op) == ADDR_EXPR)
1243 {
1244 poly_int64 extra_offset = 0;
1245 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1246 &offset);
1247 if (!base)
1248 {
1249 base = get_base_address (TREE_OPERAND (op, 0));
1250 if (TREE_CODE (base) != MEM_REF)
1251 break;
1252 offset_known = false;
1253 }
1254 else
1255 {
1256 if (TREE_CODE (base) != MEM_REF)
1257 break;
1258 offset += extra_offset;
1259 }
1260 op = TREE_OPERAND (base, 0);
1261 if (mem_ref_offset (base).to_shwi (&extra_offset))
1262 offset += extra_offset;
1263 else
1264 offset_known = false;
1265 }
1266 else if (TREE_CODE (op) == SSA_NAME
1267 && !SSA_NAME_IS_DEFAULT_DEF (op))
1268 {
1269 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1270
1271 if (gimple_assign_single_p (pstmt))
1272 op = gimple_assign_rhs1 (pstmt);
1273 else if (is_gimple_assign (pstmt)
1274 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1275 {
1276 poly_int64 extra_offset = 0;
1277 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1278 &extra_offset))
1279 offset += extra_offset;
1280 else
1281 offset_known = false;
1282 op = gimple_assign_rhs1 (pstmt);
1283 }
1284 else
1285 break;
1286 }
1287 else
1288 break;
1289 }
1290 *ret = op;
1291 *offset_ret = offset;
1292 return offset_known;
1293}
1294
b258210c 1295/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1296 of an assignment statement STMT, try to determine whether we are actually
1297 handling any of the following cases and construct an appropriate jump
1298 function into JFUNC if so:
1299
1300 1) The passed value is loaded from a formal parameter which is not a gimple
1301 register (most probably because it is addressable, the value has to be
1302 scalar) and we can guarantee the value has not changed. This case can
1303 therefore be described by a simple pass-through jump function. For example:
1304
1305 foo (int a)
1306 {
1307 int a.0;
1308
1309 a.0_2 = a;
1310 bar (a.0_2);
1311
1312 2) The passed value can be described by a simple arithmetic pass-through
1313 jump function. E.g.
1314
1315 foo (int a)
1316 {
1317 int D.2064;
1318
1319 D.2064_4 = a.1(D) + 4;
1320 bar (D.2064_4);
1321
1322 This case can also occur in combination of the previous one, e.g.:
1323
1324 foo (int a, int z)
1325 {
1326 int a.0;
1327 int D.2064;
1328
1329 a.0_3 = a;
1330 D.2064_4 = a.0_3 + 4;
1331 foo (D.2064_4);
1332
1333 3) The passed value is an address of an object within another one (which
1334 also passed by reference). Such situations are described by an ancestor
1335 jump function and describe situations such as:
1336
1337 B::foo() (struct B * const this)
1338 {
1339 struct A * D.1845;
1340
1341 D.1845_2 = &this_1(D)->D.1748;
1342 A::bar (D.1845_2);
1343
1344 INFO is the structure describing individual parameters access different
1345 stages of IPA optimizations. PARMS_AINFO contains the information that is
1346 only needed for intraprocedural analysis. */
685b0d13
MJ
1347
1348static void
56b40062 1349compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1350 class ipa_node_params *info,
b258210c 1351 struct ipa_jump_func *jfunc,
355fe088 1352 gcall *call, gimple *stmt, tree name,
06d65050 1353 tree param_type)
685b0d13 1354{
588db50c 1355 HOST_WIDE_INT offset, size;
fdb0e1b4 1356 tree op1, tc_ssa, base, ssa;
ee45a32d 1357 bool reverse;
685b0d13 1358 int index;
685b0d13 1359
685b0d13 1360 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1361
fdb0e1b4 1362 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1363 {
fdb0e1b4
MJ
1364 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1365 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1366 else
bda2bc48
MJ
1367 index = load_from_unmodified_param (fbi, info->descriptors,
1368 SSA_NAME_DEF_STMT (op1));
fdb0e1b4
MJ
1369 tc_ssa = op1;
1370 }
1371 else
1372 {
bda2bc48 1373 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1374 tc_ssa = gimple_assign_lhs (stmt);
1375 }
1376
1377 if (index >= 0)
1378 {
a77af182 1379 switch (gimple_assign_rhs_class (stmt))
8b7773a4 1380 {
a77af182
RB
1381 case GIMPLE_BINARY_RHS:
1382 {
1383 tree op2 = gimple_assign_rhs2 (stmt);
1384 if (!is_gimple_ip_invariant (op2)
1385 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1386 != tcc_comparison)
1387 && !useless_type_conversion_p (TREE_TYPE (name),
1388 TREE_TYPE (op1))))
1389 return;
1390
1391 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1392 gimple_assign_rhs_code (stmt));
1393 break;
1394 }
1395 case GIMPLE_SINGLE_RHS:
1396 {
1397 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1398 tc_ssa);
1399 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1400 break;
1401 }
1402 case GIMPLE_UNARY_RHS:
b66113e9 1403 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
a77af182 1404 ipa_set_jf_unary_pass_through (jfunc, index,
bda2bc48 1405 gimple_assign_rhs_code (stmt));
a77af182 1406 default:;
8b7773a4 1407 }
685b0d13
MJ
1408 return;
1409 }
1410
1411 if (TREE_CODE (op1) != ADDR_EXPR)
1412 return;
1413 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1414 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1415 return;
588db50c 1416 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
aca52e6f
RS
1417 offset_int mem_offset;
1418 if (!base
1419 || TREE_CODE (base) != MEM_REF
1420 || !mem_ref_offset (base).is_constant (&mem_offset))
685b0d13 1421 return;
aca52e6f 1422 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1423 ssa = TREE_OPERAND (base, 0);
1424 if (TREE_CODE (ssa) != SSA_NAME
1425 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1426 || offset < 0)
685b0d13
MJ
1427 return;
1428
b8f6e610 1429 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1430 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1431 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1432 ipa_set_ancestor_jf (jfunc, offset, index,
1433 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1434}
1435
40591473
MJ
1436/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1437 it looks like:
1438
1439 iftmp.1_3 = &obj_2(D)->D.1762;
1440
1441 The base of the MEM_REF must be a default definition SSA NAME of a
1442 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1443 whole MEM_REF expression is returned and the offset calculated from any
1444 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1445 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1446
1447static tree
355fe088 1448get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473 1449{
588db50c 1450 HOST_WIDE_INT size;
40591473 1451 tree expr, parm, obj;
ee45a32d 1452 bool reverse;
40591473
MJ
1453
1454 if (!gimple_assign_single_p (assign))
1455 return NULL_TREE;
1456 expr = gimple_assign_rhs1 (assign);
1457
1458 if (TREE_CODE (expr) != ADDR_EXPR)
1459 return NULL_TREE;
1460 expr = TREE_OPERAND (expr, 0);
1461 obj = expr;
588db50c 1462 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
40591473 1463
aca52e6f
RS
1464 offset_int mem_offset;
1465 if (!expr
1466 || TREE_CODE (expr) != MEM_REF
1467 || !mem_ref_offset (expr).is_constant (&mem_offset))
40591473
MJ
1468 return NULL_TREE;
1469 parm = TREE_OPERAND (expr, 0);
1470 if (TREE_CODE (parm) != SSA_NAME
1471 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1472 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1473 return NULL_TREE;
1474
aca52e6f 1475 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1476 *obj_p = obj;
1477 return expr;
1478}
1479
685b0d13 1480
b258210c
MJ
1481/* Given that an actual argument is an SSA_NAME that is a result of a phi
1482 statement PHI, try to find out whether NAME is in fact a
1483 multiple-inheritance typecast from a descendant into an ancestor of a formal
1484 parameter and thus can be described by an ancestor jump function and if so,
1485 write the appropriate function into JFUNC.
1486
1487 Essentially we want to match the following pattern:
1488
1489 if (obj_2(D) != 0B)
1490 goto <bb 3>;
1491 else
1492 goto <bb 4>;
1493
1494 <bb 3>:
1495 iftmp.1_3 = &obj_2(D)->D.1762;
1496
1497 <bb 4>:
1498 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1499 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1500 return D.1879_6; */
1501
1502static void
56b40062 1503compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1504 class ipa_node_params *info,
b258210c 1505 struct ipa_jump_func *jfunc,
538dd0b7 1506 gcall *call, gphi *phi)
b258210c 1507{
40591473 1508 HOST_WIDE_INT offset;
355fe088 1509 gimple *assign, *cond;
b258210c 1510 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1511 tree tmp, parm, expr, obj;
b258210c
MJ
1512 int index, i;
1513
54e348cb 1514 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1515 return;
1516
54e348cb
MJ
1517 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1518 tmp = PHI_ARG_DEF (phi, 0);
1519 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1520 tmp = PHI_ARG_DEF (phi, 1);
1521 else
1522 return;
b258210c
MJ
1523 if (TREE_CODE (tmp) != SSA_NAME
1524 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1525 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1526 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1527 return;
1528
1529 assign = SSA_NAME_DEF_STMT (tmp);
1530 assign_bb = gimple_bb (assign);
40591473 1531 if (!single_pred_p (assign_bb))
b258210c 1532 return;
40591473
MJ
1533 expr = get_ancestor_addr_info (assign, &obj, &offset);
1534 if (!expr)
b258210c
MJ
1535 return;
1536 parm = TREE_OPERAND (expr, 0);
b258210c 1537 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1538 if (index < 0)
1539 return;
b258210c
MJ
1540
1541 cond_bb = single_pred (assign_bb);
1542 cond = last_stmt (cond_bb);
69610617
SB
1543 if (!cond
1544 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1545 || gimple_cond_code (cond) != NE_EXPR
1546 || gimple_cond_lhs (cond) != parm
1547 || !integer_zerop (gimple_cond_rhs (cond)))
1548 return;
1549
b258210c
MJ
1550 phi_bb = gimple_bb (phi);
1551 for (i = 0; i < 2; i++)
1552 {
1553 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1554 if (pred != assign_bb && pred != cond_bb)
1555 return;
1556 }
1557
3b97a5c7
MJ
1558 ipa_set_ancestor_jf (jfunc, offset, index,
1559 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1560}
1561
be95e2b9
MJ
1562/* Inspect the given TYPE and return true iff it has the same structure (the
1563 same number of fields of the same types) as a C++ member pointer. If
1564 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1565 corresponding fields there. */
1566
3e293154
MJ
1567static bool
1568type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1569{
1570 tree fld;
1571
1572 if (TREE_CODE (type) != RECORD_TYPE)
1573 return false;
1574
1575 fld = TYPE_FIELDS (type);
1576 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1577 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1578 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1579 return false;
1580
1581 if (method_ptr)
1582 *method_ptr = fld;
1583
910ad8de 1584 fld = DECL_CHAIN (fld);
8b7773a4 1585 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1586 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1587 return false;
1588 if (delta)
1589 *delta = fld;
1590
910ad8de 1591 if (DECL_CHAIN (fld))
3e293154
MJ
1592 return false;
1593
1594 return true;
1595}
1596
61502ca8 1597/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
eb270950
FX
1598 return the rhs of its defining statement, and this statement is stored in
1599 *RHS_STMT. Otherwise return RHS as it is. */
7ec49257
MJ
1600
1601static inline tree
eb270950 1602get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
7ec49257
MJ
1603{
1604 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1605 {
355fe088 1606 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1607
1608 if (gimple_assign_single_p (def_stmt))
1609 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1610 else
1611 break;
eb270950 1612 *rhs_stmt = def_stmt;
7ec49257
MJ
1613 }
1614 return rhs;
1615}
1616
eb270950 1617/* Simple linked list, describing contents of an aggregate before call. */
8b7773a4
MJ
1618
1619struct ipa_known_agg_contents_list
1620{
1621 /* Offset and size of the described part of the aggregate. */
1622 HOST_WIDE_INT offset, size;
eb270950
FX
1623
1624 /* Type of the described part of the aggregate. */
1625 tree type;
1626
1627 /* Known constant value or jump function data describing contents. */
1628 struct ipa_load_agg_data value;
1629
8b7773a4
MJ
1630 /* Pointer to the next structure in the list. */
1631 struct ipa_known_agg_contents_list *next;
1632};
3e293154 1633
eb270950
FX
1634/* Add an aggregate content item into a linked list of
1635 ipa_known_agg_contents_list structure, in which all elements
1636 are sorted ascendingly by offset. */
0d48ee34 1637
46771da5
FX
1638static inline void
1639add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1640 struct ipa_known_agg_contents_list *item)
0d48ee34 1641{
46771da5
FX
1642 struct ipa_known_agg_contents_list *list = *plist;
1643
1644 for (; list; list = list->next)
0d48ee34 1645 {
46771da5
FX
1646 if (list->offset >= item->offset)
1647 break;
1648
1649 plist = &list->next;
0d48ee34
MJ
1650 }
1651
46771da5
FX
1652 item->next = list;
1653 *plist = item;
1654}
1655
eb270950 1656/* Check whether a given aggregate content is clobbered by certain element in
46771da5
FX
1657 a linked list of ipa_known_agg_contents_list. */
1658
1659static inline bool
1660clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1661 struct ipa_known_agg_contents_list *item)
1662{
1663 for (; list; list = list->next)
0d48ee34 1664 {
46771da5
FX
1665 if (list->offset >= item->offset)
1666 return list->offset < item->offset + item->size;
1667
1668 if (list->offset + list->size > item->offset)
1669 return true;
0d48ee34 1670 }
46771da5
FX
1671
1672 return false;
0d48ee34
MJ
1673}
1674
1675/* Build aggregate jump function from LIST, assuming there are exactly
eb270950 1676 VALUE_COUNT entries there and that offset of the passed argument
0d48ee34
MJ
1677 is ARG_OFFSET and store it into JFUNC. */
1678
1679static void
1680build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
eb270950 1681 int value_count, HOST_WIDE_INT arg_offset,
0d48ee34
MJ
1682 struct ipa_jump_func *jfunc)
1683{
eb270950
FX
1684 vec_alloc (jfunc->agg.items, value_count);
1685 for (; list; list = list->next)
1686 {
1687 struct ipa_agg_jf_item item;
1688 tree operand = list->value.pass_through.operand;
1689
1690 if (list->value.pass_through.formal_id >= 0)
1691 {
1692 /* Content value is derived from some formal paramerter. */
1693 if (list->value.offset >= 0)
1694 item.jftype = IPA_JF_LOAD_AGG;
1695 else
1696 item.jftype = IPA_JF_PASS_THROUGH;
1697
1698 item.value.load_agg = list->value;
1699 if (operand)
1700 item.value.pass_through.operand
1701 = unshare_expr_without_location (operand);
1702 }
1703 else if (operand)
1704 {
1705 /* Content value is known constant. */
1706 item.jftype = IPA_JF_CONST;
1707 item.value.constant = unshare_expr_without_location (operand);
1708 }
1709 else
1710 continue;
1711
1712 item.type = list->type;
1713 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1714
1715 item.offset = list->offset - arg_offset;
1716 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1717
1718 jfunc->agg.items->quick_push (item);
1719 }
1720}
1721
1722/* Given an assignment statement STMT, try to collect information into
1723 AGG_VALUE that will be used to construct jump function for RHS of the
1724 assignment, from which content value of an aggregate part comes.
1725
1726 Besides constant and simple pass-through jump functions, also try to
1727 identify whether it matches the following pattern that can be described by
1728 a load-value-from-aggregate jump function, which is a derivative of simple
1729 pass-through jump function.
1730
1731 foo (int *p)
1732 {
1733 ...
1734
1735 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1736 bar (q_5);
1737 }
1738
1739 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1740 constant, simple pass-through and load-vale-from-aggregate. If value
1741 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1742 set to -1. For simple pass-through and load-value-from-aggregate, field
1743 FORMAL_ID specifies the related formal parameter index, and field
1744 OFFSET can be used to distinguish them, -1 means simple pass-through,
1745 otherwise means load-value-from-aggregate. */
1746
1747static void
1748analyze_agg_content_value (struct ipa_func_body_info *fbi,
1749 struct ipa_load_agg_data *agg_value,
1750 gimple *stmt)
1751{
1752 tree lhs = gimple_assign_lhs (stmt);
1753 tree rhs1 = gimple_assign_rhs1 (stmt);
1754 enum tree_code code;
1755 int index = -1;
1756
1757 /* Initialize jump function data for the aggregate part. */
1758 memset (agg_value, 0, sizeof (*agg_value));
1759 agg_value->pass_through.operation = NOP_EXPR;
1760 agg_value->pass_through.formal_id = -1;
1761 agg_value->offset = -1;
1762
1763 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1764 || TREE_THIS_VOLATILE (lhs)
1765 || TREE_CODE (lhs) == BIT_FIELD_REF
1766 || contains_bitfld_component_ref_p (lhs))
1767 return;
1768
1769 /* Skip SSA copies. */
1770 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1771 {
1772 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1773 break;
1774
1775 stmt = SSA_NAME_DEF_STMT (rhs1);
1776 if (!is_gimple_assign (stmt))
1777 return;
1778
1779 rhs1 = gimple_assign_rhs1 (stmt);
1780 }
1781
1782 code = gimple_assign_rhs_code (stmt);
1783 switch (gimple_assign_rhs_class (stmt))
0d48ee34 1784 {
eb270950
FX
1785 case GIMPLE_SINGLE_RHS:
1786 if (is_gimple_ip_invariant (rhs1))
0d48ee34 1787 {
eb270950
FX
1788 agg_value->pass_through.operand = rhs1;
1789 return;
0d48ee34 1790 }
eb270950
FX
1791 code = NOP_EXPR;
1792 break;
1793
1794 case GIMPLE_UNARY_RHS:
1795 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1796 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1797 tcc_binary, this subtleness is somewhat misleading.
1798
1799 Since tcc_unary is widely used in IPA-CP code to check an operation
1800 with one operand, here we only allow tc_unary operation to avoid
1801 possible problem. Then we can use (opclass == tc_unary) or not to
1802 distinguish unary and binary. */
1803 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1804 return;
1805
1806 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1807 break;
1808
1809 case GIMPLE_BINARY_RHS:
1810 {
1811 gimple *rhs1_stmt = stmt;
1812 gimple *rhs2_stmt = stmt;
1813 tree rhs2 = gimple_assign_rhs2 (stmt);
1814
1815 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1816 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1817
1818 if (is_gimple_ip_invariant (rhs2))
1819 {
1820 agg_value->pass_through.operand = rhs2;
1821 stmt = rhs1_stmt;
1822 }
1823 else if (is_gimple_ip_invariant (rhs1))
1824 {
1825 if (TREE_CODE_CLASS (code) == tcc_comparison)
1826 code = swap_tree_comparison (code);
1827 else if (!commutative_tree_code (code))
1828 return;
1829
1830 agg_value->pass_through.operand = rhs1;
1831 stmt = rhs2_stmt;
1832 rhs1 = rhs2;
1833 }
1834 else
1835 return;
1836
1837 if (TREE_CODE_CLASS (code) != tcc_comparison
1838 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs1)))
1839 return;
1840 }
1841 break;
1842
1843 default:
1844 return;
1845 }
1846
1847 if (TREE_CODE (rhs1) != SSA_NAME)
1848 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1849 &agg_value->offset,
1850 &agg_value->by_ref);
1851 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1852 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1853
1854 if (index >= 0)
1855 {
1856 if (agg_value->offset >= 0)
1857 agg_value->type = TREE_TYPE (rhs1);
1858 agg_value->pass_through.formal_id = index;
1859 agg_value->pass_through.operation = code;
0d48ee34 1860 }
eb270950
FX
1861 else
1862 agg_value->pass_through.operand = NULL_TREE;
0d48ee34
MJ
1863}
1864
46771da5
FX
1865/* If STMT is a memory store to the object whose address is BASE, extract
1866 information (offset, size, and value) into CONTENT, and return true,
1867 otherwise we conservatively assume the whole object is modified with
1868 unknown content, and return false. CHECK_REF means that access to object
1869 is expected to be in form of MEM_REF expression. */
1870
1871static bool
eb270950
FX
1872extract_mem_content (struct ipa_func_body_info *fbi,
1873 gimple *stmt, tree base, bool check_ref,
46771da5
FX
1874 struct ipa_known_agg_contents_list *content)
1875{
1876 HOST_WIDE_INT lhs_offset, lhs_size;
46771da5
FX
1877 bool reverse;
1878
eb270950 1879 if (!is_gimple_assign (stmt))
46771da5
FX
1880 return false;
1881
eb270950
FX
1882 tree lhs = gimple_assign_lhs (stmt);
1883 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1884 &reverse);
46771da5
FX
1885 if (!lhs_base)
1886 return false;
1887
1888 if (check_ref)
1889 {
1890 if (TREE_CODE (lhs_base) != MEM_REF
1891 || TREE_OPERAND (lhs_base, 0) != base
1892 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1893 return false;
1894 }
1895 else if (lhs_base != base)
1896 return false;
1897
46771da5 1898 content->offset = lhs_offset;
eb270950
FX
1899 content->size = lhs_size;
1900 content->type = TREE_TYPE (lhs);
46771da5
FX
1901 content->next = NULL;
1902
eb270950 1903 analyze_agg_content_value (fbi, &content->value, stmt);
46771da5
FX
1904 return true;
1905}
1906
8b7773a4 1907/* Traverse statements from CALL backwards, scanning whether an aggregate given
eb270950
FX
1908 in ARG is filled in constants or values that are derived from caller's
1909 formal parameter in the way described by some kinds of jump functions. FBI
1910 is the context of the caller function for interprocedural analysis. ARG can
1911 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1912 the type of the aggregate, JFUNC is the jump function for the aggregate. */
be95e2b9 1913
3e293154 1914static void
eb270950
FX
1915determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1916 gcall *call, tree arg,
46771da5 1917 tree arg_type,
eb270950 1918 struct ipa_jump_func *jfunc)
3e293154 1919{
46771da5
FX
1920 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1921 bitmap visited = NULL;
eb270950 1922 int item_count = 0, value_count = 0;
8b7773a4 1923 HOST_WIDE_INT arg_offset, arg_size;
8b7773a4
MJ
1924 tree arg_base;
1925 bool check_ref, by_ref;
1926 ao_ref r;
de2e0835 1927 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
3e293154 1928
de2e0835 1929 if (max_agg_items == 0)
29799e9d
MJ
1930 return;
1931
8b7773a4
MJ
1932 /* The function operates in three stages. First, we prepare check_ref, r,
1933 arg_base and arg_offset based on what is actually passed as an actual
1934 argument. */
3e293154 1935
85942f45 1936 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1937 {
1938 by_ref = true;
1939 if (TREE_CODE (arg) == SSA_NAME)
1940 {
1941 tree type_size;
63831879
MJ
1942 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1943 || !POINTER_TYPE_P (TREE_TYPE (arg)))
8b7773a4
MJ
1944 return;
1945 check_ref = true;
1946 arg_base = arg;
1947 arg_offset = 0;
85942f45 1948 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1949 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1950 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1951 }
1952 else if (TREE_CODE (arg) == ADDR_EXPR)
1953 {
ee45a32d 1954 bool reverse;
8b7773a4
MJ
1955
1956 arg = TREE_OPERAND (arg, 0);
588db50c
RS
1957 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1958 &arg_size, &reverse);
1959 if (!arg_base)
8b7773a4
MJ
1960 return;
1961 if (DECL_P (arg_base))
1962 {
8b7773a4 1963 check_ref = false;
0d48ee34 1964 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1965 }
1966 else
1967 return;
1968 }
1969 else
1970 return;
1971 }
1972 else
1973 {
ee45a32d 1974 bool reverse;
8b7773a4
MJ
1975
1976 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1977
1978 by_ref = false;
1979 check_ref = false;
588db50c
RS
1980 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1981 &arg_size, &reverse);
1982 if (!arg_base)
8b7773a4
MJ
1983 return;
1984
1985 ao_ref_init (&r, arg);
1986 }
1987
46771da5
FX
1988 /* Second stage traverses virtual SSA web backwards starting from the call
1989 statement, only looks at individual dominating virtual operand (its
1990 definition dominates the call), as long as it is confident that content
1991 of the aggregate is affected by definition of the virtual operand, it
1992 builds a sorted linked list of ipa_agg_jf_list describing that. */
3e293154 1993
46771da5
FX
1994 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1995 {
1996 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
3e293154 1997
46771da5 1998 if (gimple_code (stmt) == GIMPLE_PHI)
518dc859 1999 {
fb4697e3 2000 dom_vuse = get_continuation_for_phi (stmt, &r, true,
eb270950 2001 fbi->aa_walk_budget,
46771da5
FX
2002 &visited, false, NULL, NULL);
2003 continue;
3e293154 2004 }
46771da5
FX
2005
2006 if (stmt_may_clobber_ref_p_1 (stmt, &r))
774b8a55 2007 {
46771da5
FX
2008 struct ipa_known_agg_contents_list *content
2009 = XALLOCA (struct ipa_known_agg_contents_list);
2010
eb270950 2011 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
774b8a55 2012 break;
3e293154 2013
46771da5
FX
2014 /* Now we get a dominating virtual operand, and need to check
2015 whether its value is clobbered any other dominating one. */
eb270950
FX
2016 if ((content->value.pass_through.formal_id >= 0
2017 || content->value.pass_through.operand)
46771da5
FX
2018 && !clobber_by_agg_contents_list_p (all_list, content))
2019 {
2020 struct ipa_known_agg_contents_list *copy
2021 = XALLOCA (struct ipa_known_agg_contents_list);
3e293154 2022
46771da5
FX
2023 /* Add to the list consisting of only dominating virtual
2024 operands, whose definitions can finally reach the call. */
2025 add_to_agg_contents_list (&list, (*copy = *content, copy));
2026
de2e0835 2027 if (++value_count == max_agg_items)
46771da5
FX
2028 break;
2029 }
2030
2031 /* Add to the list consisting of all dominating virtual operands. */
2032 add_to_agg_contents_list (&all_list, content);
2033
de2e0835 2034 if (++item_count == 2 * max_agg_items)
46771da5 2035 break;
8b7773a4 2036 }
46771da5
FX
2037 dom_vuse = gimple_vuse (stmt);
2038 }
3e293154 2039
46771da5
FX
2040 if (visited)
2041 BITMAP_FREE (visited);
be95e2b9 2042
8b7773a4 2043 /* Third stage just goes over the list and creates an appropriate vector of
46771da5 2044 ipa_agg_jf_item structures out of it, of course only if there are
eb270950 2045 any meaningful items to begin with. */
3e293154 2046
eb270950 2047 if (value_count)
3e293154 2048 {
8b7773a4 2049 jfunc->agg.by_ref = by_ref;
eb270950 2050 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
3e293154
MJ
2051 }
2052}
2053
46771da5 2054
5d5f1e95
KV
2055/* Return the Ith param type of callee associated with call graph
2056 edge E. */
2057
2058tree
06d65050
JH
2059ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2060{
2061 int n;
2062 tree type = (e->callee
67348ccc 2063 ? TREE_TYPE (e->callee->decl)
06d65050
JH
2064 : gimple_call_fntype (e->call_stmt));
2065 tree t = TYPE_ARG_TYPES (type);
2066
2067 for (n = 0; n < i; n++)
2068 {
2069 if (!t)
2070 break;
2071 t = TREE_CHAIN (t);
2072 }
2073 if (t)
2074 return TREE_VALUE (t);
2075 if (!e->callee)
2076 return NULL;
67348ccc 2077 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
2078 for (n = 0; n < i; n++)
2079 {
2080 if (!t)
2081 return NULL;
2082 t = TREE_CHAIN (t);
2083 }
2084 if (t)
2085 return TREE_TYPE (t);
2086 return NULL;
2087}
2088
86cd0334
MJ
2089/* Return ipa_bits with VALUE and MASK values, which can be either a newly
2090 allocated structure or a previously existing one shared with other jump
2091 functions and/or transformation summaries. */
2092
2093ipa_bits *
2094ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2095{
2096 ipa_bits tmp;
2097 tmp.value = value;
2098 tmp.mask = mask;
2099
2100 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2101 if (*slot)
2102 return *slot;
2103
2104 ipa_bits *res = ggc_alloc<ipa_bits> ();
2105 res->value = value;
2106 res->mask = mask;
2107 *slot = res;
2108
2109 return res;
2110}
2111
2112/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2113 table in order to avoid creating multiple same ipa_bits structures. */
2114
2115static void
2116ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2117 const widest_int &mask)
2118{
2119 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2120}
2121
2122/* Return a pointer to a value_range just like *TMP, but either find it in
2123 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2124
028d81b1
AH
2125static value_range *
2126ipa_get_value_range (value_range *tmp)
86cd0334 2127{
028d81b1 2128 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
86cd0334
MJ
2129 if (*slot)
2130 return *slot;
2131
4ba9fb0a 2132 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
86cd0334
MJ
2133 *vr = *tmp;
2134 *slot = vr;
2135
2136 return vr;
2137}
2138
2139/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2140 equiv set. Use hash table in order to avoid creating multiple same copies of
2141 value_ranges. */
2142
028d81b1 2143static value_range *
5d462877 2144ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
86cd0334 2145{
5d462877 2146 value_range tmp (min, max, kind);
86cd0334
MJ
2147 return ipa_get_value_range (&tmp);
2148}
2149
2150/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2151 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2152 same value_range structures. */
2153
2154static void
54994253 2155ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
86cd0334
MJ
2156 tree min, tree max)
2157{
2158 jf->m_vr = ipa_get_value_range (type, min, max);
2159}
2160
46771da5 2161/* Assign to JF a pointer to a value_range just like TMP but either fetch a
86cd0334
MJ
2162 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2163
2164static void
028d81b1 2165ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
86cd0334
MJ
2166{
2167 jf->m_vr = ipa_get_value_range (tmp);
2168}
2169
3e293154
MJ
2170/* Compute jump function for all arguments of callsite CS and insert the
2171 information in the jump_functions array in the ipa_edge_args corresponding
2172 to this callsite. */
be95e2b9 2173
749aa96d 2174static void
56b40062 2175ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 2176 struct cgraph_edge *cs)
3e293154 2177{
99b1c316 2178 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
a33c028e 2179 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
538dd0b7 2180 gcall *call = cs->call_stmt;
8b7773a4 2181 int n, arg_num = gimple_call_num_args (call);
5ce97055 2182 bool useful_context = false;
3e293154 2183
606d9a09 2184 if (arg_num == 0 || args->jump_functions)
3e293154 2185 return;
cb3874dc 2186 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
5ce97055 2187 if (flag_devirtualize)
cb3874dc 2188 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
3e293154 2189
96e24d49
JJ
2190 if (gimple_call_internal_p (call))
2191 return;
5fe8e757
MJ
2192 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2193 return;
2194
8b7773a4
MJ
2195 for (n = 0; n < arg_num; n++)
2196 {
2197 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2198 tree arg = gimple_call_arg (call, n);
06d65050 2199 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
2200 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2201 {
049e6d36 2202 tree instance;
99b1c316 2203 class ipa_polymorphic_call_context context (cs->caller->decl,
5ce97055 2204 arg, cs->call_stmt,
049e6d36 2205 &instance);
c628d1c3
MJ
2206 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2207 &fbi->aa_walk_budget);
5ce97055
JH
2208 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2209 if (!context.useless_p ())
2210 useful_context = true;
2211 }
3e293154 2212
718625ad
KV
2213 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2214 {
f7503699
KV
2215 bool addr_nonzero = false;
2216 bool strict_overflow = false;
2217
718625ad
KV
2218 if (TREE_CODE (arg) == SSA_NAME
2219 && param_type
2220 && get_ptr_nonnull (arg))
f7503699
KV
2221 addr_nonzero = true;
2222 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2223 addr_nonzero = true;
2224
2225 if (addr_nonzero)
718625ad 2226 {
86cd0334
MJ
2227 tree z = build_int_cst (TREE_TYPE (arg), 0);
2228 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
718625ad
KV
2229 }
2230 else
86cd0334 2231 gcc_assert (!jfunc->m_vr);
718625ad
KV
2232 }
2233 else
8bc5448f
KV
2234 {
2235 wide_int min, max;
5d462877 2236 value_range_kind kind;
8bc5448f
KV
2237 if (TREE_CODE (arg) == SSA_NAME
2238 && param_type
5d462877
AH
2239 && (kind = get_range_info (arg, &min, &max))
2240 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
8bc5448f 2241 {
028d81b1 2242 value_range resvr;
5d462877
AH
2243 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2244 wide_int_to_tree (TREE_TYPE (arg), max),
2245 kind);
38a73435
AH
2246 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2247 &tmpvr, TREE_TYPE (arg));
54994253 2248 if (!resvr.undefined_p () && !resvr.varying_p ())
86cd0334 2249 ipa_set_jfunc_vr (jfunc, &resvr);
3a4228ba 2250 else
86cd0334 2251 gcc_assert (!jfunc->m_vr);
8bc5448f
KV
2252 }
2253 else
86cd0334 2254 gcc_assert (!jfunc->m_vr);
8bc5448f 2255 }
04be694e 2256
209ca542
PK
2257 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2258 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2259 {
209ca542 2260 if (TREE_CODE (arg) == SSA_NAME)
86cd0334
MJ
2261 ipa_set_jfunc_bits (jfunc, 0,
2262 widest_int::from (get_nonzero_bits (arg),
2263 TYPE_SIGN (TREE_TYPE (arg))));
209ca542 2264 else
86cd0334 2265 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
209ca542 2266 }
67b97478
PK
2267 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2268 {
2269 unsigned HOST_WIDE_INT bitpos;
2270 unsigned align;
2271
67b97478 2272 get_pointer_alignment_1 (arg, &align, &bitpos);
7b27cb4b
RS
2273 widest_int mask = wi::bit_and_not
2274 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2275 align / BITS_PER_UNIT - 1);
86cd0334
MJ
2276 widest_int value = bitpos / BITS_PER_UNIT;
2277 ipa_set_jfunc_bits (jfunc, value, mask);
67b97478 2278 }
209ca542 2279 else
86cd0334 2280 gcc_assert (!jfunc->bits);
209ca542 2281
04643334 2282 if (is_gimple_ip_invariant (arg)
8813a647 2283 || (VAR_P (arg)
04643334
MJ
2284 && is_global_var (arg)
2285 && TREE_READONLY (arg)))
4502fe8d 2286 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
2287 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2288 && TREE_CODE (arg) == PARM_DECL)
2289 {
2290 int index = ipa_get_param_decl_index (info, arg);
2291
2292 gcc_assert (index >=0);
2293 /* Aggregate passed by value, check for pass-through, otherwise we
2294 will attempt to fill in aggregate contents later in this
2295 for cycle. */
8aab5218 2296 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 2297 {
3b97a5c7 2298 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
2299 continue;
2300 }
2301 }
2302 else if (TREE_CODE (arg) == SSA_NAME)
2303 {
2304 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2305 {
2306 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 2307 if (index >= 0)
8b7773a4 2308 {
3b97a5c7 2309 bool agg_p;
8aab5218 2310 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 2311 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
2312 }
2313 }
2314 else
2315 {
355fe088 2316 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 2317 if (is_gimple_assign (stmt))
8aab5218 2318 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 2319 call, stmt, arg, param_type);
8b7773a4 2320 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 2321 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
2322 call,
2323 as_a <gphi *> (stmt));
8b7773a4
MJ
2324 }
2325 }
3e293154 2326
67914693 2327 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
85942f45
JH
2328 passed (because type conversions are ignored in gimple). Usually we can
2329 safely get type from function declaration, but in case of K&R prototypes or
2330 variadic functions we can try our luck with type of the pointer passed.
2331 TODO: Since we look for actual initialization of the memory object, we may better
2332 work out the type based on the memory stores we find. */
2333 if (!param_type)
2334 param_type = TREE_TYPE (arg);
2335
8b7773a4
MJ
2336 if ((jfunc->type != IPA_JF_PASS_THROUGH
2337 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2338 && (jfunc->type != IPA_JF_ANCESTOR
2339 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2340 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 2341 || POINTER_TYPE_P (param_type)))
eb270950 2342 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
8b7773a4 2343 }
5ce97055
JH
2344 if (!useful_context)
2345 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
2346}
2347
749aa96d 2348/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 2349 from BB. */
749aa96d 2350
062c604f 2351static void
56b40062 2352ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 2353{
8aab5218
MJ
2354 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2355 int i;
749aa96d
MJ
2356 struct cgraph_edge *cs;
2357
8aab5218 2358 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 2359 {
8aab5218 2360 struct cgraph_node *callee = cs->callee;
749aa96d 2361
8aab5218
MJ
2362 if (callee)
2363 {
d7caa129 2364 callee = callee->ultimate_alias_target ();
8aab5218
MJ
2365 /* We do not need to bother analyzing calls to unknown functions
2366 unless they may become known during lto/whopr. */
2367 if (!callee->definition && !flag_lto)
2368 continue;
2369 }
2370 ipa_compute_jump_functions_for_edge (fbi, cs);
2371 }
749aa96d
MJ
2372}
2373
8b7773a4
MJ
2374/* If STMT looks like a statement loading a value from a member pointer formal
2375 parameter, return that parameter and store the offset of the field to
2376 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2377 might be clobbered). If USE_DELTA, then we look for a use of the delta
2378 field rather than the pfn. */
be95e2b9 2379
3e293154 2380static tree
355fe088 2381ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 2382 HOST_WIDE_INT *offset_p)
3e293154 2383{
8b7773a4
MJ
2384 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2385
2386 if (!gimple_assign_single_p (stmt))
2387 return NULL_TREE;
3e293154 2388
8b7773a4 2389 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2390 if (TREE_CODE (rhs) == COMPONENT_REF)
2391 {
2392 ref_field = TREE_OPERAND (rhs, 1);
2393 rhs = TREE_OPERAND (rhs, 0);
2394 }
2395 else
2396 ref_field = NULL_TREE;
d242d063 2397 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2398 return NULL_TREE;
3e293154 2399 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2400 if (TREE_CODE (rec) != ADDR_EXPR)
2401 return NULL_TREE;
2402 rec = TREE_OPERAND (rec, 0);
3e293154 2403 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2404 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2405 return NULL_TREE;
d242d063 2406 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2407
8b7773a4
MJ
2408 if (use_delta)
2409 fld = delta_field;
2410 else
2411 fld = ptr_field;
2412 if (offset_p)
2413 *offset_p = int_bit_position (fld);
2414
ae788515
EB
2415 if (ref_field)
2416 {
2417 if (integer_nonzerop (ref_offset))
2418 return NULL_TREE;
ae788515
EB
2419 return ref_field == fld ? rec : NULL_TREE;
2420 }
3e293154 2421 else
8b7773a4
MJ
2422 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2423 : NULL_TREE;
3e293154
MJ
2424}
2425
2426/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2427
3e293154
MJ
2428static bool
2429ipa_is_ssa_with_stmt_def (tree t)
2430{
2431 if (TREE_CODE (t) == SSA_NAME
2432 && !SSA_NAME_IS_DEFAULT_DEF (t))
2433 return true;
2434 else
2435 return false;
2436}
2437
40591473
MJ
2438/* Find the indirect call graph edge corresponding to STMT and mark it as a
2439 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
40a777e8
JH
2440 indirect call graph edge.
2441 If POLYMORPHIC is true record is as a destination of polymorphic call. */
be95e2b9 2442
40591473 2443static struct cgraph_edge *
538dd0b7 2444ipa_note_param_call (struct cgraph_node *node, int param_index,
40a777e8 2445 gcall *stmt, bool polymorphic)
3e293154 2446{
e33c6cd6 2447 struct cgraph_edge *cs;
3e293154 2448
d52f5295 2449 cs = node->get_edge (stmt);
b258210c 2450 cs->indirect_info->param_index = param_index;
8b7773a4 2451 cs->indirect_info->agg_contents = 0;
c13bc3d9 2452 cs->indirect_info->member_ptr = 0;
91bb9f80 2453 cs->indirect_info->guaranteed_unmodified = 0;
40a777e8
JH
2454 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2455 param_index, true);
2456 if (cs->indirect_info->polymorphic || polymorphic)
2457 ipa_set_param_used_by_polymorphic_call
2458 (IPA_NODE_REF (node), param_index, true);
40591473 2459 return cs;
3e293154
MJ
2460}
2461
e33c6cd6 2462/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2463 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2464 intermediate information about each formal parameter. Currently it checks
2465 whether the call calls a pointer that is a formal parameter and if so, the
2466 parameter is marked with the called flag and an indirect call graph edge
2467 describing the call is created. This is very simple for ordinary pointers
2468 represented in SSA but not-so-nice when it comes to member pointers. The
2469 ugly part of this function does nothing more than trying to match the
2470 pattern of such a call. An example of such a pattern is the gimple dump
2471 below, the call is on the last line:
3e293154 2472
ae788515
EB
2473 <bb 2>:
2474 f$__delta_5 = f.__delta;
2475 f$__pfn_24 = f.__pfn;
2476
2477 or
3e293154 2478 <bb 2>:
d242d063
MJ
2479 f$__delta_5 = MEM[(struct *)&f];
2480 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2481
ae788515 2482 and a few lines below:
8aa29647
MJ
2483
2484 <bb 5>
3e293154
MJ
2485 D.2496_3 = (int) f$__pfn_24;
2486 D.2497_4 = D.2496_3 & 1;
2487 if (D.2497_4 != 0)
2488 goto <bb 3>;
2489 else
2490 goto <bb 4>;
2491
8aa29647 2492 <bb 6>:
3e293154
MJ
2493 D.2500_7 = (unsigned int) f$__delta_5;
2494 D.2501_8 = &S + D.2500_7;
2495 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2496 D.2503_10 = *D.2502_9;
2497 D.2504_12 = f$__pfn_24 + -1;
2498 D.2505_13 = (unsigned int) D.2504_12;
2499 D.2506_14 = D.2503_10 + D.2505_13;
2500 D.2507_15 = *D.2506_14;
2501 iftmp.11_16 = (String:: *) D.2507_15;
2502
8aa29647 2503 <bb 7>:
3e293154
MJ
2504 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2505 D.2500_19 = (unsigned int) f$__delta_5;
2506 D.2508_20 = &S + D.2500_19;
2507 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2508
2509 Such patterns are results of simple calls to a member pointer:
2510
2511 int doprinting (int (MyString::* f)(int) const)
2512 {
2513 MyString S ("somestring");
2514
2515 return (S.*f)(4);
2516 }
8b7773a4
MJ
2517
2518 Moreover, the function also looks for called pointers loaded from aggregates
2519 passed by value or reference. */
3e293154
MJ
2520
2521static void
56b40062 2522ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 2523 tree target)
3e293154 2524{
99b1c316 2525 class ipa_node_params *info = fbi->info;
8b7773a4
MJ
2526 HOST_WIDE_INT offset;
2527 bool by_ref;
3e293154 2528
3e293154
MJ
2529 if (SSA_NAME_IS_DEFAULT_DEF (target))
2530 {
b258210c 2531 tree var = SSA_NAME_VAR (target);
8aab5218 2532 int index = ipa_get_param_decl_index (info, var);
3e293154 2533 if (index >= 0)
40a777e8 2534 ipa_note_param_call (fbi->node, index, call, false);
3e293154
MJ
2535 return;
2536 }
2537
8aab5218 2538 int index;
355fe088 2539 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 2540 bool guaranteed_unmodified;
8b7773a4 2541 if (gimple_assign_single_p (def)
ff302741
PB
2542 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2543 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2544 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2545 {
40a777e8
JH
2546 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2547 call, false);
8b7773a4
MJ
2548 cs->indirect_info->offset = offset;
2549 cs->indirect_info->agg_contents = 1;
2550 cs->indirect_info->by_ref = by_ref;
91bb9f80 2551 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2552 return;
2553 }
2554
3e293154
MJ
2555 /* Now we need to try to match the complex pattern of calling a member
2556 pointer. */
8b7773a4
MJ
2557 if (gimple_code (def) != GIMPLE_PHI
2558 || gimple_phi_num_args (def) != 2
2559 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2560 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2561 return;
2562
3e293154
MJ
2563 /* First, we need to check whether one of these is a load from a member
2564 pointer that is a parameter to this function. */
8aab5218
MJ
2565 tree n1 = PHI_ARG_DEF (def, 0);
2566 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2567 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2568 return;
355fe088
TS
2569 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2570 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2571
8aab5218
MJ
2572 tree rec;
2573 basic_block bb, virt_bb;
2574 basic_block join = gimple_bb (def);
8b7773a4 2575 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2576 {
8b7773a4 2577 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2578 return;
2579
8aa29647 2580 bb = EDGE_PRED (join, 0)->src;
726a989a 2581 virt_bb = gimple_bb (d2);
3e293154 2582 }
8b7773a4 2583 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2584 {
8aa29647 2585 bb = EDGE_PRED (join, 1)->src;
726a989a 2586 virt_bb = gimple_bb (d1);
3e293154
MJ
2587 }
2588 else
2589 return;
2590
2591 /* Second, we need to check that the basic blocks are laid out in the way
2592 corresponding to the pattern. */
2593
3e293154
MJ
2594 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2595 || single_pred (virt_bb) != bb
2596 || single_succ (virt_bb) != join)
2597 return;
2598
2599 /* Third, let's see that the branching is done depending on the least
2600 significant bit of the pfn. */
2601
355fe088 2602 gimple *branch = last_stmt (bb);
8aa29647 2603 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2604 return;
2605
12430896
RG
2606 if ((gimple_cond_code (branch) != NE_EXPR
2607 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2608 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2609 return;
3e293154 2610
8aab5218 2611 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2612 if (!ipa_is_ssa_with_stmt_def (cond))
2613 return;
2614
726a989a 2615 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2616 if (!is_gimple_assign (def)
726a989a
RB
2617 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2618 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2619 return;
726a989a
RB
2620
2621 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2622 if (!ipa_is_ssa_with_stmt_def (cond))
2623 return;
2624
726a989a 2625 def = SSA_NAME_DEF_STMT (cond);
3e293154 2626
8b75fc9b
MJ
2627 if (is_gimple_assign (def)
2628 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2629 {
726a989a 2630 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2631 if (!ipa_is_ssa_with_stmt_def (cond))
2632 return;
726a989a 2633 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2634 }
2635
8aab5218 2636 tree rec2;
6f7b8b70
RE
2637 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2638 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2639 == ptrmemfunc_vbit_in_delta),
2640 NULL);
3e293154
MJ
2641 if (rec != rec2)
2642 return;
2643
2644 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2645 if (index >= 0
8aab5218 2646 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2647 {
40a777e8
JH
2648 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2649 call, false);
8b7773a4
MJ
2650 cs->indirect_info->offset = offset;
2651 cs->indirect_info->agg_contents = 1;
c13bc3d9 2652 cs->indirect_info->member_ptr = 1;
91bb9f80 2653 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2654 }
3e293154
MJ
2655
2656 return;
2657}
2658
b258210c
MJ
2659/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2660 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2661 FBI->node (described by FBI->info), create a call note for the
2662 statement. */
b258210c
MJ
2663
2664static void
56b40062 2665ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2666 gcall *call, tree target)
b258210c
MJ
2667{
2668 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2669 int index;
40591473 2670 HOST_WIDE_INT anc_offset;
b258210c 2671
05842ff5
MJ
2672 if (!flag_devirtualize)
2673 return;
2674
40591473 2675 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2676 return;
2677
99b1c316 2678 class ipa_node_params *info = fbi->info;
40591473
MJ
2679 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2680 {
2681 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2682 return;
b258210c 2683
40591473
MJ
2684 anc_offset = 0;
2685 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2686 gcc_assert (index >= 0);
c628d1c3 2687 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
c199f329 2688 call))
40591473
MJ
2689 return;
2690 }
2691 else
2692 {
355fe088 2693 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2694 tree expr;
2695
2696 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2697 if (!expr)
2698 return;
2699 index = ipa_get_param_decl_index (info,
2700 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2701 gcc_assert (index >= 0);
c628d1c3 2702 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
c199f329 2703 call, anc_offset))
40591473
MJ
2704 return;
2705 }
2706
40a777e8
JH
2707 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2708 call, true);
99b1c316 2709 class cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2710 ii->offset = anc_offset;
ae7e9ddd 2711 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2712 ii->otr_type = obj_type_ref_class (target);
40591473 2713 ii->polymorphic = 1;
b258210c
MJ
2714}
2715
2716/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2717 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2718 containing intermediate information about each formal parameter. */
b258210c
MJ
2719
2720static void
56b40062 2721ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2722{
2723 tree target = gimple_call_fn (call);
b786d31f
JH
2724
2725 if (!target
2726 || (TREE_CODE (target) != SSA_NAME
2727 && !virtual_method_call_p (target)))
2728 return;
b258210c 2729
7d0aa05b 2730 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2731 /* If we previously turned the call into a direct call, there is
2732 no need to analyze. */
b786d31f 2733 if (cs && !cs->indirect_unknown_callee)
25583c4f 2734 return;
7d0aa05b 2735
a5b58b28 2736 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2737 {
7d0aa05b
JH
2738 tree instance;
2739 tree target = gimple_call_fn (call);
6f8091fc
JH
2740 ipa_polymorphic_call_context context (current_function_decl,
2741 target, call, &instance);
7d0aa05b 2742
ba392339
JH
2743 gcc_checking_assert (cs->indirect_info->otr_type
2744 == obj_type_ref_class (target));
2745 gcc_checking_assert (cs->indirect_info->otr_token
2746 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2747
29c43c83
JH
2748 cs->indirect_info->vptr_changed
2749 = !context.get_dynamic_type (instance,
2750 OBJ_TYPE_REF_OBJECT (target),
c628d1c3
MJ
2751 obj_type_ref_class (target), call,
2752 &fbi->aa_walk_budget);
0127c169 2753 cs->indirect_info->context = context;
7d0aa05b
JH
2754 }
2755
b258210c 2756 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2757 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2758 else if (virtual_method_call_p (target))
8aab5218 2759 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2760}
2761
2762
e33c6cd6 2763/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2764 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2765 formal parameters are called. */
be95e2b9 2766
3e293154 2767static void
355fe088 2768ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2769{
726a989a 2770 if (is_gimple_call (stmt))
538dd0b7 2771 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2772}
2773
2774/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2775 If OP is a parameter declaration, mark it as used in the info structure
2776 passed in DATA. */
2777
2778static bool
355fe088 2779visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f 2780{
99b1c316 2781 class ipa_node_params *info = (class ipa_node_params *) data;
062c604f
MJ
2782
2783 op = get_base_address (op);
2784 if (op
2785 && TREE_CODE (op) == PARM_DECL)
2786 {
2787 int index = ipa_get_param_decl_index (info, op);
2788 gcc_assert (index >= 0);
310bc633 2789 ipa_set_param_used (info, index, true);
062c604f
MJ
2790 }
2791
2792 return false;
3e293154
MJ
2793}
2794
8aab5218
MJ
2795/* Scan the statements in BB and inspect the uses of formal parameters. Store
2796 the findings in various structures of the associated ipa_node_params
2797 structure, such as parameter flags, notes etc. FBI holds various data about
2798 the function being analyzed. */
be95e2b9 2799
062c604f 2800static void
56b40062 2801ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2802{
726a989a 2803 gimple_stmt_iterator gsi;
8aab5218
MJ
2804 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2805 {
355fe088 2806 gimple *stmt = gsi_stmt (gsi);
3e293154 2807
8aab5218
MJ
2808 if (is_gimple_debug (stmt))
2809 continue;
3e293154 2810
8aab5218
MJ
2811 ipa_analyze_stmt_uses (fbi, stmt);
2812 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2813 visit_ref_for_mod_analysis,
2814 visit_ref_for_mod_analysis,
2815 visit_ref_for_mod_analysis);
5fe8e757 2816 }
8aab5218
MJ
2817 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2818 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2819 visit_ref_for_mod_analysis,
2820 visit_ref_for_mod_analysis,
2821 visit_ref_for_mod_analysis);
2822}
2823
2824/* Calculate controlled uses of parameters of NODE. */
2825
2826static void
2827ipa_analyze_controlled_uses (struct cgraph_node *node)
2828{
99b1c316 2829 class ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2830
8aab5218 2831 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2832 {
2833 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2834 int controlled_uses = 0;
2835
062c604f
MJ
2836 /* For SSA regs see if parameter is used. For non-SSA we compute
2837 the flag during modification analysis. */
4502fe8d
MJ
2838 if (is_gimple_reg (parm))
2839 {
67348ccc 2840 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2841 parm);
2842 if (ddef && !has_zero_uses (ddef))
2843 {
2844 imm_use_iterator imm_iter;
2845 use_operand_p use_p;
2846
2847 ipa_set_param_used (info, i, true);
2848 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2849 if (!is_gimple_call (USE_STMT (use_p)))
2850 {
c6de6665
JJ
2851 if (!is_gimple_debug (USE_STMT (use_p)))
2852 {
2853 controlled_uses = IPA_UNDESCRIBED_USE;
2854 break;
2855 }
4502fe8d
MJ
2856 }
2857 else
2858 controlled_uses++;
2859 }
2860 else
2861 controlled_uses = 0;
2862 }
2863 else
2864 controlled_uses = IPA_UNDESCRIBED_USE;
2865 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2866 }
8aab5218 2867}
062c604f 2868
8aab5218 2869/* Free stuff in BI. */
062c604f 2870
8aab5218
MJ
2871static void
2872free_ipa_bb_info (struct ipa_bb_info *bi)
2873{
2874 bi->cg_edges.release ();
2875 bi->param_aa_statuses.release ();
3e293154
MJ
2876}
2877
8aab5218 2878/* Dominator walker driving the analysis. */
2c9561b5 2879
8aab5218 2880class analysis_dom_walker : public dom_walker
2c9561b5 2881{
8aab5218 2882public:
56b40062 2883 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 2884 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2885
3daacdcd 2886 virtual edge before_dom_children (basic_block);
8aab5218
MJ
2887
2888private:
56b40062 2889 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
2890};
2891
3daacdcd 2892edge
8aab5218
MJ
2893analysis_dom_walker::before_dom_children (basic_block bb)
2894{
2895 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2896 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 2897 return NULL;
2c9561b5
MJ
2898}
2899
c3431191
ML
2900/* Release body info FBI. */
2901
2902void
2903ipa_release_body_info (struct ipa_func_body_info *fbi)
2904{
2905 int i;
2906 struct ipa_bb_info *bi;
2907
2908 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2909 free_ipa_bb_info (bi);
2910 fbi->bb_infos.release ();
2911}
2912
026c3cfd 2913/* Initialize the array describing properties of formal parameters
dd5a833e
MS
2914 of NODE, analyze their uses and compute jump functions associated
2915 with actual arguments of calls from within NODE. */
062c604f
MJ
2916
2917void
2918ipa_analyze_node (struct cgraph_node *node)
2919{
56b40062 2920 struct ipa_func_body_info fbi;
99b1c316 2921 class ipa_node_params *info;
062c604f 2922
57dbdc5a
MJ
2923 ipa_check_create_node_params ();
2924 ipa_check_create_edge_args ();
6cf67b62 2925 info = IPA_NODE_REF_GET_CREATE (node);
8aab5218
MJ
2926
2927 if (info->analysis_done)
2928 return;
2929 info->analysis_done = 1;
2930
2931 if (ipa_func_spec_opts_forbid_analysis_p (node))
2932 {
2933 for (int i = 0; i < ipa_get_param_count (info); i++)
2934 {
2935 ipa_set_param_used (info, i, true);
2936 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2937 }
2938 return;
2939 }
2940
2941 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2942 push_cfun (func);
2943 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2944 ipa_initialize_node_params (node);
8aab5218 2945 ipa_analyze_controlled_uses (node);
062c604f 2946
8aab5218
MJ
2947 fbi.node = node;
2948 fbi.info = IPA_NODE_REF (node);
2949 fbi.bb_infos = vNULL;
cb3874dc 2950 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
8aab5218 2951 fbi.param_count = ipa_get_param_count (info);
fdfd7f53 2952 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
062c604f 2953
8aab5218
MJ
2954 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2955 {
2956 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2957 bi->cg_edges.safe_push (cs);
2958 }
062c604f 2959
8aab5218
MJ
2960 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2961 {
2962 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2963 bi->cg_edges.safe_push (cs);
2964 }
2965
2966 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2967
c3431191 2968 ipa_release_body_info (&fbi);
8aab5218 2969 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2970 pop_cfun ();
062c604f 2971}
062c604f 2972
be95e2b9 2973/* Update the jump functions associated with call graph edge E when the call
3e293154 2974 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2975 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2976
3e293154
MJ
2977static void
2978update_jump_functions_after_inlining (struct cgraph_edge *cs,
2979 struct cgraph_edge *e)
2980{
99b1c316
MS
2981 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2982 class ipa_edge_args *args = IPA_EDGE_REF (e);
a33c028e
JH
2983 if (!args)
2984 return;
3e293154
MJ
2985 int count = ipa_get_cs_argument_count (args);
2986 int i;
2987
2988 for (i = 0; i < count; i++)
2989 {
b258210c 2990 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
99b1c316 2991 class ipa_polymorphic_call_context *dst_ctx
5ce97055 2992 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2993
eb270950
FX
2994 if (dst->agg.items)
2995 {
2996 struct ipa_agg_jf_item *item;
2997 int j;
2998
2999 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3000 {
3001 int dst_fid;
3002 struct ipa_jump_func *src;
3003
3004 if (item->jftype != IPA_JF_PASS_THROUGH
3005 && item->jftype != IPA_JF_LOAD_AGG)
3006 continue;
3007
3008 dst_fid = item->value.pass_through.formal_id;
1c3c3f45 3009 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
eb270950
FX
3010 {
3011 item->jftype = IPA_JF_UNKNOWN;
3012 continue;
3013 }
3014
3015 item->value.pass_through.formal_id = -1;
3016 src = ipa_get_ith_jump_func (top, dst_fid);
3017 if (src->type == IPA_JF_CONST)
3018 {
3019 if (item->jftype == IPA_JF_PASS_THROUGH
3020 && item->value.pass_through.operation == NOP_EXPR)
3021 {
3022 item->jftype = IPA_JF_CONST;
3023 item->value.constant = src->value.constant.value;
3024 continue;
3025 }
3026 }
3027 else if (src->type == IPA_JF_PASS_THROUGH
3028 && src->value.pass_through.operation == NOP_EXPR)
3029 {
3030 if (item->jftype == IPA_JF_PASS_THROUGH
3031 || !item->value.load_agg.by_ref
3032 || src->value.pass_through.agg_preserved)
3033 item->value.pass_through.formal_id
3034 = src->value.pass_through.formal_id;
3035 }
3036 else if (src->type == IPA_JF_ANCESTOR)
3037 {
3038 if (item->jftype == IPA_JF_PASS_THROUGH)
3039 {
3040 if (!src->value.ancestor.offset)
3041 item->value.pass_through.formal_id
3042 = src->value.ancestor.formal_id;
3043 }
3044 else if (src->value.ancestor.agg_preserved)
3045 {
3046 gcc_checking_assert (item->value.load_agg.by_ref);
3047
3048 item->value.pass_through.formal_id
3049 = src->value.ancestor.formal_id;
3050 item->value.load_agg.offset
3051 += src->value.ancestor.offset;
3052 }
3053 }
3054
3055 if (item->value.pass_through.formal_id < 0)
3056 item->jftype = IPA_JF_UNKNOWN;
3057 }
3058 }
3059
1c3c3f45
FX
3060 if (!top)
3061 {
3062 ipa_set_jf_unknown (dst);
3063 continue;
3064 }
3065
685b0d13
MJ
3066 if (dst->type == IPA_JF_ANCESTOR)
3067 {
b258210c 3068 struct ipa_jump_func *src;
8b7773a4 3069 int dst_fid = dst->value.ancestor.formal_id;
99b1c316 3070 class ipa_polymorphic_call_context *src_ctx
5ce97055 3071 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 3072
b258210c
MJ
3073 /* Variable number of arguments can cause havoc if we try to access
3074 one that does not exist in the inlined edge. So make sure we
3075 don't. */
8b7773a4 3076 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 3077 {
04be694e 3078 ipa_set_jf_unknown (dst);
b258210c
MJ
3079 continue;
3080 }
3081
8b7773a4
MJ
3082 src = ipa_get_ith_jump_func (top, dst_fid);
3083
5ce97055
JH
3084 if (src_ctx && !src_ctx->useless_p ())
3085 {
99b1c316 3086 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
3087
3088 /* TODO: Make type preserved safe WRT contexts. */
44210a96 3089 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 3090 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
3091 ctx.offset_by (dst->value.ancestor.offset);
3092 if (!ctx.useless_p ())
3093 {
a7d1f3fe
ML
3094 if (!dst_ctx)
3095 {
3096 vec_safe_grow_cleared (args->polymorphic_call_contexts,
cb3874dc 3097 count, true);
a7d1f3fe
ML
3098 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3099 }
3100
3101 dst_ctx->combine_with (ctx);
5ce97055
JH
3102 }
3103 }
3104
eb270950
FX
3105 /* Parameter and argument in ancestor jump function must be pointer
3106 type, which means access to aggregate must be by-reference. */
3107 gcc_assert (!src->agg.items || src->agg.by_ref);
3108
3109 if (src->agg.items && dst->value.ancestor.agg_preserved)
8b7773a4
MJ
3110 {
3111 struct ipa_agg_jf_item *item;
3112 int j;
3113
3114 /* Currently we do not produce clobber aggregate jump functions,
3115 replace with merging when we do. */
3116 gcc_assert (!dst->agg.items);
3117
9771b263 3118 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 3119 dst->agg.by_ref = src->agg.by_ref;
9771b263 3120 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
3121 item->offset -= dst->value.ancestor.offset;
3122 }
3123
3b97a5c7
MJ
3124 if (src->type == IPA_JF_PASS_THROUGH
3125 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
3126 {
3127 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3128 dst->value.ancestor.agg_preserved &=
3129 src->value.pass_through.agg_preserved;
3130 }
b258210c
MJ
3131 else if (src->type == IPA_JF_ANCESTOR)
3132 {
3133 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3134 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
3135 dst->value.ancestor.agg_preserved &=
3136 src->value.ancestor.agg_preserved;
b258210c
MJ
3137 }
3138 else
04be694e 3139 ipa_set_jf_unknown (dst);
b258210c
MJ
3140 }
3141 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 3142 {
b258210c
MJ
3143 struct ipa_jump_func *src;
3144 /* We must check range due to calls with variable number of arguments
3145 and we cannot combine jump functions with operations. */
3146 if (dst->value.pass_through.operation == NOP_EXPR
5a0236f8 3147 && (top && dst->value.pass_through.formal_id
b258210c
MJ
3148 < ipa_get_cs_argument_count (top)))
3149 {
8b7773a4
MJ
3150 int dst_fid = dst->value.pass_through.formal_id;
3151 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 3152 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
99b1c316 3153 class ipa_polymorphic_call_context *src_ctx
5ce97055 3154 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 3155
5ce97055
JH
3156 if (src_ctx && !src_ctx->useless_p ())
3157 {
99b1c316 3158 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
3159
3160 /* TODO: Make type preserved safe WRT contexts. */
44210a96 3161 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 3162 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
3163 if (!ctx.useless_p ())
3164 {
3165 if (!dst_ctx)
3166 {
3167 vec_safe_grow_cleared (args->polymorphic_call_contexts,
cb3874dc 3168 count, true);
5ce97055
JH
3169 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3170 }
3171 dst_ctx->combine_with (ctx);
3172 }
3173 }
b8f6e610
MJ
3174 switch (src->type)
3175 {
3176 case IPA_JF_UNKNOWN:
04be694e 3177 ipa_set_jf_unknown (dst);
b8f6e610 3178 break;
b8f6e610
MJ
3179 case IPA_JF_CONST:
3180 ipa_set_jf_cst_copy (dst, src);
3181 break;
3182
3183 case IPA_JF_PASS_THROUGH:
3184 {
3185 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3186 enum tree_code operation;
3187 operation = ipa_get_jf_pass_through_operation (src);
3188
3189 if (operation == NOP_EXPR)
3190 {
3b97a5c7 3191 bool agg_p;
b8f6e610
MJ
3192 agg_p = dst_agg_p
3193 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 3194 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610 3195 }
a2b4c188
KV
3196 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3197 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
b8f6e610
MJ
3198 else
3199 {
3200 tree operand = ipa_get_jf_pass_through_operand (src);
3201 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3202 operation);
3203 }
3204 break;
3205 }
3206 case IPA_JF_ANCESTOR:
3207 {
3b97a5c7 3208 bool agg_p;
b8f6e610
MJ
3209 agg_p = dst_agg_p
3210 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
3211 ipa_set_ancestor_jf (dst,
3212 ipa_get_jf_ancestor_offset (src),
b8f6e610 3213 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 3214 agg_p);
b8f6e610
MJ
3215 break;
3216 }
3217 default:
3218 gcc_unreachable ();
3219 }
8b7773a4
MJ
3220
3221 if (src->agg.items
b8f6e610 3222 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
3223 {
3224 /* Currently we do not produce clobber aggregate jump
3225 functions, replace with merging when we do. */
3226 gcc_assert (!dst->agg.items);
3227
3228 dst->agg.by_ref = src->agg.by_ref;
9771b263 3229 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 3230 }
b258210c
MJ
3231 }
3232 else
04be694e 3233 ipa_set_jf_unknown (dst);
3e293154 3234 }
b258210c
MJ
3235 }
3236}
3237
5ce97055
JH
3238/* If TARGET is an addr_expr of a function declaration, make it the
3239 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3240 Otherwise, return NULL. */
b258210c 3241
3949c4a7 3242struct cgraph_edge *
5ce97055
JH
3243ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3244 bool speculative)
b258210c
MJ
3245{
3246 struct cgraph_node *callee;
48b1474e 3247 bool unreachable = false;
b258210c 3248
ceeffab0
MJ
3249 if (TREE_CODE (target) == ADDR_EXPR)
3250 target = TREE_OPERAND (target, 0);
b258210c 3251 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
3252 {
3253 target = canonicalize_constructor_val (target, NULL);
3254 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3255 {
db66bf68
JH
3256 /* Member pointer call that goes through a VMT lookup. */
3257 if (ie->indirect_info->member_ptr
3258 /* Or if target is not an invariant expression and we do not
3259 know if it will evaulate to function at runtime.
3260 This can happen when folding through &VAR, where &VAR
3261 is IP invariant, but VAR itself is not.
3262
3263 TODO: Revisit this when GCC 5 is branched. It seems that
3264 member_ptr check is not needed and that we may try to fold
3265 the expression and see if VAR is readonly. */
3266 || !is_gimple_ip_invariant (target))
3267 {
3268 if (dump_enabled_p ())
3269 {
4f5b9c80 3270 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118
ML
3271 "discovered direct call non-invariant %s\n",
3272 ie->caller->dump_name ());
db66bf68
JH
3273 }
3274 return NULL;
3275 }
3276
c13bc3d9 3277
2b5f0895
XDL
3278 if (dump_enabled_p ())
3279 {
4f5b9c80 3280 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118 3281 "discovered direct call to non-function in %s, "
807b7d62 3282 "making it __builtin_unreachable\n",
464d0118 3283 ie->caller->dump_name ());
2b5f0895 3284 }
3c9e6fca 3285
48b1474e 3286 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3287 callee = cgraph_node::get_create (target);
48b1474e 3288 unreachable = true;
a0a7b611 3289 }
48b1474e 3290 else
d52f5295 3291 callee = cgraph_node::get (target);
a0a7b611 3292 }
48b1474e 3293 else
d52f5295 3294 callee = cgraph_node::get (target);
a0a7b611
JH
3295
3296 /* Because may-edges are not explicitely represented and vtable may be external,
3297 we may create the first reference to the object in the unit. */
a62bfab5 3298 if (!callee || callee->inlined_to)
a0a7b611 3299 {
a0a7b611
JH
3300
3301 /* We are better to ensure we can refer to it.
3302 In the case of static functions we are out of luck, since we already
3303 removed its body. In the case of public functions we may or may
3304 not introduce the reference. */
3305 if (!canonicalize_constructor_val (target, NULL)
3306 || !TREE_PUBLIC (target))
3307 {
3308 if (dump_file)
3309 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
845bb366 3310 "(%s -> %s) but cannot refer to it. Giving up.\n",
464d0118
ML
3311 ie->caller->dump_name (),
3312 ie->callee->dump_name ());
a0a7b611
JH
3313 return NULL;
3314 }
d52f5295 3315 callee = cgraph_node::get_create (target);
a0a7b611 3316 }
2b5f0895 3317
0127c169
JH
3318 /* If the edge is already speculated. */
3319 if (speculative && ie->speculative)
3320 {
845bb366 3321 if (dump_file)
0127c169 3322 {
845bb366
JH
3323 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3324 if (!e2)
3325 {
3326 if (dump_file)
3327 fprintf (dump_file, "ipa-prop: Discovered call to a "
3328 "speculative target (%s -> %s) but the call is "
3329 "already speculated to different target. "
3330 "Giving up.\n",
3331 ie->caller->dump_name (), callee->dump_name ());
3332 }
3333 else
3334 {
3335 if (dump_file)
3336 fprintf (dump_file,
3337 "ipa-prop: Discovered call to a speculative target "
3338 "(%s -> %s) this agree with previous speculation.\n",
3339 ie->caller->dump_name (), callee->dump_name ());
3340 }
0127c169
JH
3341 }
3342 return NULL;
3343 }
3344
2b5f0895
XDL
3345 if (!dbg_cnt (devirt))
3346 return NULL;
3347
1dbee8c9 3348 ipa_check_create_node_params ();
ceeffab0 3349
67914693 3350 /* We cannot make edges to inline clones. It is bug that someone removed
81fa35bd 3351 the cgraph node too early. */
a62bfab5 3352 gcc_assert (!callee->inlined_to);
17afc0fe 3353
48b1474e 3354 if (dump_file && !unreachable)
b258210c 3355 {
5ce97055 3356 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
464d0118 3357 "(%s -> %s), for stmt ",
b258210c 3358 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 3359 speculative ? "speculative" : "known",
464d0118
ML
3360 ie->caller->dump_name (),
3361 callee->dump_name ());
b258210c
MJ
3362 if (ie->call_stmt)
3363 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3364 else
3365 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 3366 }
2b5f0895
XDL
3367 if (dump_enabled_p ())
3368 {
4f5b9c80 3369 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
807b7d62 3370 "converting indirect call in %s to direct call to %s\n",
3629ff8a 3371 ie->caller->dump_name (), callee->dump_name ());
2b5f0895 3372 }
5ce97055 3373 if (!speculative)
d8d5aef1
JH
3374 {
3375 struct cgraph_edge *orig = ie;
27c5a177 3376 ie = cgraph_edge::make_direct (ie, callee);
d8d5aef1
JH
3377 /* If we resolved speculative edge the cost is already up to date
3378 for direct call (adjusted by inline_edge_duplication_hook). */
3379 if (ie == orig)
3380 {
56f62793 3381 ipa_call_summary *es = ipa_call_summaries->get (ie);
d8d5aef1
JH
3382 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3383 - eni_size_weights.call_cost);
3384 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3385 - eni_time_weights.call_cost);
3386 }
3387 }
5ce97055
JH
3388 else
3389 {
3390 if (!callee->can_be_discarded_p ())
3391 {
3392 cgraph_node *alias;
3393 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3394 if (alias)
3395 callee = alias;
3396 }
d8d5aef1 3397 /* make_speculative will update ie's cost to direct call cost. */
5ce97055 3398 ie = ie->make_speculative
1bad9c18 3399 (callee, ie->count.apply_scale (8, 10));
5ce97055 3400 }
749aa96d 3401
b258210c 3402 return ie;
3e293154
MJ
3403}
3404
91bb9f80
MJ
3405/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3406 CONSTRUCTOR and return it. Return NULL if the search fails for some
3407 reason. */
3408
3409static tree
3410find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3411{
3412 tree type = TREE_TYPE (constructor);
3413 if (TREE_CODE (type) != ARRAY_TYPE
3414 && TREE_CODE (type) != RECORD_TYPE)
3415 return NULL;
3416
3417 unsigned ix;
3418 tree index, val;
3419 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3420 {
3421 HOST_WIDE_INT elt_offset;
3422 if (TREE_CODE (type) == ARRAY_TYPE)
3423 {
3424 offset_int off;
3425 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3426 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3427
3428 if (index)
3429 {
db9bbdec
RB
3430 if (TREE_CODE (index) == RANGE_EXPR)
3431 off = wi::to_offset (TREE_OPERAND (index, 0));
3432 else
3433 off = wi::to_offset (index);
91bb9f80
MJ
3434 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3435 {
3436 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3437 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3438 off = wi::sext (off - wi::to_offset (low_bound),
3439 TYPE_PRECISION (TREE_TYPE (index)));
3440 }
3441 off *= wi::to_offset (unit_size);
db9bbdec
RB
3442 /* ??? Handle more than just the first index of a
3443 RANGE_EXPR. */
91bb9f80
MJ
3444 }
3445 else
3446 off = wi::to_offset (unit_size) * ix;
3447
3448 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3449 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3450 continue;
3451 elt_offset = off.to_shwi ();
3452 }
3453 else if (TREE_CODE (type) == RECORD_TYPE)
3454 {
3455 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3456 if (DECL_BIT_FIELD (index))
3457 continue;
3458 elt_offset = int_bit_position (index);
3459 }
3460 else
3461 gcc_unreachable ();
3462
3463 if (elt_offset > req_offset)
3464 return NULL;
3465
3466 if (TREE_CODE (val) == CONSTRUCTOR)
3467 return find_constructor_constant_at_offset (val,
3468 req_offset - elt_offset);
3469
3470 if (elt_offset == req_offset
3471 && is_gimple_reg_type (TREE_TYPE (val))
3472 && is_gimple_ip_invariant (val))
3473 return val;
3474 }
3475 return NULL;
3476}
3477
3478/* Check whether SCALAR could be used to look up an aggregate interprocedural
3479 invariant from a static constructor and if so, return it. Otherwise return
3480 NULL. */
3481
3482static tree
3483ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3484{
3485 if (by_ref)
3486 {
3487 if (TREE_CODE (scalar) != ADDR_EXPR)
3488 return NULL;
3489 scalar = TREE_OPERAND (scalar, 0);
3490 }
3491
8813a647 3492 if (!VAR_P (scalar)
91bb9f80
MJ
3493 || !is_global_var (scalar)
3494 || !TREE_READONLY (scalar)
3495 || !DECL_INITIAL (scalar)
3496 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3497 return NULL;
3498
3499 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3500}
3501
eb270950
FX
3502/* Retrieve value from AGG, a set of known offset/value for an aggregate or
3503 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3504 return NULL if there is none. BY_REF specifies whether the value has to be
3505 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3506 the boolean it points to is set to true if the value comes from an
3507 initializer of a constant. */
8b7773a4
MJ
3508
3509tree
eb270950 3510ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
91bb9f80
MJ
3511 HOST_WIDE_INT offset, bool by_ref,
3512 bool *from_global_constant)
8b7773a4 3513{
eb270950 3514 struct ipa_agg_value *item;
8b7773a4
MJ
3515 int i;
3516
91bb9f80
MJ
3517 if (scalar)
3518 {
3519 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3520 if (res)
3521 {
3522 if (from_global_constant)
3523 *from_global_constant = true;
3524 return res;
3525 }
3526 }
3527
3528 if (!agg
3529 || by_ref != agg->by_ref)
8b7773a4
MJ
3530 return NULL;
3531
eb270950 3532 FOR_EACH_VEC_ELT (agg->items, i, item)
2c9561b5
MJ
3533 if (item->offset == offset)
3534 {
3535 /* Currently we do not have clobber values, return NULL for them once
3536 we do. */
3537 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
3538 if (from_global_constant)
3539 *from_global_constant = false;
2c9561b5
MJ
3540 return item->value;
3541 }
8b7773a4
MJ
3542 return NULL;
3543}
3544
4502fe8d 3545/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
3546 reference description RDESC. Return true if the reference has been
3547 successfully found and removed. */
4502fe8d 3548
568cda29 3549static bool
5e20cdc9 3550remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3551{
3552 struct ipa_ref *to_del;
3553 struct cgraph_edge *origin;
3554
3555 origin = rdesc->cs;
a854f856
MJ
3556 if (!origin)
3557 return false;
d122681a
ML
3558 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3559 origin->lto_stmt_uid);
568cda29
MJ
3560 if (!to_del)
3561 return false;
3562
d122681a 3563 to_del->remove_reference ();
4502fe8d 3564 if (dump_file)
464d0118 3565 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3629ff8a 3566 origin->caller->dump_name (), symbol->dump_name ());
568cda29 3567 return true;
4502fe8d
MJ
3568}
3569
3570/* If JFUNC has a reference description with refcount different from
3571 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3572 NULL. JFUNC must be a constant jump function. */
3573
3574static struct ipa_cst_ref_desc *
3575jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3576{
3577 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3578 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3579 return rdesc;
3580 else
3581 return NULL;
3582}
3583
568cda29
MJ
3584/* If the value of constant jump function JFUNC is an address of a function
3585 declaration, return the associated call graph node. Otherwise return
3586 NULL. */
3587
3588static cgraph_node *
3589cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3590{
3591 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3592 tree cst = ipa_get_jf_constant (jfunc);
3593 if (TREE_CODE (cst) != ADDR_EXPR
3594 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3595 return NULL;
3596
d52f5295 3597 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3598}
3599
3600
3601/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3602 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3603 the edge specified in the rdesc. Return false if either the symbol or the
3604 reference could not be found, otherwise return true. */
3605
3606static bool
3607try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3608{
3609 struct ipa_cst_ref_desc *rdesc;
3610 if (jfunc->type == IPA_JF_CONST
3611 && (rdesc = jfunc_rdesc_usable (jfunc))
3612 && --rdesc->refcount == 0)
3613 {
5e20cdc9 3614 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
3615 if (!symbol)
3616 return false;
3617
3618 return remove_described_reference (symbol, rdesc);
3619 }
3620 return true;
3621}
3622
b258210c
MJ
3623/* Try to find a destination for indirect edge IE that corresponds to a simple
3624 call or a call of a member function pointer and where the destination is a
e5cf5e11
PK
3625 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3626 the type of the parameter to which the result of JFUNC is passed. If it can
3627 be determined, return the newly direct edge, otherwise return NULL.
eb270950
FX
3628 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3629 relative to. */
be95e2b9 3630
b258210c
MJ
3631static struct cgraph_edge *
3632try_make_edge_direct_simple_call (struct cgraph_edge *ie,
e5cf5e11 3633 struct ipa_jump_func *jfunc, tree target_type,
eb270950 3634 struct cgraph_node *new_root,
99b1c316 3635 class ipa_node_params *new_root_info)
b258210c 3636{
4502fe8d 3637 struct cgraph_edge *cs;
b258210c 3638 tree target;
042ae7d2 3639 bool agg_contents = ie->indirect_info->agg_contents;
e5cf5e11 3640 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
91bb9f80
MJ
3641 if (agg_contents)
3642 {
3643 bool from_global_constant;
eb270950
FX
3644 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3645 new_root,
3646 &jfunc->agg);
3647 target = ipa_find_agg_cst_for_param (&agg, scalar,
91bb9f80
MJ
3648 ie->indirect_info->offset,
3649 ie->indirect_info->by_ref,
3650 &from_global_constant);
eb270950 3651 agg.release ();
91bb9f80
MJ
3652 if (target
3653 && !from_global_constant
3654 && !ie->indirect_info->guaranteed_unmodified)
3655 return NULL;
3656 }
b258210c 3657 else
91bb9f80 3658 target = scalar;
d250540a
MJ
3659 if (!target)
3660 return NULL;
4502fe8d
MJ
3661 cs = ipa_make_edge_direct_to_target (ie, target);
3662
a12cd2db 3663 if (cs && !agg_contents)
568cda29
MJ
3664 {
3665 bool ok;
3666 gcc_checking_assert (cs->callee
ae6d0907
MJ
3667 && (cs != ie
3668 || jfunc->type != IPA_JF_CONST
568cda29
MJ
3669 || !cgraph_node_for_jfunc (jfunc)
3670 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3671 ok = try_decrement_rdesc_refcount (jfunc);
3672 gcc_checking_assert (ok);
3673 }
4502fe8d
MJ
3674
3675 return cs;
b258210c
MJ
3676}
3677
bec81025
MJ
3678/* Return the target to be used in cases of impossible devirtualization. IE
3679 and target (the latter can be NULL) are dumped when dumping is enabled. */
3680
72972c22
MJ
3681tree
3682ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3683{
3684 if (dump_file)
3685 {
3686 if (target)
3687 fprintf (dump_file,
464d0118
ML
3688 "Type inconsistent devirtualization: %s->%s\n",
3689 ie->caller->dump_name (),
bec81025
MJ
3690 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3691 else
3692 fprintf (dump_file,
464d0118
ML
3693 "No devirtualization target in %s\n",
3694 ie->caller->dump_name ());
bec81025
MJ
3695 }
3696 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3697 cgraph_node::get_create (new_target);
bec81025
MJ
3698 return new_target;
3699}
3700
d250540a
MJ
3701/* Try to find a destination for indirect edge IE that corresponds to a virtual
3702 call based on a formal parameter which is described by jump function JFUNC
3703 and if it can be determined, make it direct and return the direct edge.
44210a96 3704 Otherwise, return NULL. CTX describes the polymorphic context that the
eb270950
FX
3705 parameter the call is based on brings along with it. NEW_ROOT and
3706 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3707 to. */
b258210c
MJ
3708
3709static struct cgraph_edge *
3710try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3711 struct ipa_jump_func *jfunc,
eb270950
FX
3712 class ipa_polymorphic_call_context ctx,
3713 struct cgraph_node *new_root,
3714 class ipa_node_params *new_root_info)
3e293154 3715{
44210a96 3716 tree target = NULL;
5ce97055 3717 bool speculative = false;
85942f45 3718
2bf86c84 3719 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3720 return NULL;
b258210c 3721
44210a96 3722 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3723
3724 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3725 if (!ie->indirect_info->vptr_changed
3726 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3727 {
9de2f554
JH
3728 tree vtable;
3729 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3730 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3731 : NULL;
eb270950
FX
3732 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3733 new_root,
3734 &jfunc->agg);
3735 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
85942f45
JH
3736 ie->indirect_info->offset,
3737 true);
eb270950 3738 agg.release ();
9de2f554
JH
3739 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3740 {
2994ab20 3741 bool can_refer;
0127c169 3742 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3743 vtable, offset, &can_refer);
3744 if (can_refer)
9de2f554 3745 {
2994ab20 3746 if (!t
cb1180d5 3747 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
9de2f554 3748 || !possible_polymorphic_call_target_p
0127c169
JH
3749 (ie, cgraph_node::get (t)))
3750 {
33c3b6be 3751 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3752 if (!ie->indirect_info->vptr_changed)
3753 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3754 else
3755 target = NULL;
0127c169
JH
3756 }
3757 else
3758 {
3759 target = t;
3760 speculative = ie->indirect_info->vptr_changed;
3761 }
9de2f554
JH
3762 }
3763 }
85942f45
JH
3764 }
3765
44210a96
MJ
3766 ipa_polymorphic_call_context ie_context (ie);
3767 vec <cgraph_node *>targets;
3768 bool final;
d250540a 3769
44210a96
MJ
3770 ctx.offset_by (ie->indirect_info->offset);
3771 if (ie->indirect_info->vptr_changed)
3772 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3773 ie->indirect_info->otr_type);
3774 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3775 targets = possible_polymorphic_call_targets
3776 (ie->indirect_info->otr_type,
3777 ie->indirect_info->otr_token,
3778 ctx, &final);
3779 if (final && targets.length () <= 1)
5ce97055 3780 {
33c3b6be 3781 speculative = false;
44210a96
MJ
3782 if (targets.length () == 1)
3783 target = targets[0]->decl;
3784 else
3785 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3786 }
2bf86c84 3787 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3788 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3789 {
44210a96
MJ
3790 cgraph_node *n;
3791 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3792 ie->indirect_info->otr_token,
3793 ie->indirect_info->context);
3794 if (n)
5ce97055 3795 {
44210a96
MJ
3796 target = n->decl;
3797 speculative = true;
5ce97055 3798 }
5bccb77a 3799 }
b258210c
MJ
3800
3801 if (target)
450ad0cd 3802 {
44210a96
MJ
3803 if (!possible_polymorphic_call_target_p
3804 (ie, cgraph_node::get_create (target)))
0127c169 3805 {
29c43c83 3806 if (speculative)
0127c169
JH
3807 return NULL;
3808 target = ipa_impossible_devirt_target (ie, target);
3809 }
5ce97055 3810 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3811 }
b258210c
MJ
3812 else
3813 return NULL;
3e293154
MJ
3814}
3815
3816/* Update the param called notes associated with NODE when CS is being inlined,
3817 assuming NODE is (potentially indirectly) inlined into CS->callee.
3818 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3819 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3820 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3821
f8e2a1ed 3822static bool
e33c6cd6
MJ
3823update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3824 struct cgraph_node *node,
d52f5295 3825 vec<cgraph_edge *> *new_edges)
3e293154 3826{
99b1c316 3827 class ipa_edge_args *top;
b258210c 3828 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
eb270950 3829 struct cgraph_node *new_root;
99b1c316 3830 class ipa_node_params *new_root_info, *inlined_node_info;
f8e2a1ed 3831 bool res = false;
3e293154 3832
e33c6cd6 3833 ipa_check_create_edge_args ();
9e97ff61 3834 top = IPA_EDGE_REF (cs);
eb270950
FX
3835 new_root = cs->caller->inlined_to
3836 ? cs->caller->inlined_to : cs->caller;
3837 new_root_info = IPA_NODE_REF (new_root);
e5cf5e11 3838 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
e33c6cd6
MJ
3839
3840 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3841 {
99b1c316 3842 class cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3843 struct ipa_jump_func *jfunc;
8b7773a4 3844 int param_index;
3e293154 3845
e33c6cd6 3846 next_ie = ie->next_callee;
3e293154 3847
5f902d76
JH
3848 if (ici->param_index == -1)
3849 continue;
e33c6cd6 3850
3e293154 3851 /* We must check range due to calls with variable number of arguments: */
5a0236f8 3852 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3853 {
5ee53a06 3854 ici->param_index = -1;
3e293154
MJ
3855 continue;
3856 }
3857
8b7773a4
MJ
3858 param_index = ici->param_index;
3859 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3860
e089e433 3861 auto_vec<cgraph_node *, 4> spec_targets;
3ff29913 3862 if (ie->speculative)
e089e433
MJ
3863 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3864 direct;
3865 direct = direct->next_speculative_call_target ())
3866 spec_targets.safe_push (direct->callee);
3ff29913 3867
2bf86c84 3868 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3869 new_direct_edge = NULL;
3870 else if (ici->polymorphic)
5ce97055 3871 {
44210a96
MJ
3872 ipa_polymorphic_call_context ctx;
3873 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
eb270950
FX
3874 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3875 new_root,
3876 new_root_info);
5ce97055 3877 }
b258210c 3878 else
e5cf5e11
PK
3879 {
3880 tree target_type = ipa_get_type (inlined_node_info, param_index);
3881 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3882 target_type,
eb270950 3883 new_root,
e5cf5e11
PK
3884 new_root_info);
3885 }
3886
042ae7d2 3887 /* If speculation was removed, then we need to do nothing. */
3ff29913 3888 if (new_direct_edge && new_direct_edge != ie
e089e433 3889 && spec_targets.contains (new_direct_edge->callee))
042ae7d2
JH
3890 {
3891 new_direct_edge->indirect_inlining_edge = 1;
3892 top = IPA_EDGE_REF (cs);
3893 res = true;
73d098df
JH
3894 if (!new_direct_edge->speculative)
3895 continue;
042ae7d2
JH
3896 }
3897 else if (new_direct_edge)
685b0d13 3898 {
b258210c
MJ
3899 new_direct_edge->indirect_inlining_edge = 1;
3900 if (new_edges)
3901 {
9771b263 3902 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3903 res = true;
3904 }
042ae7d2 3905 top = IPA_EDGE_REF (cs);
3ff29913
JH
3906 /* If speculative edge was introduced we still need to update
3907 call info of the indirect edge. */
3908 if (!new_direct_edge->speculative)
3909 continue;
685b0d13 3910 }
3ff29913
JH
3911 if (jfunc->type == IPA_JF_PASS_THROUGH
3912 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 3913 {
d0502276
JH
3914 if (ici->agg_contents
3915 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3916 && !ici->polymorphic)
36b72910
JH
3917 ici->param_index = -1;
3918 else
d0502276
JH
3919 {
3920 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3921 if (ici->polymorphic
3922 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3923 ici->vptr_changed = true;
40a777e8
JH
3924 ipa_set_param_used_by_indirect_call (new_root_info,
3925 ici->param_index, true);
3926 if (ici->polymorphic)
3927 ipa_set_param_used_by_polymorphic_call (new_root_info,
3928 ici->param_index, true);
d0502276 3929 }
36b72910
JH
3930 }
3931 else if (jfunc->type == IPA_JF_ANCESTOR)
3932 {
d0502276
JH
3933 if (ici->agg_contents
3934 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3935 && !ici->polymorphic)
36b72910
JH
3936 ici->param_index = -1;
3937 else
3938 {
3939 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3940 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
3941 if (ici->polymorphic
3942 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3943 ici->vptr_changed = true;
b914768c
JH
3944 ipa_set_param_used_by_indirect_call (new_root_info,
3945 ici->param_index, true);
3946 if (ici->polymorphic)
3947 ipa_set_param_used_by_polymorphic_call (new_root_info,
3948 ici->param_index, true);
36b72910
JH
3949 }
3950 }
3951 else
3952 /* Either we can find a destination for this edge now or never. */
3953 ici->param_index = -1;
3e293154 3954 }
e33c6cd6 3955
f8e2a1ed 3956 return res;
3e293154
MJ
3957}
3958
3959/* Recursively traverse subtree of NODE (including node) made of inlined
3960 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3961 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3962 update_jump_functions_after_inlining on all non-inlined edges that lead out
3963 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3964 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3965 created. */
be95e2b9 3966
f8e2a1ed 3967static bool
3e293154
MJ
3968propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3969 struct cgraph_node *node,
d52f5295 3970 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3971{
3972 struct cgraph_edge *e;
f8e2a1ed 3973 bool res;
3e293154 3974
e33c6cd6 3975 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3976
3977 for (e = node->callees; e; e = e->next_callee)
3978 if (!e->inline_failed)
f8e2a1ed 3979 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3980 else
3981 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3982 for (e = node->indirect_calls; e; e = e->next_callee)
3983 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3984
3985 return res;
3e293154
MJ
3986}
3987
4502fe8d
MJ
3988/* Combine two controlled uses counts as done during inlining. */
3989
3990static int
3991combine_controlled_uses_counters (int c, int d)
3992{
3993 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3994 return IPA_UNDESCRIBED_USE;
3995 else
3996 return c + d - 1;
3997}
3998
3999/* Propagate number of controlled users from CS->caleee to the new root of the
4000 tree of inlined nodes. */
4001
4002static void
4003propagate_controlled_uses (struct cgraph_edge *cs)
4004{
99b1c316 4005 class ipa_edge_args *args = IPA_EDGE_REF (cs);
a33c028e
JH
4006 if (!args)
4007 return;
a62bfab5
ML
4008 struct cgraph_node *new_root = cs->caller->inlined_to
4009 ? cs->caller->inlined_to : cs->caller;
99b1c316
MS
4010 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
4011 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
4502fe8d
MJ
4012 int count, i;
4013
6cf67b62
JH
4014 if (!old_root_info)
4015 return;
4016
4502fe8d
MJ
4017 count = MIN (ipa_get_cs_argument_count (args),
4018 ipa_get_param_count (old_root_info));
4019 for (i = 0; i < count; i++)
4020 {
4021 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4022 struct ipa_cst_ref_desc *rdesc;
4023
4024 if (jf->type == IPA_JF_PASS_THROUGH)
4025 {
4026 int src_idx, c, d;
4027 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4028 c = ipa_get_controlled_uses (new_root_info, src_idx);
4029 d = ipa_get_controlled_uses (old_root_info, i);
4030
4031 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4032 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4033 c = combine_controlled_uses_counters (c, d);
4034 ipa_set_controlled_uses (new_root_info, src_idx, c);
4035 if (c == 0 && new_root_info->ipcp_orig_node)
4036 {
4037 struct cgraph_node *n;
4038 struct ipa_ref *ref;
44210a96 4039 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
4040
4041 if (t && TREE_CODE (t) == ADDR_EXPR
4042 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 4043 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 4044 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
4045 {
4046 if (dump_file)
4047 fprintf (dump_file, "ipa-prop: Removing cloning-created "
464d0118
ML
4048 "reference from %s to %s.\n",
4049 new_root->dump_name (),
4050 n->dump_name ());
d122681a 4051 ref->remove_reference ();
4502fe8d
MJ
4052 }
4053 }
4054 }
4055 else if (jf->type == IPA_JF_CONST
4056 && (rdesc = jfunc_rdesc_usable (jf)))
4057 {
4058 int d = ipa_get_controlled_uses (old_root_info, i);
4059 int c = rdesc->refcount;
4060 rdesc->refcount = combine_controlled_uses_counters (c, d);
4061 if (rdesc->refcount == 0)
4062 {
4063 tree cst = ipa_get_jf_constant (jf);
4064 struct cgraph_node *n;
4065 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4066 && TREE_CODE (TREE_OPERAND (cst, 0))
4067 == FUNCTION_DECL);
d52f5295 4068 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
4069 if (n)
4070 {
4071 struct cgraph_node *clone;
568cda29 4072 bool ok;
67348ccc 4073 ok = remove_described_reference (n, rdesc);
568cda29 4074 gcc_checking_assert (ok);
4502fe8d
MJ
4075
4076 clone = cs->caller;
a62bfab5 4077 while (clone->inlined_to
6cf67b62
JH
4078 && clone->ipcp_clone
4079 && clone != rdesc->cs->caller)
4502fe8d
MJ
4080 {
4081 struct ipa_ref *ref;
d122681a 4082 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
4083 if (ref)
4084 {
4085 if (dump_file)
4086 fprintf (dump_file, "ipa-prop: Removing "
4087 "cloning-created reference "
464d0118
ML
4088 "from %s to %s.\n",
4089 clone->dump_name (),
4090 n->dump_name ());
d122681a 4091 ref->remove_reference ();
4502fe8d
MJ
4092 }
4093 clone = clone->callers->caller;
4094 }
4095 }
4096 }
4097 }
4098 }
4099
4100 for (i = ipa_get_param_count (old_root_info);
4101 i < ipa_get_cs_argument_count (args);
4102 i++)
4103 {
4104 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4105
4106 if (jf->type == IPA_JF_CONST)
4107 {
4108 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4109 if (rdesc)
4110 rdesc->refcount = IPA_UNDESCRIBED_USE;
4111 }
4112 else if (jf->type == IPA_JF_PASS_THROUGH)
4113 ipa_set_controlled_uses (new_root_info,
4114 jf->value.pass_through.formal_id,
4115 IPA_UNDESCRIBED_USE);
4116 }
4117}
4118
3e293154
MJ
4119/* Update jump functions and call note functions on inlining the call site CS.
4120 CS is expected to lead to a node already cloned by
4121 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
4122 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4123 created. */
be95e2b9 4124
f8e2a1ed 4125bool
3e293154 4126ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 4127 vec<cgraph_edge *> *new_edges)
3e293154 4128{
5ee53a06 4129 bool changed;
f8e2a1ed
MJ
4130 /* Do nothing if the preparation phase has not been carried out yet
4131 (i.e. during early inlining). */
dd912cb8 4132 if (!ipa_node_params_sum)
f8e2a1ed 4133 return false;
6fe906a3 4134 gcc_assert (ipa_edge_args_sum);
f8e2a1ed 4135
4502fe8d 4136 propagate_controlled_uses (cs);
5ee53a06 4137 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
6cf67b62 4138 ipa_node_params_sum->remove (cs->callee);
5ee53a06 4139
d40e2362
JH
4140 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4141 if (args)
4142 {
4143 bool ok = true;
4144 if (args->jump_functions)
4145 {
4146 struct ipa_jump_func *jf;
4147 int i;
4148 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4149 if (jf->type == IPA_JF_CONST
4150 && ipa_get_jf_constant_rdesc (jf))
4151 {
4152 ok = false;
4153 break;
4154 }
4155 }
4156 if (ok)
4157 ipa_edge_args_sum->remove (cs);
4158 }
98aad294
JH
4159 if (ipcp_transformation_sum)
4160 ipcp_transformation_sum->remove (cs->callee);
d40e2362 4161
5ee53a06 4162 return changed;
518dc859
RL
4163}
4164
86cd0334
MJ
4165/* Ensure that array of edge arguments infos is big enough to accommodate a
4166 structure for all edges and reallocates it if not. Also, allocate
4167 associated hash tables is they do not already exist. */
4168
4169void
4170ipa_check_create_edge_args (void)
4171{
6fe906a3
MJ
4172 if (!ipa_edge_args_sum)
4173 ipa_edge_args_sum
78cd68c0 4174 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
6fe906a3 4175 ipa_edge_args_sum_t (symtab, true));
86cd0334
MJ
4176 if (!ipa_bits_hash_table)
4177 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4178 if (!ipa_vr_hash_table)
4179 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4180}
4181
771578a0 4182/* Free all ipa_edge structures. */
be95e2b9 4183
518dc859 4184void
771578a0 4185ipa_free_all_edge_args (void)
518dc859 4186{
6fe906a3 4187 if (!ipa_edge_args_sum)
9771b263
DN
4188 return;
4189
ddf628e4 4190 ggc_delete (ipa_edge_args_sum);
6fe906a3 4191 ipa_edge_args_sum = NULL;
518dc859
RL
4192}
4193
771578a0 4194/* Free all ipa_node_params structures. */
be95e2b9 4195
518dc859 4196void
771578a0 4197ipa_free_all_node_params (void)
518dc859 4198{
bc2fcccd
JH
4199 if (ipa_node_params_sum)
4200 ggc_delete (ipa_node_params_sum);
dd912cb8 4201 ipa_node_params_sum = NULL;
771578a0
MJ
4202}
4203
9d3e0adc 4204/* Initialize IPA CP transformation summary and also allocate any necessary hash
86cd0334 4205 tables if they do not already exist. */
04be694e
MJ
4206
4207void
9d3e0adc 4208ipcp_transformation_initialize (void)
04be694e 4209{
86cd0334
MJ
4210 if (!ipa_bits_hash_table)
4211 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4212 if (!ipa_vr_hash_table)
4213 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
9d3e0adc 4214 if (ipcp_transformation_sum == NULL)
40e67ab8
JH
4215 {
4216 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4217 ipcp_transformation_sum->disable_insertion_hook ();
4218 }
04be694e
MJ
4219}
4220
12e088ba
AC
4221/* Release the IPA CP transformation summary. */
4222
4223void
4224ipcp_free_transformation_sum (void)
4225{
4226 if (!ipcp_transformation_sum)
4227 return;
4228
78cd68c0
ML
4229 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4230 ggc_free (ipcp_transformation_sum);
12e088ba
AC
4231 ipcp_transformation_sum = NULL;
4232}
4233
2c9561b5
MJ
4234/* Set the aggregate replacements of NODE to be AGGVALS. */
4235
4236void
4237ipa_set_node_agg_value_chain (struct cgraph_node *node,
4238 struct ipa_agg_replacement_value *aggvals)
4239{
9d3e0adc
ML
4240 ipcp_transformation_initialize ();
4241 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4242 s->agg_values = aggvals;
2c9561b5
MJ
4243}
4244
6fe906a3
MJ
4245/* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4246 count data structures accordingly. */
be95e2b9 4247
6fe906a3
MJ
4248void
4249ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
771578a0 4250{
568cda29
MJ
4251 if (args->jump_functions)
4252 {
4253 struct ipa_jump_func *jf;
4254 int i;
4255 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
4256 {
4257 struct ipa_cst_ref_desc *rdesc;
4258 try_decrement_rdesc_refcount (jf);
4259 if (jf->type == IPA_JF_CONST
4260 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4261 && rdesc->cs == cs)
4262 rdesc->cs = NULL;
4263 }
568cda29 4264 }
518dc859
RL
4265}
4266
6fe906a3
MJ
4267/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4268 reference count data strucutres accordingly. */
be95e2b9 4269
6fe906a3
MJ
4270void
4271ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4272 ipa_edge_args *old_args, ipa_edge_args *new_args)
771578a0 4273{
8b7773a4 4274 unsigned int i;
771578a0 4275
9771b263 4276 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
4277 if (old_args->polymorphic_call_contexts)
4278 new_args->polymorphic_call_contexts
4279 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 4280
9771b263 4281 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
4282 {
4283 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4284 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4285
4286 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4287
4288 if (src_jf->type == IPA_JF_CONST)
4289 {
4290 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4291
4292 if (!src_rdesc)
4293 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
4294 else if (src->caller == dst->caller)
4295 {
4296 struct ipa_ref *ref;
5e20cdc9 4297 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 4298 gcc_checking_assert (n);
d122681a
ML
4299 ref = src->caller->find_reference (n, src->call_stmt,
4300 src->lto_stmt_uid);
568cda29 4301 gcc_checking_assert (ref);
d122681a 4302 dst->caller->clone_reference (ref, ref->stmt);
568cda29 4303
601f3293 4304 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
568cda29
MJ
4305 dst_rdesc->cs = dst;
4306 dst_rdesc->refcount = src_rdesc->refcount;
4307 dst_rdesc->next_duplicate = NULL;
4308 dst_jf->value.constant.rdesc = dst_rdesc;
4309 }
4502fe8d
MJ
4310 else if (src_rdesc->cs == src)
4311 {
601f3293 4312 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 4313 dst_rdesc->cs = dst;
4502fe8d 4314 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
4315 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4316 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
4317 dst_jf->value.constant.rdesc = dst_rdesc;
4318 }
4319 else
4320 {
4321 struct ipa_cst_ref_desc *dst_rdesc;
4322 /* This can happen during inlining, when a JFUNC can refer to a
4323 reference taken in a function up in the tree of inline clones.
4324 We need to find the duplicate that refers to our tree of
4325 inline clones. */
4326
a62bfab5 4327 gcc_assert (dst->caller->inlined_to);
4502fe8d
MJ
4328 for (dst_rdesc = src_rdesc->next_duplicate;
4329 dst_rdesc;
4330 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
4331 {
4332 struct cgraph_node *top;
a62bfab5
ML
4333 top = dst_rdesc->cs->caller->inlined_to
4334 ? dst_rdesc->cs->caller->inlined_to
2fd0985c 4335 : dst_rdesc->cs->caller;
a62bfab5 4336 if (dst->caller->inlined_to == top)
2fd0985c
MJ
4337 break;
4338 }
44a60244 4339 gcc_assert (dst_rdesc);
4502fe8d
MJ
4340 dst_jf->value.constant.rdesc = dst_rdesc;
4341 }
4342 }
6fe45955
MJ
4343 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4344 && src->caller == dst->caller)
4345 {
a62bfab5
ML
4346 struct cgraph_node *inline_root = dst->caller->inlined_to
4347 ? dst->caller->inlined_to : dst->caller;
99b1c316 4348 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
6fe45955
MJ
4349 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4350
4351 int c = ipa_get_controlled_uses (root_info, idx);
4352 if (c != IPA_UNDESCRIBED_USE)
4353 {
4354 c++;
4355 ipa_set_controlled_uses (root_info, idx, c);
4356 }
4357 }
4502fe8d 4358 }
771578a0
MJ
4359}
4360
dd912cb8 4361/* Analyze newly added function into callgraph. */
be95e2b9 4362
771578a0 4363static void
dd912cb8 4364ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 4365{
dd912cb8
ML
4366 if (node->has_gimple_body_p ())
4367 ipa_analyze_node (node);
4368}
771578a0 4369
dd912cb8
ML
4370/* Hook that is called by summary when a node is duplicated. */
4371
4372void
4373ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4374 ipa_node_params *old_info,
4375 ipa_node_params *new_info)
4376{
4377 ipa_agg_replacement_value *old_av, *new_av;
771578a0 4378
f65f1ae3 4379 new_info->descriptors = vec_safe_copy (old_info->descriptors);
310bc633 4380 new_info->lattices = NULL;
771578a0 4381 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
f65f1ae3
MJ
4382 new_info->known_csts = old_info->known_csts.copy ();
4383 new_info->known_contexts = old_info->known_contexts.copy ();
3949c4a7 4384
8aab5218 4385 new_info->analysis_done = old_info->analysis_done;
3949c4a7 4386 new_info->node_enqueued = old_info->node_enqueued;
7e729474 4387 new_info->versionable = old_info->versionable;
2c9561b5
MJ
4388
4389 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 4390 if (old_av)
2c9561b5 4391 {
04be694e
MJ
4392 new_av = NULL;
4393 while (old_av)
4394 {
4395 struct ipa_agg_replacement_value *v;
2c9561b5 4396
04be694e
MJ
4397 v = ggc_alloc<ipa_agg_replacement_value> ();
4398 memcpy (v, old_av, sizeof (*v));
4399 v->next = new_av;
4400 new_av = v;
4401 old_av = old_av->next;
4402 }
4403 ipa_set_node_agg_value_chain (dst, new_av);
4404 }
98aad294 4405}
04be694e 4406
98aad294 4407/* Duplication of ipcp transformation summaries. */
86cd0334 4408
98aad294
JH
4409void
4410ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4411 ipcp_transformation *src_trans,
4412 ipcp_transformation *dst_trans)
4413{
4414 /* Avoid redundant work of duplicating vectors we will never use. */
4415 if (dst->inlined_to)
4416 return;
4417 dst_trans->bits = vec_safe_copy (src_trans->bits);
4418 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4419 ipa_agg_replacement_value *agg = src_trans->agg_values,
4420 **aggptr = &dst_trans->agg_values;
4421 while (agg)
4422 {
4423 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4424 **aggptr = *agg;
4425 agg = agg->next;
4426 aggptr = &(*aggptr)->next;
2c9561b5 4427 }
771578a0
MJ
4428}
4429
4430/* Register our cgraph hooks if they are not already there. */
be95e2b9 4431
518dc859 4432void
771578a0 4433ipa_register_cgraph_hooks (void)
518dc859 4434{
dd912cb8 4435 ipa_check_create_node_params ();
6fe906a3 4436 ipa_check_create_edge_args ();
dd912cb8 4437
dd912cb8 4438 function_insertion_hook_holder =
3dafb85c 4439 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 4440}
518dc859 4441
771578a0 4442/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 4443
771578a0
MJ
4444static void
4445ipa_unregister_cgraph_hooks (void)
4446{
bc2fcccd
JH
4447 if (function_insertion_hook_holder)
4448 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 4449 function_insertion_hook_holder = NULL;
771578a0
MJ
4450}
4451
4452/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4453 longer needed after ipa-cp. */
be95e2b9 4454
771578a0 4455void
e33c6cd6 4456ipa_free_all_structures_after_ipa_cp (void)
3e293154 4457{
2bf86c84 4458 if (!optimize && !in_lto_p)
3e293154
MJ
4459 {
4460 ipa_free_all_edge_args ();
4461 ipa_free_all_node_params ();
2651e637
ML
4462 ipcp_sources_pool.release ();
4463 ipcp_cst_values_pool.release ();
4464 ipcp_poly_ctx_values_pool.release ();
4465 ipcp_agg_lattice_pool.release ();
3e293154 4466 ipa_unregister_cgraph_hooks ();
601f3293 4467 ipa_refdesc_pool.release ();
3e293154
MJ
4468 }
4469}
4470
4471/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4472 longer needed after indirect inlining. */
be95e2b9 4473
3e293154 4474void
e33c6cd6 4475ipa_free_all_structures_after_iinln (void)
771578a0
MJ
4476{
4477 ipa_free_all_edge_args ();
4478 ipa_free_all_node_params ();
4479 ipa_unregister_cgraph_hooks ();
2651e637
ML
4480 ipcp_sources_pool.release ();
4481 ipcp_cst_values_pool.release ();
4482 ipcp_poly_ctx_values_pool.release ();
4483 ipcp_agg_lattice_pool.release ();
601f3293 4484 ipa_refdesc_pool.release ();
518dc859
RL
4485}
4486
dcd416e3 4487/* Print ipa_tree_map data structures of all functions in the
518dc859 4488 callgraph to F. */
be95e2b9 4489
518dc859 4490void
2c9561b5 4491ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
4492{
4493 int i, count;
99b1c316 4494 class ipa_node_params *info;
518dc859 4495
67348ccc 4496 if (!node->definition)
3e293154
MJ
4497 return;
4498 info = IPA_NODE_REF (node);
464d0118 4499 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
0302955a
JH
4500 if (!info)
4501 {
4502 fprintf (f, " no params return\n");
4503 return;
4504 }
3e293154
MJ
4505 count = ipa_get_param_count (info);
4506 for (i = 0; i < count; i++)
518dc859 4507 {
4502fe8d
MJ
4508 int c;
4509
a4e33812 4510 fprintf (f, " ");
e067bd43 4511 ipa_dump_param (f, info, i);
339f49ec
JH
4512 if (ipa_is_param_used (info, i))
4513 fprintf (f, " used");
40a777e8
JH
4514 if (ipa_is_param_used_by_ipa_predicates (info, i))
4515 fprintf (f, " used_by_ipa_predicates");
4516 if (ipa_is_param_used_by_indirect_call (info, i))
4517 fprintf (f, " used_by_indirect_call");
4518 if (ipa_is_param_used_by_polymorphic_call (info, i))
4519 fprintf (f, " used_by_polymorphic_call");
4502fe8d
MJ
4520 c = ipa_get_controlled_uses (info, i);
4521 if (c == IPA_UNDESCRIBED_USE)
4522 fprintf (f, " undescribed_use");
4523 else
4524 fprintf (f, " controlled_uses=%i", c);
3e293154 4525 fprintf (f, "\n");
518dc859
RL
4526 }
4527}
dcd416e3 4528
ca30a539 4529/* Print ipa_tree_map data structures of all functions in the
3e293154 4530 callgraph to F. */
be95e2b9 4531
3e293154 4532void
ca30a539 4533ipa_print_all_params (FILE * f)
3e293154
MJ
4534{
4535 struct cgraph_node *node;
4536
ca30a539 4537 fprintf (f, "\nFunction parameters:\n");
65c70e6b 4538 FOR_EACH_FUNCTION (node)
ca30a539 4539 ipa_print_node_params (f, node);
3e293154 4540}
3f84bf08 4541
2c9561b5
MJ
4542/* Dump the AV linked list. */
4543
4544void
4545ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4546{
4547 bool comma = false;
4548 fprintf (f, " Aggregate replacements:");
4549 for (; av; av = av->next)
4550 {
4551 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4552 av->index, av->offset);
ef6cb4c7 4553 print_generic_expr (f, av->value);
2c9561b5
MJ
4554 comma = true;
4555 }
4556 fprintf (f, "\n");
4557}
4558
fb3f88cc
JH
4559/* Stream out jump function JUMP_FUNC to OB. */
4560
4561static void
4562ipa_write_jump_function (struct output_block *ob,
4563 struct ipa_jump_func *jump_func)
4564{
8b7773a4
MJ
4565 struct ipa_agg_jf_item *item;
4566 struct bitpack_d bp;
4567 int i, count;
f714ecf5 4568 int flag = 0;
fb3f88cc 4569
f714ecf5
JH
4570 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4571 as well as WPA memory by handling them specially. */
4572 if (jump_func->type == IPA_JF_CONST
4573 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4574 flag = 1;
4575
4576 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
fb3f88cc
JH
4577 switch (jump_func->type)
4578 {
4579 case IPA_JF_UNKNOWN:
4580 break;
4581 case IPA_JF_CONST:
5368224f 4582 gcc_assert (
4502fe8d 4583 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
f714ecf5
JH
4584 stream_write_tree (ob,
4585 flag
4586 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4587 : jump_func->value.constant.value, true);
fb3f88cc
JH
4588 break;
4589 case IPA_JF_PASS_THROUGH:
412288f1 4590 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4591 if (jump_func->value.pass_through.operation == NOP_EXPR)
4592 {
4593 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4594 bp = bitpack_create (ob->main_stream);
4595 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4596 streamer_write_bitpack (&bp);
4597 }
a2b4c188
KV
4598 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4599 == tcc_unary)
4600 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4a53743e
MJ
4601 else
4602 {
4603 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4604 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4605 }
fb3f88cc
JH
4606 break;
4607 case IPA_JF_ANCESTOR:
412288f1 4608 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4609 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4610 bp = bitpack_create (ob->main_stream);
4611 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4612 streamer_write_bitpack (&bp);
fb3f88cc 4613 break;
eb270950
FX
4614 default:
4615 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4616 }
4617
9771b263 4618 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4619 streamer_write_uhwi (ob, count);
4620 if (count)
4621 {
4622 bp = bitpack_create (ob->main_stream);
4623 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4624 streamer_write_bitpack (&bp);
4625 }
4626
9771b263 4627 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4 4628 {
eb270950 4629 stream_write_tree (ob, item->type, true);
8b7773a4 4630 streamer_write_uhwi (ob, item->offset);
eb270950
FX
4631 streamer_write_uhwi (ob, item->jftype);
4632 switch (item->jftype)
4633 {
4634 case IPA_JF_UNKNOWN:
4635 break;
4636 case IPA_JF_CONST:
4637 stream_write_tree (ob, item->value.constant, true);
4638 break;
4639 case IPA_JF_PASS_THROUGH:
4640 case IPA_JF_LOAD_AGG:
4641 streamer_write_uhwi (ob, item->value.pass_through.operation);
4642 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4643 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4644 != tcc_unary)
4645 stream_write_tree (ob, item->value.pass_through.operand, true);
4646 if (item->jftype == IPA_JF_LOAD_AGG)
4647 {
4648 stream_write_tree (ob, item->value.load_agg.type, true);
4649 streamer_write_uhwi (ob, item->value.load_agg.offset);
4650 bp = bitpack_create (ob->main_stream);
4651 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4652 streamer_write_bitpack (&bp);
4653 }
4654 break;
4655 default:
4656 fatal_error (UNKNOWN_LOCATION,
4657 "invalid jump function in LTO stream");
4658 }
fb3f88cc 4659 }
04be694e 4660
209ca542 4661 bp = bitpack_create (ob->main_stream);
86cd0334 4662 bp_pack_value (&bp, !!jump_func->bits, 1);
209ca542 4663 streamer_write_bitpack (&bp);
86cd0334 4664 if (jump_func->bits)
209ca542 4665 {
86cd0334
MJ
4666 streamer_write_widest_int (ob, jump_func->bits->value);
4667 streamer_write_widest_int (ob, jump_func->bits->mask);
a5e14a42 4668 }
86cd0334 4669 bp_pack_value (&bp, !!jump_func->m_vr, 1);
8bc5448f 4670 streamer_write_bitpack (&bp);
86cd0334 4671 if (jump_func->m_vr)
8bc5448f
KV
4672 {
4673 streamer_write_enum (ob->main_stream, value_rang_type,
54994253
AH
4674 VR_LAST, jump_func->m_vr->kind ());
4675 stream_write_tree (ob, jump_func->m_vr->min (), true);
4676 stream_write_tree (ob, jump_func->m_vr->max (), true);
8bc5448f 4677 }
fb3f88cc
JH
4678}
4679
4680/* Read in jump function JUMP_FUNC from IB. */
4681
4682static void
99b1c316 4683ipa_read_jump_function (class lto_input_block *ib,
fb3f88cc 4684 struct ipa_jump_func *jump_func,
4502fe8d 4685 struct cgraph_edge *cs,
99b1c316 4686 class data_in *data_in,
f714ecf5 4687 bool prevails)
fb3f88cc 4688{
4a53743e
MJ
4689 enum jump_func_type jftype;
4690 enum tree_code operation;
8b7773a4 4691 int i, count;
f714ecf5
JH
4692 int val = streamer_read_uhwi (ib);
4693 bool flag = val & 1;
fb3f88cc 4694
f714ecf5 4695 jftype = (enum jump_func_type) (val / 2);
4a53743e 4696 switch (jftype)
fb3f88cc
JH
4697 {
4698 case IPA_JF_UNKNOWN:
04be694e 4699 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4700 break;
4701 case IPA_JF_CONST:
f714ecf5
JH
4702 {
4703 tree t = stream_read_tree (ib, data_in);
4704 if (flag && prevails)
7a2090b0 4705 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
f714ecf5
JH
4706 ipa_set_jf_constant (jump_func, t, cs);
4707 }
fb3f88cc
JH
4708 break;
4709 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4710 operation = (enum tree_code) streamer_read_uhwi (ib);
4711 if (operation == NOP_EXPR)
4712 {
4713 int formal_id = streamer_read_uhwi (ib);
4714 struct bitpack_d bp = streamer_read_bitpack (ib);
4715 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4716 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e 4717 }
a2b4c188
KV
4718 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4719 {
4720 int formal_id = streamer_read_uhwi (ib);
4721 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4722 }
4a53743e
MJ
4723 else
4724 {
4725 tree operand = stream_read_tree (ib, data_in);
4726 int formal_id = streamer_read_uhwi (ib);
4727 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4728 operation);
4729 }
fb3f88cc
JH
4730 break;
4731 case IPA_JF_ANCESTOR:
4a53743e
MJ
4732 {
4733 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4734 int formal_id = streamer_read_uhwi (ib);
4735 struct bitpack_d bp = streamer_read_bitpack (ib);
4736 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4737 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4738 break;
4739 }
f714ecf5
JH
4740 default:
4741 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4742 }
4743
4744 count = streamer_read_uhwi (ib);
f714ecf5
JH
4745 if (prevails)
4746 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4747 if (count)
4748 {
4a53743e 4749 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4750 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4751 }
4752 for (i = 0; i < count; i++)
4753 {
f32682ca 4754 struct ipa_agg_jf_item item;
eb270950 4755 item.type = stream_read_tree (ib, data_in);
f32682ca 4756 item.offset = streamer_read_uhwi (ib);
eb270950
FX
4757 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4758
4759 switch (item.jftype)
4760 {
4761 case IPA_JF_UNKNOWN:
4762 break;
4763 case IPA_JF_CONST:
4764 item.value.constant = stream_read_tree (ib, data_in);
4765 break;
4766 case IPA_JF_PASS_THROUGH:
4767 case IPA_JF_LOAD_AGG:
4768 operation = (enum tree_code) streamer_read_uhwi (ib);
4769 item.value.pass_through.operation = operation;
4770 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4771 if (TREE_CODE_CLASS (operation) == tcc_unary)
4772 item.value.pass_through.operand = NULL_TREE;
4773 else
4774 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4775 if (item.jftype == IPA_JF_LOAD_AGG)
4776 {
4777 struct bitpack_d bp;
4778 item.value.load_agg.type = stream_read_tree (ib, data_in);
4779 item.value.load_agg.offset = streamer_read_uhwi (ib);
4780 bp = streamer_read_bitpack (ib);
4781 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4782 }
4783 break;
4784 default:
4785 fatal_error (UNKNOWN_LOCATION,
4786 "invalid jump function in LTO stream");
4787 }
f714ecf5
JH
4788 if (prevails)
4789 jump_func->agg.items->quick_push (item);
fb3f88cc 4790 }
04be694e
MJ
4791
4792 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4793 bool bits_known = bp_unpack_value (&bp, 1);
4794 if (bits_known)
4795 {
86cd0334
MJ
4796 widest_int value = streamer_read_widest_int (ib);
4797 widest_int mask = streamer_read_widest_int (ib);
f714ecf5
JH
4798 if (prevails)
4799 ipa_set_jfunc_bits (jump_func, value, mask);
209ca542
PK
4800 }
4801 else
86cd0334 4802 jump_func->bits = NULL;
8bc5448f
KV
4803
4804 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4805 bool vr_known = bp_unpack_value (&vr_bp, 1);
4806 if (vr_known)
4807 {
54994253 4808 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
86cd0334
MJ
4809 VR_LAST);
4810 tree min = stream_read_tree (ib, data_in);
4811 tree max = stream_read_tree (ib, data_in);
f714ecf5
JH
4812 if (prevails)
4813 ipa_set_jfunc_vr (jump_func, type, min, max);
8bc5448f
KV
4814 }
4815 else
86cd0334 4816 jump_func->m_vr = NULL;
fb3f88cc
JH
4817}
4818
e33c6cd6
MJ
4819/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4820 relevant to indirect inlining to OB. */
661e7330
MJ
4821
4822static void
e33c6cd6
MJ
4823ipa_write_indirect_edge_info (struct output_block *ob,
4824 struct cgraph_edge *cs)
661e7330 4825{
99b1c316 4826 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4827 struct bitpack_d bp;
e33c6cd6 4828
412288f1 4829 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4830 bp = bitpack_create (ob->main_stream);
4831 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4832 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4833 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4834 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4835 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4836 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4837 streamer_write_bitpack (&bp);
ba392339
JH
4838 if (ii->agg_contents || ii->polymorphic)
4839 streamer_write_hwi (ob, ii->offset);
4840 else
4841 gcc_assert (ii->offset == 0);
b258210c
MJ
4842
4843 if (ii->polymorphic)
4844 {
412288f1 4845 streamer_write_hwi (ob, ii->otr_token);
b9393656 4846 stream_write_tree (ob, ii->otr_type, true);
ba392339 4847 ii->context.stream_out (ob);
b258210c 4848 }
661e7330
MJ
4849}
4850
e33c6cd6
MJ
4851/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4852 relevant to indirect inlining from IB. */
661e7330
MJ
4853
4854static void
99b1c316
MS
4855ipa_read_indirect_edge_info (class lto_input_block *ib,
4856 class data_in *data_in,
40a777e8
JH
4857 struct cgraph_edge *cs,
4858 class ipa_node_params *info)
661e7330 4859{
99b1c316 4860 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4861 struct bitpack_d bp;
661e7330 4862
412288f1 4863 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 4864 bp = streamer_read_bitpack (ib);
2465dcc2 4865 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4866 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4867 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4868 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 4869 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 4870 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
4871 if (ii->agg_contents || ii->polymorphic)
4872 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4873 else
4874 ii->offset = 0;
b258210c
MJ
4875 if (ii->polymorphic)
4876 {
412288f1 4877 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4878 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 4879 ii->context.stream_in (ib, data_in);
b258210c 4880 }
40a777e8
JH
4881 if (info && ii->param_index >= 0)
4882 {
4883 if (ii->polymorphic)
4884 ipa_set_param_used_by_polymorphic_call (info,
4885 ii->param_index , true);
4886 ipa_set_param_used_by_indirect_call (info,
4887 ii->param_index, true);
4888 }
661e7330
MJ
4889}
4890
fb3f88cc
JH
4891/* Stream out NODE info to OB. */
4892
4893static void
4894ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4895{
4896 int node_ref;
7380e6ef 4897 lto_symtab_encoder_t encoder;
99b1c316 4898 class ipa_node_params *info = IPA_NODE_REF (node);
fb3f88cc
JH
4899 int j;
4900 struct cgraph_edge *e;
2465dcc2 4901 struct bitpack_d bp;
fb3f88cc 4902
7380e6ef 4903 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4904 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4905 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4906
0e8853ee
JH
4907 streamer_write_uhwi (ob, ipa_get_param_count (info));
4908 for (j = 0; j < ipa_get_param_count (info); j++)
4909 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4910 bp = bitpack_create (ob->main_stream);
8aab5218 4911 gcc_assert (info->analysis_done
661e7330 4912 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4913 gcc_assert (!info->node_enqueued);
4914 gcc_assert (!info->ipcp_orig_node);
4915 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4916 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4917 streamer_write_bitpack (&bp);
4502fe8d 4918 for (j = 0; j < ipa_get_param_count (info); j++)
a5e14a42
MJ
4919 {
4920 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4921 stream_write_tree (ob, ipa_get_type (info, j), true);
4922 }
fb3f88cc
JH
4923 for (e = node->callees; e; e = e->next_callee)
4924 {
99b1c316 4925 class ipa_edge_args *args = IPA_EDGE_REF (e);
fb3f88cc 4926
a33c028e
JH
4927 if (!args)
4928 {
4929 streamer_write_uhwi (ob, 0);
4930 continue;
4931 }
4932
5ce97055
JH
4933 streamer_write_uhwi (ob,
4934 ipa_get_cs_argument_count (args) * 2
4935 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 4936 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4937 {
4938 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4939 if (args->polymorphic_call_contexts != NULL)
4940 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4941 }
fb3f88cc 4942 }
e33c6cd6 4943 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 4944 {
99b1c316 4945 class ipa_edge_args *args = IPA_EDGE_REF (e);
a33c028e
JH
4946 if (!args)
4947 streamer_write_uhwi (ob, 0);
4948 else
5ce97055 4949 {
a33c028e
JH
4950 streamer_write_uhwi (ob,
4951 ipa_get_cs_argument_count (args) * 2
4952 + (args->polymorphic_call_contexts != NULL));
4953 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4954 {
4955 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4956 if (args->polymorphic_call_contexts != NULL)
4957 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4958 }
5ce97055 4959 }
c8246dbe
JH
4960 ipa_write_indirect_edge_info (ob, e);
4961 }
fb3f88cc
JH
4962}
4963
f714ecf5 4964/* Stream in edge E from IB. */
7b377b22 4965
f714ecf5 4966static void
99b1c316
MS
4967ipa_read_edge_info (class lto_input_block *ib,
4968 class data_in *data_in,
f714ecf5 4969 struct cgraph_edge *e, bool prevails)
7b377b22 4970{
f714ecf5
JH
4971 int count = streamer_read_uhwi (ib);
4972 bool contexts_computed = count & 1;
4973
4974 count /= 2;
4975 if (!count)
4976 return;
4977 if (prevails && e->possibly_call_in_translation_unit_p ())
4978 {
a33c028e 4979 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
cb3874dc 4980 vec_safe_grow_cleared (args->jump_functions, count, true);
f714ecf5 4981 if (contexts_computed)
cb3874dc 4982 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
f714ecf5
JH
4983 for (int k = 0; k < count; k++)
4984 {
4985 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4986 data_in, prevails);
4987 if (contexts_computed)
4988 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4989 (ib, data_in);
4990 }
4991 }
4992 else
4993 {
4994 for (int k = 0; k < count; k++)
4995 {
4996 struct ipa_jump_func dummy;
4997 ipa_read_jump_function (ib, &dummy, e,
4998 data_in, prevails);
4999 if (contexts_computed)
5000 {
99b1c316 5001 class ipa_polymorphic_call_context ctx;
f714ecf5
JH
5002 ctx.stream_in (ib, data_in);
5003 }
5004 }
5005 }
7b377b22
JH
5006}
5007
61502ca8 5008/* Stream in NODE info from IB. */
fb3f88cc
JH
5009
5010static void
99b1c316
MS
5011ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5012 class data_in *data_in)
fb3f88cc 5013{
fb3f88cc
JH
5014 int k;
5015 struct cgraph_edge *e;
2465dcc2 5016 struct bitpack_d bp;
f714ecf5 5017 bool prevails = node->prevailing_p ();
6cf67b62
JH
5018 class ipa_node_params *info = prevails
5019 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
fb3f88cc 5020
f714ecf5
JH
5021 int param_count = streamer_read_uhwi (ib);
5022 if (prevails)
5023 {
5024 ipa_alloc_node_params (node, param_count);
5025 for (k = 0; k < param_count; k++)
5026 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5027 if (ipa_get_param_count (info) != 0)
5028 info->analysis_done = true;
5029 info->node_enqueued = false;
5030 }
5031 else
5032 for (k = 0; k < param_count; k++)
5033 streamer_read_uhwi (ib);
a5e14a42 5034
412288f1 5035 bp = streamer_read_bitpack (ib);
f714ecf5 5036 for (k = 0; k < param_count; k++)
a5e14a42 5037 {
f714ecf5
JH
5038 bool used = bp_unpack_value (&bp, 1);
5039
5040 if (prevails)
5041 ipa_set_param_used (info, k, used);
a5e14a42 5042 }
f714ecf5 5043 for (k = 0; k < param_count; k++)
fb3f88cc 5044 {
f714ecf5
JH
5045 int nuses = streamer_read_hwi (ib);
5046 tree type = stream_read_tree (ib, data_in);
fb3f88cc 5047
f714ecf5 5048 if (prevails)
5ce97055 5049 {
f714ecf5
JH
5050 ipa_set_controlled_uses (info, k, nuses);
5051 (*info->descriptors)[k].decl_or_type = type;
5ce97055 5052 }
fb3f88cc 5053 }
f714ecf5
JH
5054 for (e = node->callees; e; e = e->next_callee)
5055 ipa_read_edge_info (ib, data_in, e, prevails);
e33c6cd6 5056 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 5057 {
f714ecf5 5058 ipa_read_edge_info (ib, data_in, e, prevails);
40a777e8 5059 ipa_read_indirect_edge_info (ib, data_in, e, info);
c8246dbe 5060 }
fb3f88cc
JH
5061}
5062
5063/* Write jump functions for nodes in SET. */
5064
5065void
f27c1867 5066ipa_prop_write_jump_functions (void)
fb3f88cc
JH
5067{
5068 struct cgraph_node *node;
93536c97 5069 struct output_block *ob;
fb3f88cc 5070 unsigned int count = 0;
f27c1867
JH
5071 lto_symtab_encoder_iterator lsei;
5072 lto_symtab_encoder_t encoder;
5073
6fe906a3 5074 if (!ipa_node_params_sum || !ipa_edge_args_sum)
93536c97 5075 return;
fb3f88cc 5076
93536c97 5077 ob = create_output_block (LTO_section_jump_functions);
f27c1867 5078 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5079 ob->symbol = NULL;
f27c1867
JH
5080 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5081 lsei_next_function_in_partition (&lsei))
fb3f88cc 5082 {
f27c1867 5083 node = lsei_cgraph_node (lsei);
d52f5295 5084 if (node->has_gimple_body_p ()
c47d0034 5085 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5086 count++;
5087 }
5088
412288f1 5089 streamer_write_uhwi (ob, count);
fb3f88cc
JH
5090
5091 /* Process all of the functions. */
f27c1867
JH
5092 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5093 lsei_next_function_in_partition (&lsei))
fb3f88cc 5094 {
f27c1867 5095 node = lsei_cgraph_node (lsei);
d52f5295 5096 if (node->has_gimple_body_p ()
c47d0034 5097 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5098 ipa_write_node_info (ob, node);
5099 }
412288f1 5100 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
5101 produce_asm (ob, NULL);
5102 destroy_output_block (ob);
5103}
5104
5105/* Read section in file FILE_DATA of length LEN with data DATA. */
5106
5107static void
5108ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5109 size_t len)
5110{
5111 const struct lto_function_header *header =
5112 (const struct lto_function_header *) data;
4ad9a9de
EB
5113 const int cfg_offset = sizeof (struct lto_function_header);
5114 const int main_offset = cfg_offset + header->cfg_size;
5115 const int string_offset = main_offset + header->main_size;
99b1c316 5116 class data_in *data_in;
fb3f88cc
JH
5117 unsigned int i;
5118 unsigned int count;
5119
207c68cd 5120 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5121 header->main_size, file_data->mode_table);
fb3f88cc
JH
5122
5123 data_in =
5124 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5125 header->string_size, vNULL);
412288f1 5126 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
5127
5128 for (i = 0; i < count; i++)
5129 {
5130 unsigned int index;
5131 struct cgraph_node *node;
7380e6ef 5132 lto_symtab_encoder_t encoder;
fb3f88cc 5133
412288f1 5134 index = streamer_read_uhwi (&ib_main);
7380e6ef 5135 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5136 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5137 index));
67348ccc 5138 gcc_assert (node->definition);
fb3f88cc
JH
5139 ipa_read_node_info (&ib_main, node, data_in);
5140 }
5141 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5142 len);
5143 lto_data_in_delete (data_in);
5144}
5145
5146/* Read ipcp jump functions. */
5147
5148void
5149ipa_prop_read_jump_functions (void)
5150{
5151 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5152 struct lto_file_decl_data *file_data;
5153 unsigned int j = 0;
5154
5155 ipa_check_create_node_params ();
5156 ipa_check_create_edge_args ();
5157 ipa_register_cgraph_hooks ();
5158
5159 while ((file_data = file_data_vec[j++]))
5160 {
5161 size_t len;
3c56d8d8
ML
5162 const char *data
5163 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5164 &len);
fb3f88cc
JH
5165 if (data)
5166 ipa_prop_read_section (file_data, data, len);
5167 }
5168}
5169
2c9561b5 5170void
04be694e 5171write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
5172{
5173 int node_ref;
5174 unsigned int count = 0;
5175 lto_symtab_encoder_t encoder;
5176 struct ipa_agg_replacement_value *aggvals, *av;
5177
5178 aggvals = ipa_get_agg_replacements_for_node (node);
5179 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5180 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
5181 streamer_write_uhwi (ob, node_ref);
5182
5183 for (av = aggvals; av; av = av->next)
5184 count++;
5185 streamer_write_uhwi (ob, count);
5186
5187 for (av = aggvals; av; av = av->next)
5188 {
7b920a9a
MJ
5189 struct bitpack_d bp;
5190
2c9561b5
MJ
5191 streamer_write_uhwi (ob, av->offset);
5192 streamer_write_uhwi (ob, av->index);
5193 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
5194
5195 bp = bitpack_create (ob->main_stream);
5196 bp_pack_value (&bp, av->by_ref, 1);
5197 streamer_write_bitpack (&bp);
2c9561b5 5198 }
04be694e 5199
9d3e0adc 5200 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5201 if (ts && vec_safe_length (ts->m_vr) > 0)
5202 {
5203 count = ts->m_vr->length ();
5204 streamer_write_uhwi (ob, count);
5205 for (unsigned i = 0; i < count; ++i)
5206 {
5207 struct bitpack_d bp;
5208 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5209 bp = bitpack_create (ob->main_stream);
5210 bp_pack_value (&bp, parm_vr->known, 1);
5211 streamer_write_bitpack (&bp);
5212 if (parm_vr->known)
5213 {
5214 streamer_write_enum (ob->main_stream, value_rang_type,
5215 VR_LAST, parm_vr->type);
5216 streamer_write_wide_int (ob, parm_vr->min);
5217 streamer_write_wide_int (ob, parm_vr->max);
5218 }
5219 }
5220 }
5221 else
5222 streamer_write_uhwi (ob, 0);
5223
209ca542
PK
5224 if (ts && vec_safe_length (ts->bits) > 0)
5225 {
5226 count = ts->bits->length ();
5227 streamer_write_uhwi (ob, count);
5228
5229 for (unsigned i = 0; i < count; ++i)
5230 {
86cd0334 5231 const ipa_bits *bits_jfunc = (*ts->bits)[i];
209ca542 5232 struct bitpack_d bp = bitpack_create (ob->main_stream);
86cd0334 5233 bp_pack_value (&bp, !!bits_jfunc, 1);
209ca542 5234 streamer_write_bitpack (&bp);
86cd0334 5235 if (bits_jfunc)
209ca542 5236 {
86cd0334
MJ
5237 streamer_write_widest_int (ob, bits_jfunc->value);
5238 streamer_write_widest_int (ob, bits_jfunc->mask);
209ca542
PK
5239 }
5240 }
5241 }
5242 else
5243 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
5244}
5245
5246/* Stream in the aggregate value replacement chain for NODE from IB. */
5247
5248static void
04be694e
MJ
5249read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5250 data_in *data_in)
2c9561b5
MJ
5251{
5252 struct ipa_agg_replacement_value *aggvals = NULL;
5253 unsigned int count, i;
5254
5255 count = streamer_read_uhwi (ib);
5256 for (i = 0; i <count; i++)
5257 {
5258 struct ipa_agg_replacement_value *av;
7b920a9a 5259 struct bitpack_d bp;
2c9561b5 5260
766090c2 5261 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
5262 av->offset = streamer_read_uhwi (ib);
5263 av->index = streamer_read_uhwi (ib);
5264 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
5265 bp = streamer_read_bitpack (ib);
5266 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
5267 av->next = aggvals;
5268 aggvals = av;
5269 }
5270 ipa_set_node_agg_value_chain (node, aggvals);
67b97478 5271
209ca542
PK
5272 count = streamer_read_uhwi (ib);
5273 if (count > 0)
5274 {
9d3e0adc
ML
5275 ipcp_transformation_initialize ();
5276 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
cb3874dc 5277 vec_safe_grow_cleared (ts->m_vr, count, true);
8bc5448f
KV
5278 for (i = 0; i < count; i++)
5279 {
5280 ipa_vr *parm_vr;
5281 parm_vr = &(*ts->m_vr)[i];
5282 struct bitpack_d bp;
5283 bp = streamer_read_bitpack (ib);
5284 parm_vr->known = bp_unpack_value (&bp, 1);
5285 if (parm_vr->known)
5286 {
54994253 5287 parm_vr->type = streamer_read_enum (ib, value_range_kind,
8bc5448f
KV
5288 VR_LAST);
5289 parm_vr->min = streamer_read_wide_int (ib);
5290 parm_vr->max = streamer_read_wide_int (ib);
5291 }
5292 }
5293 }
5294 count = streamer_read_uhwi (ib);
5295 if (count > 0)
5296 {
9d3e0adc
ML
5297 ipcp_transformation_initialize ();
5298 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
cb3874dc 5299 vec_safe_grow_cleared (ts->bits, count, true);
209ca542
PK
5300
5301 for (i = 0; i < count; i++)
5302 {
209ca542 5303 struct bitpack_d bp = streamer_read_bitpack (ib);
86cd0334
MJ
5304 bool known = bp_unpack_value (&bp, 1);
5305 if (known)
209ca542 5306 {
2fb1d6d6
JH
5307 const widest_int value = streamer_read_widest_int (ib);
5308 const widest_int mask = streamer_read_widest_int (ib);
86cd0334 5309 ipa_bits *bits
2fb1d6d6 5310 = ipa_get_ipa_bits_for_value (value, mask);
86cd0334 5311 (*ts->bits)[i] = bits;
209ca542
PK
5312 }
5313 }
5314 }
2c9561b5
MJ
5315}
5316
5317/* Write all aggregate replacement for nodes in set. */
5318
5319void
04be694e 5320ipcp_write_transformation_summaries (void)
2c9561b5
MJ
5321{
5322 struct cgraph_node *node;
5323 struct output_block *ob;
5324 unsigned int count = 0;
5325 lto_symtab_encoder_iterator lsei;
5326 lto_symtab_encoder_t encoder;
5327
2c9561b5
MJ
5328 ob = create_output_block (LTO_section_ipcp_transform);
5329 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5330 ob->symbol = NULL;
2c9561b5
MJ
5331 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5332 lsei_next_function_in_partition (&lsei))
5333 {
5334 node = lsei_cgraph_node (lsei);
04be694e 5335 if (node->has_gimple_body_p ())
2c9561b5
MJ
5336 count++;
5337 }
5338
5339 streamer_write_uhwi (ob, count);
5340
5341 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5342 lsei_next_function_in_partition (&lsei))
5343 {
5344 node = lsei_cgraph_node (lsei);
04be694e
MJ
5345 if (node->has_gimple_body_p ())
5346 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
5347 }
5348 streamer_write_char_stream (ob->main_stream, 0);
5349 produce_asm (ob, NULL);
5350 destroy_output_block (ob);
5351}
5352
5353/* Read replacements section in file FILE_DATA of length LEN with data
5354 DATA. */
5355
5356static void
5357read_replacements_section (struct lto_file_decl_data *file_data,
5358 const char *data,
5359 size_t len)
5360{
5361 const struct lto_function_header *header =
5362 (const struct lto_function_header *) data;
5363 const int cfg_offset = sizeof (struct lto_function_header);
5364 const int main_offset = cfg_offset + header->cfg_size;
5365 const int string_offset = main_offset + header->main_size;
99b1c316 5366 class data_in *data_in;
2c9561b5
MJ
5367 unsigned int i;
5368 unsigned int count;
5369
207c68cd 5370 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5371 header->main_size, file_data->mode_table);
2c9561b5
MJ
5372
5373 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5374 header->string_size, vNULL);
2c9561b5
MJ
5375 count = streamer_read_uhwi (&ib_main);
5376
5377 for (i = 0; i < count; i++)
5378 {
5379 unsigned int index;
5380 struct cgraph_node *node;
5381 lto_symtab_encoder_t encoder;
5382
5383 index = streamer_read_uhwi (&ib_main);
5384 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5385 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5386 index));
67348ccc 5387 gcc_assert (node->definition);
04be694e 5388 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
5389 }
5390 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5391 len);
5392 lto_data_in_delete (data_in);
5393}
5394
5395/* Read IPA-CP aggregate replacements. */
5396
5397void
04be694e 5398ipcp_read_transformation_summaries (void)
2c9561b5
MJ
5399{
5400 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5401 struct lto_file_decl_data *file_data;
5402 unsigned int j = 0;
5403
5404 while ((file_data = file_data_vec[j++]))
5405 {
5406 size_t len;
3c56d8d8
ML
5407 const char *data
5408 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5409 &len);
2c9561b5
MJ
5410 if (data)
5411 read_replacements_section (file_data, data, len);
5412 }
5413}
5414
5415/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5416 NODE. */
5417
5418static void
5419adjust_agg_replacement_values (struct cgraph_node *node,
5420 struct ipa_agg_replacement_value *aggval)
5421{
5422 struct ipa_agg_replacement_value *v;
ae7a23a3 5423 clone_info *cinfo = clone_info::get (node);
2c9561b5 5424
ae7a23a3 5425 if (!cinfo || !cinfo->param_adjustments)
2c9561b5
MJ
5426 return;
5427
ff6686d2 5428 auto_vec<int, 16> new_indices;
ae7a23a3 5429 cinfo->param_adjustments->get_updated_indices (&new_indices);
2c9561b5
MJ
5430 for (v = aggval; v; v = v->next)
5431 {
ff6686d2 5432 gcc_checking_assert (v->index >= 0);
2c9561b5 5433
ff6686d2
MJ
5434 if ((unsigned) v->index < new_indices.length ())
5435 v->index = new_indices[v->index];
5436 else
5437 /* This can happen if we know about a constant passed by reference by
5438 an argument which is never actually used for anything, let alone
5439 loading that constant. */
5440 v->index = -1;
5441 }
2c9561b5
MJ
5442}
5443
8aab5218
MJ
5444/* Dominator walker driving the ipcp modification phase. */
5445
5446class ipcp_modif_dom_walker : public dom_walker
5447{
5448public:
56b40062 5449 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
f65f1ae3 5450 vec<ipa_param_descriptor, va_gc> *descs,
8aab5218
MJ
5451 struct ipa_agg_replacement_value *av,
5452 bool *sc, bool *cc)
5453 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5454 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5455
3daacdcd 5456 virtual edge before_dom_children (basic_block);
8aab5218
MJ
5457
5458private:
56b40062 5459 struct ipa_func_body_info *m_fbi;
f65f1ae3 5460 vec<ipa_param_descriptor, va_gc> *m_descriptors;
8aab5218
MJ
5461 struct ipa_agg_replacement_value *m_aggval;
5462 bool *m_something_changed, *m_cfg_changed;
5463};
5464
3daacdcd 5465edge
8aab5218
MJ
5466ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5467{
5468 gimple_stmt_iterator gsi;
5469 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5470 {
5471 struct ipa_agg_replacement_value *v;
355fe088 5472 gimple *stmt = gsi_stmt (gsi);
8aab5218 5473 tree rhs, val, t;
86003645
RS
5474 HOST_WIDE_INT offset;
5475 poly_int64 size;
8aab5218
MJ
5476 int index;
5477 bool by_ref, vce;
5478
5479 if (!gimple_assign_load_p (stmt))
5480 continue;
5481 rhs = gimple_assign_rhs1 (stmt);
5482 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5483 continue;
2c9561b5 5484
8aab5218
MJ
5485 vce = false;
5486 t = rhs;
5487 while (handled_component_p (t))
5488 {
5489 /* V_C_E can do things like convert an array of integers to one
5490 bigger integer and similar things we do not handle below. */
b66113e9 5491 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8aab5218
MJ
5492 {
5493 vce = true;
5494 break;
5495 }
5496 t = TREE_OPERAND (t, 0);
5497 }
5498 if (vce)
5499 continue;
5500
ff302741
PB
5501 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5502 &offset, &size, &by_ref))
8aab5218
MJ
5503 continue;
5504 for (v = m_aggval; v; v = v->next)
5505 if (v->index == index
5506 && v->offset == offset)
5507 break;
5508 if (!v
5509 || v->by_ref != by_ref
86003645
RS
5510 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5511 size))
8aab5218
MJ
5512 continue;
5513
5514 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5515 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5516 {
5517 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5518 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5519 else if (TYPE_SIZE (TREE_TYPE (rhs))
5520 == TYPE_SIZE (TREE_TYPE (v->value)))
5521 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5522 else
5523 {
5524 if (dump_file)
5525 {
5526 fprintf (dump_file, " const ");
ef6cb4c7 5527 print_generic_expr (dump_file, v->value);
8aab5218 5528 fprintf (dump_file, " can't be converted to type of ");
ef6cb4c7 5529 print_generic_expr (dump_file, rhs);
8aab5218
MJ
5530 fprintf (dump_file, "\n");
5531 }
5532 continue;
5533 }
5534 }
5535 else
5536 val = v->value;
5537
5538 if (dump_file && (dump_flags & TDF_DETAILS))
5539 {
5540 fprintf (dump_file, "Modifying stmt:\n ");
ef6cb4c7 5541 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5542 }
5543 gimple_assign_set_rhs_from_tree (&gsi, val);
5544 update_stmt (stmt);
5545
5546 if (dump_file && (dump_flags & TDF_DETAILS))
5547 {
5548 fprintf (dump_file, "into:\n ");
ef6cb4c7 5549 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5550 fprintf (dump_file, "\n");
5551 }
5552
5553 *m_something_changed = true;
5554 if (maybe_clean_eh_stmt (stmt)
5555 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5556 *m_cfg_changed = true;
5557 }
3daacdcd 5558 return NULL;
8aab5218
MJ
5559}
5560
c7ac9a0c
JH
5561/* Return true if we have recorded VALUE and MASK about PARM.
5562 Set VALUE and MASk accordingly. */
5563
5564bool
5565ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5566{
5567 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5568 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5569 if (!ts || vec_safe_length (ts->bits) == 0)
5570 return false;
5571
5572 int i = 0;
5573 for (tree p = DECL_ARGUMENTS (current_function_decl);
5574 p != parm; p = DECL_CHAIN (p))
5575 {
5576 i++;
5577 /* Ignore static chain. */
5578 if (!p)
5579 return false;
5580 }
5581
ae7a23a3
JH
5582 clone_info *cinfo = clone_info::get (cnode);
5583 if (cinfo && cinfo->param_adjustments)
c7ac9a0c 5584 {
ae7a23a3 5585 i = cinfo->param_adjustments->get_original_index (i);
c7ac9a0c
JH
5586 if (i < 0)
5587 return false;
5588 }
5589
5590 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5591 if (!bits[i])
5592 return false;
5593 *mask = bits[i]->mask;
5594 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5595 return true;
5596}
5597
5598
209ca542 5599/* Update bits info of formal parameters as described in
9d3e0adc 5600 ipcp_transformation. */
209ca542
PK
5601
5602static void
5603ipcp_update_bits (struct cgraph_node *node)
5604{
9d3e0adc 5605 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
209ca542
PK
5606
5607 if (!ts || vec_safe_length (ts->bits) == 0)
5608 return;
86cd0334 5609 vec<ipa_bits *, va_gc> &bits = *ts->bits;
209ca542 5610 unsigned count = bits.length ();
ff6686d2
MJ
5611 if (!count)
5612 return;
209ca542 5613
ff6686d2
MJ
5614 auto_vec<int, 16> new_indices;
5615 bool need_remapping = false;
ae7a23a3
JH
5616 clone_info *cinfo = clone_info::get (node);
5617 if (cinfo && cinfo->param_adjustments)
209ca542 5618 {
ae7a23a3 5619 cinfo->param_adjustments->get_updated_indices (&new_indices);
ff6686d2
MJ
5620 need_remapping = true;
5621 }
5622 auto_vec <tree, 16> parm_decls;
5623 push_function_arg_decls (&parm_decls, node->decl);
209ca542 5624
ff6686d2
MJ
5625 for (unsigned i = 0; i < count; ++i)
5626 {
5627 tree parm;
5628 if (need_remapping)
5629 {
5630 if (i >= new_indices.length ())
5631 continue;
5632 int idx = new_indices[i];
5633 if (idx < 0)
5634 continue;
5635 parm = parm_decls[idx];
5636 }
5637 else
5638 parm = parm_decls[i];
209ca542 5639 gcc_checking_assert (parm);
ff6686d2 5640
209ca542 5641
86cd0334
MJ
5642 if (!bits[i]
5643 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5644 || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542 5645 || !is_gimple_reg (parm))
86cd0334 5646 continue;
209ca542
PK
5647
5648 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5649 if (!ddef)
5650 continue;
5651
5652 if (dump_file)
5653 {
86cd0334
MJ
5654 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5655 print_hex (bits[i]->mask, dump_file);
209ca542
PK
5656 fprintf (dump_file, "\n");
5657 }
5658
67b97478
PK
5659 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5660 {
5661 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5662 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5663
86cd0334
MJ
5664 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5665 | wide_int::from (bits[i]->value, prec, sgn);
67b97478
PK
5666 set_nonzero_bits (ddef, nonzero_bits);
5667 }
5668 else
5669 {
86cd0334
MJ
5670 unsigned tem = bits[i]->mask.to_uhwi ();
5671 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
67b97478
PK
5672 unsigned align = tem & -tem;
5673 unsigned misalign = bitpos & (align - 1);
209ca542 5674
67b97478
PK
5675 if (align > 1)
5676 {
5677 if (dump_file)
5678 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5679
5680 unsigned old_align, old_misalign;
5681 struct ptr_info_def *pi = get_ptr_info (ddef);
5682 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5683
5684 if (old_known
5685 && old_align > align)
5686 {
5687 if (dump_file)
5688 {
5689 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5690 if ((old_misalign & (align - 1)) != misalign)
5691 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5692 old_misalign, misalign);
5693 }
5694 continue;
5695 }
5696
5697 if (old_known
5698 && ((misalign & (old_align - 1)) != old_misalign)
5699 && dump_file)
5700 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5701 old_misalign, misalign);
5702
5703 set_ptr_info_alignment (pi, align, misalign);
5704 }
5705 }
209ca542
PK
5706 }
5707}
5708
523fe5b6
AH
5709bool
5710ipa_vr::nonzero_p (tree expr_type) const
5711{
5712 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5713 return true;
5714
5715 unsigned prec = TYPE_PRECISION (expr_type);
5716 return (type == VR_RANGE
19849d15 5717 && TYPE_UNSIGNED (expr_type)
523fe5b6
AH
5718 && wi::eq_p (min, wi::one (prec))
5719 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5720}
5721
8bc5448f 5722/* Update value range of formal parameters as described in
9d3e0adc 5723 ipcp_transformation. */
8bc5448f
KV
5724
5725static void
5726ipcp_update_vr (struct cgraph_node *node)
5727{
9d3e0adc 5728 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5729 if (!ts || vec_safe_length (ts->m_vr) == 0)
5730 return;
5731 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5732 unsigned count = vr.length ();
ff6686d2
MJ
5733 if (!count)
5734 return;
8bc5448f 5735
ff6686d2
MJ
5736 auto_vec<int, 16> new_indices;
5737 bool need_remapping = false;
ae7a23a3
JH
5738 clone_info *cinfo = clone_info::get (node);
5739 if (cinfo && cinfo->param_adjustments)
8bc5448f 5740 {
ae7a23a3 5741 cinfo->param_adjustments->get_updated_indices (&new_indices);
ff6686d2
MJ
5742 need_remapping = true;
5743 }
5744 auto_vec <tree, 16> parm_decls;
5745 push_function_arg_decls (&parm_decls, node->decl);
5746
5747 for (unsigned i = 0; i < count; ++i)
5748 {
5749 tree parm;
5750 int remapped_idx;
5751 if (need_remapping)
5752 {
5753 if (i >= new_indices.length ())
5754 continue;
5755 remapped_idx = new_indices[i];
5756 if (remapped_idx < 0)
5757 continue;
5758 }
5759 else
5760 remapped_idx = i;
5761
5762 parm = parm_decls[remapped_idx];
5763
8bc5448f 5764 gcc_checking_assert (parm);
8bc5448f
KV
5765 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5766
5767 if (!ddef || !is_gimple_reg (parm))
5768 continue;
5769
5770 if (vr[i].known
8bc5448f
KV
5771 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5772 {
5773 tree type = TREE_TYPE (ddef);
5774 unsigned prec = TYPE_PRECISION (type);
718625ad
KV
5775 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5776 {
5777 if (dump_file)
5778 {
ff6686d2
MJ
5779 fprintf (dump_file, "Setting value range of param %u "
5780 "(now %i) ", i, remapped_idx);
718625ad
KV
5781 fprintf (dump_file, "%s[",
5782 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5783 print_decs (vr[i].min, dump_file);
5784 fprintf (dump_file, ", ");
5785 print_decs (vr[i].max, dump_file);
5786 fprintf (dump_file, "]\n");
5787 }
5788 set_range_info (ddef, vr[i].type,
5789 wide_int_storage::from (vr[i].min, prec,
5790 TYPE_SIGN (type)),
5791 wide_int_storage::from (vr[i].max, prec,
5792 TYPE_SIGN (type)));
5793 }
5794 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
523fe5b6 5795 && vr[i].nonzero_p (TREE_TYPE (ddef)))
8bc5448f 5796 {
718625ad
KV
5797 if (dump_file)
5798 fprintf (dump_file, "Setting nonnull for %u\n", i);
5799 set_ptr_nonnull (ddef);
8bc5448f 5800 }
8bc5448f
KV
5801 }
5802 }
5803}
5804
8aab5218 5805/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5806
5807unsigned int
5808ipcp_transform_function (struct cgraph_node *node)
5809{
f65f1ae3 5810 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
56b40062 5811 struct ipa_func_body_info fbi;
2c9561b5 5812 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5813 int param_count;
5814 bool cfg_changed = false, something_changed = false;
5815
5816 gcc_checking_assert (cfun);
5817 gcc_checking_assert (current_function_decl);
5818
5819 if (dump_file)
464d0118
ML
5820 fprintf (dump_file, "Modification phase of node %s\n",
5821 node->dump_name ());
2c9561b5 5822
209ca542 5823 ipcp_update_bits (node);
8bc5448f 5824 ipcp_update_vr (node);
2c9561b5
MJ
5825 aggval = ipa_get_agg_replacements_for_node (node);
5826 if (!aggval)
5827 return 0;
67348ccc 5828 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5829 if (param_count == 0)
5830 return 0;
5831 adjust_agg_replacement_values (node, aggval);
5832 if (dump_file)
5833 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5834
8aab5218
MJ
5835 fbi.node = node;
5836 fbi.info = NULL;
5837 fbi.bb_infos = vNULL;
cb3874dc 5838 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
8aab5218 5839 fbi.param_count = param_count;
fdfd7f53 5840 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
2c9561b5 5841
cb3874dc 5842 vec_safe_grow_cleared (descriptors, param_count, true);
f65f1ae3 5843 ipa_populate_param_decls (node, *descriptors);
8aab5218
MJ
5844 calculate_dominance_info (CDI_DOMINATORS);
5845 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5846 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5847
8aab5218
MJ
5848 int i;
5849 struct ipa_bb_info *bi;
5850 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5851 free_ipa_bb_info (bi);
5852 fbi.bb_infos.release ();
5853 free_dominance_info (CDI_DOMINATORS);
9d3e0adc
ML
5854
5855 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5856 s->agg_values = NULL;
5857 s->bits = NULL;
5858 s->m_vr = NULL;
676b4899 5859
f65f1ae3 5860 vec_free (descriptors);
2c9561b5
MJ
5861
5862 if (!something_changed)
5863 return 0;
95a2ed03
MJ
5864
5865 if (cfg_changed)
5866 delete_unreachable_blocks_update_callgraph (node, false);
5867
5868 return TODO_update_ssa_only_virtuals;
2c9561b5 5869}
86cd0334 5870
ac6f2e59 5871
eb270950 5872/* Return true if OTHER describes same agg value. */
ac6f2e59 5873bool
eb270950 5874ipa_agg_value::equal_to (const ipa_agg_value &other)
ac6f2e59
JH
5875{
5876 return offset == other.offset
5877 && operand_equal_p (value, other.value, 0);
5878}
9d5af1db
MJ
5879
5880/* Destructor also removing individual aggregate values. */
5881
5882ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
5883{
5884 ipa_release_agg_values (m_known_aggs, false);
5885}
5886
5887
5888
86cd0334 5889#include "gt-ipa-prop.h"