]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.cc
Implement ipa_vr hashing.
[thirdparty/gcc.git] / gcc / ipa-prop.cc
CommitLineData
518dc859 1/* Interprocedural analyses.
aeee4812 2 Copyright (C) 2005-2023 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
ba206889 34#include "gimple-iterator.h"
2fb9a547
AM
35#include "gimple-fold.h"
36#include "tree-eh.h"
36566b39 37#include "calls.h"
d8a2d370
DN
38#include "stor-layout.h"
39#include "print-tree.h"
45b0be94 40#include "gimplify.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
27d020cf 48#include "ipa-fnsummary.h"
cf835838 49#include "gimple-pretty-print.h"
450ad0cd 50#include "ipa-utils.h"
2b5f0895 51#include "dbgcnt.h"
8aab5218 52#include "domwalk.h"
9b2b7279 53#include "builtins.h"
95a2ed03 54#include "tree-cfgcleanup.h"
c7b6a758 55#include "options.h"
ae7a23a3 56#include "symtab-clones.h"
6cef01c3 57#include "attr-fnspec.h"
45f4e2b0 58#include "gimple-range.h"
cfe17528 59#include "value-range-storage.h"
771578a0 60
dd912cb8
ML
61/* Function summary where the parameter infos are actually stored. */
62ipa_node_params_t *ipa_node_params_sum = NULL;
9d3e0adc
ML
63
64function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
65
6fe906a3
MJ
66/* Edge summary for IPA-CP edge information. */
67ipa_edge_args_sum_t *ipa_edge_args_sum;
771578a0 68
86cd0334
MJ
69/* Traits for a hash table for reusing already existing ipa_bits. */
70
71struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
72{
73 typedef ipa_bits *value_type;
74 typedef ipa_bits *compare_type;
75 static hashval_t
76 hash (const ipa_bits *p)
77 {
78 hashval_t t = (hashval_t) p->value.to_shwi ();
79 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
80 }
81 static bool
82 equal (const ipa_bits *a, const ipa_bits *b)
83 {
84 return a->value == b->value && a->mask == b->mask;
85 }
7ca50de0 86 static const bool empty_zero_p = true;
86cd0334
MJ
87 static void
88 mark_empty (ipa_bits *&p)
89 {
90 p = NULL;
91 }
92 static bool
93 is_empty (const ipa_bits *p)
94 {
95 return p == NULL;
96 }
97 static bool
98 is_deleted (const ipa_bits *p)
99 {
100 return p == reinterpret_cast<const ipa_bits *> (1);
101 }
102 static void
103 mark_deleted (ipa_bits *&p)
104 {
105 p = reinterpret_cast<ipa_bits *> (1);
106 }
107};
108
109/* Hash table for avoid repeated allocations of equal ipa_bits. */
110static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
111
15819a7d 112/* Traits for a hash table for reusing ranges. */
86cd0334 113
15819a7d 114struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <ipa_vr *>
86cd0334 115{
15819a7d
AH
116 typedef ipa_vr *value_type;
117 typedef const vrange *compare_type;
86cd0334 118 static hashval_t
15819a7d 119 hash (const ipa_vr *p)
59b2c134 120 {
15819a7d
AH
121 // This never get called, except in the verification code, as
122 // ipa_get_value_range() calculates the hash itself. This
123 // function is mostly here for completness' sake.
124 Value_Range vr;
125 p->get_vrange (vr);
126 inchash::hash hstate;
127 add_vrange (vr, hstate);
59b2c134
JJ
128 return hstate.end ();
129 }
86cd0334 130 static bool
15819a7d 131 equal (const ipa_vr *a, const vrange *b)
86cd0334 132 {
15819a7d 133 return a->equal_p (*b);
86cd0334 134 }
7ca50de0 135 static const bool empty_zero_p = true;
86cd0334 136 static void
15819a7d 137 mark_empty (ipa_vr *&p)
86cd0334
MJ
138 {
139 p = NULL;
140 }
141 static bool
15819a7d 142 is_empty (const ipa_vr *p)
86cd0334
MJ
143 {
144 return p == NULL;
145 }
146 static bool
15819a7d 147 is_deleted (const ipa_vr *p)
86cd0334 148 {
15819a7d 149 return p == reinterpret_cast<const ipa_vr *> (1);
86cd0334
MJ
150 }
151 static void
15819a7d 152 mark_deleted (ipa_vr *&p)
86cd0334 153 {
15819a7d 154 p = reinterpret_cast<ipa_vr *> (1);
86cd0334
MJ
155 }
156};
157
15819a7d 158/* Hash table for avoid repeated allocations of equal ranges. */
86cd0334
MJ
159static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
160
771578a0 161/* Holders of ipa cgraph hooks: */
40982661 162static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 163
4502fe8d
MJ
164/* Description of a reference to an IPA constant. */
165struct ipa_cst_ref_desc
166{
167 /* Edge that corresponds to the statement which took the reference. */
168 struct cgraph_edge *cs;
169 /* Linked list of duplicates created when call graph edges are cloned. */
170 struct ipa_cst_ref_desc *next_duplicate;
171 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
172 if out of control. */
173 int refcount;
174};
175
176/* Allocation pool for reference descriptions. */
177
fb0b2914 178static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 179 ("IPA-PROP ref descriptions");
4502fe8d 180
cfe17528
AH
181ipa_vr::ipa_vr ()
182 : m_storage (NULL),
183 m_type (NULL)
184{
185}
186
187ipa_vr::ipa_vr (const vrange &r)
188 : m_storage (ggc_alloc_vrange_storage (r)),
189 m_type (r.type ())
190{
191}
192
193bool
194ipa_vr::equal_p (const vrange &r) const
195{
196 gcc_checking_assert (!r.undefined_p ());
197 return (types_compatible_p (m_type, r.type ()) && m_storage->equal_p (r));
198}
199
200void
bc5a2c2e 201ipa_vr::get_vrange (Value_Range &r) const
cfe17528 202{
bc5a2c2e 203 r.set_type (m_type);
cfe17528
AH
204 m_storage->get_vrange (r, m_type);
205}
206
207void
208ipa_vr::set_unknown ()
209{
210 if (m_storage)
211 ggc_free (m_storage);
212
213 m_storage = NULL;
214}
215
216void
217ipa_vr::streamer_read (lto_input_block *ib, data_in *data_in)
218{
219 struct bitpack_d bp = streamer_read_bitpack (ib);
220 bool known = bp_unpack_value (&bp, 1);
221 if (known)
222 {
223 Value_Range vr;
224 streamer_read_value_range (ib, data_in, vr);
225 if (!m_storage || !m_storage->fits_p (vr))
226 {
227 if (m_storage)
228 ggc_free (m_storage);
229 m_storage = ggc_alloc_vrange_storage (vr);
230 }
231 m_storage->set_vrange (vr);
232 m_type = vr.type ();
233 }
234 else
235 {
236 m_storage = NULL;
237 m_type = NULL;
238 }
239}
240
241void
242ipa_vr::streamer_write (output_block *ob) const
243{
244 struct bitpack_d bp = bitpack_create (ob->main_stream);
245 bp_pack_value (&bp, !!m_storage, 1);
246 streamer_write_bitpack (&bp);
247 if (m_storage)
248 {
249 Value_Range vr (m_type);
250 m_storage->get_vrange (vr, m_type);
251 streamer_write_vrange (ob, vr);
252 }
253}
254
255void
256ipa_vr::dump (FILE *out) const
257{
258 if (known_p ())
259 {
260 Value_Range vr (m_type);
261 m_storage->get_vrange (vr, m_type);
262 vr.dump (out);
263 }
264 else
265 fprintf (out, "NO RANGE");
266}
267
15819a7d
AH
268// These stubs are because we use an ipa_vr in a hash_traits and
269// hash-traits.h defines an extern of gt_ggc_mx (T &) instead of
270// picking up the gt_ggc_mx (T *) version.
271void
272gt_pch_nx (ipa_vr *&x)
273{
274 return gt_pch_nx ((ipa_vr *) x);
275}
276
277void
278gt_ggc_mx (ipa_vr *&x)
279{
280 return gt_ggc_mx ((ipa_vr *) x);
281}
282
283
5fe8e757
MJ
284/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
285 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
286
287static bool
288ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
289{
67348ccc 290 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
291
292 if (!fs_opts)
293 return false;
2bf86c84 294 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
295}
296
be95e2b9
MJ
297/* Return index of the formal whose tree is PTREE in function which corresponds
298 to INFO. */
299
d044dd17 300static int
f65f1ae3
MJ
301ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
302 tree ptree)
518dc859
RL
303{
304 int i, count;
305
f65f1ae3 306 count = vec_safe_length (descriptors);
518dc859 307 for (i = 0; i < count; i++)
f65f1ae3 308 if ((*descriptors)[i].decl_or_type == ptree)
518dc859
RL
309 return i;
310
311 return -1;
312}
313
d044dd17
MJ
314/* Return index of the formal whose tree is PTREE in function which corresponds
315 to INFO. */
316
317int
99b1c316 318ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
d044dd17
MJ
319{
320 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
321}
322
323/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
324 NODE. */
be95e2b9 325
f8e2a1ed
MJ
326static void
327ipa_populate_param_decls (struct cgraph_node *node,
f65f1ae3 328 vec<ipa_param_descriptor, va_gc> &descriptors)
518dc859
RL
329{
330 tree fndecl;
331 tree fnargs;
332 tree parm;
333 int param_num;
3e293154 334
67348ccc 335 fndecl = node->decl;
0e8853ee 336 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
337 fnargs = DECL_ARGUMENTS (fndecl);
338 param_num = 0;
910ad8de 339 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 340 {
209ca542 341 descriptors[param_num].decl_or_type = parm;
40a777e8
JH
342 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
343 descriptors[param_num].move_cost = cost;
344 /* Watch overflow, move_cost is a bitfield. */
345 gcc_checking_assert (cost == descriptors[param_num].move_cost);
518dc859
RL
346 param_num++;
347 }
348}
349
3f84bf08
MJ
350/* Return how many formal parameters FNDECL has. */
351
fd29c024 352int
310bc633 353count_formal_params (tree fndecl)
3f84bf08
MJ
354{
355 tree parm;
356 int count = 0;
0e8853ee 357 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 358
910ad8de 359 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
360 count++;
361
362 return count;
363}
364
0e8853ee
JH
365/* Return the declaration of Ith formal parameter of the function corresponding
366 to INFO. Note there is no setter function as this array is built just once
367 using ipa_initialize_node_params. */
368
369void
99b1c316 370ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
0e8853ee
JH
371{
372 fprintf (file, "param #%i", i);
f65f1ae3 373 if ((*info->descriptors)[i].decl_or_type)
0e8853ee
JH
374 {
375 fprintf (file, " ");
ef6cb4c7 376 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
0e8853ee
JH
377 }
378}
379
159f01f8
MJ
380/* If necessary, allocate vector of parameter descriptors in info of NODE.
381 Return true if they were allocated, false if not. */
0e8853ee 382
159f01f8 383static bool
0e8853ee
JH
384ipa_alloc_node_params (struct cgraph_node *node, int param_count)
385{
a4a3cdd0 386 ipa_node_params *info = ipa_node_params_sum->get_create (node);
0e8853ee 387
f65f1ae3 388 if (!info->descriptors && param_count)
159f01f8 389 {
cb3874dc 390 vec_safe_grow_cleared (info->descriptors, param_count, true);
159f01f8
MJ
391 return true;
392 }
393 else
394 return false;
0e8853ee
JH
395}
396
f8e2a1ed
MJ
397/* Initialize the ipa_node_params structure associated with NODE by counting
398 the function parameters, creating the descriptors and populating their
399 param_decls. */
be95e2b9 400
f8e2a1ed
MJ
401void
402ipa_initialize_node_params (struct cgraph_node *node)
403{
a4a3cdd0 404 ipa_node_params *info = ipa_node_params_sum->get_create (node);
f8e2a1ed 405
159f01f8
MJ
406 if (!info->descriptors
407 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
408 ipa_populate_param_decls (node, *info->descriptors);
518dc859
RL
409}
410
749aa96d
MJ
411/* Print the jump functions associated with call graph edge CS to file F. */
412
413static void
414ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
415{
a4a3cdd0
MJ
416 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
417 int count = ipa_get_cs_argument_count (args);
749aa96d 418
a4a3cdd0 419 for (int i = 0; i < count; i++)
749aa96d
MJ
420 {
421 struct ipa_jump_func *jump_func;
422 enum jump_func_type type;
423
a4a3cdd0 424 jump_func = ipa_get_ith_jump_func (args, i);
749aa96d
MJ
425 type = jump_func->type;
426
427 fprintf (f, " param %d: ", i);
428 if (type == IPA_JF_UNKNOWN)
429 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
430 else if (type == IPA_JF_CONST)
431 {
4502fe8d 432 tree val = jump_func->value.constant.value;
749aa96d 433 fprintf (f, "CONST: ");
ef6cb4c7 434 print_generic_expr (f, val);
749aa96d
MJ
435 if (TREE_CODE (val) == ADDR_EXPR
436 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
437 {
438 fprintf (f, " -> ");
ef6cb4c7 439 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
749aa96d
MJ
440 }
441 fprintf (f, "\n");
442 }
749aa96d
MJ
443 else if (type == IPA_JF_PASS_THROUGH)
444 {
445 fprintf (f, "PASS THROUGH: ");
8b7773a4 446 fprintf (f, "%d, op %s",
749aa96d 447 jump_func->value.pass_through.formal_id,
5806f481 448 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 449 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
450 {
451 fprintf (f, " ");
ef6cb4c7 452 print_generic_expr (f, jump_func->value.pass_through.operand);
8b7773a4
MJ
453 }
454 if (jump_func->value.pass_through.agg_preserved)
455 fprintf (f, ", agg_preserved");
8e08c788
MJ
456 if (jump_func->value.pass_through.refdesc_decremented)
457 fprintf (f, ", refdesc_decremented");
3ea6239f 458 fprintf (f, "\n");
749aa96d
MJ
459 }
460 else if (type == IPA_JF_ANCESTOR)
461 {
462 fprintf (f, "ANCESTOR: ");
16998094 463 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
464 jump_func->value.ancestor.formal_id,
465 jump_func->value.ancestor.offset);
8b7773a4
MJ
466 if (jump_func->value.ancestor.agg_preserved)
467 fprintf (f, ", agg_preserved");
7ea3a73c
MJ
468 if (jump_func->value.ancestor.keep_null)
469 fprintf (f, ", keep_null");
3ea6239f 470 fprintf (f, "\n");
749aa96d 471 }
8b7773a4
MJ
472
473 if (jump_func->agg.items)
474 {
475 struct ipa_agg_jf_item *item;
476 int j;
477
478 fprintf (f, " Aggregate passed by %s:\n",
479 jump_func->agg.by_ref ? "reference" : "value");
eb270950 480 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
8b7773a4
MJ
481 {
482 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
483 item->offset);
eb270950
FX
484 fprintf (f, "type: ");
485 print_generic_expr (f, item->type);
486 fprintf (f, ", ");
487 if (item->jftype == IPA_JF_PASS_THROUGH)
488 fprintf (f, "PASS THROUGH: %d,",
489 item->value.pass_through.formal_id);
490 else if (item->jftype == IPA_JF_LOAD_AGG)
491 {
492 fprintf (f, "LOAD AGG: %d",
493 item->value.pass_through.formal_id);
494 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
495 item->value.load_agg.offset,
496 item->value.load_agg.by_ref ? "reference"
497 : "value");
498 }
499
500 if (item->jftype == IPA_JF_PASS_THROUGH
501 || item->jftype == IPA_JF_LOAD_AGG)
502 {
503 fprintf (f, " op %s",
504 get_tree_code_name (item->value.pass_through.operation));
505 if (item->value.pass_through.operation != NOP_EXPR)
506 {
507 fprintf (f, " ");
508 print_generic_expr (f, item->value.pass_through.operand);
509 }
510 }
511 else if (item->jftype == IPA_JF_CONST)
8b7773a4 512 {
eb270950
FX
513 fprintf (f, "CONST: ");
514 print_generic_expr (f, item->value.constant);
8b7773a4 515 }
eb270950
FX
516 else if (item->jftype == IPA_JF_UNKNOWN)
517 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
518 tree_to_uhwi (TYPE_SIZE (item->type)));
8b7773a4
MJ
519 fprintf (f, "\n");
520 }
521 }
44210a96 522
99b1c316 523 class ipa_polymorphic_call_context *ctx
a4a3cdd0 524 = ipa_get_ith_polymorhic_call_context (args, i);
44210a96
MJ
525 if (ctx && !ctx->useless_p ())
526 {
527 fprintf (f, " Context: ");
528 ctx->dump (dump_file);
529 }
04be694e 530
86cd0334 531 if (jump_func->bits)
209ca542 532 {
86cd0334
MJ
533 fprintf (f, " value: ");
534 print_hex (jump_func->bits->value, f);
535 fprintf (f, ", mask: ");
536 print_hex (jump_func->bits->mask, f);
209ca542
PK
537 fprintf (f, "\n");
538 }
539 else
540 fprintf (f, " Unknown bits\n");
8bc5448f 541
86cd0334 542 if (jump_func->m_vr)
8bc5448f 543 {
1a8087c7
AH
544 jump_func->m_vr->dump (f);
545 fprintf (f, "\n");
8bc5448f
KV
546 }
547 else
548 fprintf (f, " Unknown VR\n");
749aa96d
MJ
549 }
550}
551
552
be95e2b9
MJ
553/* Print the jump functions of all arguments on all call graph edges going from
554 NODE to file F. */
555
518dc859 556void
3e293154 557ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 558{
3e293154 559 struct cgraph_edge *cs;
518dc859 560
464d0118 561 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
3e293154
MJ
562 for (cs = node->callees; cs; cs = cs->next_callee)
563 {
3e293154 564
464d0118
ML
565 fprintf (f, " callsite %s -> %s : \n",
566 node->dump_name (),
567 cs->callee->dump_name ());
0302955a
JH
568 if (!ipa_edge_args_info_available_for_edge_p (cs))
569 fprintf (f, " no arg info\n");
570 else
571 ipa_print_node_jump_functions_for_edge (f, cs);
749aa96d 572 }
518dc859 573
9de04252 574 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 575 {
99b1c316 576 class cgraph_indirect_call_info *ii;
3e293154 577
9de04252
MJ
578 ii = cs->indirect_info;
579 if (ii->agg_contents)
c13bc3d9 580 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 581 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 582 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
583 ii->param_index, ii->offset,
584 ii->by_ref ? "by reference" : "by_value");
585 else
85942f45
JH
586 fprintf (f, " indirect %s callsite, calling param %i, "
587 "offset " HOST_WIDE_INT_PRINT_DEC,
588 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
589 ii->offset);
9de04252 590
749aa96d
MJ
591 if (cs->call_stmt)
592 {
9de04252 593 fprintf (f, ", for stmt ");
749aa96d 594 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 595 }
749aa96d 596 else
9de04252 597 fprintf (f, "\n");
ba392339
JH
598 if (ii->polymorphic)
599 ii->context.dump (f);
0302955a
JH
600 if (!ipa_edge_args_info_available_for_edge_p (cs))
601 fprintf (f, " no arg info\n");
602 else
603 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
604 }
605}
606
607/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 608
3e293154
MJ
609void
610ipa_print_all_jump_functions (FILE *f)
611{
612 struct cgraph_node *node;
613
ca30a539 614 fprintf (f, "\nJump functions:\n");
65c70e6b 615 FOR_EACH_FUNCTION (node)
3e293154
MJ
616 {
617 ipa_print_node_jump_functions (f, node);
618 }
619}
620
04be694e
MJ
621/* Set jfunc to be a know-really nothing jump function. */
622
623static void
624ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
625{
626 jfunc->type = IPA_JF_UNKNOWN;
04be694e
MJ
627}
628
b8f6e610
MJ
629/* Set JFUNC to be a copy of another jmp (to be used by jump function
630 combination code). The two functions will share their rdesc. */
631
632static void
633ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
634 struct ipa_jump_func *src)
635
636{
637 gcc_checking_assert (src->type == IPA_JF_CONST);
638 dst->type = IPA_JF_CONST;
639 dst->value.constant = src->value.constant;
640}
641
7b872d9e
MJ
642/* Set JFUNC to be a constant jmp function. */
643
644static void
4502fe8d
MJ
645ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
646 struct cgraph_edge *cs)
7b872d9e
MJ
647{
648 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
649 jfunc->value.constant.value = unshare_expr_without_location (constant);
650
651 if (TREE_CODE (constant) == ADDR_EXPR
13586172 652 && (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL
ca2007a9 653 || (VAR_P (TREE_OPERAND (constant, 0))
13586172 654 && TREE_STATIC (TREE_OPERAND (constant, 0)))))
4502fe8d
MJ
655 {
656 struct ipa_cst_ref_desc *rdesc;
4502fe8d 657
601f3293 658 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
659 rdesc->cs = cs;
660 rdesc->next_duplicate = NULL;
661 rdesc->refcount = 1;
662 jfunc->value.constant.rdesc = rdesc;
663 }
664 else
665 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
666}
667
668/* Set JFUNC to be a simple pass-through jump function. */
669static void
8b7773a4 670ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 671 bool agg_preserved)
7b872d9e
MJ
672{
673 jfunc->type = IPA_JF_PASS_THROUGH;
674 jfunc->value.pass_through.operand = NULL_TREE;
675 jfunc->value.pass_through.formal_id = formal_id;
676 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 677 jfunc->value.pass_through.agg_preserved = agg_preserved;
8e08c788 678 jfunc->value.pass_through.refdesc_decremented = false;
7b872d9e
MJ
679}
680
a2b4c188
KV
681/* Set JFUNC to be an unary pass through jump function. */
682
683static void
684ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
685 enum tree_code operation)
686{
687 jfunc->type = IPA_JF_PASS_THROUGH;
688 jfunc->value.pass_through.operand = NULL_TREE;
689 jfunc->value.pass_through.formal_id = formal_id;
690 jfunc->value.pass_through.operation = operation;
691 jfunc->value.pass_through.agg_preserved = false;
8e08c788 692 jfunc->value.pass_through.refdesc_decremented = false;
a2b4c188 693}
7b872d9e
MJ
694/* Set JFUNC to be an arithmetic pass through jump function. */
695
696static void
697ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
698 tree operand, enum tree_code operation)
699{
700 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 701 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
702 jfunc->value.pass_through.formal_id = formal_id;
703 jfunc->value.pass_through.operation = operation;
8b7773a4 704 jfunc->value.pass_through.agg_preserved = false;
8e08c788 705 jfunc->value.pass_through.refdesc_decremented = false;
7b872d9e
MJ
706}
707
708/* Set JFUNC to be an ancestor jump function. */
709
710static void
711ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
7ea3a73c 712 int formal_id, bool agg_preserved, bool keep_null)
7b872d9e
MJ
713{
714 jfunc->type = IPA_JF_ANCESTOR;
715 jfunc->value.ancestor.formal_id = formal_id;
716 jfunc->value.ancestor.offset = offset;
8b7773a4 717 jfunc->value.ancestor.agg_preserved = agg_preserved;
7ea3a73c 718 jfunc->value.ancestor.keep_null = keep_null;
e248d83f
MJ
719}
720
8aab5218
MJ
721/* Get IPA BB information about the given BB. FBI is the context of analyzis
722 of this function body. */
723
724static struct ipa_bb_info *
56b40062 725ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
726{
727 gcc_checking_assert (fbi);
728 return &fbi->bb_infos[bb->index];
729}
730
f65cf2b7
MJ
731/* Structure to be passed in between detect_type_change and
732 check_stmt_for_type_change. */
733
11478306 734struct prop_type_change_info
f65cf2b7 735{
290ebcb7
MJ
736 /* Offset into the object where there is the virtual method pointer we are
737 looking for. */
738 HOST_WIDE_INT offset;
739 /* The declaration or SSA_NAME pointer of the base that we are checking for
740 type change. */
741 tree object;
f65cf2b7
MJ
742 /* Set to true if dynamic type change has been detected. */
743 bool type_maybe_changed;
744};
745
746/* Return true if STMT can modify a virtual method table pointer.
747
748 This function makes special assumptions about both constructors and
749 destructors which are all the functions that are allowed to alter the VMT
750 pointers. It assumes that destructors begin with assignment into all VMT
751 pointers and that constructors essentially look in the following way:
752
753 1) The very first thing they do is that they call constructors of ancestor
754 sub-objects that have them.
755
756 2) Then VMT pointers of this and all its ancestors is set to new values
757 corresponding to the type corresponding to the constructor.
758
759 3) Only afterwards, other stuff such as constructor of member sub-objects
760 and the code written by the user is run. Only this may include calling
761 virtual functions, directly or indirectly.
762
763 There is no way to call a constructor of an ancestor sub-object in any
764 other way.
765
766 This means that we do not have to care whether constructors get the correct
767 type information because they will always change it (in fact, if we define
768 the type to be given by the VMT pointer, it is undefined).
769
770 The most important fact to derive from the above is that if, for some
771 statement in the section 3, we try to detect whether the dynamic type has
772 changed, we can safely ignore all calls as we examine the function body
773 backwards until we reach statements in section 2 because these calls cannot
774 be ancestor constructors or destructors (if the input is not bogus) and so
775 do not change the dynamic type (this holds true only for automatically
776 allocated objects but at the moment we devirtualize only these). We then
777 must detect that statements in section 2 change the dynamic type and can try
778 to derive the new type. That is enough and we can stop, we will never see
779 the calls into constructors of sub-objects in this code. Therefore we can
780 safely ignore all call statements that we traverse.
781 */
782
783static bool
355fe088 784stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
785{
786 if (is_gimple_call (stmt))
787 return false;
70f633c5
JH
788 if (gimple_clobber_p (stmt))
789 return false;
f65cf2b7
MJ
790 else if (is_gimple_assign (stmt))
791 {
792 tree lhs = gimple_assign_lhs (stmt);
793
0004f992
MJ
794 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
795 {
796 if (flag_strict_aliasing
797 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
798 return false;
799
800 if (TREE_CODE (lhs) == COMPONENT_REF
801 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 802 return false;
450aa0ee 803 /* In the future we might want to use get_ref_base_and_extent to find
0004f992
MJ
804 if there is a field corresponding to the offset and if so, proceed
805 almost like if it was a component ref. */
806 }
f65cf2b7
MJ
807 }
808 return true;
809}
810
3b97a5c7
MJ
811/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
812 to check whether a particular statement may modify the virtual table
813 pointerIt stores its result into DATA, which points to a
11478306 814 prop_type_change_info structure. */
f65cf2b7
MJ
815
816static bool
817check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
818{
355fe088 819 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 820 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
821
822 if (stmt_may_be_vtbl_ptr_store (stmt))
823 {
824 tci->type_maybe_changed = true;
825 return true;
826 }
827 else
828 return false;
829}
830
058d0a90
JH
831/* See if ARG is PARAM_DECl describing instance passed by pointer
832 or reference in FUNCTION. Return false if the dynamic type may change
833 in between beggining of the function until CALL is invoked.
290ebcb7 834
058d0a90 835 Generally functions are not allowed to change type of such instances,
67914693 836 but they call destructors. We assume that methods cannot destroy the THIS
058d0a90
JH
837 pointer. Also as a special cases, constructor and destructors may change
838 type of the THIS pointer. */
839
840static bool
355fe088 841param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90 842{
67914693 843 /* Pure functions cannot do any changes on the dynamic type;
058d0a90
JH
844 that require writting to memory. */
845 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
846 return false;
847 /* We need to check if we are within inlined consturctor
848 or destructor (ideally we would have way to check that the
849 inline cdtor is actually working on ARG, but we don't have
850 easy tie on this, so punt on all non-pure cdtors.
851 We may also record the types of cdtors and once we know type
852 of the instance match them.
853
854 Also code unification optimizations may merge calls from
855 different blocks making return values unreliable. So
856 do nothing during late optimization. */
857 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
858 return true;
859 if (TREE_CODE (arg) == SSA_NAME
860 && SSA_NAME_IS_DEFAULT_DEF (arg)
861 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
862 {
863 /* Normal (non-THIS) argument. */
864 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
865 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 866 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
867 and destructors as those definitely may change the dynamic
868 type. */
869 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
870 && !DECL_CXX_CONSTRUCTOR_P (function)
871 && !DECL_CXX_DESTRUCTOR_P (function)
872 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
873 {
874 /* Walk the inline stack and watch out for ctors/dtors. */
875 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
876 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
877 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
878 return true;
058d0a90
JH
879 return false;
880 }
881 }
882 return true;
883}
290ebcb7 884
06d65050
JH
885/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
886 callsite CALL) by looking for assignments to its virtual table pointer. If
c199f329 887 it is, return true. ARG is the object itself (not a pointer
06d65050 888 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
889 returned by get_ref_base_and_extent, as is the offset.
890
891 This is helper function for detect_type_change and detect_type_change_ssa
892 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
893
894static bool
c628d1c3
MJ
895detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
896 tree base, tree comp_type, gcall *call,
058d0a90 897 HOST_WIDE_INT offset)
f65cf2b7 898{
11478306 899 struct prop_type_change_info tci;
f65cf2b7
MJ
900 ao_ref ao;
901
902 gcc_checking_assert (DECL_P (arg)
903 || TREE_CODE (arg) == MEM_REF
904 || handled_component_p (arg));
f65cf2b7 905
b49407f8
JH
906 comp_type = TYPE_MAIN_VARIANT (comp_type);
907
d570d364
JH
908 /* Const calls cannot call virtual methods through VMT and so type changes do
909 not matter. */
910 if (!flag_devirtualize || !gimple_vuse (call)
911 /* Be sure expected_type is polymorphic. */
912 || !comp_type
913 || TREE_CODE (comp_type) != RECORD_TYPE
914 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
915 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
916 return true;
4bf2a588 917
6cc886bf
RB
918 if (fbi->aa_walk_budget == 0)
919 return false;
920
dd887943 921 ao_ref_init (&ao, arg);
f65cf2b7
MJ
922 ao.base = base;
923 ao.offset = offset;
924 ao.size = POINTER_SIZE;
925 ao.max_size = ao.size;
f65cf2b7 926
290ebcb7
MJ
927 tci.offset = offset;
928 tci.object = get_base_address (arg);
290ebcb7 929 tci.type_maybe_changed = false;
290ebcb7 930
c628d1c3
MJ
931 int walked
932 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
6cc886bf
RB
933 &tci, NULL, NULL, fbi->aa_walk_budget);
934 if (walked >= 0)
935 fbi->aa_walk_budget -= walked;
936 else
937 fbi->aa_walk_budget = 0;
c628d1c3
MJ
938
939 if (walked >= 0 && !tci.type_maybe_changed)
f65cf2b7
MJ
940 return false;
941
f65cf2b7
MJ
942 return true;
943}
944
058d0a90 945/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
c199f329 946 If it is, return true. ARG is the object itself (not a pointer
058d0a90
JH
947 to it, unless dereferenced). BASE is the base of the memory access as
948 returned by get_ref_base_and_extent, as is the offset. */
949
950static bool
c628d1c3 951detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
c199f329 952 tree comp_type, gcall *call,
c628d1c3 953 HOST_WIDE_INT offset)
058d0a90
JH
954{
955 if (!flag_devirtualize)
956 return false;
957
958 if (TREE_CODE (base) == MEM_REF
959 && !param_type_may_change_p (current_function_decl,
960 TREE_OPERAND (base, 0),
961 call))
962 return false;
c628d1c3 963 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
c199f329 964 call, offset);
058d0a90
JH
965}
966
f65cf2b7
MJ
967/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
968 SSA name (its dereference will become the base and the offset is assumed to
969 be zero). */
970
971static bool
c628d1c3 972detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
c199f329 973 gcall *call)
f65cf2b7
MJ
974{
975 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 976 if (!flag_devirtualize
06d65050 977 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
978 return false;
979
058d0a90
JH
980 if (!param_type_may_change_p (current_function_decl, arg, call))
981 return false;
982
f65cf2b7 983 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 984 build_int_cst (ptr_type_node, 0));
f65cf2b7 985
c628d1c3 986 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
c199f329 987 call, 0);
f65cf2b7
MJ
988}
989
fdb0e1b4
MJ
990/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
991 boolean variable pointed to by DATA. */
992
993static bool
994mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
995 void *data)
996{
997 bool *b = (bool *) data;
998 *b = true;
999 return true;
1000}
1001
8aab5218
MJ
1002/* Find the nearest valid aa status for parameter specified by INDEX that
1003 dominates BB. */
1004
56b40062
MJ
1005static struct ipa_param_aa_status *
1006find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
1007 int index)
1008{
1009 while (true)
1010 {
1011 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1012 if (!bb)
1013 return NULL;
1014 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1015 if (!bi->param_aa_statuses.is_empty ()
1016 && bi->param_aa_statuses[index].valid)
1017 return &bi->param_aa_statuses[index];
1018 }
1019}
1020
1021/* Get AA status structure for the given BB and parameter with INDEX. Allocate
1022 structures and/or intialize the result with a dominating description as
1023 necessary. */
1024
56b40062
MJ
1025static struct ipa_param_aa_status *
1026parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
1027 int index)
1028{
1029 gcc_checking_assert (fbi);
1030 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1031 if (bi->param_aa_statuses.is_empty ())
cb3874dc 1032 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
56b40062 1033 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
1034 if (!paa->valid)
1035 {
1036 gcc_checking_assert (!paa->parm_modified
1037 && !paa->ref_modified
1038 && !paa->pt_modified);
56b40062 1039 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
1040 dom_paa = find_dominating_aa_status (fbi, bb, index);
1041 if (dom_paa)
1042 *paa = *dom_paa;
1043 else
1044 paa->valid = true;
1045 }
1046
1047 return paa;
1048}
1049
688010ba 1050/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 1051 a value known not to be modified in this function before reaching the
8aab5218
MJ
1052 statement STMT. FBI holds information about the function we have so far
1053 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
1054
1055static bool
56b40062 1056parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 1057 gimple *stmt, tree parm_load)
fdb0e1b4 1058{
56b40062 1059 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
1060 bool modified = false;
1061 ao_ref refd;
1062
776e4fe2
MJ
1063 tree base = get_base_address (parm_load);
1064 gcc_assert (TREE_CODE (base) == PARM_DECL);
1065 if (TREE_READONLY (base))
1066 return true;
1067
c628d1c3
MJ
1068 gcc_checking_assert (fbi);
1069 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
6cc886bf 1070 if (paa->parm_modified || fbi->aa_walk_budget == 0)
c628d1c3 1071 return false;
fdb0e1b4
MJ
1072
1073 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 1074 ao_ref_init (&refd, parm_load);
8aab5218 1075 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3 1076 &modified, NULL, NULL,
6cc886bf 1077 fbi->aa_walk_budget);
c628d1c3
MJ
1078 if (walked < 0)
1079 {
1080 modified = true;
6cc886bf 1081 fbi->aa_walk_budget = 0;
c628d1c3 1082 }
6cc886bf 1083 else
c628d1c3 1084 fbi->aa_walk_budget -= walked;
8aab5218
MJ
1085 if (paa && modified)
1086 paa->parm_modified = true;
8b7773a4 1087 return !modified;
fdb0e1b4
MJ
1088}
1089
a2b4c188
KV
1090/* If STMT is an assignment that loads a value from an parameter declaration,
1091 return the index of the parameter in ipa_node_params which has not been
1092 modified. Otherwise return -1. */
1093
1094static int
1095load_from_unmodified_param (struct ipa_func_body_info *fbi,
f65f1ae3 1096 vec<ipa_param_descriptor, va_gc> *descriptors,
a2b4c188
KV
1097 gimple *stmt)
1098{
bda2bc48
MJ
1099 int index;
1100 tree op1;
1101
a2b4c188
KV
1102 if (!gimple_assign_single_p (stmt))
1103 return -1;
1104
bda2bc48
MJ
1105 op1 = gimple_assign_rhs1 (stmt);
1106 if (TREE_CODE (op1) != PARM_DECL)
a2b4c188
KV
1107 return -1;
1108
bda2bc48
MJ
1109 index = ipa_get_param_decl_index_1 (descriptors, op1);
1110 if (index < 0
1111 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
a2b4c188
KV
1112 return -1;
1113
bda2bc48 1114 return index;
a2b4c188
KV
1115}
1116
8aab5218
MJ
1117/* Return true if memory reference REF (which must be a load through parameter
1118 with INDEX) loads data that are known to be unmodified in this function
1119 before reaching statement STMT. */
8b7773a4
MJ
1120
1121static bool
56b40062 1122parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 1123 int index, gimple *stmt, tree ref)
8b7773a4 1124{
56b40062 1125 struct ipa_param_aa_status *paa;
8b7773a4
MJ
1126 bool modified = false;
1127 ao_ref refd;
1128
c628d1c3
MJ
1129 gcc_checking_assert (fbi);
1130 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
6cc886bf 1131 if (paa->ref_modified || fbi->aa_walk_budget == 0)
c628d1c3 1132 return false;
8b7773a4 1133
8aab5218 1134 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 1135 ao_ref_init (&refd, ref);
8aab5218 1136 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3 1137 &modified, NULL, NULL,
6cc886bf 1138 fbi->aa_walk_budget);
c628d1c3
MJ
1139 if (walked < 0)
1140 {
1141 modified = true;
1142 fbi->aa_walk_budget = 0;
1143 }
1144 else
1145 fbi->aa_walk_budget -= walked;
1146 if (modified)
8aab5218 1147 paa->ref_modified = true;
8b7773a4
MJ
1148 return !modified;
1149}
1150
8aab5218
MJ
1151/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1152 is known to be unmodified in this function before reaching call statement
1153 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1154
1155static bool
56b40062 1156parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 1157 gimple *call, tree parm)
8b7773a4
MJ
1158{
1159 bool modified = false;
1160 ao_ref refd;
1161
1162 /* It's unnecessary to calculate anything about memory contnets for a const
1163 function because it is not goin to use it. But do not cache the result
1164 either. Also, no such calculations for non-pointers. */
1165 if (!gimple_vuse (call)
c628d1c3 1166 || !POINTER_TYPE_P (TREE_TYPE (parm)))
8b7773a4
MJ
1167 return false;
1168
56b40062
MJ
1169 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1170 gimple_bb (call),
1171 index);
6cc886bf 1172 if (paa->pt_modified || fbi->aa_walk_budget == 0)
8b7773a4
MJ
1173 return false;
1174
1175 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218 1176 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
c628d1c3 1177 &modified, NULL, NULL,
6cc886bf 1178 fbi->aa_walk_budget);
c628d1c3
MJ
1179 if (walked < 0)
1180 {
1181 fbi->aa_walk_budget = 0;
1182 modified = true;
1183 }
1184 else
1185 fbi->aa_walk_budget -= walked;
8b7773a4 1186 if (modified)
8aab5218 1187 paa->pt_modified = true;
8b7773a4
MJ
1188 return !modified;
1189}
1190
91bb9f80
MJ
1191/* Return true if we can prove that OP is a memory reference loading
1192 data from an aggregate passed as a parameter.
1193
1194 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1195 false if it cannot prove that the value has not been modified before the
1196 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1197 if it cannot prove the value has not been modified, in that case it will
1198 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1199
8b7773a4
MJ
1200 INFO and PARMS_AINFO describe parameters of the current function (but the
1201 latter can be NULL), STMT is the load statement. If function returns true,
1202 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1203 within the aggregate and whether it is a load from a value passed by
e0403e95
MJ
1204 reference respectively.
1205
1206 Return false if the offset divided by BITS_PER_UNIT would not fit into an
1207 unsigned int. */
8b7773a4 1208
ff302741 1209bool
56b40062 1210ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
f65f1ae3 1211 vec<ipa_param_descriptor, va_gc> *descriptors,
355fe088 1212 gimple *stmt, tree op, int *index_p,
86003645 1213 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
91bb9f80 1214 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
1215{
1216 int index;
588db50c 1217 HOST_WIDE_INT size;
ee45a32d 1218 bool reverse;
588db50c 1219 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
8b7773a4 1220
e0403e95
MJ
1221 if (!base
1222 || (*offset_p / BITS_PER_UNIT) > UINT_MAX)
8b7773a4
MJ
1223 return false;
1224
8f6c317b
JH
1225 /* We can not propagate across volatile loads. */
1226 if (TREE_THIS_VOLATILE (op))
1227 return false;
1228
8b7773a4
MJ
1229 if (DECL_P (base))
1230 {
d044dd17 1231 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1232 if (index >= 0
8aab5218 1233 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1234 {
1235 *index_p = index;
1236 *by_ref_p = false;
3ff2ca23
JJ
1237 if (size_p)
1238 *size_p = size;
91bb9f80
MJ
1239 if (guaranteed_unmodified)
1240 *guaranteed_unmodified = true;
8b7773a4
MJ
1241 return true;
1242 }
1243 return false;
1244 }
1245
1246 if (TREE_CODE (base) != MEM_REF
1247 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1248 || !integer_zerop (TREE_OPERAND (base, 1)))
1249 return false;
1250
1251 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1252 {
1253 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1254 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1255 }
1256 else
1257 {
1258 /* This branch catches situations where a pointer parameter is not a
1259 gimple register, for example:
1260
1261 void hip7(S*) (struct S * p)
1262 {
1263 void (*<T2e4>) (struct S *) D.1867;
1264 struct S * p.1;
1265
1266 <bb 2>:
1267 p.1_1 = p;
1268 D.1867_2 = p.1_1->f;
1269 D.1867_2 ();
1270 gdp = &p;
1271 */
1272
355fe088 1273 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1274 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1275 }
1276
91bb9f80 1277 if (index >= 0)
8b7773a4 1278 {
91bb9f80
MJ
1279 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1280 if (!data_preserved && !guaranteed_unmodified)
1281 return false;
1282
8b7773a4
MJ
1283 *index_p = index;
1284 *by_ref_p = true;
3ff2ca23
JJ
1285 if (size_p)
1286 *size_p = size;
91bb9f80
MJ
1287 if (guaranteed_unmodified)
1288 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1289 return true;
1290 }
1291 return false;
1292}
1293
eb270950
FX
1294/* If STMT is an assignment that loads a value from a parameter declaration,
1295 or from an aggregate passed as the parameter either by value or reference,
1296 return the index of the parameter in ipa_node_params. Otherwise return -1.
1297
1298 FBI holds gathered information about the function. INFO describes
1299 parameters of the function, STMT is the assignment statement. If it is a
1300 memory load from an aggregate, *OFFSET_P is filled with offset within the
1301 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1302 reference. */
1303
1304static int
1305load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1306 class ipa_node_params *info,
1307 gimple *stmt,
1308 HOST_WIDE_INT *offset_p,
1309 bool *by_ref_p)
1310{
1311 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1312 poly_int64 size;
1313
1314 /* Load value from a parameter declaration. */
1315 if (index >= 0)
1316 {
1317 *offset_p = -1;
1318 return index;
1319 }
1320
1321 if (!gimple_assign_load_p (stmt))
1322 return -1;
1323
1324 tree rhs = gimple_assign_rhs1 (stmt);
1325
1326 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1327 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1328 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1329 return -1;
1330
1331 /* Skip memory reference containing bit-field. */
1332 if (TREE_CODE (rhs) == BIT_FIELD_REF
1333 || contains_bitfld_component_ref_p (rhs))
1334 return -1;
1335
1336 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1337 offset_p, &size, by_ref_p))
1338 return -1;
1339
1340 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1341 size));
1342 if (!*by_ref_p)
1343 {
1344 tree param_type = ipa_get_type (info, index);
1345
1346 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1347 return -1;
1348 }
1349 else if (TREE_THIS_VOLATILE (rhs))
1350 return -1;
1351
1352 return index;
1353}
1354
c7b6a758
JH
1355/* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1356 to find original pointer. Initialize RET to the pointer which results from
1357 the walk.
1358 If offset is known return true and initialize OFFSET_RET. */
1359
1360bool
1361unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1362{
1363 poly_int64 offset = 0;
1364 bool offset_known = true;
1365 int i;
1366
1367 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1368 {
1369 if (TREE_CODE (op) == ADDR_EXPR)
1370 {
1371 poly_int64 extra_offset = 0;
1372 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1373 &offset);
1374 if (!base)
1375 {
1376 base = get_base_address (TREE_OPERAND (op, 0));
1377 if (TREE_CODE (base) != MEM_REF)
1378 break;
1379 offset_known = false;
1380 }
1381 else
1382 {
1383 if (TREE_CODE (base) != MEM_REF)
1384 break;
1385 offset += extra_offset;
1386 }
1387 op = TREE_OPERAND (base, 0);
1388 if (mem_ref_offset (base).to_shwi (&extra_offset))
1389 offset += extra_offset;
1390 else
1391 offset_known = false;
1392 }
1393 else if (TREE_CODE (op) == SSA_NAME
1394 && !SSA_NAME_IS_DEFAULT_DEF (op))
1395 {
1396 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1397
1398 if (gimple_assign_single_p (pstmt))
1399 op = gimple_assign_rhs1 (pstmt);
1400 else if (is_gimple_assign (pstmt)
1401 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1402 {
1403 poly_int64 extra_offset = 0;
1404 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1405 &extra_offset))
1406 offset += extra_offset;
1407 else
1408 offset_known = false;
1409 op = gimple_assign_rhs1 (pstmt);
1410 }
1411 else
1412 break;
1413 }
1414 else
1415 break;
1416 }
1417 *ret = op;
1418 *offset_ret = offset;
1419 return offset_known;
1420}
1421
b258210c 1422/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1423 of an assignment statement STMT, try to determine whether we are actually
1424 handling any of the following cases and construct an appropriate jump
1425 function into JFUNC if so:
1426
1427 1) The passed value is loaded from a formal parameter which is not a gimple
1428 register (most probably because it is addressable, the value has to be
1429 scalar) and we can guarantee the value has not changed. This case can
1430 therefore be described by a simple pass-through jump function. For example:
1431
1432 foo (int a)
1433 {
1434 int a.0;
1435
1436 a.0_2 = a;
1437 bar (a.0_2);
1438
1439 2) The passed value can be described by a simple arithmetic pass-through
1440 jump function. E.g.
1441
1442 foo (int a)
1443 {
1444 int D.2064;
1445
1446 D.2064_4 = a.1(D) + 4;
1447 bar (D.2064_4);
1448
1449 This case can also occur in combination of the previous one, e.g.:
1450
1451 foo (int a, int z)
1452 {
1453 int a.0;
1454 int D.2064;
1455
1456 a.0_3 = a;
1457 D.2064_4 = a.0_3 + 4;
1458 foo (D.2064_4);
1459
1460 3) The passed value is an address of an object within another one (which
1461 also passed by reference). Such situations are described by an ancestor
1462 jump function and describe situations such as:
1463
1464 B::foo() (struct B * const this)
1465 {
1466 struct A * D.1845;
1467
1468 D.1845_2 = &this_1(D)->D.1748;
1469 A::bar (D.1845_2);
1470
1471 INFO is the structure describing individual parameters access different
1472 stages of IPA optimizations. PARMS_AINFO contains the information that is
1473 only needed for intraprocedural analysis. */
685b0d13
MJ
1474
1475static void
56b40062 1476compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1477 class ipa_node_params *info,
b258210c 1478 struct ipa_jump_func *jfunc,
355fe088 1479 gcall *call, gimple *stmt, tree name,
06d65050 1480 tree param_type)
685b0d13 1481{
588db50c 1482 HOST_WIDE_INT offset, size;
fdb0e1b4 1483 tree op1, tc_ssa, base, ssa;
ee45a32d 1484 bool reverse;
685b0d13 1485 int index;
685b0d13 1486
685b0d13 1487 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1488
fdb0e1b4 1489 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1490 {
fdb0e1b4
MJ
1491 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1492 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1493 else
bda2bc48
MJ
1494 index = load_from_unmodified_param (fbi, info->descriptors,
1495 SSA_NAME_DEF_STMT (op1));
fdb0e1b4
MJ
1496 tc_ssa = op1;
1497 }
1498 else
1499 {
bda2bc48 1500 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1501 tc_ssa = gimple_assign_lhs (stmt);
1502 }
1503
1504 if (index >= 0)
1505 {
a77af182 1506 switch (gimple_assign_rhs_class (stmt))
8b7773a4 1507 {
a77af182
RB
1508 case GIMPLE_BINARY_RHS:
1509 {
1510 tree op2 = gimple_assign_rhs2 (stmt);
1511 if (!is_gimple_ip_invariant (op2)
1512 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1513 != tcc_comparison)
1514 && !useless_type_conversion_p (TREE_TYPE (name),
1515 TREE_TYPE (op1))))
1516 return;
1517
1518 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1519 gimple_assign_rhs_code (stmt));
1520 break;
1521 }
1522 case GIMPLE_SINGLE_RHS:
1523 {
1524 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1525 tc_ssa);
1526 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1527 break;
1528 }
1529 case GIMPLE_UNARY_RHS:
b66113e9 1530 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
a77af182 1531 ipa_set_jf_unary_pass_through (jfunc, index,
bda2bc48 1532 gimple_assign_rhs_code (stmt));
a77af182 1533 default:;
8b7773a4 1534 }
685b0d13
MJ
1535 return;
1536 }
1537
1538 if (TREE_CODE (op1) != ADDR_EXPR)
1539 return;
1540 op1 = TREE_OPERAND (op1, 0);
588db50c 1541 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
aca52e6f
RS
1542 offset_int mem_offset;
1543 if (!base
1544 || TREE_CODE (base) != MEM_REF
1545 || !mem_ref_offset (base).is_constant (&mem_offset))
685b0d13 1546 return;
aca52e6f 1547 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1548 ssa = TREE_OPERAND (base, 0);
1549 if (TREE_CODE (ssa) != SSA_NAME
1550 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1551 || offset < 0)
685b0d13
MJ
1552 return;
1553
b8f6e610 1554 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1555 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1556 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7 1557 ipa_set_ancestor_jf (jfunc, offset, index,
7ea3a73c
MJ
1558 parm_ref_data_pass_through_p (fbi, index, call, ssa),
1559 false);
685b0d13
MJ
1560}
1561
40591473
MJ
1562/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1563 it looks like:
1564
1565 iftmp.1_3 = &obj_2(D)->D.1762;
1566
1567 The base of the MEM_REF must be a default definition SSA NAME of a
1568 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1569 whole MEM_REF expression is returned and the offset calculated from any
1570 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1571 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1572
1573static tree
355fe088 1574get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473 1575{
588db50c 1576 HOST_WIDE_INT size;
40591473 1577 tree expr, parm, obj;
ee45a32d 1578 bool reverse;
40591473
MJ
1579
1580 if (!gimple_assign_single_p (assign))
1581 return NULL_TREE;
1582 expr = gimple_assign_rhs1 (assign);
1583
1584 if (TREE_CODE (expr) != ADDR_EXPR)
1585 return NULL_TREE;
1586 expr = TREE_OPERAND (expr, 0);
1587 obj = expr;
588db50c 1588 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
40591473 1589
aca52e6f
RS
1590 offset_int mem_offset;
1591 if (!expr
1592 || TREE_CODE (expr) != MEM_REF
1593 || !mem_ref_offset (expr).is_constant (&mem_offset))
40591473
MJ
1594 return NULL_TREE;
1595 parm = TREE_OPERAND (expr, 0);
1596 if (TREE_CODE (parm) != SSA_NAME
1597 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1598 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1599 return NULL_TREE;
1600
aca52e6f 1601 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1602 *obj_p = obj;
1603 return expr;
1604}
1605
685b0d13 1606
b258210c
MJ
1607/* Given that an actual argument is an SSA_NAME that is a result of a phi
1608 statement PHI, try to find out whether NAME is in fact a
1609 multiple-inheritance typecast from a descendant into an ancestor of a formal
1610 parameter and thus can be described by an ancestor jump function and if so,
1611 write the appropriate function into JFUNC.
1612
1613 Essentially we want to match the following pattern:
1614
1615 if (obj_2(D) != 0B)
1616 goto <bb 3>;
1617 else
1618 goto <bb 4>;
1619
1620 <bb 3>:
1621 iftmp.1_3 = &obj_2(D)->D.1762;
1622
1623 <bb 4>:
1624 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1625 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1626 return D.1879_6; */
1627
1628static void
56b40062 1629compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1630 class ipa_node_params *info,
b258210c 1631 struct ipa_jump_func *jfunc,
538dd0b7 1632 gcall *call, gphi *phi)
b258210c 1633{
40591473 1634 HOST_WIDE_INT offset;
60bf26a4 1635 gimple *assign;
b258210c 1636 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1637 tree tmp, parm, expr, obj;
b258210c
MJ
1638 int index, i;
1639
54e348cb 1640 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1641 return;
1642
54e348cb
MJ
1643 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1644 tmp = PHI_ARG_DEF (phi, 0);
1645 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1646 tmp = PHI_ARG_DEF (phi, 1);
1647 else
1648 return;
b258210c
MJ
1649 if (TREE_CODE (tmp) != SSA_NAME
1650 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1651 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1652 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1653 return;
1654
1655 assign = SSA_NAME_DEF_STMT (tmp);
1656 assign_bb = gimple_bb (assign);
40591473 1657 if (!single_pred_p (assign_bb))
b258210c 1658 return;
40591473
MJ
1659 expr = get_ancestor_addr_info (assign, &obj, &offset);
1660 if (!expr)
b258210c
MJ
1661 return;
1662 parm = TREE_OPERAND (expr, 0);
b258210c 1663 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1664 if (index < 0)
1665 return;
b258210c
MJ
1666
1667 cond_bb = single_pred (assign_bb);
60bf26a4 1668 gcond *cond = safe_dyn_cast <gcond *> (*gsi_last_bb (cond_bb));
69610617 1669 if (!cond
b258210c
MJ
1670 || gimple_cond_code (cond) != NE_EXPR
1671 || gimple_cond_lhs (cond) != parm
1672 || !integer_zerop (gimple_cond_rhs (cond)))
1673 return;
1674
b258210c
MJ
1675 phi_bb = gimple_bb (phi);
1676 for (i = 0; i < 2; i++)
1677 {
1678 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1679 if (pred != assign_bb && pred != cond_bb)
1680 return;
1681 }
1682
3b97a5c7 1683 ipa_set_ancestor_jf (jfunc, offset, index,
7ea3a73c
MJ
1684 parm_ref_data_pass_through_p (fbi, index, call, parm),
1685 true);
b258210c
MJ
1686}
1687
be95e2b9
MJ
1688/* Inspect the given TYPE and return true iff it has the same structure (the
1689 same number of fields of the same types) as a C++ member pointer. If
1690 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1691 corresponding fields there. */
1692
3e293154
MJ
1693static bool
1694type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1695{
1696 tree fld;
1697
1698 if (TREE_CODE (type) != RECORD_TYPE)
1699 return false;
1700
1701 fld = TYPE_FIELDS (type);
1702 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1703 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1704 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1705 return false;
1706
1707 if (method_ptr)
1708 *method_ptr = fld;
1709
910ad8de 1710 fld = DECL_CHAIN (fld);
8b7773a4 1711 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1712 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1713 return false;
1714 if (delta)
1715 *delta = fld;
1716
910ad8de 1717 if (DECL_CHAIN (fld))
3e293154
MJ
1718 return false;
1719
1720 return true;
1721}
1722
61502ca8 1723/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
eb270950
FX
1724 return the rhs of its defining statement, and this statement is stored in
1725 *RHS_STMT. Otherwise return RHS as it is. */
7ec49257
MJ
1726
1727static inline tree
eb270950 1728get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
7ec49257
MJ
1729{
1730 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1731 {
355fe088 1732 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1733
1734 if (gimple_assign_single_p (def_stmt))
1735 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1736 else
1737 break;
eb270950 1738 *rhs_stmt = def_stmt;
7ec49257
MJ
1739 }
1740 return rhs;
1741}
1742
eb270950 1743/* Simple linked list, describing contents of an aggregate before call. */
8b7773a4
MJ
1744
1745struct ipa_known_agg_contents_list
1746{
1747 /* Offset and size of the described part of the aggregate. */
1748 HOST_WIDE_INT offset, size;
eb270950
FX
1749
1750 /* Type of the described part of the aggregate. */
1751 tree type;
1752
1753 /* Known constant value or jump function data describing contents. */
1754 struct ipa_load_agg_data value;
1755
8b7773a4
MJ
1756 /* Pointer to the next structure in the list. */
1757 struct ipa_known_agg_contents_list *next;
1758};
3e293154 1759
eb270950
FX
1760/* Add an aggregate content item into a linked list of
1761 ipa_known_agg_contents_list structure, in which all elements
1762 are sorted ascendingly by offset. */
0d48ee34 1763
46771da5
FX
1764static inline void
1765add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1766 struct ipa_known_agg_contents_list *item)
0d48ee34 1767{
46771da5
FX
1768 struct ipa_known_agg_contents_list *list = *plist;
1769
1770 for (; list; list = list->next)
0d48ee34 1771 {
46771da5
FX
1772 if (list->offset >= item->offset)
1773 break;
1774
1775 plist = &list->next;
0d48ee34
MJ
1776 }
1777
46771da5
FX
1778 item->next = list;
1779 *plist = item;
1780}
1781
eb270950 1782/* Check whether a given aggregate content is clobbered by certain element in
46771da5
FX
1783 a linked list of ipa_known_agg_contents_list. */
1784
1785static inline bool
1786clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1787 struct ipa_known_agg_contents_list *item)
1788{
1789 for (; list; list = list->next)
0d48ee34 1790 {
46771da5
FX
1791 if (list->offset >= item->offset)
1792 return list->offset < item->offset + item->size;
1793
1794 if (list->offset + list->size > item->offset)
1795 return true;
0d48ee34 1796 }
46771da5
FX
1797
1798 return false;
0d48ee34
MJ
1799}
1800
1801/* Build aggregate jump function from LIST, assuming there are exactly
eb270950 1802 VALUE_COUNT entries there and that offset of the passed argument
0d48ee34
MJ
1803 is ARG_OFFSET and store it into JFUNC. */
1804
1805static void
1806build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
eb270950 1807 int value_count, HOST_WIDE_INT arg_offset,
0d48ee34
MJ
1808 struct ipa_jump_func *jfunc)
1809{
7ee0681e 1810 vec_safe_reserve (jfunc->agg.items, value_count, true);
eb270950
FX
1811 for (; list; list = list->next)
1812 {
1813 struct ipa_agg_jf_item item;
1814 tree operand = list->value.pass_through.operand;
1815
1816 if (list->value.pass_through.formal_id >= 0)
1817 {
1818 /* Content value is derived from some formal paramerter. */
1819 if (list->value.offset >= 0)
1820 item.jftype = IPA_JF_LOAD_AGG;
1821 else
1822 item.jftype = IPA_JF_PASS_THROUGH;
1823
1824 item.value.load_agg = list->value;
1825 if (operand)
1826 item.value.pass_through.operand
1827 = unshare_expr_without_location (operand);
1828 }
1829 else if (operand)
1830 {
1831 /* Content value is known constant. */
1832 item.jftype = IPA_JF_CONST;
1833 item.value.constant = unshare_expr_without_location (operand);
1834 }
1835 else
1836 continue;
1837
1838 item.type = list->type;
1839 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1840
1841 item.offset = list->offset - arg_offset;
1842 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1843
1844 jfunc->agg.items->quick_push (item);
1845 }
1846}
1847
1848/* Given an assignment statement STMT, try to collect information into
1849 AGG_VALUE that will be used to construct jump function for RHS of the
1850 assignment, from which content value of an aggregate part comes.
1851
1852 Besides constant and simple pass-through jump functions, also try to
1853 identify whether it matches the following pattern that can be described by
1854 a load-value-from-aggregate jump function, which is a derivative of simple
1855 pass-through jump function.
1856
1857 foo (int *p)
1858 {
1859 ...
1860
1861 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1862 bar (q_5);
1863 }
1864
1865 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1866 constant, simple pass-through and load-vale-from-aggregate. If value
1867 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1868 set to -1. For simple pass-through and load-value-from-aggregate, field
1869 FORMAL_ID specifies the related formal parameter index, and field
1870 OFFSET can be used to distinguish them, -1 means simple pass-through,
1871 otherwise means load-value-from-aggregate. */
1872
1873static void
1874analyze_agg_content_value (struct ipa_func_body_info *fbi,
1875 struct ipa_load_agg_data *agg_value,
1876 gimple *stmt)
1877{
1878 tree lhs = gimple_assign_lhs (stmt);
1879 tree rhs1 = gimple_assign_rhs1 (stmt);
1880 enum tree_code code;
1881 int index = -1;
1882
1883 /* Initialize jump function data for the aggregate part. */
1884 memset (agg_value, 0, sizeof (*agg_value));
1885 agg_value->pass_through.operation = NOP_EXPR;
1886 agg_value->pass_through.formal_id = -1;
1887 agg_value->offset = -1;
1888
1889 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1890 || TREE_THIS_VOLATILE (lhs)
1891 || TREE_CODE (lhs) == BIT_FIELD_REF
1892 || contains_bitfld_component_ref_p (lhs))
1893 return;
1894
1895 /* Skip SSA copies. */
1896 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1897 {
1898 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1899 break;
1900
1901 stmt = SSA_NAME_DEF_STMT (rhs1);
1902 if (!is_gimple_assign (stmt))
f38a33a2 1903 break;
eb270950
FX
1904
1905 rhs1 = gimple_assign_rhs1 (stmt);
1906 }
1907
f38a33a2 1908 if (gphi *phi = dyn_cast<gphi *> (stmt))
0d48ee34 1909 {
f38a33a2
MJ
1910 /* Also special case like the following (a is a formal parameter):
1911
1912 _12 = *a_11(D).dim[0].stride;
1913 ...
1914 # iftmp.22_9 = PHI <_12(2), 1(3)>
1915 ...
1916 parm.6.dim[0].stride = iftmp.22_9;
1917 ...
1918 __x_MOD_foo (&parm.6, b_31(D));
1919
1920 The aggregate function describing parm.6.dim[0].stride is encoded as a
1921 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1922 (the constant from the PHI node). */
1923
1924 if (gimple_phi_num_args (phi) != 2)
1925 return;
1926 tree arg0 = gimple_phi_arg_def (phi, 0);
1927 tree arg1 = gimple_phi_arg_def (phi, 1);
1928 tree operand;
1929
1930 if (is_gimple_ip_invariant (arg1))
0d48ee34 1931 {
f38a33a2
MJ
1932 operand = arg1;
1933 rhs1 = arg0;
1934 }
1935 else if (is_gimple_ip_invariant (arg0))
1936 {
1937 operand = arg0;
1938 rhs1 = arg1;
0d48ee34 1939 }
f38a33a2 1940 else
eb270950
FX
1941 return;
1942
1943 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
f38a33a2
MJ
1944 if (!is_gimple_assign (stmt))
1945 return;
eb270950 1946
f38a33a2
MJ
1947 code = ASSERT_EXPR;
1948 agg_value->pass_through.operand = operand;
1949 }
1950 else if (is_gimple_assign (stmt))
1951 {
1952 code = gimple_assign_rhs_code (stmt);
1953 switch (gimple_assign_rhs_class (stmt))
1954 {
1955 case GIMPLE_SINGLE_RHS:
1956 if (is_gimple_ip_invariant (rhs1))
1957 {
1958 agg_value->pass_through.operand = rhs1;
1959 return;
1960 }
1961 code = NOP_EXPR;
1962 break;
1963
1964 case GIMPLE_UNARY_RHS:
1965 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1966 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1967 tcc_binary, this subtleness is somewhat misleading.
eb270950 1968
f38a33a2
MJ
1969 Since tcc_unary is widely used in IPA-CP code to check an operation
1970 with one operand, here we only allow tc_unary operation to avoid
1971 possible problem. Then we can use (opclass == tc_unary) or not to
1972 distinguish unary and binary. */
1973 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1974 return;
eb270950 1975
f38a33a2
MJ
1976 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1977 break;
1978
1979 case GIMPLE_BINARY_RHS:
eb270950 1980 {
f38a33a2
MJ
1981 gimple *rhs1_stmt = stmt;
1982 gimple *rhs2_stmt = stmt;
1983 tree rhs2 = gimple_assign_rhs2 (stmt);
1984
1985 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1986 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1987
1988 if (is_gimple_ip_invariant (rhs2))
1989 {
1990 agg_value->pass_through.operand = rhs2;
1991 stmt = rhs1_stmt;
1992 }
1993 else if (is_gimple_ip_invariant (rhs1))
1994 {
1995 if (TREE_CODE_CLASS (code) == tcc_comparison)
1996 code = swap_tree_comparison (code);
1997 else if (!commutative_tree_code (code))
1998 return;
1999
2000 agg_value->pass_through.operand = rhs1;
2001 stmt = rhs2_stmt;
2002 rhs1 = rhs2;
2003 }
2004 else
eb270950
FX
2005 return;
2006
f38a33a2
MJ
2007 if (TREE_CODE_CLASS (code) != tcc_comparison
2008 && !useless_type_conversion_p (TREE_TYPE (lhs),
2009 TREE_TYPE (rhs1)))
2010 return;
eb270950 2011 }
f38a33a2 2012 break;
eb270950 2013
f38a33a2 2014 default:
eb270950 2015 return;
f38a33a2
MJ
2016 }
2017 }
2018 else
2019 return;
eb270950
FX
2020
2021 if (TREE_CODE (rhs1) != SSA_NAME)
2022 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
2023 &agg_value->offset,
2024 &agg_value->by_ref);
2025 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
2026 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
2027
2028 if (index >= 0)
2029 {
2030 if (agg_value->offset >= 0)
2031 agg_value->type = TREE_TYPE (rhs1);
2032 agg_value->pass_through.formal_id = index;
2033 agg_value->pass_through.operation = code;
0d48ee34 2034 }
eb270950
FX
2035 else
2036 agg_value->pass_through.operand = NULL_TREE;
0d48ee34
MJ
2037}
2038
46771da5
FX
2039/* If STMT is a memory store to the object whose address is BASE, extract
2040 information (offset, size, and value) into CONTENT, and return true,
2041 otherwise we conservatively assume the whole object is modified with
2042 unknown content, and return false. CHECK_REF means that access to object
2043 is expected to be in form of MEM_REF expression. */
2044
2045static bool
eb270950
FX
2046extract_mem_content (struct ipa_func_body_info *fbi,
2047 gimple *stmt, tree base, bool check_ref,
46771da5
FX
2048 struct ipa_known_agg_contents_list *content)
2049{
2050 HOST_WIDE_INT lhs_offset, lhs_size;
46771da5
FX
2051 bool reverse;
2052
eb270950 2053 if (!is_gimple_assign (stmt))
46771da5
FX
2054 return false;
2055
eb270950
FX
2056 tree lhs = gimple_assign_lhs (stmt);
2057 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
2058 &reverse);
46771da5
FX
2059 if (!lhs_base)
2060 return false;
2061
2062 if (check_ref)
2063 {
2064 if (TREE_CODE (lhs_base) != MEM_REF
2065 || TREE_OPERAND (lhs_base, 0) != base
2066 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
2067 return false;
2068 }
2069 else if (lhs_base != base)
2070 return false;
2071
46771da5 2072 content->offset = lhs_offset;
eb270950
FX
2073 content->size = lhs_size;
2074 content->type = TREE_TYPE (lhs);
46771da5
FX
2075 content->next = NULL;
2076
eb270950 2077 analyze_agg_content_value (fbi, &content->value, stmt);
46771da5
FX
2078 return true;
2079}
2080
8b7773a4 2081/* Traverse statements from CALL backwards, scanning whether an aggregate given
eb270950
FX
2082 in ARG is filled in constants or values that are derived from caller's
2083 formal parameter in the way described by some kinds of jump functions. FBI
2084 is the context of the caller function for interprocedural analysis. ARG can
2085 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
2086 the type of the aggregate, JFUNC is the jump function for the aggregate. */
be95e2b9 2087
3e293154 2088static void
eb270950
FX
2089determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
2090 gcall *call, tree arg,
46771da5 2091 tree arg_type,
eb270950 2092 struct ipa_jump_func *jfunc)
3e293154 2093{
46771da5
FX
2094 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
2095 bitmap visited = NULL;
eb270950 2096 int item_count = 0, value_count = 0;
8b7773a4 2097 HOST_WIDE_INT arg_offset, arg_size;
8b7773a4
MJ
2098 tree arg_base;
2099 bool check_ref, by_ref;
2100 ao_ref r;
de2e0835 2101 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
3e293154 2102
de2e0835 2103 if (max_agg_items == 0)
29799e9d
MJ
2104 return;
2105
8b7773a4
MJ
2106 /* The function operates in three stages. First, we prepare check_ref, r,
2107 arg_base and arg_offset based on what is actually passed as an actual
2108 argument. */
3e293154 2109
85942f45 2110 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
2111 {
2112 by_ref = true;
2113 if (TREE_CODE (arg) == SSA_NAME)
2114 {
2115 tree type_size;
63831879
MJ
2116 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
2117 || !POINTER_TYPE_P (TREE_TYPE (arg)))
8b7773a4
MJ
2118 return;
2119 check_ref = true;
2120 arg_base = arg;
2121 arg_offset = 0;
85942f45 2122 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 2123 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
2124 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2125 }
2126 else if (TREE_CODE (arg) == ADDR_EXPR)
2127 {
ee45a32d 2128 bool reverse;
8b7773a4
MJ
2129
2130 arg = TREE_OPERAND (arg, 0);
588db50c
RS
2131 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2132 &arg_size, &reverse);
2133 if (!arg_base)
8b7773a4
MJ
2134 return;
2135 if (DECL_P (arg_base))
2136 {
8b7773a4 2137 check_ref = false;
0d48ee34 2138 ao_ref_init (&r, arg_base);
8b7773a4
MJ
2139 }
2140 else
2141 return;
2142 }
2143 else
2144 return;
2145 }
2146 else
2147 {
ee45a32d 2148 bool reverse;
8b7773a4
MJ
2149
2150 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2151
2152 by_ref = false;
2153 check_ref = false;
588db50c
RS
2154 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2155 &arg_size, &reverse);
2156 if (!arg_base)
8b7773a4
MJ
2157 return;
2158
2159 ao_ref_init (&r, arg);
2160 }
2161
46771da5
FX
2162 /* Second stage traverses virtual SSA web backwards starting from the call
2163 statement, only looks at individual dominating virtual operand (its
2164 definition dominates the call), as long as it is confident that content
2165 of the aggregate is affected by definition of the virtual operand, it
2166 builds a sorted linked list of ipa_agg_jf_list describing that. */
3e293154 2167
6cc886bf
RB
2168 for (tree dom_vuse = gimple_vuse (call);
2169 dom_vuse && fbi->aa_walk_budget > 0;)
46771da5
FX
2170 {
2171 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
3e293154 2172
46771da5 2173 if (gimple_code (stmt) == GIMPLE_PHI)
518dc859 2174 {
fb4697e3 2175 dom_vuse = get_continuation_for_phi (stmt, &r, true,
eb270950 2176 fbi->aa_walk_budget,
46771da5
FX
2177 &visited, false, NULL, NULL);
2178 continue;
3e293154 2179 }
46771da5 2180
6cc886bf 2181 fbi->aa_walk_budget--;
46771da5 2182 if (stmt_may_clobber_ref_p_1 (stmt, &r))
774b8a55 2183 {
46771da5
FX
2184 struct ipa_known_agg_contents_list *content
2185 = XALLOCA (struct ipa_known_agg_contents_list);
2186
eb270950 2187 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
774b8a55 2188 break;
3e293154 2189
46771da5
FX
2190 /* Now we get a dominating virtual operand, and need to check
2191 whether its value is clobbered any other dominating one. */
eb270950
FX
2192 if ((content->value.pass_through.formal_id >= 0
2193 || content->value.pass_through.operand)
da3fd017
MJ
2194 && !clobber_by_agg_contents_list_p (all_list, content)
2195 /* Since IPA-CP stores results with unsigned int offsets, we can
2196 discard those which would not fit now before we stream them to
2197 WPA. */
2198 && (content->offset + content->size - arg_offset
2199 <= (HOST_WIDE_INT) UINT_MAX * BITS_PER_UNIT))
46771da5
FX
2200 {
2201 struct ipa_known_agg_contents_list *copy
2202 = XALLOCA (struct ipa_known_agg_contents_list);
3e293154 2203
46771da5
FX
2204 /* Add to the list consisting of only dominating virtual
2205 operands, whose definitions can finally reach the call. */
2206 add_to_agg_contents_list (&list, (*copy = *content, copy));
2207
de2e0835 2208 if (++value_count == max_agg_items)
46771da5
FX
2209 break;
2210 }
2211
2212 /* Add to the list consisting of all dominating virtual operands. */
2213 add_to_agg_contents_list (&all_list, content);
2214
de2e0835 2215 if (++item_count == 2 * max_agg_items)
46771da5 2216 break;
8b7773a4 2217 }
46771da5
FX
2218 dom_vuse = gimple_vuse (stmt);
2219 }
3e293154 2220
46771da5
FX
2221 if (visited)
2222 BITMAP_FREE (visited);
be95e2b9 2223
8b7773a4 2224 /* Third stage just goes over the list and creates an appropriate vector of
46771da5 2225 ipa_agg_jf_item structures out of it, of course only if there are
eb270950 2226 any meaningful items to begin with. */
3e293154 2227
eb270950 2228 if (value_count)
3e293154 2229 {
8b7773a4 2230 jfunc->agg.by_ref = by_ref;
eb270950 2231 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
3e293154
MJ
2232 }
2233}
2234
46771da5 2235
5d5f1e95
KV
2236/* Return the Ith param type of callee associated with call graph
2237 edge E. */
2238
2239tree
06d65050
JH
2240ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2241{
2242 int n;
2243 tree type = (e->callee
67348ccc 2244 ? TREE_TYPE (e->callee->decl)
06d65050
JH
2245 : gimple_call_fntype (e->call_stmt));
2246 tree t = TYPE_ARG_TYPES (type);
2247
2248 for (n = 0; n < i; n++)
2249 {
2250 if (!t)
2251 break;
2252 t = TREE_CHAIN (t);
2253 }
395a7559 2254 if (t && t != void_list_node)
06d65050
JH
2255 return TREE_VALUE (t);
2256 if (!e->callee)
2257 return NULL;
67348ccc 2258 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
2259 for (n = 0; n < i; n++)
2260 {
2261 if (!t)
2262 return NULL;
2263 t = TREE_CHAIN (t);
2264 }
2265 if (t)
2266 return TREE_TYPE (t);
2267 return NULL;
2268}
2269
86cd0334
MJ
2270/* Return ipa_bits with VALUE and MASK values, which can be either a newly
2271 allocated structure or a previously existing one shared with other jump
2272 functions and/or transformation summaries. */
2273
2274ipa_bits *
2275ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2276{
2277 ipa_bits tmp;
2278 tmp.value = value;
2279 tmp.mask = mask;
2280
2281 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2282 if (*slot)
2283 return *slot;
2284
2285 ipa_bits *res = ggc_alloc<ipa_bits> ();
2286 res->value = value;
2287 res->mask = mask;
2288 *slot = res;
2289
2290 return res;
2291}
2292
2293/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2294 table in order to avoid creating multiple same ipa_bits structures. */
2295
2296static void
2297ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2298 const widest_int &mask)
2299{
2300 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2301}
2302
15819a7d
AH
2303/* Return a pointer to an ipa_vr just like TMP, but either find it in
2304 ipa_vr_hash_table or allocate it in GC memory. */
86cd0334 2305
065cc876
AH
2306static ipa_vr *
2307ipa_get_value_range (const vrange &tmp)
86cd0334 2308{
15819a7d
AH
2309 inchash::hash hstate;
2310 inchash::add_vrange (tmp, hstate);
2311 hashval_t hash = hstate.end ();
2312 ipa_vr **slot = ipa_vr_hash_table->find_slot_with_hash (&tmp, hash, INSERT);
86cd0334
MJ
2313 if (*slot)
2314 return *slot;
2315
065cc876 2316 ipa_vr *vr = new (ggc_alloc<ipa_vr> ()) ipa_vr (tmp);
15819a7d 2317 *slot = vr;
86cd0334
MJ
2318 return vr;
2319}
2320
15819a7d 2321/* Assign to JF a pointer to a range just like TMP but either fetch a
065cc876 2322 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
86cd0334
MJ
2323
2324static void
065cc876 2325ipa_set_jfunc_vr (ipa_jump_func *jf, const vrange &tmp)
86cd0334 2326{
065cc876 2327 jf->m_vr = ipa_get_value_range (tmp);
86cd0334
MJ
2328}
2329
86cd0334 2330static void
065cc876 2331ipa_set_jfunc_vr (ipa_jump_func *jf, const ipa_vr &vr)
86cd0334 2332{
065cc876
AH
2333 Value_Range tmp;
2334 vr.get_vrange (tmp);
2335 ipa_set_jfunc_vr (jf, tmp);
86cd0334
MJ
2336}
2337
3e293154
MJ
2338/* Compute jump function for all arguments of callsite CS and insert the
2339 information in the jump_functions array in the ipa_edge_args corresponding
2340 to this callsite. */
be95e2b9 2341
749aa96d 2342static void
56b40062 2343ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 2344 struct cgraph_edge *cs)
3e293154 2345{
a4a3cdd0
MJ
2346 ipa_node_params *info = ipa_node_params_sum->get (cs->caller);
2347 ipa_edge_args *args = ipa_edge_args_sum->get_create (cs);
538dd0b7 2348 gcall *call = cs->call_stmt;
8b7773a4 2349 int n, arg_num = gimple_call_num_args (call);
5ce97055 2350 bool useful_context = false;
45f4e2b0 2351 value_range vr;
3e293154 2352
606d9a09 2353 if (arg_num == 0 || args->jump_functions)
3e293154 2354 return;
cb3874dc 2355 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
5ce97055 2356 if (flag_devirtualize)
cb3874dc 2357 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
3e293154 2358
96e24d49
JJ
2359 if (gimple_call_internal_p (call))
2360 return;
5fe8e757
MJ
2361 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2362 return;
2363
8b7773a4
MJ
2364 for (n = 0; n < arg_num; n++)
2365 {
2366 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2367 tree arg = gimple_call_arg (call, n);
06d65050 2368 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
2369 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2370 {
049e6d36 2371 tree instance;
99b1c316 2372 class ipa_polymorphic_call_context context (cs->caller->decl,
5ce97055 2373 arg, cs->call_stmt,
049e6d36 2374 &instance);
c628d1c3
MJ
2375 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2376 &fbi->aa_walk_budget);
5ce97055
JH
2377 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2378 if (!context.useless_p ())
2379 useful_context = true;
2380 }
3e293154 2381
718625ad
KV
2382 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2383 {
f7503699
KV
2384 bool addr_nonzero = false;
2385 bool strict_overflow = false;
2386
718625ad
KV
2387 if (TREE_CODE (arg) == SSA_NAME
2388 && param_type
45f4e2b0
AH
2389 && get_range_query (cfun)->range_of_expr (vr, arg)
2390 && vr.nonzero_p ())
f7503699
KV
2391 addr_nonzero = true;
2392 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2393 addr_nonzero = true;
2394
2395 if (addr_nonzero)
718625ad 2396 {
065cc876
AH
2397 vr.set_nonzero (TREE_TYPE (arg));
2398 ipa_set_jfunc_vr (jfunc, vr);
718625ad
KV
2399 }
2400 else
86cd0334 2401 gcc_assert (!jfunc->m_vr);
718625ad
KV
2402 }
2403 else
8bc5448f 2404 {
8bc5448f
KV
2405 if (TREE_CODE (arg) == SSA_NAME
2406 && param_type
e9f5b4fa
AH
2407 /* Limit the ranger query to integral types as the rest
2408 of this file uses value_range's, which only hold
2409 integers and pointers. */
2410 && irange::supports_p (TREE_TYPE (arg))
5612aa4d 2411 && irange::supports_p (param_type)
45f4e2b0
AH
2412 && get_range_query (cfun)->range_of_expr (vr, arg)
2413 && !vr.undefined_p ())
8bc5448f 2414 {
3c9372df
AH
2415 value_range resvr = vr;
2416 range_cast (resvr, param_type);
54994253 2417 if (!resvr.undefined_p () && !resvr.varying_p ())
065cc876 2418 ipa_set_jfunc_vr (jfunc, resvr);
3a4228ba 2419 else
86cd0334 2420 gcc_assert (!jfunc->m_vr);
8bc5448f
KV
2421 }
2422 else
86cd0334 2423 gcc_assert (!jfunc->m_vr);
8bc5448f 2424 }
04be694e 2425
209ca542
PK
2426 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2427 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2428 {
209ca542 2429 if (TREE_CODE (arg) == SSA_NAME)
86cd0334
MJ
2430 ipa_set_jfunc_bits (jfunc, 0,
2431 widest_int::from (get_nonzero_bits (arg),
2432 TYPE_SIGN (TREE_TYPE (arg))));
209ca542 2433 else
86cd0334 2434 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
209ca542 2435 }
67b97478
PK
2436 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2437 {
2438 unsigned HOST_WIDE_INT bitpos;
2439 unsigned align;
2440
67b97478 2441 get_pointer_alignment_1 (arg, &align, &bitpos);
7b27cb4b
RS
2442 widest_int mask = wi::bit_and_not
2443 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2444 align / BITS_PER_UNIT - 1);
86cd0334
MJ
2445 widest_int value = bitpos / BITS_PER_UNIT;
2446 ipa_set_jfunc_bits (jfunc, value, mask);
67b97478 2447 }
209ca542 2448 else
86cd0334 2449 gcc_assert (!jfunc->bits);
209ca542 2450
04643334 2451 if (is_gimple_ip_invariant (arg)
8813a647 2452 || (VAR_P (arg)
04643334
MJ
2453 && is_global_var (arg)
2454 && TREE_READONLY (arg)))
4502fe8d 2455 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
2456 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2457 && TREE_CODE (arg) == PARM_DECL)
2458 {
2459 int index = ipa_get_param_decl_index (info, arg);
2460
2461 gcc_assert (index >=0);
2462 /* Aggregate passed by value, check for pass-through, otherwise we
2463 will attempt to fill in aggregate contents later in this
2464 for cycle. */
8aab5218 2465 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 2466 {
3b97a5c7 2467 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
2468 continue;
2469 }
2470 }
2471 else if (TREE_CODE (arg) == SSA_NAME)
2472 {
2473 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2474 {
2475 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 2476 if (index >= 0)
8b7773a4 2477 {
3b97a5c7 2478 bool agg_p;
8aab5218 2479 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 2480 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
2481 }
2482 }
2483 else
2484 {
355fe088 2485 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 2486 if (is_gimple_assign (stmt))
8aab5218 2487 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 2488 call, stmt, arg, param_type);
8b7773a4 2489 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 2490 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
2491 call,
2492 as_a <gphi *> (stmt));
8b7773a4
MJ
2493 }
2494 }
3e293154 2495
67914693 2496 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
85942f45
JH
2497 passed (because type conversions are ignored in gimple). Usually we can
2498 safely get type from function declaration, but in case of K&R prototypes or
2499 variadic functions we can try our luck with type of the pointer passed.
2500 TODO: Since we look for actual initialization of the memory object, we may better
2501 work out the type based on the memory stores we find. */
2502 if (!param_type)
2503 param_type = TREE_TYPE (arg);
2504
8b7773a4
MJ
2505 if ((jfunc->type != IPA_JF_PASS_THROUGH
2506 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2507 && (jfunc->type != IPA_JF_ANCESTOR
2508 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2509 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 2510 || POINTER_TYPE_P (param_type)))
eb270950 2511 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
8b7773a4 2512 }
5ce97055
JH
2513 if (!useful_context)
2514 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
2515}
2516
749aa96d 2517/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 2518 from BB. */
749aa96d 2519
062c604f 2520static void
56b40062 2521ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 2522{
8aab5218
MJ
2523 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2524 int i;
749aa96d
MJ
2525 struct cgraph_edge *cs;
2526
8aab5218 2527 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 2528 {
8aab5218 2529 struct cgraph_node *callee = cs->callee;
749aa96d 2530
8aab5218
MJ
2531 if (callee)
2532 {
d7caa129 2533 callee = callee->ultimate_alias_target ();
8aab5218
MJ
2534 /* We do not need to bother analyzing calls to unknown functions
2535 unless they may become known during lto/whopr. */
6cef01c3
JH
2536 if (!callee->definition && !flag_lto
2537 && !gimple_call_fnspec (cs->call_stmt).known_p ())
8aab5218
MJ
2538 continue;
2539 }
2540 ipa_compute_jump_functions_for_edge (fbi, cs);
2541 }
749aa96d
MJ
2542}
2543
8b7773a4
MJ
2544/* If STMT looks like a statement loading a value from a member pointer formal
2545 parameter, return that parameter and store the offset of the field to
2546 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2547 might be clobbered). If USE_DELTA, then we look for a use of the delta
2548 field rather than the pfn. */
be95e2b9 2549
3e293154 2550static tree
355fe088 2551ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 2552 HOST_WIDE_INT *offset_p)
3e293154 2553{
8b7773a4
MJ
2554 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2555
2556 if (!gimple_assign_single_p (stmt))
2557 return NULL_TREE;
3e293154 2558
8b7773a4 2559 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2560 if (TREE_CODE (rhs) == COMPONENT_REF)
2561 {
2562 ref_field = TREE_OPERAND (rhs, 1);
2563 rhs = TREE_OPERAND (rhs, 0);
2564 }
2565 else
2566 ref_field = NULL_TREE;
d242d063 2567 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2568 return NULL_TREE;
3e293154 2569 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2570 if (TREE_CODE (rec) != ADDR_EXPR)
2571 return NULL_TREE;
2572 rec = TREE_OPERAND (rec, 0);
3e293154 2573 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2574 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2575 return NULL_TREE;
d242d063 2576 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2577
8b7773a4
MJ
2578 if (use_delta)
2579 fld = delta_field;
2580 else
2581 fld = ptr_field;
2582 if (offset_p)
2583 *offset_p = int_bit_position (fld);
2584
ae788515
EB
2585 if (ref_field)
2586 {
2587 if (integer_nonzerop (ref_offset))
2588 return NULL_TREE;
ae788515
EB
2589 return ref_field == fld ? rec : NULL_TREE;
2590 }
3e293154 2591 else
8b7773a4
MJ
2592 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2593 : NULL_TREE;
3e293154
MJ
2594}
2595
2596/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2597
3e293154
MJ
2598static bool
2599ipa_is_ssa_with_stmt_def (tree t)
2600{
2601 if (TREE_CODE (t) == SSA_NAME
2602 && !SSA_NAME_IS_DEFAULT_DEF (t))
2603 return true;
2604 else
2605 return false;
2606}
2607
40591473
MJ
2608/* Find the indirect call graph edge corresponding to STMT and mark it as a
2609 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
40a777e8
JH
2610 indirect call graph edge.
2611 If POLYMORPHIC is true record is as a destination of polymorphic call. */
be95e2b9 2612
40591473 2613static struct cgraph_edge *
538dd0b7 2614ipa_note_param_call (struct cgraph_node *node, int param_index,
40a777e8 2615 gcall *stmt, bool polymorphic)
3e293154 2616{
e33c6cd6 2617 struct cgraph_edge *cs;
3e293154 2618
d52f5295 2619 cs = node->get_edge (stmt);
b258210c 2620 cs->indirect_info->param_index = param_index;
8b7773a4 2621 cs->indirect_info->agg_contents = 0;
c13bc3d9 2622 cs->indirect_info->member_ptr = 0;
91bb9f80 2623 cs->indirect_info->guaranteed_unmodified = 0;
a4a3cdd0
MJ
2624 ipa_node_params *info = ipa_node_params_sum->get (node);
2625 ipa_set_param_used_by_indirect_call (info, param_index, true);
40a777e8 2626 if (cs->indirect_info->polymorphic || polymorphic)
a4a3cdd0 2627 ipa_set_param_used_by_polymorphic_call (info, param_index, true);
40591473 2628 return cs;
3e293154
MJ
2629}
2630
e33c6cd6 2631/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2632 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2633 intermediate information about each formal parameter. Currently it checks
2634 whether the call calls a pointer that is a formal parameter and if so, the
2635 parameter is marked with the called flag and an indirect call graph edge
2636 describing the call is created. This is very simple for ordinary pointers
2637 represented in SSA but not-so-nice when it comes to member pointers. The
2638 ugly part of this function does nothing more than trying to match the
2639 pattern of such a call. An example of such a pattern is the gimple dump
2640 below, the call is on the last line:
3e293154 2641
ae788515
EB
2642 <bb 2>:
2643 f$__delta_5 = f.__delta;
2644 f$__pfn_24 = f.__pfn;
2645
2646 or
3e293154 2647 <bb 2>:
d242d063
MJ
2648 f$__delta_5 = MEM[(struct *)&f];
2649 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2650
ae788515 2651 and a few lines below:
8aa29647
MJ
2652
2653 <bb 5>
3e293154
MJ
2654 D.2496_3 = (int) f$__pfn_24;
2655 D.2497_4 = D.2496_3 & 1;
2656 if (D.2497_4 != 0)
2657 goto <bb 3>;
2658 else
2659 goto <bb 4>;
2660
8aa29647 2661 <bb 6>:
3e293154
MJ
2662 D.2500_7 = (unsigned int) f$__delta_5;
2663 D.2501_8 = &S + D.2500_7;
2664 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2665 D.2503_10 = *D.2502_9;
2666 D.2504_12 = f$__pfn_24 + -1;
2667 D.2505_13 = (unsigned int) D.2504_12;
2668 D.2506_14 = D.2503_10 + D.2505_13;
2669 D.2507_15 = *D.2506_14;
2670 iftmp.11_16 = (String:: *) D.2507_15;
2671
8aa29647 2672 <bb 7>:
3e293154
MJ
2673 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2674 D.2500_19 = (unsigned int) f$__delta_5;
2675 D.2508_20 = &S + D.2500_19;
2676 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2677
2678 Such patterns are results of simple calls to a member pointer:
2679
2680 int doprinting (int (MyString::* f)(int) const)
2681 {
2682 MyString S ("somestring");
2683
2684 return (S.*f)(4);
2685 }
8b7773a4
MJ
2686
2687 Moreover, the function also looks for called pointers loaded from aggregates
2688 passed by value or reference. */
3e293154
MJ
2689
2690static void
56b40062 2691ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 2692 tree target)
3e293154 2693{
99b1c316 2694 class ipa_node_params *info = fbi->info;
8b7773a4
MJ
2695 HOST_WIDE_INT offset;
2696 bool by_ref;
3e293154 2697
3e293154
MJ
2698 if (SSA_NAME_IS_DEFAULT_DEF (target))
2699 {
b258210c 2700 tree var = SSA_NAME_VAR (target);
8aab5218 2701 int index = ipa_get_param_decl_index (info, var);
3e293154 2702 if (index >= 0)
40a777e8 2703 ipa_note_param_call (fbi->node, index, call, false);
3e293154
MJ
2704 return;
2705 }
2706
8aab5218 2707 int index;
355fe088 2708 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 2709 bool guaranteed_unmodified;
8b7773a4 2710 if (gimple_assign_single_p (def)
ff302741
PB
2711 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2712 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2713 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2714 {
40a777e8
JH
2715 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2716 call, false);
8b7773a4
MJ
2717 cs->indirect_info->offset = offset;
2718 cs->indirect_info->agg_contents = 1;
2719 cs->indirect_info->by_ref = by_ref;
91bb9f80 2720 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2721 return;
2722 }
2723
3e293154
MJ
2724 /* Now we need to try to match the complex pattern of calling a member
2725 pointer. */
8b7773a4
MJ
2726 if (gimple_code (def) != GIMPLE_PHI
2727 || gimple_phi_num_args (def) != 2
2728 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2729 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2730 return;
2731
3e293154
MJ
2732 /* First, we need to check whether one of these is a load from a member
2733 pointer that is a parameter to this function. */
8aab5218
MJ
2734 tree n1 = PHI_ARG_DEF (def, 0);
2735 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2736 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2737 return;
355fe088
TS
2738 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2739 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2740
8aab5218
MJ
2741 tree rec;
2742 basic_block bb, virt_bb;
2743 basic_block join = gimple_bb (def);
8b7773a4 2744 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2745 {
8b7773a4 2746 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2747 return;
2748
8aa29647 2749 bb = EDGE_PRED (join, 0)->src;
726a989a 2750 virt_bb = gimple_bb (d2);
3e293154 2751 }
8b7773a4 2752 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2753 {
8aa29647 2754 bb = EDGE_PRED (join, 1)->src;
726a989a 2755 virt_bb = gimple_bb (d1);
3e293154
MJ
2756 }
2757 else
2758 return;
2759
2760 /* Second, we need to check that the basic blocks are laid out in the way
2761 corresponding to the pattern. */
2762
3e293154
MJ
2763 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2764 || single_pred (virt_bb) != bb
2765 || single_succ (virt_bb) != join)
2766 return;
2767
2768 /* Third, let's see that the branching is done depending on the least
2769 significant bit of the pfn. */
2770
60bf26a4
RB
2771 gcond *branch = safe_dyn_cast <gcond *> (*gsi_last_bb (bb));
2772 if (!branch)
3e293154
MJ
2773 return;
2774
12430896
RG
2775 if ((gimple_cond_code (branch) != NE_EXPR
2776 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2777 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2778 return;
3e293154 2779
8aab5218 2780 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2781 if (!ipa_is_ssa_with_stmt_def (cond))
2782 return;
2783
726a989a 2784 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2785 if (!is_gimple_assign (def)
726a989a
RB
2786 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2787 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2788 return;
726a989a
RB
2789
2790 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2791 if (!ipa_is_ssa_with_stmt_def (cond))
2792 return;
2793
726a989a 2794 def = SSA_NAME_DEF_STMT (cond);
3e293154 2795
8b75fc9b
MJ
2796 if (is_gimple_assign (def)
2797 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2798 {
726a989a 2799 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2800 if (!ipa_is_ssa_with_stmt_def (cond))
2801 return;
726a989a 2802 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2803 }
2804
8aab5218 2805 tree rec2;
6f7b8b70
RE
2806 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2807 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2808 == ptrmemfunc_vbit_in_delta),
2809 NULL);
3e293154
MJ
2810 if (rec != rec2)
2811 return;
2812
2813 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2814 if (index >= 0
8aab5218 2815 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2816 {
40a777e8
JH
2817 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2818 call, false);
8b7773a4
MJ
2819 cs->indirect_info->offset = offset;
2820 cs->indirect_info->agg_contents = 1;
c13bc3d9 2821 cs->indirect_info->member_ptr = 1;
91bb9f80 2822 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2823 }
3e293154
MJ
2824
2825 return;
2826}
2827
b258210c
MJ
2828/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2829 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2830 FBI->node (described by FBI->info), create a call note for the
2831 statement. */
b258210c
MJ
2832
2833static void
56b40062 2834ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2835 gcall *call, tree target)
b258210c
MJ
2836{
2837 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2838 int index;
40591473 2839 HOST_WIDE_INT anc_offset;
b258210c 2840
05842ff5
MJ
2841 if (!flag_devirtualize)
2842 return;
2843
40591473 2844 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2845 return;
2846
99b1c316 2847 class ipa_node_params *info = fbi->info;
40591473
MJ
2848 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2849 {
2850 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2851 return;
b258210c 2852
40591473
MJ
2853 anc_offset = 0;
2854 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2855 gcc_assert (index >= 0);
c628d1c3 2856 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
c199f329 2857 call))
40591473
MJ
2858 return;
2859 }
2860 else
2861 {
355fe088 2862 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2863 tree expr;
2864
2865 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2866 if (!expr)
2867 return;
2868 index = ipa_get_param_decl_index (info,
2869 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2870 gcc_assert (index >= 0);
c628d1c3 2871 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
c199f329 2872 call, anc_offset))
40591473
MJ
2873 return;
2874 }
2875
40a777e8
JH
2876 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2877 call, true);
99b1c316 2878 class cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2879 ii->offset = anc_offset;
ae7e9ddd 2880 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2881 ii->otr_type = obj_type_ref_class (target);
40591473 2882 ii->polymorphic = 1;
b258210c
MJ
2883}
2884
2885/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2886 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2887 containing intermediate information about each formal parameter. */
b258210c
MJ
2888
2889static void
56b40062 2890ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2891{
2892 tree target = gimple_call_fn (call);
b786d31f
JH
2893
2894 if (!target
2895 || (TREE_CODE (target) != SSA_NAME
2896 && !virtual_method_call_p (target)))
2897 return;
b258210c 2898
7d0aa05b 2899 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2900 /* If we previously turned the call into a direct call, there is
2901 no need to analyze. */
b786d31f 2902 if (cs && !cs->indirect_unknown_callee)
25583c4f 2903 return;
7d0aa05b 2904
a5b58b28 2905 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2906 {
7d0aa05b
JH
2907 tree instance;
2908 tree target = gimple_call_fn (call);
6f8091fc
JH
2909 ipa_polymorphic_call_context context (current_function_decl,
2910 target, call, &instance);
7d0aa05b 2911
ba392339
JH
2912 gcc_checking_assert (cs->indirect_info->otr_type
2913 == obj_type_ref_class (target));
2914 gcc_checking_assert (cs->indirect_info->otr_token
2915 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2916
29c43c83
JH
2917 cs->indirect_info->vptr_changed
2918 = !context.get_dynamic_type (instance,
2919 OBJ_TYPE_REF_OBJECT (target),
c628d1c3
MJ
2920 obj_type_ref_class (target), call,
2921 &fbi->aa_walk_budget);
0127c169 2922 cs->indirect_info->context = context;
7d0aa05b
JH
2923 }
2924
b258210c 2925 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2926 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2927 else if (virtual_method_call_p (target))
8aab5218 2928 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2929}
2930
2931
e33c6cd6 2932/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2933 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2934 formal parameters are called. */
be95e2b9 2935
3e293154 2936static void
355fe088 2937ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2938{
726a989a 2939 if (is_gimple_call (stmt))
538dd0b7 2940 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2941}
2942
2943/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2944 If OP is a parameter declaration, mark it as used in the info structure
2945 passed in DATA. */
2946
2947static bool
355fe088 2948visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f 2949{
99b1c316 2950 class ipa_node_params *info = (class ipa_node_params *) data;
062c604f
MJ
2951
2952 op = get_base_address (op);
2953 if (op
2954 && TREE_CODE (op) == PARM_DECL)
2955 {
2956 int index = ipa_get_param_decl_index (info, op);
2957 gcc_assert (index >= 0);
310bc633 2958 ipa_set_param_used (info, index, true);
062c604f
MJ
2959 }
2960
2961 return false;
3e293154
MJ
2962}
2963
8aab5218
MJ
2964/* Scan the statements in BB and inspect the uses of formal parameters. Store
2965 the findings in various structures of the associated ipa_node_params
2966 structure, such as parameter flags, notes etc. FBI holds various data about
2967 the function being analyzed. */
be95e2b9 2968
062c604f 2969static void
56b40062 2970ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2971{
726a989a 2972 gimple_stmt_iterator gsi;
8aab5218
MJ
2973 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2974 {
355fe088 2975 gimple *stmt = gsi_stmt (gsi);
3e293154 2976
8aab5218
MJ
2977 if (is_gimple_debug (stmt))
2978 continue;
3e293154 2979
8aab5218
MJ
2980 ipa_analyze_stmt_uses (fbi, stmt);
2981 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2982 visit_ref_for_mod_analysis,
2983 visit_ref_for_mod_analysis,
2984 visit_ref_for_mod_analysis);
5fe8e757 2985 }
8aab5218
MJ
2986 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2987 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2988 visit_ref_for_mod_analysis,
2989 visit_ref_for_mod_analysis,
2990 visit_ref_for_mod_analysis);
2991}
2992
13586172
MJ
2993/* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2994
2995static bool
2996load_from_dereferenced_name (tree expr, tree name)
2997{
2998 tree base = get_base_address (expr);
2999 return (TREE_CODE (base) == MEM_REF
3000 && TREE_OPERAND (base, 0) == name);
3001}
3002
8aab5218
MJ
3003/* Calculate controlled uses of parameters of NODE. */
3004
3005static void
3006ipa_analyze_controlled_uses (struct cgraph_node *node)
3007{
a4a3cdd0 3008 ipa_node_params *info = ipa_node_params_sum->get (node);
5fe8e757 3009
8aab5218 3010 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
3011 {
3012 tree parm = ipa_get_param (info, i);
13586172
MJ
3013 int call_uses = 0;
3014 bool load_dereferenced = false;
4502fe8d 3015
062c604f
MJ
3016 /* For SSA regs see if parameter is used. For non-SSA we compute
3017 the flag during modification analysis. */
4502fe8d
MJ
3018 if (is_gimple_reg (parm))
3019 {
67348ccc 3020 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
3021 parm);
3022 if (ddef && !has_zero_uses (ddef))
3023 {
3024 imm_use_iterator imm_iter;
13586172 3025 gimple *stmt;
4502fe8d
MJ
3026
3027 ipa_set_param_used (info, i, true);
13586172
MJ
3028 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)
3029 {
3030 if (is_gimple_debug (stmt))
3031 continue;
3032
3033 int all_stmt_uses = 0;
3034 use_operand_p use_p;
3035 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3036 all_stmt_uses++;
3037
3038 if (is_gimple_call (stmt))
3039 {
3040 if (gimple_call_internal_p (stmt))
3041 {
3042 call_uses = IPA_UNDESCRIBED_USE;
3043 break;
3044 }
3045 int recognized_stmt_uses;
3046 if (gimple_call_fn (stmt) == ddef)
3047 recognized_stmt_uses = 1;
3048 else
3049 recognized_stmt_uses = 0;
3050 unsigned arg_count = gimple_call_num_args (stmt);
3051 for (unsigned i = 0; i < arg_count; i++)
3052 {
3053 tree arg = gimple_call_arg (stmt, i);
3054 if (arg == ddef)
3055 recognized_stmt_uses++;
3056 else if (load_from_dereferenced_name (arg, ddef))
3057 {
3058 load_dereferenced = true;
3059 recognized_stmt_uses++;
3060 }
3061 }
3062
3063 if (recognized_stmt_uses != all_stmt_uses)
3064 {
3065 call_uses = IPA_UNDESCRIBED_USE;
3066 break;
3067 }
3068 if (call_uses >= 0)
3069 call_uses += all_stmt_uses;
3070 }
3071 else if (gimple_assign_single_p (stmt))
3072 {
3073 tree rhs = gimple_assign_rhs1 (stmt);
3074 if (all_stmt_uses != 1
3075 || !load_from_dereferenced_name (rhs, ddef))
3076 {
3077 call_uses = IPA_UNDESCRIBED_USE;
3078 break;
3079 }
3080 load_dereferenced = true;
3081 }
3082 else
3083 {
3084 call_uses = IPA_UNDESCRIBED_USE;
3085 break;
3086 }
3087 }
4502fe8d
MJ
3088 }
3089 else
13586172 3090 call_uses = 0;
4502fe8d
MJ
3091 }
3092 else
13586172
MJ
3093 call_uses = IPA_UNDESCRIBED_USE;
3094 ipa_set_controlled_uses (info, i, call_uses);
3095 ipa_set_param_load_dereferenced (info, i, load_dereferenced);
062c604f 3096 }
8aab5218 3097}
062c604f 3098
8aab5218 3099/* Free stuff in BI. */
062c604f 3100
8aab5218
MJ
3101static void
3102free_ipa_bb_info (struct ipa_bb_info *bi)
3103{
3104 bi->cg_edges.release ();
3105 bi->param_aa_statuses.release ();
3e293154
MJ
3106}
3107
8aab5218 3108/* Dominator walker driving the analysis. */
2c9561b5 3109
8aab5218 3110class analysis_dom_walker : public dom_walker
2c9561b5 3111{
8aab5218 3112public:
56b40062 3113 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 3114 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 3115
894ddeac 3116 edge before_dom_children (basic_block) final override;
8aab5218
MJ
3117
3118private:
56b40062 3119 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
3120};
3121
3daacdcd 3122edge
8aab5218
MJ
3123analysis_dom_walker::before_dom_children (basic_block bb)
3124{
3125 ipa_analyze_params_uses_in_bb (m_fbi, bb);
3126 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 3127 return NULL;
2c9561b5
MJ
3128}
3129
c3431191
ML
3130/* Release body info FBI. */
3131
3132void
3133ipa_release_body_info (struct ipa_func_body_info *fbi)
3134{
3135 int i;
3136 struct ipa_bb_info *bi;
3137
3138 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
3139 free_ipa_bb_info (bi);
3140 fbi->bb_infos.release ();
3141}
3142
026c3cfd 3143/* Initialize the array describing properties of formal parameters
dd5a833e
MS
3144 of NODE, analyze their uses and compute jump functions associated
3145 with actual arguments of calls from within NODE. */
062c604f
MJ
3146
3147void
3148ipa_analyze_node (struct cgraph_node *node)
3149{
56b40062 3150 struct ipa_func_body_info fbi;
99b1c316 3151 class ipa_node_params *info;
062c604f 3152
57dbdc5a
MJ
3153 ipa_check_create_node_params ();
3154 ipa_check_create_edge_args ();
a4a3cdd0 3155 info = ipa_node_params_sum->get_create (node);
8aab5218
MJ
3156
3157 if (info->analysis_done)
3158 return;
3159 info->analysis_done = 1;
3160
e0403e95
MJ
3161 if (ipa_func_spec_opts_forbid_analysis_p (node)
3162 || (count_formal_params (node->decl)
3163 >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS)))
8aab5218 3164 {
e0403e95 3165 gcc_assert (!ipa_get_param_count (info));
8aab5218
MJ
3166 return;
3167 }
3168
3169 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3170 push_cfun (func);
3171 calculate_dominance_info (CDI_DOMINATORS);
062c604f 3172 ipa_initialize_node_params (node);
8aab5218 3173 ipa_analyze_controlled_uses (node);
062c604f 3174
8aab5218 3175 fbi.node = node;
a4a3cdd0 3176 fbi.info = info;
8aab5218 3177 fbi.bb_infos = vNULL;
cb3874dc 3178 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
8aab5218 3179 fbi.param_count = ipa_get_param_count (info);
fdfd7f53 3180 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
062c604f 3181
8aab5218
MJ
3182 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3183 {
3184 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3185 bi->cg_edges.safe_push (cs);
3186 }
062c604f 3187
8aab5218
MJ
3188 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3189 {
3190 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3191 bi->cg_edges.safe_push (cs);
3192 }
3193
3194 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3195
c3431191 3196 ipa_release_body_info (&fbi);
8aab5218 3197 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 3198 pop_cfun ();
062c604f 3199}
062c604f 3200
be95e2b9 3201/* Update the jump functions associated with call graph edge E when the call
3e293154 3202 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 3203 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 3204
3e293154
MJ
3205static void
3206update_jump_functions_after_inlining (struct cgraph_edge *cs,
3207 struct cgraph_edge *e)
3208{
a4a3cdd0
MJ
3209 ipa_edge_args *top = ipa_edge_args_sum->get (cs);
3210 ipa_edge_args *args = ipa_edge_args_sum->get (e);
a33c028e
JH
3211 if (!args)
3212 return;
3e293154
MJ
3213 int count = ipa_get_cs_argument_count (args);
3214 int i;
3215
3216 for (i = 0; i < count; i++)
3217 {
b258210c 3218 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
99b1c316 3219 class ipa_polymorphic_call_context *dst_ctx
5ce97055 3220 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 3221
eb270950
FX
3222 if (dst->agg.items)
3223 {
3224 struct ipa_agg_jf_item *item;
3225 int j;
3226
3227 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3228 {
3229 int dst_fid;
3230 struct ipa_jump_func *src;
3231
3232 if (item->jftype != IPA_JF_PASS_THROUGH
3233 && item->jftype != IPA_JF_LOAD_AGG)
3234 continue;
3235
3236 dst_fid = item->value.pass_through.formal_id;
1c3c3f45 3237 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
eb270950
FX
3238 {
3239 item->jftype = IPA_JF_UNKNOWN;
3240 continue;
3241 }
3242
3243 item->value.pass_through.formal_id = -1;
3244 src = ipa_get_ith_jump_func (top, dst_fid);
3245 if (src->type == IPA_JF_CONST)
3246 {
3247 if (item->jftype == IPA_JF_PASS_THROUGH
3248 && item->value.pass_through.operation == NOP_EXPR)
3249 {
3250 item->jftype = IPA_JF_CONST;
3251 item->value.constant = src->value.constant.value;
3252 continue;
3253 }
3254 }
3255 else if (src->type == IPA_JF_PASS_THROUGH
3256 && src->value.pass_through.operation == NOP_EXPR)
3257 {
3258 if (item->jftype == IPA_JF_PASS_THROUGH
3259 || !item->value.load_agg.by_ref
3260 || src->value.pass_through.agg_preserved)
3261 item->value.pass_through.formal_id
3262 = src->value.pass_through.formal_id;
3263 }
3264 else if (src->type == IPA_JF_ANCESTOR)
3265 {
3266 if (item->jftype == IPA_JF_PASS_THROUGH)
3267 {
3268 if (!src->value.ancestor.offset)
3269 item->value.pass_through.formal_id
3270 = src->value.ancestor.formal_id;
3271 }
3272 else if (src->value.ancestor.agg_preserved)
3273 {
3274 gcc_checking_assert (item->value.load_agg.by_ref);
3275
3276 item->value.pass_through.formal_id
3277 = src->value.ancestor.formal_id;
3278 item->value.load_agg.offset
3279 += src->value.ancestor.offset;
3280 }
3281 }
3282
3283 if (item->value.pass_through.formal_id < 0)
3284 item->jftype = IPA_JF_UNKNOWN;
3285 }
3286 }
3287
1c3c3f45
FX
3288 if (!top)
3289 {
3290 ipa_set_jf_unknown (dst);
3291 continue;
3292 }
3293
685b0d13
MJ
3294 if (dst->type == IPA_JF_ANCESTOR)
3295 {
b258210c 3296 struct ipa_jump_func *src;
8b7773a4 3297 int dst_fid = dst->value.ancestor.formal_id;
99b1c316 3298 class ipa_polymorphic_call_context *src_ctx
5ce97055 3299 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 3300
b258210c
MJ
3301 /* Variable number of arguments can cause havoc if we try to access
3302 one that does not exist in the inlined edge. So make sure we
3303 don't. */
8b7773a4 3304 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 3305 {
04be694e 3306 ipa_set_jf_unknown (dst);
b258210c
MJ
3307 continue;
3308 }
3309
8b7773a4
MJ
3310 src = ipa_get_ith_jump_func (top, dst_fid);
3311
5ce97055
JH
3312 if (src_ctx && !src_ctx->useless_p ())
3313 {
99b1c316 3314 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
3315
3316 /* TODO: Make type preserved safe WRT contexts. */
44210a96 3317 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 3318 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
3319 ctx.offset_by (dst->value.ancestor.offset);
3320 if (!ctx.useless_p ())
3321 {
a7d1f3fe
ML
3322 if (!dst_ctx)
3323 {
3324 vec_safe_grow_cleared (args->polymorphic_call_contexts,
cb3874dc 3325 count, true);
a7d1f3fe
ML
3326 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3327 }
3328
3329 dst_ctx->combine_with (ctx);
5ce97055
JH
3330 }
3331 }
3332
eb270950
FX
3333 /* Parameter and argument in ancestor jump function must be pointer
3334 type, which means access to aggregate must be by-reference. */
3335 gcc_assert (!src->agg.items || src->agg.by_ref);
3336
3337 if (src->agg.items && dst->value.ancestor.agg_preserved)
8b7773a4
MJ
3338 {
3339 struct ipa_agg_jf_item *item;
3340 int j;
3341
3342 /* Currently we do not produce clobber aggregate jump functions,
3343 replace with merging when we do. */
3344 gcc_assert (!dst->agg.items);
3345
9771b263 3346 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 3347 dst->agg.by_ref = src->agg.by_ref;
9771b263 3348 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
3349 item->offset -= dst->value.ancestor.offset;
3350 }
3351
3b97a5c7
MJ
3352 if (src->type == IPA_JF_PASS_THROUGH
3353 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
3354 {
3355 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3356 dst->value.ancestor.agg_preserved &=
3357 src->value.pass_through.agg_preserved;
3358 }
b258210c
MJ
3359 else if (src->type == IPA_JF_ANCESTOR)
3360 {
3361 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3362 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
3363 dst->value.ancestor.agg_preserved &=
3364 src->value.ancestor.agg_preserved;
7ea3a73c 3365 dst->value.ancestor.keep_null |= src->value.ancestor.keep_null;
b258210c
MJ
3366 }
3367 else
04be694e 3368 ipa_set_jf_unknown (dst);
b258210c
MJ
3369 }
3370 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 3371 {
b258210c
MJ
3372 struct ipa_jump_func *src;
3373 /* We must check range due to calls with variable number of arguments
3374 and we cannot combine jump functions with operations. */
3375 if (dst->value.pass_through.operation == NOP_EXPR
5a0236f8 3376 && (top && dst->value.pass_through.formal_id
b258210c
MJ
3377 < ipa_get_cs_argument_count (top)))
3378 {
8b7773a4
MJ
3379 int dst_fid = dst->value.pass_through.formal_id;
3380 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 3381 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
99b1c316 3382 class ipa_polymorphic_call_context *src_ctx
5ce97055 3383 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 3384
5ce97055
JH
3385 if (src_ctx && !src_ctx->useless_p ())
3386 {
99b1c316 3387 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
3388
3389 /* TODO: Make type preserved safe WRT contexts. */
44210a96 3390 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 3391 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
3392 if (!ctx.useless_p ())
3393 {
3394 if (!dst_ctx)
3395 {
3396 vec_safe_grow_cleared (args->polymorphic_call_contexts,
cb3874dc 3397 count, true);
5ce97055
JH
3398 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3399 }
3400 dst_ctx->combine_with (ctx);
3401 }
3402 }
b8f6e610
MJ
3403 switch (src->type)
3404 {
3405 case IPA_JF_UNKNOWN:
04be694e 3406 ipa_set_jf_unknown (dst);
b8f6e610 3407 break;
b8f6e610 3408 case IPA_JF_CONST:
8e08c788
MJ
3409 {
3410 bool rd = ipa_get_jf_pass_through_refdesc_decremented (dst);
3411 ipa_set_jf_cst_copy (dst, src);
3412 if (rd)
3413 ipa_zap_jf_refdesc (dst);
3414 }
3415
b8f6e610
MJ
3416 break;
3417
3418 case IPA_JF_PASS_THROUGH:
3419 {
3420 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3421 enum tree_code operation;
3422 operation = ipa_get_jf_pass_through_operation (src);
3423
3424 if (operation == NOP_EXPR)
3425 {
3b97a5c7 3426 bool agg_p;
b8f6e610
MJ
3427 agg_p = dst_agg_p
3428 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 3429 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610 3430 }
a2b4c188
KV
3431 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3432 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
b8f6e610
MJ
3433 else
3434 {
3435 tree operand = ipa_get_jf_pass_through_operand (src);
3436 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3437 operation);
3438 }
3439 break;
3440 }
3441 case IPA_JF_ANCESTOR:
3442 {
3b97a5c7 3443 bool agg_p;
b8f6e610
MJ
3444 agg_p = dst_agg_p
3445 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
3446 ipa_set_ancestor_jf (dst,
3447 ipa_get_jf_ancestor_offset (src),
b8f6e610 3448 ipa_get_jf_ancestor_formal_id (src),
7ea3a73c
MJ
3449 agg_p,
3450 ipa_get_jf_ancestor_keep_null (src));
b8f6e610
MJ
3451 break;
3452 }
3453 default:
3454 gcc_unreachable ();
3455 }
8b7773a4
MJ
3456
3457 if (src->agg.items
b8f6e610 3458 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
3459 {
3460 /* Currently we do not produce clobber aggregate jump
3461 functions, replace with merging when we do. */
3462 gcc_assert (!dst->agg.items);
3463
3464 dst->agg.by_ref = src->agg.by_ref;
9771b263 3465 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 3466 }
b258210c
MJ
3467 }
3468 else
04be694e 3469 ipa_set_jf_unknown (dst);
3e293154 3470 }
b258210c
MJ
3471 }
3472}
3473
5ce97055
JH
3474/* If TARGET is an addr_expr of a function declaration, make it the
3475 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3476 Otherwise, return NULL. */
b258210c 3477
3949c4a7 3478struct cgraph_edge *
5ce97055
JH
3479ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3480 bool speculative)
b258210c
MJ
3481{
3482 struct cgraph_node *callee;
48b1474e 3483 bool unreachable = false;
b258210c 3484
ceeffab0
MJ
3485 if (TREE_CODE (target) == ADDR_EXPR)
3486 target = TREE_OPERAND (target, 0);
b258210c 3487 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
3488 {
3489 target = canonicalize_constructor_val (target, NULL);
3490 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3491 {
db66bf68
JH
3492 /* Member pointer call that goes through a VMT lookup. */
3493 if (ie->indirect_info->member_ptr
3494 /* Or if target is not an invariant expression and we do not
3495 know if it will evaulate to function at runtime.
3496 This can happen when folding through &VAR, where &VAR
3497 is IP invariant, but VAR itself is not.
3498
3499 TODO: Revisit this when GCC 5 is branched. It seems that
3500 member_ptr check is not needed and that we may try to fold
3501 the expression and see if VAR is readonly. */
3502 || !is_gimple_ip_invariant (target))
3503 {
3504 if (dump_enabled_p ())
3505 {
4f5b9c80 3506 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118
ML
3507 "discovered direct call non-invariant %s\n",
3508 ie->caller->dump_name ());
db66bf68
JH
3509 }
3510 return NULL;
3511 }
3512
c13bc3d9 3513
2b5f0895
XDL
3514 if (dump_enabled_p ())
3515 {
4f5b9c80 3516 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118 3517 "discovered direct call to non-function in %s, "
807b7d62 3518 "making it __builtin_unreachable\n",
464d0118 3519 ie->caller->dump_name ());
2b5f0895 3520 }
3c9e6fca 3521
d68d3664 3522 target = builtin_decl_unreachable ();
d52f5295 3523 callee = cgraph_node::get_create (target);
48b1474e 3524 unreachable = true;
a0a7b611 3525 }
48b1474e 3526 else
d52f5295 3527 callee = cgraph_node::get (target);
a0a7b611 3528 }
48b1474e 3529 else
d52f5295 3530 callee = cgraph_node::get (target);
a0a7b611
JH
3531
3532 /* Because may-edges are not explicitely represented and vtable may be external,
3533 we may create the first reference to the object in the unit. */
a62bfab5 3534 if (!callee || callee->inlined_to)
a0a7b611 3535 {
a0a7b611
JH
3536
3537 /* We are better to ensure we can refer to it.
3538 In the case of static functions we are out of luck, since we already
3539 removed its body. In the case of public functions we may or may
3540 not introduce the reference. */
3541 if (!canonicalize_constructor_val (target, NULL)
3542 || !TREE_PUBLIC (target))
3543 {
3544 if (dump_file)
3545 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
845bb366 3546 "(%s -> %s) but cannot refer to it. Giving up.\n",
464d0118
ML
3547 ie->caller->dump_name (),
3548 ie->callee->dump_name ());
a0a7b611
JH
3549 return NULL;
3550 }
d52f5295 3551 callee = cgraph_node::get_create (target);
a0a7b611 3552 }
2b5f0895 3553
0127c169
JH
3554 /* If the edge is already speculated. */
3555 if (speculative && ie->speculative)
3556 {
845bb366 3557 if (dump_file)
0127c169 3558 {
845bb366
JH
3559 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3560 if (!e2)
3561 {
3562 if (dump_file)
3563 fprintf (dump_file, "ipa-prop: Discovered call to a "
3564 "speculative target (%s -> %s) but the call is "
3565 "already speculated to different target. "
3566 "Giving up.\n",
3567 ie->caller->dump_name (), callee->dump_name ());
3568 }
3569 else
3570 {
3571 if (dump_file)
3572 fprintf (dump_file,
3573 "ipa-prop: Discovered call to a speculative target "
3574 "(%s -> %s) this agree with previous speculation.\n",
3575 ie->caller->dump_name (), callee->dump_name ());
3576 }
0127c169
JH
3577 }
3578 return NULL;
3579 }
3580
2b5f0895
XDL
3581 if (!dbg_cnt (devirt))
3582 return NULL;
3583
1dbee8c9 3584 ipa_check_create_node_params ();
ceeffab0 3585
67914693 3586 /* We cannot make edges to inline clones. It is bug that someone removed
81fa35bd 3587 the cgraph node too early. */
a62bfab5 3588 gcc_assert (!callee->inlined_to);
17afc0fe 3589
48b1474e 3590 if (dump_file && !unreachable)
b258210c 3591 {
5ce97055 3592 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
464d0118 3593 "(%s -> %s), for stmt ",
b258210c 3594 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 3595 speculative ? "speculative" : "known",
464d0118
ML
3596 ie->caller->dump_name (),
3597 callee->dump_name ());
b258210c
MJ
3598 if (ie->call_stmt)
3599 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3600 else
3601 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 3602 }
2b5f0895
XDL
3603 if (dump_enabled_p ())
3604 {
4f5b9c80 3605 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
807b7d62 3606 "converting indirect call in %s to direct call to %s\n",
3629ff8a 3607 ie->caller->dump_name (), callee->dump_name ());
2b5f0895 3608 }
5ce97055 3609 if (!speculative)
d8d5aef1
JH
3610 {
3611 struct cgraph_edge *orig = ie;
27c5a177 3612 ie = cgraph_edge::make_direct (ie, callee);
d8d5aef1
JH
3613 /* If we resolved speculative edge the cost is already up to date
3614 for direct call (adjusted by inline_edge_duplication_hook). */
3615 if (ie == orig)
3616 {
56f62793 3617 ipa_call_summary *es = ipa_call_summaries->get (ie);
d8d5aef1
JH
3618 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3619 - eni_size_weights.call_cost);
3620 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3621 - eni_time_weights.call_cost);
3622 }
3623 }
5ce97055
JH
3624 else
3625 {
3626 if (!callee->can_be_discarded_p ())
3627 {
3628 cgraph_node *alias;
3629 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3630 if (alias)
3631 callee = alias;
3632 }
d8d5aef1 3633 /* make_speculative will update ie's cost to direct call cost. */
5ce97055 3634 ie = ie->make_speculative
1bad9c18 3635 (callee, ie->count.apply_scale (8, 10));
5ce97055 3636 }
749aa96d 3637
b258210c 3638 return ie;
3e293154
MJ
3639}
3640
91bb9f80
MJ
3641/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3642 CONSTRUCTOR and return it. Return NULL if the search fails for some
3643 reason. */
3644
3645static tree
3646find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3647{
3648 tree type = TREE_TYPE (constructor);
3649 if (TREE_CODE (type) != ARRAY_TYPE
3650 && TREE_CODE (type) != RECORD_TYPE)
3651 return NULL;
3652
3653 unsigned ix;
3654 tree index, val;
3655 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3656 {
3657 HOST_WIDE_INT elt_offset;
3658 if (TREE_CODE (type) == ARRAY_TYPE)
3659 {
3660 offset_int off;
3661 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3662 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3663
3664 if (index)
3665 {
db9bbdec
RB
3666 if (TREE_CODE (index) == RANGE_EXPR)
3667 off = wi::to_offset (TREE_OPERAND (index, 0));
3668 else
3669 off = wi::to_offset (index);
91bb9f80
MJ
3670 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3671 {
3672 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3673 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3674 off = wi::sext (off - wi::to_offset (low_bound),
3675 TYPE_PRECISION (TREE_TYPE (index)));
3676 }
3677 off *= wi::to_offset (unit_size);
db9bbdec
RB
3678 /* ??? Handle more than just the first index of a
3679 RANGE_EXPR. */
91bb9f80
MJ
3680 }
3681 else
3682 off = wi::to_offset (unit_size) * ix;
3683
3684 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3685 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3686 continue;
3687 elt_offset = off.to_shwi ();
3688 }
3689 else if (TREE_CODE (type) == RECORD_TYPE)
3690 {
3691 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3692 if (DECL_BIT_FIELD (index))
3693 continue;
3694 elt_offset = int_bit_position (index);
3695 }
3696 else
3697 gcc_unreachable ();
3698
3699 if (elt_offset > req_offset)
3700 return NULL;
3701
3702 if (TREE_CODE (val) == CONSTRUCTOR)
3703 return find_constructor_constant_at_offset (val,
3704 req_offset - elt_offset);
3705
3706 if (elt_offset == req_offset
3707 && is_gimple_reg_type (TREE_TYPE (val))
3708 && is_gimple_ip_invariant (val))
3709 return val;
3710 }
3711 return NULL;
3712}
3713
3714/* Check whether SCALAR could be used to look up an aggregate interprocedural
3715 invariant from a static constructor and if so, return it. Otherwise return
3716 NULL. */
3717
656b2338 3718tree
91bb9f80
MJ
3719ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3720{
3721 if (by_ref)
3722 {
3723 if (TREE_CODE (scalar) != ADDR_EXPR)
3724 return NULL;
3725 scalar = TREE_OPERAND (scalar, 0);
3726 }
3727
8813a647 3728 if (!VAR_P (scalar)
91bb9f80
MJ
3729 || !is_global_var (scalar)
3730 || !TREE_READONLY (scalar)
3731 || !DECL_INITIAL (scalar)
3732 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3733 return NULL;
3734
3735 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3736}
3737
656b2338
MJ
3738/* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
3739 is none. BY_REF specifies whether the value has to be passed by reference
3740 or by value. */
8b7773a4 3741
656b2338
MJ
3742static tree
3743ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function *agg_jfunc,
3744 ipa_node_params *src_info,
3745 cgraph_node *src_node,
3746 HOST_WIDE_INT offset, bool by_ref)
8b7773a4 3747{
656b2338
MJ
3748 if (by_ref != agg_jfunc->by_ref)
3749 return NULL_TREE;
91bb9f80 3750
656b2338
MJ
3751 for (const ipa_agg_jf_item &item : agg_jfunc->items)
3752 if (item.offset == offset)
3753 return ipa_agg_value_from_jfunc (src_info, src_node, &item);
8b7773a4 3754
656b2338 3755 return NULL_TREE;
8b7773a4
MJ
3756}
3757
4502fe8d 3758/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
3759 reference description RDESC. Return true if the reference has been
3760 successfully found and removed. */
4502fe8d 3761
568cda29 3762static bool
5e20cdc9 3763remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3764{
3765 struct ipa_ref *to_del;
3766 struct cgraph_edge *origin;
3767
3768 origin = rdesc->cs;
a854f856
MJ
3769 if (!origin)
3770 return false;
d122681a 3771 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
8e08c788 3772 origin->lto_stmt_uid, IPA_REF_ADDR);
568cda29
MJ
3773 if (!to_del)
3774 return false;
3775
d122681a 3776 to_del->remove_reference ();
4502fe8d 3777 if (dump_file)
464d0118 3778 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3629ff8a 3779 origin->caller->dump_name (), symbol->dump_name ());
568cda29 3780 return true;
4502fe8d
MJ
3781}
3782
3783/* If JFUNC has a reference description with refcount different from
3784 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3785 NULL. JFUNC must be a constant jump function. */
3786
3787static struct ipa_cst_ref_desc *
3788jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3789{
3790 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3791 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3792 return rdesc;
3793 else
3794 return NULL;
3795}
3796
568cda29
MJ
3797/* If the value of constant jump function JFUNC is an address of a function
3798 declaration, return the associated call graph node. Otherwise return
3799 NULL. */
3800
13586172
MJ
3801static symtab_node *
3802symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
568cda29
MJ
3803{
3804 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3805 tree cst = ipa_get_jf_constant (jfunc);
3806 if (TREE_CODE (cst) != ADDR_EXPR
13586172
MJ
3807 || (TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL
3808 && TREE_CODE (TREE_OPERAND (cst, 0)) != VAR_DECL))
568cda29
MJ
3809 return NULL;
3810
13586172 3811 return symtab_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3812}
3813
3814
3815/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3816 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3817 the edge specified in the rdesc. Return false if either the symbol or the
3818 reference could not be found, otherwise return true. */
3819
3820static bool
3821try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3822{
3823 struct ipa_cst_ref_desc *rdesc;
3824 if (jfunc->type == IPA_JF_CONST
3825 && (rdesc = jfunc_rdesc_usable (jfunc))
3826 && --rdesc->refcount == 0)
3827 {
13586172 3828 symtab_node *symbol = symtab_node_for_jfunc (jfunc);
568cda29
MJ
3829 if (!symbol)
3830 return false;
3831
3832 return remove_described_reference (symbol, rdesc);
3833 }
3834 return true;
3835}
3836
b258210c
MJ
3837/* Try to find a destination for indirect edge IE that corresponds to a simple
3838 call or a call of a member function pointer and where the destination is a
e5cf5e11
PK
3839 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3840 the type of the parameter to which the result of JFUNC is passed. If it can
3841 be determined, return the newly direct edge, otherwise return NULL.
eb270950
FX
3842 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3843 relative to. */
be95e2b9 3844
b258210c
MJ
3845static struct cgraph_edge *
3846try_make_edge_direct_simple_call (struct cgraph_edge *ie,
e5cf5e11 3847 struct ipa_jump_func *jfunc, tree target_type,
eb270950 3848 struct cgraph_node *new_root,
99b1c316 3849 class ipa_node_params *new_root_info)
b258210c 3850{
4502fe8d 3851 struct cgraph_edge *cs;
656b2338 3852 tree target = NULL_TREE;
042ae7d2 3853 bool agg_contents = ie->indirect_info->agg_contents;
e5cf5e11 3854 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
91bb9f80
MJ
3855 if (agg_contents)
3856 {
656b2338
MJ
3857 if (scalar)
3858 target = ipa_find_agg_cst_from_init (scalar, ie->indirect_info->offset,
3859 ie->indirect_info->by_ref);
3860 if (!target && ie->indirect_info->guaranteed_unmodified)
3861 target = ipa_find_agg_cst_from_jfunc_items (&jfunc->agg, new_root_info,
3862 new_root,
3863 ie->indirect_info->offset,
3864 ie->indirect_info->by_ref);
91bb9f80 3865 }
b258210c 3866 else
91bb9f80 3867 target = scalar;
d250540a
MJ
3868 if (!target)
3869 return NULL;
4502fe8d
MJ
3870 cs = ipa_make_edge_direct_to_target (ie, target);
3871
a12cd2db 3872 if (cs && !agg_contents)
568cda29
MJ
3873 {
3874 bool ok;
3875 gcc_checking_assert (cs->callee
ae6d0907
MJ
3876 && (cs != ie
3877 || jfunc->type != IPA_JF_CONST
13586172
MJ
3878 || !symtab_node_for_jfunc (jfunc)
3879 || cs->callee == symtab_node_for_jfunc (jfunc)));
568cda29
MJ
3880 ok = try_decrement_rdesc_refcount (jfunc);
3881 gcc_checking_assert (ok);
3882 }
4502fe8d
MJ
3883
3884 return cs;
b258210c
MJ
3885}
3886
bec81025
MJ
3887/* Return the target to be used in cases of impossible devirtualization. IE
3888 and target (the latter can be NULL) are dumped when dumping is enabled. */
3889
72972c22
MJ
3890tree
3891ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3892{
3893 if (dump_file)
3894 {
3895 if (target)
3896 fprintf (dump_file,
464d0118
ML
3897 "Type inconsistent devirtualization: %s->%s\n",
3898 ie->caller->dump_name (),
bec81025
MJ
3899 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3900 else
3901 fprintf (dump_file,
464d0118
ML
3902 "No devirtualization target in %s\n",
3903 ie->caller->dump_name ());
bec81025 3904 }
d68d3664 3905 tree new_target = builtin_decl_unreachable ();
d52f5295 3906 cgraph_node::get_create (new_target);
bec81025
MJ
3907 return new_target;
3908}
3909
d250540a
MJ
3910/* Try to find a destination for indirect edge IE that corresponds to a virtual
3911 call based on a formal parameter which is described by jump function JFUNC
3912 and if it can be determined, make it direct and return the direct edge.
44210a96 3913 Otherwise, return NULL. CTX describes the polymorphic context that the
eb270950
FX
3914 parameter the call is based on brings along with it. NEW_ROOT and
3915 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3916 to. */
b258210c
MJ
3917
3918static struct cgraph_edge *
3919try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3920 struct ipa_jump_func *jfunc,
eb270950
FX
3921 class ipa_polymorphic_call_context ctx,
3922 struct cgraph_node *new_root,
3923 class ipa_node_params *new_root_info)
3e293154 3924{
44210a96 3925 tree target = NULL;
5ce97055 3926 bool speculative = false;
85942f45 3927
2bf86c84 3928 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3929 return NULL;
b258210c 3930
44210a96 3931 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3932
3933 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3934 if (!ie->indirect_info->vptr_changed
3935 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3936 {
9de2f554
JH
3937 tree vtable;
3938 unsigned HOST_WIDE_INT offset;
656b2338
MJ
3939 tree t = NULL_TREE;
3940 if (jfunc->type == IPA_JF_CONST)
3941 t = ipa_find_agg_cst_from_init (ipa_get_jf_constant (jfunc),
3942 ie->indirect_info->offset, true);
3943 if (!t)
3944 t = ipa_find_agg_cst_from_jfunc_items (&jfunc->agg, new_root_info,
3945 new_root,
3946 ie->indirect_info->offset, true);
9de2f554
JH
3947 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3948 {
2994ab20 3949 bool can_refer;
0127c169 3950 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3951 vtable, offset, &can_refer);
3952 if (can_refer)
9de2f554 3953 {
2994ab20 3954 if (!t
1edcb2ea
JJ
3955 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE,
3956 BUILT_IN_UNREACHABLE_TRAP)
9de2f554 3957 || !possible_polymorphic_call_target_p
0127c169
JH
3958 (ie, cgraph_node::get (t)))
3959 {
33c3b6be 3960 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3961 if (!ie->indirect_info->vptr_changed)
3962 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3963 else
3964 target = NULL;
0127c169
JH
3965 }
3966 else
3967 {
3968 target = t;
3969 speculative = ie->indirect_info->vptr_changed;
3970 }
9de2f554
JH
3971 }
3972 }
85942f45
JH
3973 }
3974
44210a96
MJ
3975 ipa_polymorphic_call_context ie_context (ie);
3976 vec <cgraph_node *>targets;
3977 bool final;
d250540a 3978
44210a96
MJ
3979 ctx.offset_by (ie->indirect_info->offset);
3980 if (ie->indirect_info->vptr_changed)
3981 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3982 ie->indirect_info->otr_type);
3983 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3984 targets = possible_polymorphic_call_targets
3985 (ie->indirect_info->otr_type,
3986 ie->indirect_info->otr_token,
3987 ctx, &final);
3988 if (final && targets.length () <= 1)
5ce97055 3989 {
33c3b6be 3990 speculative = false;
44210a96
MJ
3991 if (targets.length () == 1)
3992 target = targets[0]->decl;
3993 else
3994 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3995 }
2bf86c84 3996 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3997 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3998 {
44210a96
MJ
3999 cgraph_node *n;
4000 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
4001 ie->indirect_info->otr_token,
4002 ie->indirect_info->context);
4003 if (n)
5ce97055 4004 {
44210a96
MJ
4005 target = n->decl;
4006 speculative = true;
5ce97055 4007 }
5bccb77a 4008 }
b258210c
MJ
4009
4010 if (target)
450ad0cd 4011 {
44210a96
MJ
4012 if (!possible_polymorphic_call_target_p
4013 (ie, cgraph_node::get_create (target)))
0127c169 4014 {
29c43c83 4015 if (speculative)
0127c169
JH
4016 return NULL;
4017 target = ipa_impossible_devirt_target (ie, target);
4018 }
5ce97055 4019 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 4020 }
b258210c
MJ
4021 else
4022 return NULL;
3e293154
MJ
4023}
4024
4025/* Update the param called notes associated with NODE when CS is being inlined,
4026 assuming NODE is (potentially indirectly) inlined into CS->callee.
4027 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 4028 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 4029 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 4030
f8e2a1ed 4031static bool
e33c6cd6
MJ
4032update_indirect_edges_after_inlining (struct cgraph_edge *cs,
4033 struct cgraph_node *node,
d52f5295 4034 vec<cgraph_edge *> *new_edges)
3e293154 4035{
99b1c316 4036 class ipa_edge_args *top;
b258210c 4037 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
eb270950 4038 struct cgraph_node *new_root;
99b1c316 4039 class ipa_node_params *new_root_info, *inlined_node_info;
f8e2a1ed 4040 bool res = false;
3e293154 4041
e33c6cd6 4042 ipa_check_create_edge_args ();
a4a3cdd0 4043 top = ipa_edge_args_sum->get (cs);
eb270950
FX
4044 new_root = cs->caller->inlined_to
4045 ? cs->caller->inlined_to : cs->caller;
a4a3cdd0
MJ
4046 new_root_info = ipa_node_params_sum->get (new_root);
4047 inlined_node_info = ipa_node_params_sum->get (cs->callee->function_symbol ());
e33c6cd6
MJ
4048
4049 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 4050 {
99b1c316 4051 class cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 4052 struct ipa_jump_func *jfunc;
8b7773a4 4053 int param_index;
3e293154 4054
e33c6cd6 4055 next_ie = ie->next_callee;
3e293154 4056
5f902d76
JH
4057 if (ici->param_index == -1)
4058 continue;
e33c6cd6 4059
3e293154 4060 /* We must check range due to calls with variable number of arguments: */
5a0236f8 4061 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 4062 {
5ee53a06 4063 ici->param_index = -1;
3e293154
MJ
4064 continue;
4065 }
4066
8b7773a4
MJ
4067 param_index = ici->param_index;
4068 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 4069
e089e433 4070 auto_vec<cgraph_node *, 4> spec_targets;
3ff29913 4071 if (ie->speculative)
e089e433
MJ
4072 for (cgraph_edge *direct = ie->first_speculative_call_target ();
4073 direct;
4074 direct = direct->next_speculative_call_target ())
4075 spec_targets.safe_push (direct->callee);
3ff29913 4076
2bf86c84 4077 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
4078 new_direct_edge = NULL;
4079 else if (ici->polymorphic)
5ce97055 4080 {
44210a96
MJ
4081 ipa_polymorphic_call_context ctx;
4082 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
eb270950
FX
4083 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
4084 new_root,
4085 new_root_info);
5ce97055 4086 }
b258210c 4087 else
e5cf5e11
PK
4088 {
4089 tree target_type = ipa_get_type (inlined_node_info, param_index);
4090 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
4091 target_type,
eb270950 4092 new_root,
e5cf5e11
PK
4093 new_root_info);
4094 }
4095
042ae7d2 4096 /* If speculation was removed, then we need to do nothing. */
3ff29913 4097 if (new_direct_edge && new_direct_edge != ie
e089e433 4098 && spec_targets.contains (new_direct_edge->callee))
042ae7d2
JH
4099 {
4100 new_direct_edge->indirect_inlining_edge = 1;
042ae7d2 4101 res = true;
73d098df
JH
4102 if (!new_direct_edge->speculative)
4103 continue;
042ae7d2
JH
4104 }
4105 else if (new_direct_edge)
685b0d13 4106 {
b258210c
MJ
4107 new_direct_edge->indirect_inlining_edge = 1;
4108 if (new_edges)
4109 {
9771b263 4110 new_edges->safe_push (new_direct_edge);
b258210c
MJ
4111 res = true;
4112 }
3ff29913
JH
4113 /* If speculative edge was introduced we still need to update
4114 call info of the indirect edge. */
4115 if (!new_direct_edge->speculative)
4116 continue;
685b0d13 4117 }
3ff29913
JH
4118 if (jfunc->type == IPA_JF_PASS_THROUGH
4119 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 4120 {
d0502276
JH
4121 if (ici->agg_contents
4122 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
4123 && !ici->polymorphic)
36b72910
JH
4124 ici->param_index = -1;
4125 else
d0502276
JH
4126 {
4127 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4128 if (ici->polymorphic
4129 && !ipa_get_jf_pass_through_type_preserved (jfunc))
4130 ici->vptr_changed = true;
40a777e8
JH
4131 ipa_set_param_used_by_indirect_call (new_root_info,
4132 ici->param_index, true);
4133 if (ici->polymorphic)
4134 ipa_set_param_used_by_polymorphic_call (new_root_info,
4135 ici->param_index, true);
d0502276 4136 }
36b72910
JH
4137 }
4138 else if (jfunc->type == IPA_JF_ANCESTOR)
4139 {
d0502276
JH
4140 if (ici->agg_contents
4141 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
4142 && !ici->polymorphic)
36b72910
JH
4143 ici->param_index = -1;
4144 else
4145 {
4146 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4147 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
4148 if (ici->polymorphic
4149 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4150 ici->vptr_changed = true;
b914768c
JH
4151 ipa_set_param_used_by_indirect_call (new_root_info,
4152 ici->param_index, true);
4153 if (ici->polymorphic)
4154 ipa_set_param_used_by_polymorphic_call (new_root_info,
4155 ici->param_index, true);
36b72910
JH
4156 }
4157 }
4158 else
4159 /* Either we can find a destination for this edge now or never. */
4160 ici->param_index = -1;
3e293154 4161 }
e33c6cd6 4162
f8e2a1ed 4163 return res;
3e293154
MJ
4164}
4165
4166/* Recursively traverse subtree of NODE (including node) made of inlined
4167 cgraph_edges when CS has been inlined and invoke
e33c6cd6 4168 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
4169 update_jump_functions_after_inlining on all non-inlined edges that lead out
4170 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
4171 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4172 created. */
be95e2b9 4173
f8e2a1ed 4174static bool
3e293154
MJ
4175propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4176 struct cgraph_node *node,
d52f5295 4177 vec<cgraph_edge *> *new_edges)
3e293154
MJ
4178{
4179 struct cgraph_edge *e;
f8e2a1ed 4180 bool res;
3e293154 4181
e33c6cd6 4182 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
4183
4184 for (e = node->callees; e; e = e->next_callee)
4185 if (!e->inline_failed)
f8e2a1ed 4186 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
4187 else
4188 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
4189 for (e = node->indirect_calls; e; e = e->next_callee)
4190 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
4191
4192 return res;
3e293154
MJ
4193}
4194
4502fe8d
MJ
4195/* Combine two controlled uses counts as done during inlining. */
4196
4197static int
4198combine_controlled_uses_counters (int c, int d)
4199{
4200 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4201 return IPA_UNDESCRIBED_USE;
4202 else
4203 return c + d - 1;
4204}
4205
4206/* Propagate number of controlled users from CS->caleee to the new root of the
4207 tree of inlined nodes. */
4208
4209static void
4210propagate_controlled_uses (struct cgraph_edge *cs)
4211{
a4a3cdd0 4212 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
a33c028e
JH
4213 if (!args)
4214 return;
a62bfab5
ML
4215 struct cgraph_node *new_root = cs->caller->inlined_to
4216 ? cs->caller->inlined_to : cs->caller;
a4a3cdd0
MJ
4217 ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4218 ipa_node_params *old_root_info = ipa_node_params_sum->get (cs->callee);
4502fe8d
MJ
4219 int count, i;
4220
6cf67b62
JH
4221 if (!old_root_info)
4222 return;
4223
4502fe8d
MJ
4224 count = MIN (ipa_get_cs_argument_count (args),
4225 ipa_get_param_count (old_root_info));
4226 for (i = 0; i < count; i++)
4227 {
4228 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4229 struct ipa_cst_ref_desc *rdesc;
4230
8e08c788
MJ
4231 if (jf->type == IPA_JF_PASS_THROUGH
4232 && !ipa_get_jf_pass_through_refdesc_decremented (jf))
4502fe8d
MJ
4233 {
4234 int src_idx, c, d;
4235 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4236 c = ipa_get_controlled_uses (new_root_info, src_idx);
4237 d = ipa_get_controlled_uses (old_root_info, i);
4238
4239 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4240 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4241 c = combine_controlled_uses_counters (c, d);
4242 ipa_set_controlled_uses (new_root_info, src_idx, c);
13586172
MJ
4243 bool lderef = true;
4244 if (c != IPA_UNDESCRIBED_USE)
4245 {
4246 lderef = (ipa_get_param_load_dereferenced (new_root_info, src_idx)
4247 || ipa_get_param_load_dereferenced (old_root_info, i));
4248 ipa_set_param_load_dereferenced (new_root_info, src_idx, lderef);
4249 }
4250
4251 if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4502fe8d
MJ
4252 {
4253 struct cgraph_node *n;
4254 struct ipa_ref *ref;
44210a96 4255 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
4256
4257 if (t && TREE_CODE (t) == ADDR_EXPR
4258 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 4259 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
8e08c788
MJ
4260 && (ref = new_root->find_reference (n, NULL, 0,
4261 IPA_REF_ADDR)))
4502fe8d
MJ
4262 {
4263 if (dump_file)
4264 fprintf (dump_file, "ipa-prop: Removing cloning-created "
464d0118
ML
4265 "reference from %s to %s.\n",
4266 new_root->dump_name (),
4267 n->dump_name ());
d122681a 4268 ref->remove_reference ();
4502fe8d
MJ
4269 }
4270 }
4271 }
4272 else if (jf->type == IPA_JF_CONST
4273 && (rdesc = jfunc_rdesc_usable (jf)))
4274 {
4275 int d = ipa_get_controlled_uses (old_root_info, i);
4276 int c = rdesc->refcount;
f571596f 4277 tree cst = ipa_get_jf_constant (jf);
4502fe8d 4278 rdesc->refcount = combine_controlled_uses_counters (c, d);
f6d65e80 4279 if (rdesc->refcount != IPA_UNDESCRIBED_USE
f571596f
MJ
4280 && ipa_get_param_load_dereferenced (old_root_info, i)
4281 && TREE_CODE (cst) == ADDR_EXPR
ca2007a9 4282 && VAR_P (TREE_OPERAND (cst, 0)))
f6d65e80 4283 {
f6d65e80
MJ
4284 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4285 new_root->create_reference (n, IPA_REF_LOAD, NULL);
4286 if (dump_file)
4287 fprintf (dump_file, "ipa-prop: Address IPA constant will reach "
4288 "a load so adding LOAD reference from %s to %s.\n",
4289 new_root->dump_name (), n->dump_name ());
4290 }
4502fe8d
MJ
4291 if (rdesc->refcount == 0)
4292 {
4502fe8d 4293 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
13586172
MJ
4294 && ((TREE_CODE (TREE_OPERAND (cst, 0))
4295 == FUNCTION_DECL)
ca2007a9 4296 || VAR_P (TREE_OPERAND (cst, 0))));
13586172
MJ
4297
4298 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
4299 if (n)
4300 {
f6d65e80
MJ
4301 remove_described_reference (n, rdesc);
4302 cgraph_node *clone = cs->caller;
a62bfab5 4303 while (clone->inlined_to
6cf67b62
JH
4304 && clone->ipcp_clone
4305 && clone != rdesc->cs->caller)
4502fe8d
MJ
4306 {
4307 struct ipa_ref *ref;
8e08c788 4308 ref = clone->find_reference (n, NULL, 0, IPA_REF_ADDR);
4502fe8d
MJ
4309 if (ref)
4310 {
4311 if (dump_file)
4312 fprintf (dump_file, "ipa-prop: Removing "
4313 "cloning-created reference "
464d0118
ML
4314 "from %s to %s.\n",
4315 clone->dump_name (),
4316 n->dump_name ());
d122681a 4317 ref->remove_reference ();
4502fe8d
MJ
4318 }
4319 clone = clone->callers->caller;
4320 }
4321 }
4322 }
4323 }
4324 }
4325
4326 for (i = ipa_get_param_count (old_root_info);
4327 i < ipa_get_cs_argument_count (args);
4328 i++)
4329 {
4330 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4331
4332 if (jf->type == IPA_JF_CONST)
4333 {
4334 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4335 if (rdesc)
4336 rdesc->refcount = IPA_UNDESCRIBED_USE;
4337 }
4338 else if (jf->type == IPA_JF_PASS_THROUGH)
4339 ipa_set_controlled_uses (new_root_info,
4340 jf->value.pass_through.formal_id,
4341 IPA_UNDESCRIBED_USE);
4342 }
4343}
4344
3e293154
MJ
4345/* Update jump functions and call note functions on inlining the call site CS.
4346 CS is expected to lead to a node already cloned by
4347 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
4348 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4349 created. */
be95e2b9 4350
f8e2a1ed 4351bool
3e293154 4352ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 4353 vec<cgraph_edge *> *new_edges)
3e293154 4354{
5ee53a06 4355 bool changed;
f8e2a1ed
MJ
4356 /* Do nothing if the preparation phase has not been carried out yet
4357 (i.e. during early inlining). */
dd912cb8 4358 if (!ipa_node_params_sum)
f8e2a1ed 4359 return false;
6fe906a3 4360 gcc_assert (ipa_edge_args_sum);
f8e2a1ed 4361
4502fe8d 4362 propagate_controlled_uses (cs);
5ee53a06 4363 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
6cf67b62 4364 ipa_node_params_sum->remove (cs->callee);
5ee53a06 4365
a4a3cdd0 4366 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
d40e2362
JH
4367 if (args)
4368 {
4369 bool ok = true;
4370 if (args->jump_functions)
4371 {
4372 struct ipa_jump_func *jf;
4373 int i;
4374 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4375 if (jf->type == IPA_JF_CONST
4376 && ipa_get_jf_constant_rdesc (jf))
4377 {
4378 ok = false;
4379 break;
4380 }
4381 }
4382 if (ok)
4383 ipa_edge_args_sum->remove (cs);
4384 }
98aad294
JH
4385 if (ipcp_transformation_sum)
4386 ipcp_transformation_sum->remove (cs->callee);
d40e2362 4387
5ee53a06 4388 return changed;
518dc859
RL
4389}
4390
86cd0334
MJ
4391/* Ensure that array of edge arguments infos is big enough to accommodate a
4392 structure for all edges and reallocates it if not. Also, allocate
4393 associated hash tables is they do not already exist. */
4394
4395void
4396ipa_check_create_edge_args (void)
4397{
6fe906a3
MJ
4398 if (!ipa_edge_args_sum)
4399 ipa_edge_args_sum
78cd68c0 4400 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
6fe906a3 4401 ipa_edge_args_sum_t (symtab, true));
86cd0334
MJ
4402 if (!ipa_bits_hash_table)
4403 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4404 if (!ipa_vr_hash_table)
4405 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4406}
4407
771578a0 4408/* Free all ipa_edge structures. */
be95e2b9 4409
518dc859 4410void
771578a0 4411ipa_free_all_edge_args (void)
518dc859 4412{
6fe906a3 4413 if (!ipa_edge_args_sum)
9771b263
DN
4414 return;
4415
ddf628e4 4416 ggc_delete (ipa_edge_args_sum);
6fe906a3 4417 ipa_edge_args_sum = NULL;
518dc859
RL
4418}
4419
771578a0 4420/* Free all ipa_node_params structures. */
be95e2b9 4421
518dc859 4422void
771578a0 4423ipa_free_all_node_params (void)
518dc859 4424{
bc2fcccd
JH
4425 if (ipa_node_params_sum)
4426 ggc_delete (ipa_node_params_sum);
dd912cb8 4427 ipa_node_params_sum = NULL;
771578a0
MJ
4428}
4429
9d3e0adc 4430/* Initialize IPA CP transformation summary and also allocate any necessary hash
86cd0334 4431 tables if they do not already exist. */
04be694e
MJ
4432
4433void
9d3e0adc 4434ipcp_transformation_initialize (void)
04be694e 4435{
86cd0334
MJ
4436 if (!ipa_bits_hash_table)
4437 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4438 if (!ipa_vr_hash_table)
4439 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
9d3e0adc 4440 if (ipcp_transformation_sum == NULL)
40e67ab8
JH
4441 {
4442 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4443 ipcp_transformation_sum->disable_insertion_hook ();
4444 }
04be694e
MJ
4445}
4446
12e088ba
AC
4447/* Release the IPA CP transformation summary. */
4448
4449void
4450ipcp_free_transformation_sum (void)
4451{
4452 if (!ipcp_transformation_sum)
4453 return;
4454
78cd68c0
ML
4455 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4456 ggc_free (ipcp_transformation_sum);
12e088ba
AC
4457 ipcp_transformation_sum = NULL;
4458}
4459
2c9561b5
MJ
4460/* Set the aggregate replacements of NODE to be AGGVALS. */
4461
4462void
4463ipa_set_node_agg_value_chain (struct cgraph_node *node,
e0403e95 4464 vec<ipa_argagg_value, va_gc> *aggs)
2c9561b5 4465{
9d3e0adc
ML
4466 ipcp_transformation_initialize ();
4467 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
e0403e95 4468 s->m_agg_values = aggs;
2c9561b5
MJ
4469}
4470
e53b6e56 4471/* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
6fe906a3 4472 count data structures accordingly. */
be95e2b9 4473
6fe906a3
MJ
4474void
4475ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
771578a0 4476{
568cda29
MJ
4477 if (args->jump_functions)
4478 {
4479 struct ipa_jump_func *jf;
4480 int i;
4481 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
4482 {
4483 struct ipa_cst_ref_desc *rdesc;
4484 try_decrement_rdesc_refcount (jf);
4485 if (jf->type == IPA_JF_CONST
4486 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4487 && rdesc->cs == cs)
4488 rdesc->cs = NULL;
4489 }
568cda29 4490 }
518dc859
RL
4491}
4492
6fe906a3
MJ
4493/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4494 reference count data strucutres accordingly. */
be95e2b9 4495
6fe906a3
MJ
4496void
4497ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4498 ipa_edge_args *old_args, ipa_edge_args *new_args)
771578a0 4499{
8b7773a4 4500 unsigned int i;
771578a0 4501
9771b263 4502 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
4503 if (old_args->polymorphic_call_contexts)
4504 new_args->polymorphic_call_contexts
4505 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 4506
9771b263 4507 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
4508 {
4509 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4510 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4511
4512 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4513
4514 if (src_jf->type == IPA_JF_CONST)
4515 {
4516 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4517
4518 if (!src_rdesc)
4519 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
4520 else if (src->caller == dst->caller)
4521 {
1a7d452c
MJ
4522 /* Creation of a speculative edge. If the source edge is the one
4523 grabbing a reference, we must create a new (duplicate)
4524 reference description. Otherwise they refer to the same
4525 description corresponding to a reference taken in a function
4526 src->caller is inlined to. In that case we just must
4527 increment the refcount. */
4528 if (src_rdesc->cs == src)
4529 {
4530 symtab_node *n = symtab_node_for_jfunc (src_jf);
4531 gcc_checking_assert (n);
4532 ipa_ref *ref
4533 = src->caller->find_reference (n, src->call_stmt,
8e08c788
MJ
4534 src->lto_stmt_uid,
4535 IPA_REF_ADDR);
1a7d452c
MJ
4536 gcc_checking_assert (ref);
4537 dst->caller->clone_reference (ref, ref->stmt);
4538
4539 ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4540 dst_rdesc->cs = dst;
4541 dst_rdesc->refcount = src_rdesc->refcount;
4542 dst_rdesc->next_duplicate = NULL;
4543 dst_jf->value.constant.rdesc = dst_rdesc;
4544 }
4545 else
4546 {
4547 src_rdesc->refcount++;
4548 dst_jf->value.constant.rdesc = src_rdesc;
4549 }
568cda29 4550 }
4502fe8d
MJ
4551 else if (src_rdesc->cs == src)
4552 {
601f3293 4553 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 4554 dst_rdesc->cs = dst;
4502fe8d 4555 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
4556 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4557 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
4558 dst_jf->value.constant.rdesc = dst_rdesc;
4559 }
4560 else
4561 {
4562 struct ipa_cst_ref_desc *dst_rdesc;
4563 /* This can happen during inlining, when a JFUNC can refer to a
4564 reference taken in a function up in the tree of inline clones.
4565 We need to find the duplicate that refers to our tree of
4566 inline clones. */
4567
a62bfab5 4568 gcc_assert (dst->caller->inlined_to);
4502fe8d
MJ
4569 for (dst_rdesc = src_rdesc->next_duplicate;
4570 dst_rdesc;
4571 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
4572 {
4573 struct cgraph_node *top;
a62bfab5
ML
4574 top = dst_rdesc->cs->caller->inlined_to
4575 ? dst_rdesc->cs->caller->inlined_to
2fd0985c 4576 : dst_rdesc->cs->caller;
a62bfab5 4577 if (dst->caller->inlined_to == top)
2fd0985c
MJ
4578 break;
4579 }
44a60244 4580 gcc_assert (dst_rdesc);
4502fe8d
MJ
4581 dst_jf->value.constant.rdesc = dst_rdesc;
4582 }
4583 }
6fe45955
MJ
4584 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4585 && src->caller == dst->caller)
4586 {
a62bfab5
ML
4587 struct cgraph_node *inline_root = dst->caller->inlined_to
4588 ? dst->caller->inlined_to : dst->caller;
a4a3cdd0 4589 ipa_node_params *root_info = ipa_node_params_sum->get (inline_root);
6fe45955
MJ
4590 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4591
4592 int c = ipa_get_controlled_uses (root_info, idx);
4593 if (c != IPA_UNDESCRIBED_USE)
4594 {
4595 c++;
4596 ipa_set_controlled_uses (root_info, idx, c);
4597 }
4598 }
4502fe8d 4599 }
771578a0
MJ
4600}
4601
dd912cb8 4602/* Analyze newly added function into callgraph. */
be95e2b9 4603
771578a0 4604static void
dd912cb8 4605ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 4606{
dd912cb8
ML
4607 if (node->has_gimple_body_p ())
4608 ipa_analyze_node (node);
4609}
771578a0 4610
dd912cb8
ML
4611/* Hook that is called by summary when a node is duplicated. */
4612
4613void
e0403e95 4614ipa_node_params_t::duplicate(cgraph_node *, cgraph_node *,
dd912cb8
ML
4615 ipa_node_params *old_info,
4616 ipa_node_params *new_info)
4617{
f65f1ae3 4618 new_info->descriptors = vec_safe_copy (old_info->descriptors);
310bc633 4619 new_info->lattices = NULL;
771578a0 4620 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
f65f1ae3
MJ
4621 new_info->known_csts = old_info->known_csts.copy ();
4622 new_info->known_contexts = old_info->known_contexts.copy ();
3949c4a7 4623
8aab5218 4624 new_info->analysis_done = old_info->analysis_done;
3949c4a7 4625 new_info->node_enqueued = old_info->node_enqueued;
7e729474 4626 new_info->versionable = old_info->versionable;
98aad294 4627}
04be694e 4628
98aad294 4629/* Duplication of ipcp transformation summaries. */
86cd0334 4630
98aad294
JH
4631void
4632ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4633 ipcp_transformation *src_trans,
4634 ipcp_transformation *dst_trans)
4635{
4636 /* Avoid redundant work of duplicating vectors we will never use. */
4637 if (dst->inlined_to)
4638 return;
e0403e95 4639 dst_trans->m_agg_values = vec_safe_copy (src_trans->m_agg_values);
98aad294
JH
4640 dst_trans->bits = vec_safe_copy (src_trans->bits);
4641 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
771578a0
MJ
4642}
4643
4644/* Register our cgraph hooks if they are not already there. */
be95e2b9 4645
518dc859 4646void
771578a0 4647ipa_register_cgraph_hooks (void)
518dc859 4648{
dd912cb8 4649 ipa_check_create_node_params ();
6fe906a3 4650 ipa_check_create_edge_args ();
dd912cb8 4651
dd912cb8 4652 function_insertion_hook_holder =
3dafb85c 4653 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 4654}
518dc859 4655
771578a0 4656/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 4657
771578a0
MJ
4658static void
4659ipa_unregister_cgraph_hooks (void)
4660{
bc2fcccd
JH
4661 if (function_insertion_hook_holder)
4662 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 4663 function_insertion_hook_holder = NULL;
771578a0
MJ
4664}
4665
4666/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4667 longer needed after ipa-cp. */
be95e2b9 4668
771578a0 4669void
e33c6cd6 4670ipa_free_all_structures_after_ipa_cp (void)
3e293154 4671{
2bf86c84 4672 if (!optimize && !in_lto_p)
3e293154
MJ
4673 {
4674 ipa_free_all_edge_args ();
4675 ipa_free_all_node_params ();
2651e637
ML
4676 ipcp_sources_pool.release ();
4677 ipcp_cst_values_pool.release ();
4678 ipcp_poly_ctx_values_pool.release ();
4679 ipcp_agg_lattice_pool.release ();
3e293154 4680 ipa_unregister_cgraph_hooks ();
601f3293 4681 ipa_refdesc_pool.release ();
3e293154
MJ
4682 }
4683}
4684
4685/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4686 longer needed after indirect inlining. */
be95e2b9 4687
3e293154 4688void
e33c6cd6 4689ipa_free_all_structures_after_iinln (void)
771578a0
MJ
4690{
4691 ipa_free_all_edge_args ();
4692 ipa_free_all_node_params ();
4693 ipa_unregister_cgraph_hooks ();
2651e637
ML
4694 ipcp_sources_pool.release ();
4695 ipcp_cst_values_pool.release ();
4696 ipcp_poly_ctx_values_pool.release ();
4697 ipcp_agg_lattice_pool.release ();
601f3293 4698 ipa_refdesc_pool.release ();
518dc859
RL
4699}
4700
dcd416e3 4701/* Print ipa_tree_map data structures of all functions in the
518dc859 4702 callgraph to F. */
be95e2b9 4703
518dc859 4704void
2c9561b5 4705ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
4706{
4707 int i, count;
99b1c316 4708 class ipa_node_params *info;
518dc859 4709
67348ccc 4710 if (!node->definition)
3e293154 4711 return;
a4a3cdd0 4712 info = ipa_node_params_sum->get (node);
464d0118 4713 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
0302955a
JH
4714 if (!info)
4715 {
4716 fprintf (f, " no params return\n");
4717 return;
4718 }
3e293154
MJ
4719 count = ipa_get_param_count (info);
4720 for (i = 0; i < count; i++)
518dc859 4721 {
4502fe8d
MJ
4722 int c;
4723
a4e33812 4724 fprintf (f, " ");
e067bd43 4725 ipa_dump_param (f, info, i);
339f49ec
JH
4726 if (ipa_is_param_used (info, i))
4727 fprintf (f, " used");
40a777e8
JH
4728 if (ipa_is_param_used_by_ipa_predicates (info, i))
4729 fprintf (f, " used_by_ipa_predicates");
4730 if (ipa_is_param_used_by_indirect_call (info, i))
4731 fprintf (f, " used_by_indirect_call");
4732 if (ipa_is_param_used_by_polymorphic_call (info, i))
4733 fprintf (f, " used_by_polymorphic_call");
4502fe8d
MJ
4734 c = ipa_get_controlled_uses (info, i);
4735 if (c == IPA_UNDESCRIBED_USE)
4736 fprintf (f, " undescribed_use");
4737 else
13586172
MJ
4738 fprintf (f, " controlled_uses=%i %s", c,
4739 ipa_get_param_load_dereferenced (info, i)
4740 ? "(load_dereferenced)" : "");
3e293154 4741 fprintf (f, "\n");
518dc859
RL
4742 }
4743}
dcd416e3 4744
ca30a539 4745/* Print ipa_tree_map data structures of all functions in the
3e293154 4746 callgraph to F. */
be95e2b9 4747
3e293154 4748void
ca30a539 4749ipa_print_all_params (FILE * f)
3e293154
MJ
4750{
4751 struct cgraph_node *node;
4752
ca30a539 4753 fprintf (f, "\nFunction parameters:\n");
65c70e6b 4754 FOR_EACH_FUNCTION (node)
ca30a539 4755 ipa_print_node_params (f, node);
3e293154 4756}
3f84bf08 4757
fb3f88cc
JH
4758/* Stream out jump function JUMP_FUNC to OB. */
4759
4760static void
4761ipa_write_jump_function (struct output_block *ob,
4762 struct ipa_jump_func *jump_func)
4763{
8b7773a4
MJ
4764 struct ipa_agg_jf_item *item;
4765 struct bitpack_d bp;
4766 int i, count;
f714ecf5 4767 int flag = 0;
fb3f88cc 4768
f714ecf5
JH
4769 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4770 as well as WPA memory by handling them specially. */
4771 if (jump_func->type == IPA_JF_CONST
4772 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4773 flag = 1;
4774
4775 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
fb3f88cc
JH
4776 switch (jump_func->type)
4777 {
4778 case IPA_JF_UNKNOWN:
4779 break;
4780 case IPA_JF_CONST:
5368224f 4781 gcc_assert (
4502fe8d 4782 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
f714ecf5
JH
4783 stream_write_tree (ob,
4784 flag
4785 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4786 : jump_func->value.constant.value, true);
fb3f88cc
JH
4787 break;
4788 case IPA_JF_PASS_THROUGH:
412288f1 4789 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4790 if (jump_func->value.pass_through.operation == NOP_EXPR)
4791 {
4792 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4793 bp = bitpack_create (ob->main_stream);
4794 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
8e08c788 4795 gcc_assert (!jump_func->value.pass_through.refdesc_decremented);
4a53743e
MJ
4796 streamer_write_bitpack (&bp);
4797 }
a2b4c188
KV
4798 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4799 == tcc_unary)
4800 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4a53743e
MJ
4801 else
4802 {
4803 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4804 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4805 }
fb3f88cc
JH
4806 break;
4807 case IPA_JF_ANCESTOR:
412288f1 4808 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4809 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4810 bp = bitpack_create (ob->main_stream);
4811 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
7ea3a73c 4812 bp_pack_value (&bp, jump_func->value.ancestor.keep_null, 1);
8b7773a4 4813 streamer_write_bitpack (&bp);
fb3f88cc 4814 break;
eb270950
FX
4815 default:
4816 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4817 }
4818
9771b263 4819 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4820 streamer_write_uhwi (ob, count);
4821 if (count)
4822 {
4823 bp = bitpack_create (ob->main_stream);
4824 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4825 streamer_write_bitpack (&bp);
4826 }
4827
9771b263 4828 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4 4829 {
eb270950 4830 stream_write_tree (ob, item->type, true);
8b7773a4 4831 streamer_write_uhwi (ob, item->offset);
eb270950
FX
4832 streamer_write_uhwi (ob, item->jftype);
4833 switch (item->jftype)
4834 {
4835 case IPA_JF_UNKNOWN:
4836 break;
4837 case IPA_JF_CONST:
4838 stream_write_tree (ob, item->value.constant, true);
4839 break;
4840 case IPA_JF_PASS_THROUGH:
4841 case IPA_JF_LOAD_AGG:
4842 streamer_write_uhwi (ob, item->value.pass_through.operation);
4843 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4844 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4845 != tcc_unary)
4846 stream_write_tree (ob, item->value.pass_through.operand, true);
4847 if (item->jftype == IPA_JF_LOAD_AGG)
4848 {
4849 stream_write_tree (ob, item->value.load_agg.type, true);
4850 streamer_write_uhwi (ob, item->value.load_agg.offset);
4851 bp = bitpack_create (ob->main_stream);
4852 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4853 streamer_write_bitpack (&bp);
4854 }
4855 break;
4856 default:
4857 fatal_error (UNKNOWN_LOCATION,
4858 "invalid jump function in LTO stream");
4859 }
fb3f88cc 4860 }
04be694e 4861
209ca542 4862 bp = bitpack_create (ob->main_stream);
86cd0334 4863 bp_pack_value (&bp, !!jump_func->bits, 1);
209ca542 4864 streamer_write_bitpack (&bp);
86cd0334 4865 if (jump_func->bits)
209ca542 4866 {
86cd0334
MJ
4867 streamer_write_widest_int (ob, jump_func->bits->value);
4868 streamer_write_widest_int (ob, jump_func->bits->mask);
a5e14a42 4869 }
86cd0334 4870 if (jump_func->m_vr)
065cc876
AH
4871 jump_func->m_vr->streamer_write (ob);
4872 else
8bc5448f 4873 {
065cc876
AH
4874 bp_pack_value (&bp, false, 1);
4875 streamer_write_bitpack (&bp);
8bc5448f 4876 }
fb3f88cc
JH
4877}
4878
4879/* Read in jump function JUMP_FUNC from IB. */
4880
4881static void
99b1c316 4882ipa_read_jump_function (class lto_input_block *ib,
fb3f88cc 4883 struct ipa_jump_func *jump_func,
4502fe8d 4884 struct cgraph_edge *cs,
99b1c316 4885 class data_in *data_in,
f714ecf5 4886 bool prevails)
fb3f88cc 4887{
4a53743e
MJ
4888 enum jump_func_type jftype;
4889 enum tree_code operation;
8b7773a4 4890 int i, count;
f714ecf5
JH
4891 int val = streamer_read_uhwi (ib);
4892 bool flag = val & 1;
fb3f88cc 4893
f714ecf5 4894 jftype = (enum jump_func_type) (val / 2);
4a53743e 4895 switch (jftype)
fb3f88cc
JH
4896 {
4897 case IPA_JF_UNKNOWN:
04be694e 4898 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4899 break;
4900 case IPA_JF_CONST:
f714ecf5
JH
4901 {
4902 tree t = stream_read_tree (ib, data_in);
4903 if (flag && prevails)
7a2090b0 4904 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
f714ecf5
JH
4905 ipa_set_jf_constant (jump_func, t, cs);
4906 }
fb3f88cc
JH
4907 break;
4908 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4909 operation = (enum tree_code) streamer_read_uhwi (ib);
4910 if (operation == NOP_EXPR)
4911 {
4912 int formal_id = streamer_read_uhwi (ib);
4913 struct bitpack_d bp = streamer_read_bitpack (ib);
4914 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4915 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e 4916 }
a2b4c188
KV
4917 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4918 {
4919 int formal_id = streamer_read_uhwi (ib);
4920 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4921 }
4a53743e
MJ
4922 else
4923 {
4924 tree operand = stream_read_tree (ib, data_in);
4925 int formal_id = streamer_read_uhwi (ib);
4926 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4927 operation);
4928 }
fb3f88cc
JH
4929 break;
4930 case IPA_JF_ANCESTOR:
4a53743e
MJ
4931 {
4932 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4933 int formal_id = streamer_read_uhwi (ib);
4934 struct bitpack_d bp = streamer_read_bitpack (ib);
4935 bool agg_preserved = bp_unpack_value (&bp, 1);
7ea3a73c
MJ
4936 bool keep_null = bp_unpack_value (&bp, 1);
4937 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved,
4938 keep_null);
4a53743e
MJ
4939 break;
4940 }
f714ecf5
JH
4941 default:
4942 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4943 }
4944
4945 count = streamer_read_uhwi (ib);
f714ecf5 4946 if (prevails)
7ee0681e
JH
4947 {
4948 jump_func->agg.items = NULL;
4949 vec_safe_reserve (jump_func->agg.items, count, true);
4950 }
8b7773a4
MJ
4951 if (count)
4952 {
4a53743e 4953 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4954 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4955 }
4956 for (i = 0; i < count; i++)
4957 {
f32682ca 4958 struct ipa_agg_jf_item item;
eb270950 4959 item.type = stream_read_tree (ib, data_in);
f32682ca 4960 item.offset = streamer_read_uhwi (ib);
eb270950
FX
4961 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4962
4963 switch (item.jftype)
4964 {
4965 case IPA_JF_UNKNOWN:
4966 break;
4967 case IPA_JF_CONST:
4968 item.value.constant = stream_read_tree (ib, data_in);
4969 break;
4970 case IPA_JF_PASS_THROUGH:
4971 case IPA_JF_LOAD_AGG:
4972 operation = (enum tree_code) streamer_read_uhwi (ib);
4973 item.value.pass_through.operation = operation;
4974 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4975 if (TREE_CODE_CLASS (operation) == tcc_unary)
4976 item.value.pass_through.operand = NULL_TREE;
4977 else
4978 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4979 if (item.jftype == IPA_JF_LOAD_AGG)
4980 {
4981 struct bitpack_d bp;
4982 item.value.load_agg.type = stream_read_tree (ib, data_in);
4983 item.value.load_agg.offset = streamer_read_uhwi (ib);
4984 bp = streamer_read_bitpack (ib);
4985 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4986 }
4987 break;
4988 default:
4989 fatal_error (UNKNOWN_LOCATION,
4990 "invalid jump function in LTO stream");
4991 }
f714ecf5
JH
4992 if (prevails)
4993 jump_func->agg.items->quick_push (item);
fb3f88cc 4994 }
04be694e
MJ
4995
4996 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4997 bool bits_known = bp_unpack_value (&bp, 1);
4998 if (bits_known)
4999 {
86cd0334
MJ
5000 widest_int value = streamer_read_widest_int (ib);
5001 widest_int mask = streamer_read_widest_int (ib);
f714ecf5 5002 if (prevails)
065cc876 5003 ipa_set_jfunc_bits (jump_func, value, mask);
209ca542
PK
5004 }
5005 else
86cd0334 5006 jump_func->bits = NULL;
8bc5448f 5007
065cc876
AH
5008 ipa_vr vr;
5009 vr.streamer_read (ib, data_in);
5010 if (vr.known_p ())
8bc5448f 5011 {
f714ecf5 5012 if (prevails)
065cc876 5013 ipa_set_jfunc_vr (jump_func, vr);
8bc5448f
KV
5014 }
5015 else
86cd0334 5016 jump_func->m_vr = NULL;
fb3f88cc
JH
5017}
5018
e33c6cd6
MJ
5019/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
5020 relevant to indirect inlining to OB. */
661e7330
MJ
5021
5022static void
e33c6cd6
MJ
5023ipa_write_indirect_edge_info (struct output_block *ob,
5024 struct cgraph_edge *cs)
661e7330 5025{
99b1c316 5026 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 5027 struct bitpack_d bp;
e33c6cd6 5028
412288f1 5029 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
5030 bp = bitpack_create (ob->main_stream);
5031 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 5032 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 5033 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 5034 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 5035 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 5036 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 5037 streamer_write_bitpack (&bp);
ba392339
JH
5038 if (ii->agg_contents || ii->polymorphic)
5039 streamer_write_hwi (ob, ii->offset);
5040 else
5041 gcc_assert (ii->offset == 0);
b258210c
MJ
5042
5043 if (ii->polymorphic)
5044 {
412288f1 5045 streamer_write_hwi (ob, ii->otr_token);
b9393656 5046 stream_write_tree (ob, ii->otr_type, true);
ba392339 5047 ii->context.stream_out (ob);
b258210c 5048 }
661e7330
MJ
5049}
5050
e33c6cd6
MJ
5051/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5052 relevant to indirect inlining from IB. */
661e7330
MJ
5053
5054static void
99b1c316
MS
5055ipa_read_indirect_edge_info (class lto_input_block *ib,
5056 class data_in *data_in,
40a777e8
JH
5057 struct cgraph_edge *cs,
5058 class ipa_node_params *info)
661e7330 5059{
99b1c316 5060 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 5061 struct bitpack_d bp;
661e7330 5062
412288f1 5063 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 5064 bp = streamer_read_bitpack (ib);
2465dcc2 5065 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 5066 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 5067 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 5068 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 5069 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 5070 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
5071 if (ii->agg_contents || ii->polymorphic)
5072 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5073 else
5074 ii->offset = 0;
b258210c
MJ
5075 if (ii->polymorphic)
5076 {
412288f1 5077 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 5078 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 5079 ii->context.stream_in (ib, data_in);
b258210c 5080 }
40a777e8
JH
5081 if (info && ii->param_index >= 0)
5082 {
5083 if (ii->polymorphic)
5084 ipa_set_param_used_by_polymorphic_call (info,
5085 ii->param_index , true);
5086 ipa_set_param_used_by_indirect_call (info,
5087 ii->param_index, true);
5088 }
661e7330
MJ
5089}
5090
fb3f88cc
JH
5091/* Stream out NODE info to OB. */
5092
5093static void
5094ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5095{
5096 int node_ref;
7380e6ef 5097 lto_symtab_encoder_t encoder;
a4a3cdd0 5098 ipa_node_params *info = ipa_node_params_sum->get (node);
fb3f88cc
JH
5099 int j;
5100 struct cgraph_edge *e;
2465dcc2 5101 struct bitpack_d bp;
fb3f88cc 5102
7380e6ef 5103 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5104 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 5105 streamer_write_uhwi (ob, node_ref);
fb3f88cc 5106
0e8853ee
JH
5107 streamer_write_uhwi (ob, ipa_get_param_count (info));
5108 for (j = 0; j < ipa_get_param_count (info); j++)
5109 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 5110 bp = bitpack_create (ob->main_stream);
8aab5218 5111 gcc_assert (info->analysis_done
661e7330 5112 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
5113 gcc_assert (!info->node_enqueued);
5114 gcc_assert (!info->ipcp_orig_node);
5115 for (j = 0; j < ipa_get_param_count (info); j++)
13586172
MJ
5116 {
5117 /* TODO: We could just not stream the bit in the undescribed case. */
5118 bool d = (ipa_get_controlled_uses (info, j) != IPA_UNDESCRIBED_USE)
5119 ? ipa_get_param_load_dereferenced (info, j) : true;
5120 bp_pack_value (&bp, d, 1);
5121 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5122 }
412288f1 5123 streamer_write_bitpack (&bp);
4502fe8d 5124 for (j = 0; j < ipa_get_param_count (info); j++)
a5e14a42
MJ
5125 {
5126 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5127 stream_write_tree (ob, ipa_get_type (info, j), true);
5128 }
fb3f88cc
JH
5129 for (e = node->callees; e; e = e->next_callee)
5130 {
a4a3cdd0 5131 ipa_edge_args *args = ipa_edge_args_sum->get (e);
fb3f88cc 5132
a33c028e
JH
5133 if (!args)
5134 {
5135 streamer_write_uhwi (ob, 0);
5136 continue;
5137 }
5138
5ce97055
JH
5139 streamer_write_uhwi (ob,
5140 ipa_get_cs_argument_count (args) * 2
5141 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 5142 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
5143 {
5144 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5145 if (args->polymorphic_call_contexts != NULL)
5146 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5147 }
fb3f88cc 5148 }
e33c6cd6 5149 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 5150 {
a4a3cdd0 5151 ipa_edge_args *args = ipa_edge_args_sum->get (e);
a33c028e
JH
5152 if (!args)
5153 streamer_write_uhwi (ob, 0);
5154 else
5ce97055 5155 {
a33c028e
JH
5156 streamer_write_uhwi (ob,
5157 ipa_get_cs_argument_count (args) * 2
5158 + (args->polymorphic_call_contexts != NULL));
5159 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5160 {
5161 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5162 if (args->polymorphic_call_contexts != NULL)
5163 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5164 }
5ce97055 5165 }
c8246dbe
JH
5166 ipa_write_indirect_edge_info (ob, e);
5167 }
fb3f88cc
JH
5168}
5169
f714ecf5 5170/* Stream in edge E from IB. */
7b377b22 5171
f714ecf5 5172static void
99b1c316
MS
5173ipa_read_edge_info (class lto_input_block *ib,
5174 class data_in *data_in,
f714ecf5 5175 struct cgraph_edge *e, bool prevails)
7b377b22 5176{
f714ecf5
JH
5177 int count = streamer_read_uhwi (ib);
5178 bool contexts_computed = count & 1;
5179
5180 count /= 2;
5181 if (!count)
5182 return;
6cef01c3
JH
5183 if (prevails
5184 && (e->possibly_call_in_translation_unit_p ()
5185 /* Also stream in jump functions to builtins in hope that they
5186 will get fnspecs. */
5187 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
f714ecf5 5188 {
a4a3cdd0 5189 ipa_edge_args *args = ipa_edge_args_sum->get_create (e);
cb3874dc 5190 vec_safe_grow_cleared (args->jump_functions, count, true);
f714ecf5 5191 if (contexts_computed)
cb3874dc 5192 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
f714ecf5
JH
5193 for (int k = 0; k < count; k++)
5194 {
5195 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5196 data_in, prevails);
5197 if (contexts_computed)
5198 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5199 (ib, data_in);
5200 }
5201 }
5202 else
5203 {
5204 for (int k = 0; k < count; k++)
5205 {
5206 struct ipa_jump_func dummy;
5207 ipa_read_jump_function (ib, &dummy, e,
5208 data_in, prevails);
5209 if (contexts_computed)
5210 {
99b1c316 5211 class ipa_polymorphic_call_context ctx;
f714ecf5
JH
5212 ctx.stream_in (ib, data_in);
5213 }
5214 }
5215 }
7b377b22
JH
5216}
5217
61502ca8 5218/* Stream in NODE info from IB. */
fb3f88cc
JH
5219
5220static void
99b1c316
MS
5221ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5222 class data_in *data_in)
fb3f88cc 5223{
fb3f88cc
JH
5224 int k;
5225 struct cgraph_edge *e;
2465dcc2 5226 struct bitpack_d bp;
f714ecf5 5227 bool prevails = node->prevailing_p ();
a4a3cdd0
MJ
5228 ipa_node_params *info
5229 = prevails ? ipa_node_params_sum->get_create (node) : NULL;
fb3f88cc 5230
f714ecf5
JH
5231 int param_count = streamer_read_uhwi (ib);
5232 if (prevails)
5233 {
5234 ipa_alloc_node_params (node, param_count);
5235 for (k = 0; k < param_count; k++)
5236 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5237 if (ipa_get_param_count (info) != 0)
5238 info->analysis_done = true;
5239 info->node_enqueued = false;
5240 }
5241 else
5242 for (k = 0; k < param_count; k++)
5243 streamer_read_uhwi (ib);
a5e14a42 5244
412288f1 5245 bp = streamer_read_bitpack (ib);
f714ecf5 5246 for (k = 0; k < param_count; k++)
a5e14a42 5247 {
13586172 5248 bool load_dereferenced = bp_unpack_value (&bp, 1);
f714ecf5
JH
5249 bool used = bp_unpack_value (&bp, 1);
5250
5251 if (prevails)
13586172
MJ
5252 {
5253 ipa_set_param_load_dereferenced (info, k, load_dereferenced);
5254 ipa_set_param_used (info, k, used);
5255 }
a5e14a42 5256 }
f714ecf5 5257 for (k = 0; k < param_count; k++)
fb3f88cc 5258 {
f714ecf5
JH
5259 int nuses = streamer_read_hwi (ib);
5260 tree type = stream_read_tree (ib, data_in);
fb3f88cc 5261
f714ecf5 5262 if (prevails)
5ce97055 5263 {
f714ecf5
JH
5264 ipa_set_controlled_uses (info, k, nuses);
5265 (*info->descriptors)[k].decl_or_type = type;
5ce97055 5266 }
fb3f88cc 5267 }
f714ecf5
JH
5268 for (e = node->callees; e; e = e->next_callee)
5269 ipa_read_edge_info (ib, data_in, e, prevails);
e33c6cd6 5270 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 5271 {
f714ecf5 5272 ipa_read_edge_info (ib, data_in, e, prevails);
40a777e8 5273 ipa_read_indirect_edge_info (ib, data_in, e, info);
c8246dbe 5274 }
fb3f88cc
JH
5275}
5276
5277/* Write jump functions for nodes in SET. */
5278
5279void
f27c1867 5280ipa_prop_write_jump_functions (void)
fb3f88cc 5281{
93536c97 5282 struct output_block *ob;
fb3f88cc 5283 unsigned int count = 0;
f27c1867
JH
5284 lto_symtab_encoder_iterator lsei;
5285 lto_symtab_encoder_t encoder;
5286
6fe906a3 5287 if (!ipa_node_params_sum || !ipa_edge_args_sum)
93536c97 5288 return;
fb3f88cc 5289
93536c97 5290 ob = create_output_block (LTO_section_jump_functions);
f27c1867 5291 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5292 ob->symbol = NULL;
f27c1867
JH
5293 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5294 lsei_next_function_in_partition (&lsei))
fb3f88cc 5295 {
a4a3cdd0 5296 cgraph_node *node = lsei_cgraph_node (lsei);
d52f5295 5297 if (node->has_gimple_body_p ()
a4a3cdd0 5298 && ipa_node_params_sum->get (node) != NULL)
fb3f88cc
JH
5299 count++;
5300 }
5301
412288f1 5302 streamer_write_uhwi (ob, count);
fb3f88cc
JH
5303
5304 /* Process all of the functions. */
f27c1867
JH
5305 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5306 lsei_next_function_in_partition (&lsei))
fb3f88cc 5307 {
a4a3cdd0 5308 cgraph_node *node = lsei_cgraph_node (lsei);
d52f5295 5309 if (node->has_gimple_body_p ()
a4a3cdd0 5310 && ipa_node_params_sum->get (node) != NULL)
fb3f88cc
JH
5311 ipa_write_node_info (ob, node);
5312 }
412288f1 5313 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
5314 produce_asm (ob, NULL);
5315 destroy_output_block (ob);
5316}
5317
5318/* Read section in file FILE_DATA of length LEN with data DATA. */
5319
5320static void
5321ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5322 size_t len)
5323{
5324 const struct lto_function_header *header =
5325 (const struct lto_function_header *) data;
4ad9a9de
EB
5326 const int cfg_offset = sizeof (struct lto_function_header);
5327 const int main_offset = cfg_offset + header->cfg_size;
5328 const int string_offset = main_offset + header->main_size;
99b1c316 5329 class data_in *data_in;
fb3f88cc
JH
5330 unsigned int i;
5331 unsigned int count;
5332
207c68cd 5333 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5334 header->main_size, file_data->mode_table);
fb3f88cc
JH
5335
5336 data_in =
5337 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5338 header->string_size, vNULL);
412288f1 5339 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
5340
5341 for (i = 0; i < count; i++)
5342 {
5343 unsigned int index;
5344 struct cgraph_node *node;
7380e6ef 5345 lto_symtab_encoder_t encoder;
fb3f88cc 5346
412288f1 5347 index = streamer_read_uhwi (&ib_main);
7380e6ef 5348 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5349 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5350 index));
67348ccc 5351 gcc_assert (node->definition);
fb3f88cc
JH
5352 ipa_read_node_info (&ib_main, node, data_in);
5353 }
5354 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5355 len);
5356 lto_data_in_delete (data_in);
5357}
5358
5359/* Read ipcp jump functions. */
5360
5361void
5362ipa_prop_read_jump_functions (void)
5363{
5364 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5365 struct lto_file_decl_data *file_data;
5366 unsigned int j = 0;
5367
5368 ipa_check_create_node_params ();
5369 ipa_check_create_edge_args ();
5370 ipa_register_cgraph_hooks ();
5371
5372 while ((file_data = file_data_vec[j++]))
5373 {
5374 size_t len;
3c56d8d8
ML
5375 const char *data
5376 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5377 &len);
fb3f88cc
JH
5378 if (data)
5379 ipa_prop_read_section (file_data, data, len);
5380 }
5381}
5382
7450b255
MJ
5383/* Return true if the IPA-CP transformation summary TS is non-NULL and contains
5384 useful info. */
5385static bool
5386useful_ipcp_transformation_info_p (ipcp_transformation *ts)
2c9561b5 5387{
7450b255
MJ
5388 if (!ts)
5389 return false;
5390 if (!vec_safe_is_empty (ts->m_agg_values)
5391 || !vec_safe_is_empty (ts->bits)
5392 || !vec_safe_is_empty (ts->m_vr))
5393 return true;
5394 return false;
5395}
2c9561b5 5396
7450b255
MJ
5397/* Write into OB IPA-CP transfromation summary TS describing NODE. */
5398
5399void
5400write_ipcp_transformation_info (output_block *ob, cgraph_node *node,
5401 ipcp_transformation *ts)
5402{
5403 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
5404 int node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
5405 streamer_write_uhwi (ob, node_ref);
5406
7450b255
MJ
5407 streamer_write_uhwi (ob, vec_safe_length (ts->m_agg_values));
5408 for (const ipa_argagg_value &av : ts->m_agg_values)
2c9561b5 5409 {
7450b255 5410 struct bitpack_d bp;
7b920a9a 5411
7450b255
MJ
5412 stream_write_tree (ob, av.value, true);
5413 streamer_write_uhwi (ob, av.unit_offset);
5414 streamer_write_uhwi (ob, av.index);
7b920a9a 5415
7450b255
MJ
5416 bp = bitpack_create (ob->main_stream);
5417 bp_pack_value (&bp, av.by_ref, 1);
5418 streamer_write_bitpack (&bp);
2c9561b5 5419 }
04be694e 5420
7450b255
MJ
5421 streamer_write_uhwi (ob, vec_safe_length (ts->m_vr));
5422 for (const ipa_vr &parm_vr : ts->m_vr)
cfe17528 5423 parm_vr.streamer_write (ob);
8bc5448f 5424
7450b255
MJ
5425 streamer_write_uhwi (ob, vec_safe_length (ts->bits));
5426 for (const ipa_bits *bits_jfunc : ts->bits)
209ca542 5427 {
7450b255
MJ
5428 struct bitpack_d bp = bitpack_create (ob->main_stream);
5429 bp_pack_value (&bp, !!bits_jfunc, 1);
5430 streamer_write_bitpack (&bp);
5431 if (bits_jfunc)
209ca542 5432 {
7450b255
MJ
5433 streamer_write_widest_int (ob, bits_jfunc->value);
5434 streamer_write_widest_int (ob, bits_jfunc->mask);
209ca542
PK
5435 }
5436 }
2c9561b5
MJ
5437}
5438
5439/* Stream in the aggregate value replacement chain for NODE from IB. */
5440
5441static void
04be694e
MJ
5442read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5443 data_in *data_in)
2c9561b5 5444{
2c9561b5 5445 unsigned int count, i;
7450b255
MJ
5446 ipcp_transformation_initialize ();
5447 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
2c9561b5
MJ
5448
5449 count = streamer_read_uhwi (ib);
e0403e95
MJ
5450 if (count > 0)
5451 {
e0403e95
MJ
5452 vec_safe_grow_cleared (ts->m_agg_values, count, true);
5453 for (i = 0; i <count; i++)
5454 {
5455 ipa_argagg_value *av = &(*ts->m_agg_values)[i];;
5456
5457 av->value = stream_read_tree (ib, data_in);
5458 av->unit_offset = streamer_read_uhwi (ib);
5459 av->index = streamer_read_uhwi (ib);
5460
5461 bitpack_d bp = streamer_read_bitpack (ib);
5462 av->by_ref = bp_unpack_value (&bp, 1);
5463 }
5464 }
5465
209ca542
PK
5466 count = streamer_read_uhwi (ib);
5467 if (count > 0)
5468 {
cb3874dc 5469 vec_safe_grow_cleared (ts->m_vr, count, true);
8bc5448f
KV
5470 for (i = 0; i < count; i++)
5471 {
5472 ipa_vr *parm_vr;
5473 parm_vr = &(*ts->m_vr)[i];
cfe17528 5474 parm_vr->streamer_read (ib, data_in);
8bc5448f
KV
5475 }
5476 }
5477 count = streamer_read_uhwi (ib);
5478 if (count > 0)
5479 {
cb3874dc 5480 vec_safe_grow_cleared (ts->bits, count, true);
209ca542
PK
5481 for (i = 0; i < count; i++)
5482 {
209ca542 5483 struct bitpack_d bp = streamer_read_bitpack (ib);
86cd0334
MJ
5484 bool known = bp_unpack_value (&bp, 1);
5485 if (known)
209ca542 5486 {
2fb1d6d6
JH
5487 const widest_int value = streamer_read_widest_int (ib);
5488 const widest_int mask = streamer_read_widest_int (ib);
86cd0334 5489 ipa_bits *bits
2fb1d6d6 5490 = ipa_get_ipa_bits_for_value (value, mask);
86cd0334 5491 (*ts->bits)[i] = bits;
209ca542
PK
5492 }
5493 }
5494 }
2c9561b5
MJ
5495}
5496
5497/* Write all aggregate replacement for nodes in set. */
5498
5499void
04be694e 5500ipcp_write_transformation_summaries (void)
2c9561b5 5501{
2c9561b5
MJ
5502 struct output_block *ob;
5503 unsigned int count = 0;
2c9561b5
MJ
5504 lto_symtab_encoder_t encoder;
5505
2c9561b5
MJ
5506 ob = create_output_block (LTO_section_ipcp_transform);
5507 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5508 ob->symbol = NULL;
7450b255
MJ
5509
5510 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
2c9561b5 5511 {
7450b255
MJ
5512 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
5513 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
5514 if (!cnode)
5515 continue;
5516 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5517 if (useful_ipcp_transformation_info_p (ts)
5518 && lto_symtab_encoder_encode_body_p (encoder, cnode))
2c9561b5
MJ
5519 count++;
5520 }
5521
5522 streamer_write_uhwi (ob, count);
5523
7450b255 5524 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
2c9561b5 5525 {
7450b255
MJ
5526 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
5527 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
5528 if (!cnode)
5529 continue;
5530 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5531 if (useful_ipcp_transformation_info_p (ts)
5532 && lto_symtab_encoder_encode_body_p (encoder, cnode))
5533 write_ipcp_transformation_info (ob, cnode, ts);
2c9561b5
MJ
5534 }
5535 streamer_write_char_stream (ob->main_stream, 0);
5536 produce_asm (ob, NULL);
5537 destroy_output_block (ob);
5538}
5539
5540/* Read replacements section in file FILE_DATA of length LEN with data
5541 DATA. */
5542
5543static void
5544read_replacements_section (struct lto_file_decl_data *file_data,
5545 const char *data,
5546 size_t len)
5547{
5548 const struct lto_function_header *header =
5549 (const struct lto_function_header *) data;
5550 const int cfg_offset = sizeof (struct lto_function_header);
5551 const int main_offset = cfg_offset + header->cfg_size;
5552 const int string_offset = main_offset + header->main_size;
99b1c316 5553 class data_in *data_in;
2c9561b5
MJ
5554 unsigned int i;
5555 unsigned int count;
5556
207c68cd 5557 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5558 header->main_size, file_data->mode_table);
2c9561b5
MJ
5559
5560 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5561 header->string_size, vNULL);
2c9561b5
MJ
5562 count = streamer_read_uhwi (&ib_main);
5563
5564 for (i = 0; i < count; i++)
5565 {
5566 unsigned int index;
5567 struct cgraph_node *node;
5568 lto_symtab_encoder_t encoder;
5569
5570 index = streamer_read_uhwi (&ib_main);
5571 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5572 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5573 index));
04be694e 5574 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
5575 }
5576 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5577 len);
5578 lto_data_in_delete (data_in);
5579}
5580
5581/* Read IPA-CP aggregate replacements. */
5582
5583void
04be694e 5584ipcp_read_transformation_summaries (void)
2c9561b5
MJ
5585{
5586 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5587 struct lto_file_decl_data *file_data;
5588 unsigned int j = 0;
5589
5590 while ((file_data = file_data_vec[j++]))
5591 {
5592 size_t len;
3c56d8d8
ML
5593 const char *data
5594 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5595 &len);
2c9561b5
MJ
5596 if (data)
5597 read_replacements_section (file_data, data, len);
5598 }
5599}
5600
4834e936
MJ
5601/* Adjust the aggregate replacements in TS to reflect any parameter removals
5602 which might have already taken place. If after adjustments there are no
5603 aggregate replacements left, the m_agg_values will be set to NULL. In other
5604 cases, it may be shrunk. */
2c9561b5 5605
4834e936
MJ
5606static void
5607adjust_agg_replacement_values (cgraph_node *node, ipcp_transformation *ts)
2c9561b5 5608{
ae7a23a3 5609 clone_info *cinfo = clone_info::get (node);
ae7a23a3 5610 if (!cinfo || !cinfo->param_adjustments)
4834e936 5611 return;
2c9561b5 5612
4834e936
MJ
5613 auto_vec<int, 16> new_indices;
5614 cinfo->param_adjustments->get_updated_indices (&new_indices);
e0403e95 5615 bool removed_item = false;
e0403e95
MJ
5616 unsigned dst_index = 0;
5617 unsigned count = ts->m_agg_values->length ();
5618 for (unsigned i = 0; i < count; i++)
2c9561b5 5619 {
e0403e95 5620 ipa_argagg_value *v = &(*ts->m_agg_values)[i];
ff6686d2 5621 gcc_checking_assert (v->index >= 0);
2c9561b5 5622
4834e936
MJ
5623 int new_idx = -1;
5624 if ((unsigned) v->index < new_indices.length ())
5625 new_idx = new_indices[v->index];
5626
5bc4cb04 5627 if (new_idx >= 0)
5bc4cb04 5628 {
e0403e95
MJ
5629 v->index = new_idx;
5630 if (removed_item)
5631 (*ts->m_agg_values)[dst_index] = *v;
5632 dst_index++;
5633 }
5634 else
4834e936 5635 removed_item = true;
ff6686d2 5636 }
e0403e95
MJ
5637
5638 if (dst_index == 0)
5639 {
5640 ggc_free (ts->m_agg_values);
5641 ts->m_agg_values = NULL;
5642 }
5643 else if (removed_item)
5644 ts->m_agg_values->truncate (dst_index);
5645
4834e936 5646 return;
2c9561b5
MJ
5647}
5648
8aab5218
MJ
5649/* Dominator walker driving the ipcp modification phase. */
5650
5651class ipcp_modif_dom_walker : public dom_walker
5652{
5653public:
56b40062 5654 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
f65f1ae3 5655 vec<ipa_param_descriptor, va_gc> *descs,
e0403e95 5656 ipcp_transformation *ts, bool *sc)
8aab5218 5657 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
e0403e95 5658 m_ts (ts), m_something_changed (sc) {}
8aab5218 5659
894ddeac 5660 edge before_dom_children (basic_block) final override;
8ddce3f7
RB
5661 bool cleanup_eh ()
5662 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
8aab5218
MJ
5663
5664private:
56b40062 5665 struct ipa_func_body_info *m_fbi;
f65f1ae3 5666 vec<ipa_param_descriptor, va_gc> *m_descriptors;
e0403e95 5667 ipcp_transformation *m_ts;
8ddce3f7
RB
5668 bool *m_something_changed;
5669 auto_bitmap m_need_eh_cleanup;
8aab5218
MJ
5670};
5671
3daacdcd 5672edge
8aab5218
MJ
5673ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5674{
5675 gimple_stmt_iterator gsi;
5676 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5677 {
355fe088 5678 gimple *stmt = gsi_stmt (gsi);
8aab5218 5679 tree rhs, val, t;
e0403e95 5680 HOST_WIDE_INT bit_offset;
86003645 5681 poly_int64 size;
8aab5218
MJ
5682 int index;
5683 bool by_ref, vce;
5684
5685 if (!gimple_assign_load_p (stmt))
5686 continue;
5687 rhs = gimple_assign_rhs1 (stmt);
5688 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5689 continue;
2c9561b5 5690
8aab5218
MJ
5691 vce = false;
5692 t = rhs;
5693 while (handled_component_p (t))
5694 {
5695 /* V_C_E can do things like convert an array of integers to one
5696 bigger integer and similar things we do not handle below. */
b66113e9 5697 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8aab5218
MJ
5698 {
5699 vce = true;
5700 break;
5701 }
5702 t = TREE_OPERAND (t, 0);
5703 }
5704 if (vce)
5705 continue;
5706
ff302741 5707 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
e0403e95 5708 &bit_offset, &size, &by_ref))
8aab5218 5709 continue;
e0403e95
MJ
5710 unsigned unit_offset = bit_offset / BITS_PER_UNIT;
5711 ipa_argagg_value_list avl (m_ts);
5712 tree v = avl.get_value (index, unit_offset, by_ref);
5713
8aab5218 5714 if (!v
e0403e95 5715 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), size))
8aab5218
MJ
5716 continue;
5717
e0403e95
MJ
5718 gcc_checking_assert (is_gimple_ip_invariant (v));
5719 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v)))
8aab5218 5720 {
e0403e95
MJ
5721 if (fold_convertible_p (TREE_TYPE (rhs), v))
5722 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v);
8aab5218 5723 else if (TYPE_SIZE (TREE_TYPE (rhs))
e0403e95
MJ
5724 == TYPE_SIZE (TREE_TYPE (v)))
5725 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v);
8aab5218
MJ
5726 else
5727 {
5728 if (dump_file)
5729 {
5730 fprintf (dump_file, " const ");
e0403e95 5731 print_generic_expr (dump_file, v);
8aab5218 5732 fprintf (dump_file, " can't be converted to type of ");
ef6cb4c7 5733 print_generic_expr (dump_file, rhs);
8aab5218
MJ
5734 fprintf (dump_file, "\n");
5735 }
5736 continue;
5737 }
5738 }
5739 else
e0403e95 5740 val = v;
8aab5218
MJ
5741
5742 if (dump_file && (dump_flags & TDF_DETAILS))
5743 {
5744 fprintf (dump_file, "Modifying stmt:\n ");
ef6cb4c7 5745 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5746 }
5747 gimple_assign_set_rhs_from_tree (&gsi, val);
5748 update_stmt (stmt);
5749
5750 if (dump_file && (dump_flags & TDF_DETAILS))
5751 {
5752 fprintf (dump_file, "into:\n ");
ef6cb4c7 5753 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5754 fprintf (dump_file, "\n");
5755 }
5756
5757 *m_something_changed = true;
8ddce3f7
RB
5758 if (maybe_clean_eh_stmt (stmt))
5759 bitmap_set_bit (m_need_eh_cleanup, bb->index);
8aab5218 5760 }
3daacdcd 5761 return NULL;
8aab5218
MJ
5762}
5763
c7ac9a0c
JH
5764/* Return true if we have recorded VALUE and MASK about PARM.
5765 Set VALUE and MASk accordingly. */
5766
5767bool
5768ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5769{
5770 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5771 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5772 if (!ts || vec_safe_length (ts->bits) == 0)
5773 return false;
5774
7f986e2e
MJ
5775 int i = ts->get_param_index (current_function_decl, parm);
5776 if (i < 0)
5777 return false;
ae7a23a3
JH
5778 clone_info *cinfo = clone_info::get (cnode);
5779 if (cinfo && cinfo->param_adjustments)
c7ac9a0c 5780 {
ae7a23a3 5781 i = cinfo->param_adjustments->get_original_index (i);
c7ac9a0c
JH
5782 if (i < 0)
5783 return false;
5784 }
5785
5786 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5787 if (!bits[i])
5788 return false;
5789 *mask = bits[i]->mask;
5790 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5791 return true;
5792}
5793
7f986e2e 5794/* Update bits info of formal parameters of NODE as described in TS. */
209ca542
PK
5795
5796static void
7f986e2e 5797ipcp_update_bits (struct cgraph_node *node, ipcp_transformation *ts)
209ca542 5798{
7f986e2e 5799 if (vec_safe_is_empty (ts->bits))
209ca542 5800 return;
86cd0334 5801 vec<ipa_bits *, va_gc> &bits = *ts->bits;
209ca542 5802 unsigned count = bits.length ();
ff6686d2
MJ
5803 if (!count)
5804 return;
209ca542 5805
ff6686d2
MJ
5806 auto_vec<int, 16> new_indices;
5807 bool need_remapping = false;
ae7a23a3
JH
5808 clone_info *cinfo = clone_info::get (node);
5809 if (cinfo && cinfo->param_adjustments)
209ca542 5810 {
ae7a23a3 5811 cinfo->param_adjustments->get_updated_indices (&new_indices);
ff6686d2
MJ
5812 need_remapping = true;
5813 }
5814 auto_vec <tree, 16> parm_decls;
5815 push_function_arg_decls (&parm_decls, node->decl);
209ca542 5816
ff6686d2
MJ
5817 for (unsigned i = 0; i < count; ++i)
5818 {
5819 tree parm;
5820 if (need_remapping)
5821 {
5822 if (i >= new_indices.length ())
5823 continue;
5824 int idx = new_indices[i];
5825 if (idx < 0)
5826 continue;
5827 parm = parm_decls[idx];
5828 }
5829 else
5830 parm = parm_decls[i];
209ca542 5831 gcc_checking_assert (parm);
ff6686d2 5832
209ca542 5833
86cd0334
MJ
5834 if (!bits[i]
5835 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5836 || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542 5837 || !is_gimple_reg (parm))
86cd0334 5838 continue;
209ca542
PK
5839
5840 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5841 if (!ddef)
5842 continue;
5843
5844 if (dump_file)
5845 {
86cd0334
MJ
5846 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5847 print_hex (bits[i]->mask, dump_file);
209ca542
PK
5848 fprintf (dump_file, "\n");
5849 }
5850
67b97478
PK
5851 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5852 {
5853 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5854 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5855
86cd0334
MJ
5856 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5857 | wide_int::from (bits[i]->value, prec, sgn);
67b97478
PK
5858 set_nonzero_bits (ddef, nonzero_bits);
5859 }
5860 else
5861 {
86cd0334
MJ
5862 unsigned tem = bits[i]->mask.to_uhwi ();
5863 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
67b97478
PK
5864 unsigned align = tem & -tem;
5865 unsigned misalign = bitpos & (align - 1);
209ca542 5866
67b97478
PK
5867 if (align > 1)
5868 {
5869 if (dump_file)
5870 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5871
5872 unsigned old_align, old_misalign;
5873 struct ptr_info_def *pi = get_ptr_info (ddef);
5874 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5875
5876 if (old_known
5877 && old_align > align)
5878 {
5879 if (dump_file)
5880 {
5881 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5882 if ((old_misalign & (align - 1)) != misalign)
5883 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5884 old_misalign, misalign);
5885 }
5886 continue;
5887 }
5888
5889 if (old_known
5890 && ((misalign & (old_align - 1)) != old_misalign)
5891 && dump_file)
5892 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5893 old_misalign, misalign);
5894
5895 set_ptr_info_alignment (pi, align, misalign);
5896 }
5897 }
209ca542
PK
5898 }
5899}
5900
7f986e2e 5901/* Update value range of formal parameters of NODE as described in TS. */
8bc5448f
KV
5902
5903static void
7f986e2e 5904ipcp_update_vr (struct cgraph_node *node, ipcp_transformation *ts)
8bc5448f 5905{
7f986e2e 5906 if (vec_safe_is_empty (ts->m_vr))
8bc5448f
KV
5907 return;
5908 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5909 unsigned count = vr.length ();
ff6686d2
MJ
5910 if (!count)
5911 return;
8bc5448f 5912
ff6686d2
MJ
5913 auto_vec<int, 16> new_indices;
5914 bool need_remapping = false;
ae7a23a3
JH
5915 clone_info *cinfo = clone_info::get (node);
5916 if (cinfo && cinfo->param_adjustments)
8bc5448f 5917 {
ae7a23a3 5918 cinfo->param_adjustments->get_updated_indices (&new_indices);
ff6686d2
MJ
5919 need_remapping = true;
5920 }
5921 auto_vec <tree, 16> parm_decls;
5922 push_function_arg_decls (&parm_decls, node->decl);
5923
5924 for (unsigned i = 0; i < count; ++i)
5925 {
5926 tree parm;
5927 int remapped_idx;
5928 if (need_remapping)
5929 {
5930 if (i >= new_indices.length ())
5931 continue;
5932 remapped_idx = new_indices[i];
5933 if (remapped_idx < 0)
5934 continue;
5935 }
5936 else
5937 remapped_idx = i;
5938
5939 parm = parm_decls[remapped_idx];
5940
8bc5448f 5941 gcc_checking_assert (parm);
8bc5448f
KV
5942 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5943
5944 if (!ddef || !is_gimple_reg (parm))
5945 continue;
5946
cfe17528 5947 if (vr[i].known_p ())
8bc5448f 5948 {
bc5a2c2e 5949 Value_Range tmp;
cfe17528
AH
5950 vr[i].get_vrange (tmp);
5951
5952 if (!tmp.undefined_p () && !tmp.varying_p ())
718625ad
KV
5953 {
5954 if (dump_file)
5955 {
ff6686d2
MJ
5956 fprintf (dump_file, "Setting value range of param %u "
5957 "(now %i) ", i, remapped_idx);
cfe17528 5958 tmp.dump (dump_file);
718625ad
KV
5959 fprintf (dump_file, "]\n");
5960 }
cfe17528 5961 set_range_info (ddef, tmp);
8bc5448f 5962 }
8bc5448f
KV
5963 }
5964 }
5965}
5966
8aab5218 5967/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5968
5969unsigned int
5970ipcp_transform_function (struct cgraph_node *node)
5971{
56b40062 5972 struct ipa_func_body_info fbi;
2c9561b5 5973 int param_count;
2c9561b5
MJ
5974
5975 gcc_checking_assert (cfun);
5976 gcc_checking_assert (current_function_decl);
5977
5978 if (dump_file)
464d0118
ML
5979 fprintf (dump_file, "Modification phase of node %s\n",
5980 node->dump_name ());
2c9561b5 5981
e0403e95 5982 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
7f986e2e
MJ
5983 if (!ts
5984 || (vec_safe_is_empty (ts->m_agg_values)
5985 && vec_safe_is_empty (ts->bits)
5986 && vec_safe_is_empty (ts->m_vr)))
5987 return 0;
5988
5989 ts->maybe_create_parm_idx_map (cfun->decl);
5990 ipcp_update_bits (node, ts);
5991 ipcp_update_vr (node, ts);
5992 if (vec_safe_is_empty (ts->m_agg_values))
2c9561b5 5993 return 0;
67348ccc 5994 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5995 if (param_count == 0)
5996 return 0;
e0403e95 5997
4834e936 5998 adjust_agg_replacement_values (node, ts);
e0403e95 5999 if (vec_safe_is_empty (ts->m_agg_values))
5bc4cb04 6000 {
5bc4cb04
MJ
6001 if (dump_file)
6002 fprintf (dump_file, " All affected aggregate parameters were either "
6003 "removed or converted into scalars, phase done.\n");
9e2e4739 6004 return 0;
5bc4cb04 6005 }
2c9561b5 6006 if (dump_file)
e0403e95
MJ
6007 {
6008 fprintf (dump_file, " Aggregate replacements:");
6009 ipa_argagg_value_list avs (ts);
6010 avs.dump (dump_file);
6011 }
2c9561b5 6012
8aab5218
MJ
6013 fbi.node = node;
6014 fbi.info = NULL;
6015 fbi.bb_infos = vNULL;
cb3874dc 6016 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
8aab5218 6017 fbi.param_count = param_count;
fdfd7f53 6018 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
2c9561b5 6019
4834e936
MJ
6020 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
6021 vec_safe_grow_cleared (descriptors, param_count, true);
6022 ipa_populate_param_decls (node, *descriptors);
9e2e4739 6023 bool modified_mem_access = false;
8aab5218 6024 calculate_dominance_info (CDI_DOMINATORS);
e0403e95 6025 ipcp_modif_dom_walker walker (&fbi, descriptors, ts, &modified_mem_access);
8ddce3f7
RB
6026 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6027 free_dominance_info (CDI_DOMINATORS);
4834e936 6028 bool cfg_changed = walker.cleanup_eh ();
2c9561b5 6029
8aab5218
MJ
6030 int i;
6031 struct ipa_bb_info *bi;
6032 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
6033 free_ipa_bb_info (bi);
6034 fbi.bb_infos.release ();
9d3e0adc
ML
6035
6036 ipcp_transformation *s = ipcp_transformation_sum->get (node);
e0403e95 6037 s->m_agg_values = NULL;
9d3e0adc
ML
6038 s->bits = NULL;
6039 s->m_vr = NULL;
676b4899 6040
f65f1ae3 6041 vec_free (descriptors);
95a2ed03
MJ
6042 if (cfg_changed)
6043 delete_unreachable_blocks_update_callgraph (node, false);
6044
9e2e4739 6045 return modified_mem_access ? TODO_update_ssa_only_virtuals : 0;
2c9561b5 6046}
86cd0334 6047
ac6f2e59 6048
86cd0334 6049#include "gt-ipa-prop.h"