]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
path solver: Revert computation of ranges in gimple order.
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
99dee823 2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
27d020cf 48#include "ipa-fnsummary.h"
cf835838 49#include "gimple-pretty-print.h"
450ad0cd 50#include "ipa-utils.h"
2b5f0895 51#include "dbgcnt.h"
8aab5218 52#include "domwalk.h"
9b2b7279 53#include "builtins.h"
95a2ed03 54#include "tree-cfgcleanup.h"
c7b6a758 55#include "options.h"
ae7a23a3 56#include "symtab-clones.h"
6cef01c3 57#include "attr-fnspec.h"
45f4e2b0 58#include "gimple-range.h"
771578a0 59
dd912cb8
ML
60/* Function summary where the parameter infos are actually stored. */
61ipa_node_params_t *ipa_node_params_sum = NULL;
9d3e0adc
ML
62
63function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
64
6fe906a3
MJ
65/* Edge summary for IPA-CP edge information. */
66ipa_edge_args_sum_t *ipa_edge_args_sum;
771578a0 67
86cd0334
MJ
68/* Traits for a hash table for reusing already existing ipa_bits. */
69
70struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
71{
72 typedef ipa_bits *value_type;
73 typedef ipa_bits *compare_type;
74 static hashval_t
75 hash (const ipa_bits *p)
76 {
77 hashval_t t = (hashval_t) p->value.to_shwi ();
78 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
79 }
80 static bool
81 equal (const ipa_bits *a, const ipa_bits *b)
82 {
83 return a->value == b->value && a->mask == b->mask;
84 }
7ca50de0 85 static const bool empty_zero_p = true;
86cd0334
MJ
86 static void
87 mark_empty (ipa_bits *&p)
88 {
89 p = NULL;
90 }
91 static bool
92 is_empty (const ipa_bits *p)
93 {
94 return p == NULL;
95 }
96 static bool
97 is_deleted (const ipa_bits *p)
98 {
99 return p == reinterpret_cast<const ipa_bits *> (1);
100 }
101 static void
102 mark_deleted (ipa_bits *&p)
103 {
104 p = reinterpret_cast<ipa_bits *> (1);
105 }
106};
107
108/* Hash table for avoid repeated allocations of equal ipa_bits. */
109static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
110
111/* Traits for a hash table for reusing value_ranges used for IPA. Note that
112 the equiv bitmap is not hashed and is expected to be NULL. */
113
028d81b1 114struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
86cd0334 115{
028d81b1
AH
116 typedef value_range *value_type;
117 typedef value_range *compare_type;
86cd0334 118 static hashval_t
028d81b1 119 hash (const value_range *p)
59b2c134 120 {
54994253 121 inchash::hash hstate (p->kind ());
74ca1c01
ML
122 inchash::add_expr (p->min (), hstate);
123 inchash::add_expr (p->max (), hstate);
59b2c134
JJ
124 return hstate.end ();
125 }
86cd0334 126 static bool
028d81b1 127 equal (const value_range *a, const value_range *b)
86cd0334 128 {
a8662390
ML
129 return (a->equal_p (*b)
130 && types_compatible_p (a->type (), b->type ()));
86cd0334 131 }
7ca50de0 132 static const bool empty_zero_p = true;
86cd0334 133 static void
028d81b1 134 mark_empty (value_range *&p)
86cd0334
MJ
135 {
136 p = NULL;
137 }
138 static bool
028d81b1 139 is_empty (const value_range *p)
86cd0334
MJ
140 {
141 return p == NULL;
142 }
143 static bool
028d81b1 144 is_deleted (const value_range *p)
86cd0334 145 {
028d81b1 146 return p == reinterpret_cast<const value_range *> (1);
86cd0334
MJ
147 }
148 static void
028d81b1 149 mark_deleted (value_range *&p)
86cd0334 150 {
028d81b1 151 p = reinterpret_cast<value_range *> (1);
86cd0334
MJ
152 }
153};
154
155/* Hash table for avoid repeated allocations of equal value_ranges. */
156static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
157
771578a0 158/* Holders of ipa cgraph hooks: */
40982661 159static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 160
4502fe8d
MJ
161/* Description of a reference to an IPA constant. */
162struct ipa_cst_ref_desc
163{
164 /* Edge that corresponds to the statement which took the reference. */
165 struct cgraph_edge *cs;
166 /* Linked list of duplicates created when call graph edges are cloned. */
167 struct ipa_cst_ref_desc *next_duplicate;
168 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
169 if out of control. */
170 int refcount;
171};
172
173/* Allocation pool for reference descriptions. */
174
fb0b2914 175static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 176 ("IPA-PROP ref descriptions");
4502fe8d 177
5fe8e757
MJ
178/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
179 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
180
181static bool
182ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
183{
67348ccc 184 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
185
186 if (!fs_opts)
187 return false;
2bf86c84 188 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
189}
190
be95e2b9
MJ
191/* Return index of the formal whose tree is PTREE in function which corresponds
192 to INFO. */
193
d044dd17 194static int
f65f1ae3
MJ
195ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
196 tree ptree)
518dc859
RL
197{
198 int i, count;
199
f65f1ae3 200 count = vec_safe_length (descriptors);
518dc859 201 for (i = 0; i < count; i++)
f65f1ae3 202 if ((*descriptors)[i].decl_or_type == ptree)
518dc859
RL
203 return i;
204
205 return -1;
206}
207
d044dd17
MJ
208/* Return index of the formal whose tree is PTREE in function which corresponds
209 to INFO. */
210
211int
99b1c316 212ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
d044dd17
MJ
213{
214 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
215}
216
217/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
218 NODE. */
be95e2b9 219
f8e2a1ed
MJ
220static void
221ipa_populate_param_decls (struct cgraph_node *node,
f65f1ae3 222 vec<ipa_param_descriptor, va_gc> &descriptors)
518dc859
RL
223{
224 tree fndecl;
225 tree fnargs;
226 tree parm;
227 int param_num;
3e293154 228
67348ccc 229 fndecl = node->decl;
0e8853ee 230 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
231 fnargs = DECL_ARGUMENTS (fndecl);
232 param_num = 0;
910ad8de 233 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 234 {
209ca542 235 descriptors[param_num].decl_or_type = parm;
40a777e8
JH
236 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
237 descriptors[param_num].move_cost = cost;
238 /* Watch overflow, move_cost is a bitfield. */
239 gcc_checking_assert (cost == descriptors[param_num].move_cost);
518dc859
RL
240 param_num++;
241 }
242}
243
3f84bf08
MJ
244/* Return how many formal parameters FNDECL has. */
245
fd29c024 246int
310bc633 247count_formal_params (tree fndecl)
3f84bf08
MJ
248{
249 tree parm;
250 int count = 0;
0e8853ee 251 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 252
910ad8de 253 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
254 count++;
255
256 return count;
257}
258
0e8853ee
JH
259/* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
262
263void
99b1c316 264ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
0e8853ee
JH
265{
266 fprintf (file, "param #%i", i);
f65f1ae3 267 if ((*info->descriptors)[i].decl_or_type)
0e8853ee
JH
268 {
269 fprintf (file, " ");
ef6cb4c7 270 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
0e8853ee
JH
271 }
272}
273
159f01f8
MJ
274/* If necessary, allocate vector of parameter descriptors in info of NODE.
275 Return true if they were allocated, false if not. */
0e8853ee 276
159f01f8 277static bool
0e8853ee
JH
278ipa_alloc_node_params (struct cgraph_node *node, int param_count)
279{
a4a3cdd0 280 ipa_node_params *info = ipa_node_params_sum->get_create (node);
0e8853ee 281
f65f1ae3 282 if (!info->descriptors && param_count)
159f01f8 283 {
cb3874dc 284 vec_safe_grow_cleared (info->descriptors, param_count, true);
159f01f8
MJ
285 return true;
286 }
287 else
288 return false;
0e8853ee
JH
289}
290
f8e2a1ed
MJ
291/* Initialize the ipa_node_params structure associated with NODE by counting
292 the function parameters, creating the descriptors and populating their
293 param_decls. */
be95e2b9 294
f8e2a1ed
MJ
295void
296ipa_initialize_node_params (struct cgraph_node *node)
297{
a4a3cdd0 298 ipa_node_params *info = ipa_node_params_sum->get_create (node);
f8e2a1ed 299
159f01f8
MJ
300 if (!info->descriptors
301 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
302 ipa_populate_param_decls (node, *info->descriptors);
518dc859
RL
303}
304
749aa96d
MJ
305/* Print the jump functions associated with call graph edge CS to file F. */
306
307static void
308ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
309{
a4a3cdd0
MJ
310 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
311 int count = ipa_get_cs_argument_count (args);
749aa96d 312
a4a3cdd0 313 for (int i = 0; i < count; i++)
749aa96d
MJ
314 {
315 struct ipa_jump_func *jump_func;
316 enum jump_func_type type;
317
a4a3cdd0 318 jump_func = ipa_get_ith_jump_func (args, i);
749aa96d
MJ
319 type = jump_func->type;
320
321 fprintf (f, " param %d: ", i);
322 if (type == IPA_JF_UNKNOWN)
323 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
324 else if (type == IPA_JF_CONST)
325 {
4502fe8d 326 tree val = jump_func->value.constant.value;
749aa96d 327 fprintf (f, "CONST: ");
ef6cb4c7 328 print_generic_expr (f, val);
749aa96d
MJ
329 if (TREE_CODE (val) == ADDR_EXPR
330 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
331 {
332 fprintf (f, " -> ");
ef6cb4c7 333 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
749aa96d
MJ
334 }
335 fprintf (f, "\n");
336 }
749aa96d
MJ
337 else if (type == IPA_JF_PASS_THROUGH)
338 {
339 fprintf (f, "PASS THROUGH: ");
8b7773a4 340 fprintf (f, "%d, op %s",
749aa96d 341 jump_func->value.pass_through.formal_id,
5806f481 342 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 343 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
344 {
345 fprintf (f, " ");
ef6cb4c7 346 print_generic_expr (f, jump_func->value.pass_through.operand);
8b7773a4
MJ
347 }
348 if (jump_func->value.pass_through.agg_preserved)
349 fprintf (f, ", agg_preserved");
3ea6239f 350 fprintf (f, "\n");
749aa96d
MJ
351 }
352 else if (type == IPA_JF_ANCESTOR)
353 {
354 fprintf (f, "ANCESTOR: ");
16998094 355 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
356 jump_func->value.ancestor.formal_id,
357 jump_func->value.ancestor.offset);
8b7773a4
MJ
358 if (jump_func->value.ancestor.agg_preserved)
359 fprintf (f, ", agg_preserved");
3ea6239f 360 fprintf (f, "\n");
749aa96d 361 }
8b7773a4
MJ
362
363 if (jump_func->agg.items)
364 {
365 struct ipa_agg_jf_item *item;
366 int j;
367
368 fprintf (f, " Aggregate passed by %s:\n",
369 jump_func->agg.by_ref ? "reference" : "value");
eb270950 370 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
8b7773a4
MJ
371 {
372 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
373 item->offset);
eb270950
FX
374 fprintf (f, "type: ");
375 print_generic_expr (f, item->type);
376 fprintf (f, ", ");
377 if (item->jftype == IPA_JF_PASS_THROUGH)
378 fprintf (f, "PASS THROUGH: %d,",
379 item->value.pass_through.formal_id);
380 else if (item->jftype == IPA_JF_LOAD_AGG)
381 {
382 fprintf (f, "LOAD AGG: %d",
383 item->value.pass_through.formal_id);
384 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
385 item->value.load_agg.offset,
386 item->value.load_agg.by_ref ? "reference"
387 : "value");
388 }
389
390 if (item->jftype == IPA_JF_PASS_THROUGH
391 || item->jftype == IPA_JF_LOAD_AGG)
392 {
393 fprintf (f, " op %s",
394 get_tree_code_name (item->value.pass_through.operation));
395 if (item->value.pass_through.operation != NOP_EXPR)
396 {
397 fprintf (f, " ");
398 print_generic_expr (f, item->value.pass_through.operand);
399 }
400 }
401 else if (item->jftype == IPA_JF_CONST)
8b7773a4 402 {
eb270950
FX
403 fprintf (f, "CONST: ");
404 print_generic_expr (f, item->value.constant);
8b7773a4 405 }
eb270950
FX
406 else if (item->jftype == IPA_JF_UNKNOWN)
407 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
408 tree_to_uhwi (TYPE_SIZE (item->type)));
8b7773a4
MJ
409 fprintf (f, "\n");
410 }
411 }
44210a96 412
99b1c316 413 class ipa_polymorphic_call_context *ctx
a4a3cdd0 414 = ipa_get_ith_polymorhic_call_context (args, i);
44210a96
MJ
415 if (ctx && !ctx->useless_p ())
416 {
417 fprintf (f, " Context: ");
418 ctx->dump (dump_file);
419 }
04be694e 420
86cd0334 421 if (jump_func->bits)
209ca542 422 {
86cd0334
MJ
423 fprintf (f, " value: ");
424 print_hex (jump_func->bits->value, f);
425 fprintf (f, ", mask: ");
426 print_hex (jump_func->bits->mask, f);
209ca542
PK
427 fprintf (f, "\n");
428 }
429 else
430 fprintf (f, " Unknown bits\n");
8bc5448f 431
86cd0334 432 if (jump_func->m_vr)
8bc5448f
KV
433 {
434 fprintf (f, " VR ");
435 fprintf (f, "%s[",
54994253
AH
436 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
437 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
8bc5448f 438 fprintf (f, ", ");
54994253 439 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
8bc5448f
KV
440 fprintf (f, "]\n");
441 }
442 else
443 fprintf (f, " Unknown VR\n");
749aa96d
MJ
444 }
445}
446
447
be95e2b9
MJ
448/* Print the jump functions of all arguments on all call graph edges going from
449 NODE to file F. */
450
518dc859 451void
3e293154 452ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 453{
3e293154 454 struct cgraph_edge *cs;
518dc859 455
464d0118 456 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
3e293154
MJ
457 for (cs = node->callees; cs; cs = cs->next_callee)
458 {
3e293154 459
464d0118
ML
460 fprintf (f, " callsite %s -> %s : \n",
461 node->dump_name (),
462 cs->callee->dump_name ());
0302955a
JH
463 if (!ipa_edge_args_info_available_for_edge_p (cs))
464 fprintf (f, " no arg info\n");
465 else
466 ipa_print_node_jump_functions_for_edge (f, cs);
749aa96d 467 }
518dc859 468
9de04252 469 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 470 {
99b1c316 471 class cgraph_indirect_call_info *ii;
3e293154 472
9de04252
MJ
473 ii = cs->indirect_info;
474 if (ii->agg_contents)
c13bc3d9 475 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 476 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 477 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
478 ii->param_index, ii->offset,
479 ii->by_ref ? "by reference" : "by_value");
480 else
85942f45
JH
481 fprintf (f, " indirect %s callsite, calling param %i, "
482 "offset " HOST_WIDE_INT_PRINT_DEC,
483 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
484 ii->offset);
9de04252 485
749aa96d
MJ
486 if (cs->call_stmt)
487 {
9de04252 488 fprintf (f, ", for stmt ");
749aa96d 489 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 490 }
749aa96d 491 else
9de04252 492 fprintf (f, "\n");
ba392339
JH
493 if (ii->polymorphic)
494 ii->context.dump (f);
0302955a
JH
495 if (!ipa_edge_args_info_available_for_edge_p (cs))
496 fprintf (f, " no arg info\n");
497 else
498 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
499 }
500}
501
502/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 503
3e293154
MJ
504void
505ipa_print_all_jump_functions (FILE *f)
506{
507 struct cgraph_node *node;
508
ca30a539 509 fprintf (f, "\nJump functions:\n");
65c70e6b 510 FOR_EACH_FUNCTION (node)
3e293154
MJ
511 {
512 ipa_print_node_jump_functions (f, node);
513 }
514}
515
04be694e
MJ
516/* Set jfunc to be a know-really nothing jump function. */
517
518static void
519ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
520{
521 jfunc->type = IPA_JF_UNKNOWN;
04be694e
MJ
522}
523
b8f6e610
MJ
524/* Set JFUNC to be a copy of another jmp (to be used by jump function
525 combination code). The two functions will share their rdesc. */
526
527static void
528ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
529 struct ipa_jump_func *src)
530
531{
532 gcc_checking_assert (src->type == IPA_JF_CONST);
533 dst->type = IPA_JF_CONST;
534 dst->value.constant = src->value.constant;
535}
536
7b872d9e
MJ
537/* Set JFUNC to be a constant jmp function. */
538
539static void
4502fe8d
MJ
540ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
541 struct cgraph_edge *cs)
7b872d9e
MJ
542{
543 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
544 jfunc->value.constant.value = unshare_expr_without_location (constant);
545
546 if (TREE_CODE (constant) == ADDR_EXPR
13586172
MJ
547 && (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL
548 || (TREE_CODE (TREE_OPERAND (constant, 0)) == VAR_DECL
549 && TREE_STATIC (TREE_OPERAND (constant, 0)))))
4502fe8d
MJ
550 {
551 struct ipa_cst_ref_desc *rdesc;
4502fe8d 552
601f3293 553 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
554 rdesc->cs = cs;
555 rdesc->next_duplicate = NULL;
556 rdesc->refcount = 1;
557 jfunc->value.constant.rdesc = rdesc;
558 }
559 else
560 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
561}
562
563/* Set JFUNC to be a simple pass-through jump function. */
564static void
8b7773a4 565ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 566 bool agg_preserved)
7b872d9e
MJ
567{
568 jfunc->type = IPA_JF_PASS_THROUGH;
569 jfunc->value.pass_through.operand = NULL_TREE;
570 jfunc->value.pass_through.formal_id = formal_id;
571 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 572 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
573}
574
a2b4c188
KV
575/* Set JFUNC to be an unary pass through jump function. */
576
577static void
578ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
579 enum tree_code operation)
580{
581 jfunc->type = IPA_JF_PASS_THROUGH;
582 jfunc->value.pass_through.operand = NULL_TREE;
583 jfunc->value.pass_through.formal_id = formal_id;
584 jfunc->value.pass_through.operation = operation;
585 jfunc->value.pass_through.agg_preserved = false;
586}
7b872d9e
MJ
587/* Set JFUNC to be an arithmetic pass through jump function. */
588
589static void
590ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
591 tree operand, enum tree_code operation)
592{
593 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 594 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
595 jfunc->value.pass_through.formal_id = formal_id;
596 jfunc->value.pass_through.operation = operation;
8b7773a4 597 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
598}
599
600/* Set JFUNC to be an ancestor jump function. */
601
602static void
603ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 604 int formal_id, bool agg_preserved)
7b872d9e
MJ
605{
606 jfunc->type = IPA_JF_ANCESTOR;
607 jfunc->value.ancestor.formal_id = formal_id;
608 jfunc->value.ancestor.offset = offset;
8b7773a4 609 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
610}
611
8aab5218
MJ
612/* Get IPA BB information about the given BB. FBI is the context of analyzis
613 of this function body. */
614
615static struct ipa_bb_info *
56b40062 616ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
617{
618 gcc_checking_assert (fbi);
619 return &fbi->bb_infos[bb->index];
620}
621
f65cf2b7
MJ
622/* Structure to be passed in between detect_type_change and
623 check_stmt_for_type_change. */
624
11478306 625struct prop_type_change_info
f65cf2b7 626{
290ebcb7
MJ
627 /* Offset into the object where there is the virtual method pointer we are
628 looking for. */
629 HOST_WIDE_INT offset;
630 /* The declaration or SSA_NAME pointer of the base that we are checking for
631 type change. */
632 tree object;
f65cf2b7
MJ
633 /* Set to true if dynamic type change has been detected. */
634 bool type_maybe_changed;
635};
636
637/* Return true if STMT can modify a virtual method table pointer.
638
639 This function makes special assumptions about both constructors and
640 destructors which are all the functions that are allowed to alter the VMT
641 pointers. It assumes that destructors begin with assignment into all VMT
642 pointers and that constructors essentially look in the following way:
643
644 1) The very first thing they do is that they call constructors of ancestor
645 sub-objects that have them.
646
647 2) Then VMT pointers of this and all its ancestors is set to new values
648 corresponding to the type corresponding to the constructor.
649
650 3) Only afterwards, other stuff such as constructor of member sub-objects
651 and the code written by the user is run. Only this may include calling
652 virtual functions, directly or indirectly.
653
654 There is no way to call a constructor of an ancestor sub-object in any
655 other way.
656
657 This means that we do not have to care whether constructors get the correct
658 type information because they will always change it (in fact, if we define
659 the type to be given by the VMT pointer, it is undefined).
660
661 The most important fact to derive from the above is that if, for some
662 statement in the section 3, we try to detect whether the dynamic type has
663 changed, we can safely ignore all calls as we examine the function body
664 backwards until we reach statements in section 2 because these calls cannot
665 be ancestor constructors or destructors (if the input is not bogus) and so
666 do not change the dynamic type (this holds true only for automatically
667 allocated objects but at the moment we devirtualize only these). We then
668 must detect that statements in section 2 change the dynamic type and can try
669 to derive the new type. That is enough and we can stop, we will never see
670 the calls into constructors of sub-objects in this code. Therefore we can
671 safely ignore all call statements that we traverse.
672 */
673
674static bool
355fe088 675stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
676{
677 if (is_gimple_call (stmt))
678 return false;
70f633c5
JH
679 if (gimple_clobber_p (stmt))
680 return false;
f65cf2b7
MJ
681 else if (is_gimple_assign (stmt))
682 {
683 tree lhs = gimple_assign_lhs (stmt);
684
0004f992
MJ
685 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
686 {
687 if (flag_strict_aliasing
688 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
689 return false;
690
691 if (TREE_CODE (lhs) == COMPONENT_REF
692 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 693 return false;
450aa0ee 694 /* In the future we might want to use get_ref_base_and_extent to find
0004f992
MJ
695 if there is a field corresponding to the offset and if so, proceed
696 almost like if it was a component ref. */
697 }
f65cf2b7
MJ
698 }
699 return true;
700}
701
3b97a5c7
MJ
702/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
703 to check whether a particular statement may modify the virtual table
704 pointerIt stores its result into DATA, which points to a
11478306 705 prop_type_change_info structure. */
f65cf2b7
MJ
706
707static bool
708check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
709{
355fe088 710 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 711 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
712
713 if (stmt_may_be_vtbl_ptr_store (stmt))
714 {
715 tci->type_maybe_changed = true;
716 return true;
717 }
718 else
719 return false;
720}
721
058d0a90
JH
722/* See if ARG is PARAM_DECl describing instance passed by pointer
723 or reference in FUNCTION. Return false if the dynamic type may change
724 in between beggining of the function until CALL is invoked.
290ebcb7 725
058d0a90 726 Generally functions are not allowed to change type of such instances,
67914693 727 but they call destructors. We assume that methods cannot destroy the THIS
058d0a90
JH
728 pointer. Also as a special cases, constructor and destructors may change
729 type of the THIS pointer. */
730
731static bool
355fe088 732param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90 733{
67914693 734 /* Pure functions cannot do any changes on the dynamic type;
058d0a90
JH
735 that require writting to memory. */
736 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
737 return false;
738 /* We need to check if we are within inlined consturctor
739 or destructor (ideally we would have way to check that the
740 inline cdtor is actually working on ARG, but we don't have
741 easy tie on this, so punt on all non-pure cdtors.
742 We may also record the types of cdtors and once we know type
743 of the instance match them.
744
745 Also code unification optimizations may merge calls from
746 different blocks making return values unreliable. So
747 do nothing during late optimization. */
748 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
749 return true;
750 if (TREE_CODE (arg) == SSA_NAME
751 && SSA_NAME_IS_DEFAULT_DEF (arg)
752 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
753 {
754 /* Normal (non-THIS) argument. */
755 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
756 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 757 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
758 and destructors as those definitely may change the dynamic
759 type. */
760 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
761 && !DECL_CXX_CONSTRUCTOR_P (function)
762 && !DECL_CXX_DESTRUCTOR_P (function)
763 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
764 {
765 /* Walk the inline stack and watch out for ctors/dtors. */
766 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
767 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
768 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
769 return true;
058d0a90
JH
770 return false;
771 }
772 }
773 return true;
774}
290ebcb7 775
06d65050
JH
776/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
777 callsite CALL) by looking for assignments to its virtual table pointer. If
c199f329 778 it is, return true. ARG is the object itself (not a pointer
06d65050 779 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
780 returned by get_ref_base_and_extent, as is the offset.
781
782 This is helper function for detect_type_change and detect_type_change_ssa
783 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
784
785static bool
c628d1c3
MJ
786detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
787 tree base, tree comp_type, gcall *call,
058d0a90 788 HOST_WIDE_INT offset)
f65cf2b7 789{
11478306 790 struct prop_type_change_info tci;
f65cf2b7
MJ
791 ao_ref ao;
792
793 gcc_checking_assert (DECL_P (arg)
794 || TREE_CODE (arg) == MEM_REF
795 || handled_component_p (arg));
f65cf2b7 796
b49407f8
JH
797 comp_type = TYPE_MAIN_VARIANT (comp_type);
798
d570d364
JH
799 /* Const calls cannot call virtual methods through VMT and so type changes do
800 not matter. */
801 if (!flag_devirtualize || !gimple_vuse (call)
802 /* Be sure expected_type is polymorphic. */
803 || !comp_type
804 || TREE_CODE (comp_type) != RECORD_TYPE
805 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
806 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
807 return true;
4bf2a588 808
6cc886bf
RB
809 if (fbi->aa_walk_budget == 0)
810 return false;
811
dd887943 812 ao_ref_init (&ao, arg);
f65cf2b7
MJ
813 ao.base = base;
814 ao.offset = offset;
815 ao.size = POINTER_SIZE;
816 ao.max_size = ao.size;
f65cf2b7 817
290ebcb7
MJ
818 tci.offset = offset;
819 tci.object = get_base_address (arg);
290ebcb7 820 tci.type_maybe_changed = false;
290ebcb7 821
c628d1c3
MJ
822 int walked
823 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
6cc886bf
RB
824 &tci, NULL, NULL, fbi->aa_walk_budget);
825 if (walked >= 0)
826 fbi->aa_walk_budget -= walked;
827 else
828 fbi->aa_walk_budget = 0;
c628d1c3
MJ
829
830 if (walked >= 0 && !tci.type_maybe_changed)
f65cf2b7
MJ
831 return false;
832
f65cf2b7
MJ
833 return true;
834}
835
058d0a90 836/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
c199f329 837 If it is, return true. ARG is the object itself (not a pointer
058d0a90
JH
838 to it, unless dereferenced). BASE is the base of the memory access as
839 returned by get_ref_base_and_extent, as is the offset. */
840
841static bool
c628d1c3 842detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
c199f329 843 tree comp_type, gcall *call,
c628d1c3 844 HOST_WIDE_INT offset)
058d0a90
JH
845{
846 if (!flag_devirtualize)
847 return false;
848
849 if (TREE_CODE (base) == MEM_REF
850 && !param_type_may_change_p (current_function_decl,
851 TREE_OPERAND (base, 0),
852 call))
853 return false;
c628d1c3 854 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
c199f329 855 call, offset);
058d0a90
JH
856}
857
f65cf2b7
MJ
858/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
859 SSA name (its dereference will become the base and the offset is assumed to
860 be zero). */
861
862static bool
c628d1c3 863detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
c199f329 864 gcall *call)
f65cf2b7
MJ
865{
866 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 867 if (!flag_devirtualize
06d65050 868 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
869 return false;
870
058d0a90
JH
871 if (!param_type_may_change_p (current_function_decl, arg, call))
872 return false;
873
f65cf2b7 874 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 875 build_int_cst (ptr_type_node, 0));
f65cf2b7 876
c628d1c3 877 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
c199f329 878 call, 0);
f65cf2b7
MJ
879}
880
fdb0e1b4
MJ
881/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
882 boolean variable pointed to by DATA. */
883
884static bool
885mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
886 void *data)
887{
888 bool *b = (bool *) data;
889 *b = true;
890 return true;
891}
892
8aab5218
MJ
893/* Find the nearest valid aa status for parameter specified by INDEX that
894 dominates BB. */
895
56b40062
MJ
896static struct ipa_param_aa_status *
897find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
898 int index)
899{
900 while (true)
901 {
902 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
903 if (!bb)
904 return NULL;
905 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
906 if (!bi->param_aa_statuses.is_empty ()
907 && bi->param_aa_statuses[index].valid)
908 return &bi->param_aa_statuses[index];
909 }
910}
911
912/* Get AA status structure for the given BB and parameter with INDEX. Allocate
913 structures and/or intialize the result with a dominating description as
914 necessary. */
915
56b40062
MJ
916static struct ipa_param_aa_status *
917parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
918 int index)
919{
920 gcc_checking_assert (fbi);
921 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
922 if (bi->param_aa_statuses.is_empty ())
cb3874dc 923 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
56b40062 924 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
925 if (!paa->valid)
926 {
927 gcc_checking_assert (!paa->parm_modified
928 && !paa->ref_modified
929 && !paa->pt_modified);
56b40062 930 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
931 dom_paa = find_dominating_aa_status (fbi, bb, index);
932 if (dom_paa)
933 *paa = *dom_paa;
934 else
935 paa->valid = true;
936 }
937
938 return paa;
939}
940
688010ba 941/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 942 a value known not to be modified in this function before reaching the
8aab5218
MJ
943 statement STMT. FBI holds information about the function we have so far
944 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
945
946static bool
56b40062 947parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 948 gimple *stmt, tree parm_load)
fdb0e1b4 949{
56b40062 950 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
951 bool modified = false;
952 ao_ref refd;
953
776e4fe2
MJ
954 tree base = get_base_address (parm_load);
955 gcc_assert (TREE_CODE (base) == PARM_DECL);
956 if (TREE_READONLY (base))
957 return true;
958
c628d1c3
MJ
959 gcc_checking_assert (fbi);
960 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
6cc886bf 961 if (paa->parm_modified || fbi->aa_walk_budget == 0)
c628d1c3 962 return false;
fdb0e1b4
MJ
963
964 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 965 ao_ref_init (&refd, parm_load);
8aab5218 966 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3 967 &modified, NULL, NULL,
6cc886bf 968 fbi->aa_walk_budget);
c628d1c3
MJ
969 if (walked < 0)
970 {
971 modified = true;
6cc886bf 972 fbi->aa_walk_budget = 0;
c628d1c3 973 }
6cc886bf 974 else
c628d1c3 975 fbi->aa_walk_budget -= walked;
8aab5218
MJ
976 if (paa && modified)
977 paa->parm_modified = true;
8b7773a4 978 return !modified;
fdb0e1b4
MJ
979}
980
a2b4c188
KV
981/* If STMT is an assignment that loads a value from an parameter declaration,
982 return the index of the parameter in ipa_node_params which has not been
983 modified. Otherwise return -1. */
984
985static int
986load_from_unmodified_param (struct ipa_func_body_info *fbi,
f65f1ae3 987 vec<ipa_param_descriptor, va_gc> *descriptors,
a2b4c188
KV
988 gimple *stmt)
989{
bda2bc48
MJ
990 int index;
991 tree op1;
992
a2b4c188
KV
993 if (!gimple_assign_single_p (stmt))
994 return -1;
995
bda2bc48
MJ
996 op1 = gimple_assign_rhs1 (stmt);
997 if (TREE_CODE (op1) != PARM_DECL)
a2b4c188
KV
998 return -1;
999
bda2bc48
MJ
1000 index = ipa_get_param_decl_index_1 (descriptors, op1);
1001 if (index < 0
1002 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
a2b4c188
KV
1003 return -1;
1004
bda2bc48 1005 return index;
a2b4c188
KV
1006}
1007
8aab5218
MJ
1008/* Return true if memory reference REF (which must be a load through parameter
1009 with INDEX) loads data that are known to be unmodified in this function
1010 before reaching statement STMT. */
8b7773a4
MJ
1011
1012static bool
56b40062 1013parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 1014 int index, gimple *stmt, tree ref)
8b7773a4 1015{
56b40062 1016 struct ipa_param_aa_status *paa;
8b7773a4
MJ
1017 bool modified = false;
1018 ao_ref refd;
1019
c628d1c3
MJ
1020 gcc_checking_assert (fbi);
1021 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
6cc886bf 1022 if (paa->ref_modified || fbi->aa_walk_budget == 0)
c628d1c3 1023 return false;
8b7773a4 1024
8aab5218 1025 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 1026 ao_ref_init (&refd, ref);
8aab5218 1027 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3 1028 &modified, NULL, NULL,
6cc886bf 1029 fbi->aa_walk_budget);
c628d1c3
MJ
1030 if (walked < 0)
1031 {
1032 modified = true;
1033 fbi->aa_walk_budget = 0;
1034 }
1035 else
1036 fbi->aa_walk_budget -= walked;
1037 if (modified)
8aab5218 1038 paa->ref_modified = true;
8b7773a4
MJ
1039 return !modified;
1040}
1041
8aab5218
MJ
1042/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1043 is known to be unmodified in this function before reaching call statement
1044 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1045
1046static bool
56b40062 1047parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 1048 gimple *call, tree parm)
8b7773a4
MJ
1049{
1050 bool modified = false;
1051 ao_ref refd;
1052
1053 /* It's unnecessary to calculate anything about memory contnets for a const
1054 function because it is not goin to use it. But do not cache the result
1055 either. Also, no such calculations for non-pointers. */
1056 if (!gimple_vuse (call)
c628d1c3 1057 || !POINTER_TYPE_P (TREE_TYPE (parm)))
8b7773a4
MJ
1058 return false;
1059
56b40062
MJ
1060 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1061 gimple_bb (call),
1062 index);
6cc886bf 1063 if (paa->pt_modified || fbi->aa_walk_budget == 0)
8b7773a4
MJ
1064 return false;
1065
1066 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218 1067 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
c628d1c3 1068 &modified, NULL, NULL,
6cc886bf 1069 fbi->aa_walk_budget);
c628d1c3
MJ
1070 if (walked < 0)
1071 {
1072 fbi->aa_walk_budget = 0;
1073 modified = true;
1074 }
1075 else
1076 fbi->aa_walk_budget -= walked;
8b7773a4 1077 if (modified)
8aab5218 1078 paa->pt_modified = true;
8b7773a4
MJ
1079 return !modified;
1080}
1081
91bb9f80
MJ
1082/* Return true if we can prove that OP is a memory reference loading
1083 data from an aggregate passed as a parameter.
1084
1085 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1086 false if it cannot prove that the value has not been modified before the
1087 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1088 if it cannot prove the value has not been modified, in that case it will
1089 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1090
8b7773a4
MJ
1091 INFO and PARMS_AINFO describe parameters of the current function (but the
1092 latter can be NULL), STMT is the load statement. If function returns true,
1093 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1094 within the aggregate and whether it is a load from a value passed by
1095 reference respectively. */
1096
ff302741 1097bool
56b40062 1098ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
f65f1ae3 1099 vec<ipa_param_descriptor, va_gc> *descriptors,
355fe088 1100 gimple *stmt, tree op, int *index_p,
86003645 1101 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
91bb9f80 1102 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
1103{
1104 int index;
588db50c 1105 HOST_WIDE_INT size;
ee45a32d 1106 bool reverse;
588db50c 1107 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
8b7773a4 1108
588db50c 1109 if (!base)
8b7773a4
MJ
1110 return false;
1111
1112 if (DECL_P (base))
1113 {
d044dd17 1114 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1115 if (index >= 0
8aab5218 1116 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1117 {
1118 *index_p = index;
1119 *by_ref_p = false;
3ff2ca23
JJ
1120 if (size_p)
1121 *size_p = size;
91bb9f80
MJ
1122 if (guaranteed_unmodified)
1123 *guaranteed_unmodified = true;
8b7773a4
MJ
1124 return true;
1125 }
1126 return false;
1127 }
1128
1129 if (TREE_CODE (base) != MEM_REF
1130 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1131 || !integer_zerop (TREE_OPERAND (base, 1)))
1132 return false;
1133
1134 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1135 {
1136 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1137 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1138 }
1139 else
1140 {
1141 /* This branch catches situations where a pointer parameter is not a
1142 gimple register, for example:
1143
1144 void hip7(S*) (struct S * p)
1145 {
1146 void (*<T2e4>) (struct S *) D.1867;
1147 struct S * p.1;
1148
1149 <bb 2>:
1150 p.1_1 = p;
1151 D.1867_2 = p.1_1->f;
1152 D.1867_2 ();
1153 gdp = &p;
1154 */
1155
355fe088 1156 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1157 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1158 }
1159
91bb9f80 1160 if (index >= 0)
8b7773a4 1161 {
91bb9f80
MJ
1162 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1163 if (!data_preserved && !guaranteed_unmodified)
1164 return false;
1165
8b7773a4
MJ
1166 *index_p = index;
1167 *by_ref_p = true;
3ff2ca23
JJ
1168 if (size_p)
1169 *size_p = size;
91bb9f80
MJ
1170 if (guaranteed_unmodified)
1171 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1172 return true;
1173 }
1174 return false;
1175}
1176
eb270950
FX
1177/* If STMT is an assignment that loads a value from a parameter declaration,
1178 or from an aggregate passed as the parameter either by value or reference,
1179 return the index of the parameter in ipa_node_params. Otherwise return -1.
1180
1181 FBI holds gathered information about the function. INFO describes
1182 parameters of the function, STMT is the assignment statement. If it is a
1183 memory load from an aggregate, *OFFSET_P is filled with offset within the
1184 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1185 reference. */
1186
1187static int
1188load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1189 class ipa_node_params *info,
1190 gimple *stmt,
1191 HOST_WIDE_INT *offset_p,
1192 bool *by_ref_p)
1193{
1194 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1195 poly_int64 size;
1196
1197 /* Load value from a parameter declaration. */
1198 if (index >= 0)
1199 {
1200 *offset_p = -1;
1201 return index;
1202 }
1203
1204 if (!gimple_assign_load_p (stmt))
1205 return -1;
1206
1207 tree rhs = gimple_assign_rhs1 (stmt);
1208
1209 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1210 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1211 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1212 return -1;
1213
1214 /* Skip memory reference containing bit-field. */
1215 if (TREE_CODE (rhs) == BIT_FIELD_REF
1216 || contains_bitfld_component_ref_p (rhs))
1217 return -1;
1218
1219 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1220 offset_p, &size, by_ref_p))
1221 return -1;
1222
1223 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1224 size));
1225 if (!*by_ref_p)
1226 {
1227 tree param_type = ipa_get_type (info, index);
1228
1229 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1230 return -1;
1231 }
1232 else if (TREE_THIS_VOLATILE (rhs))
1233 return -1;
1234
1235 return index;
1236}
1237
c7b6a758
JH
1238/* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1239 to find original pointer. Initialize RET to the pointer which results from
1240 the walk.
1241 If offset is known return true and initialize OFFSET_RET. */
1242
1243bool
1244unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1245{
1246 poly_int64 offset = 0;
1247 bool offset_known = true;
1248 int i;
1249
1250 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1251 {
1252 if (TREE_CODE (op) == ADDR_EXPR)
1253 {
1254 poly_int64 extra_offset = 0;
1255 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1256 &offset);
1257 if (!base)
1258 {
1259 base = get_base_address (TREE_OPERAND (op, 0));
1260 if (TREE_CODE (base) != MEM_REF)
1261 break;
1262 offset_known = false;
1263 }
1264 else
1265 {
1266 if (TREE_CODE (base) != MEM_REF)
1267 break;
1268 offset += extra_offset;
1269 }
1270 op = TREE_OPERAND (base, 0);
1271 if (mem_ref_offset (base).to_shwi (&extra_offset))
1272 offset += extra_offset;
1273 else
1274 offset_known = false;
1275 }
1276 else if (TREE_CODE (op) == SSA_NAME
1277 && !SSA_NAME_IS_DEFAULT_DEF (op))
1278 {
1279 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1280
1281 if (gimple_assign_single_p (pstmt))
1282 op = gimple_assign_rhs1 (pstmt);
1283 else if (is_gimple_assign (pstmt)
1284 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1285 {
1286 poly_int64 extra_offset = 0;
1287 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1288 &extra_offset))
1289 offset += extra_offset;
1290 else
1291 offset_known = false;
1292 op = gimple_assign_rhs1 (pstmt);
1293 }
1294 else
1295 break;
1296 }
1297 else
1298 break;
1299 }
1300 *ret = op;
1301 *offset_ret = offset;
1302 return offset_known;
1303}
1304
b258210c 1305/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1306 of an assignment statement STMT, try to determine whether we are actually
1307 handling any of the following cases and construct an appropriate jump
1308 function into JFUNC if so:
1309
1310 1) The passed value is loaded from a formal parameter which is not a gimple
1311 register (most probably because it is addressable, the value has to be
1312 scalar) and we can guarantee the value has not changed. This case can
1313 therefore be described by a simple pass-through jump function. For example:
1314
1315 foo (int a)
1316 {
1317 int a.0;
1318
1319 a.0_2 = a;
1320 bar (a.0_2);
1321
1322 2) The passed value can be described by a simple arithmetic pass-through
1323 jump function. E.g.
1324
1325 foo (int a)
1326 {
1327 int D.2064;
1328
1329 D.2064_4 = a.1(D) + 4;
1330 bar (D.2064_4);
1331
1332 This case can also occur in combination of the previous one, e.g.:
1333
1334 foo (int a, int z)
1335 {
1336 int a.0;
1337 int D.2064;
1338
1339 a.0_3 = a;
1340 D.2064_4 = a.0_3 + 4;
1341 foo (D.2064_4);
1342
1343 3) The passed value is an address of an object within another one (which
1344 also passed by reference). Such situations are described by an ancestor
1345 jump function and describe situations such as:
1346
1347 B::foo() (struct B * const this)
1348 {
1349 struct A * D.1845;
1350
1351 D.1845_2 = &this_1(D)->D.1748;
1352 A::bar (D.1845_2);
1353
1354 INFO is the structure describing individual parameters access different
1355 stages of IPA optimizations. PARMS_AINFO contains the information that is
1356 only needed for intraprocedural analysis. */
685b0d13
MJ
1357
1358static void
56b40062 1359compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1360 class ipa_node_params *info,
b258210c 1361 struct ipa_jump_func *jfunc,
355fe088 1362 gcall *call, gimple *stmt, tree name,
06d65050 1363 tree param_type)
685b0d13 1364{
588db50c 1365 HOST_WIDE_INT offset, size;
fdb0e1b4 1366 tree op1, tc_ssa, base, ssa;
ee45a32d 1367 bool reverse;
685b0d13 1368 int index;
685b0d13 1369
685b0d13 1370 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1371
fdb0e1b4 1372 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1373 {
fdb0e1b4
MJ
1374 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1375 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1376 else
bda2bc48
MJ
1377 index = load_from_unmodified_param (fbi, info->descriptors,
1378 SSA_NAME_DEF_STMT (op1));
fdb0e1b4
MJ
1379 tc_ssa = op1;
1380 }
1381 else
1382 {
bda2bc48 1383 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1384 tc_ssa = gimple_assign_lhs (stmt);
1385 }
1386
1387 if (index >= 0)
1388 {
a77af182 1389 switch (gimple_assign_rhs_class (stmt))
8b7773a4 1390 {
a77af182
RB
1391 case GIMPLE_BINARY_RHS:
1392 {
1393 tree op2 = gimple_assign_rhs2 (stmt);
1394 if (!is_gimple_ip_invariant (op2)
1395 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1396 != tcc_comparison)
1397 && !useless_type_conversion_p (TREE_TYPE (name),
1398 TREE_TYPE (op1))))
1399 return;
1400
1401 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1402 gimple_assign_rhs_code (stmt));
1403 break;
1404 }
1405 case GIMPLE_SINGLE_RHS:
1406 {
1407 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1408 tc_ssa);
1409 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1410 break;
1411 }
1412 case GIMPLE_UNARY_RHS:
b66113e9 1413 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
a77af182 1414 ipa_set_jf_unary_pass_through (jfunc, index,
bda2bc48 1415 gimple_assign_rhs_code (stmt));
a77af182 1416 default:;
8b7773a4 1417 }
685b0d13
MJ
1418 return;
1419 }
1420
1421 if (TREE_CODE (op1) != ADDR_EXPR)
1422 return;
1423 op1 = TREE_OPERAND (op1, 0);
588db50c 1424 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
aca52e6f
RS
1425 offset_int mem_offset;
1426 if (!base
1427 || TREE_CODE (base) != MEM_REF
1428 || !mem_ref_offset (base).is_constant (&mem_offset))
685b0d13 1429 return;
aca52e6f 1430 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1431 ssa = TREE_OPERAND (base, 0);
1432 if (TREE_CODE (ssa) != SSA_NAME
1433 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1434 || offset < 0)
685b0d13
MJ
1435 return;
1436
b8f6e610 1437 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1438 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1439 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1440 ipa_set_ancestor_jf (jfunc, offset, index,
1441 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1442}
1443
40591473
MJ
1444/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1445 it looks like:
1446
1447 iftmp.1_3 = &obj_2(D)->D.1762;
1448
1449 The base of the MEM_REF must be a default definition SSA NAME of a
1450 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1451 whole MEM_REF expression is returned and the offset calculated from any
1452 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1453 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1454
1455static tree
355fe088 1456get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473 1457{
588db50c 1458 HOST_WIDE_INT size;
40591473 1459 tree expr, parm, obj;
ee45a32d 1460 bool reverse;
40591473
MJ
1461
1462 if (!gimple_assign_single_p (assign))
1463 return NULL_TREE;
1464 expr = gimple_assign_rhs1 (assign);
1465
1466 if (TREE_CODE (expr) != ADDR_EXPR)
1467 return NULL_TREE;
1468 expr = TREE_OPERAND (expr, 0);
1469 obj = expr;
588db50c 1470 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
40591473 1471
aca52e6f
RS
1472 offset_int mem_offset;
1473 if (!expr
1474 || TREE_CODE (expr) != MEM_REF
1475 || !mem_ref_offset (expr).is_constant (&mem_offset))
40591473
MJ
1476 return NULL_TREE;
1477 parm = TREE_OPERAND (expr, 0);
1478 if (TREE_CODE (parm) != SSA_NAME
1479 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1480 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1481 return NULL_TREE;
1482
aca52e6f 1483 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1484 *obj_p = obj;
1485 return expr;
1486}
1487
685b0d13 1488
b258210c
MJ
1489/* Given that an actual argument is an SSA_NAME that is a result of a phi
1490 statement PHI, try to find out whether NAME is in fact a
1491 multiple-inheritance typecast from a descendant into an ancestor of a formal
1492 parameter and thus can be described by an ancestor jump function and if so,
1493 write the appropriate function into JFUNC.
1494
1495 Essentially we want to match the following pattern:
1496
1497 if (obj_2(D) != 0B)
1498 goto <bb 3>;
1499 else
1500 goto <bb 4>;
1501
1502 <bb 3>:
1503 iftmp.1_3 = &obj_2(D)->D.1762;
1504
1505 <bb 4>:
1506 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1507 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1508 return D.1879_6; */
1509
1510static void
56b40062 1511compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1512 class ipa_node_params *info,
b258210c 1513 struct ipa_jump_func *jfunc,
538dd0b7 1514 gcall *call, gphi *phi)
b258210c 1515{
40591473 1516 HOST_WIDE_INT offset;
355fe088 1517 gimple *assign, *cond;
b258210c 1518 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1519 tree tmp, parm, expr, obj;
b258210c
MJ
1520 int index, i;
1521
54e348cb 1522 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1523 return;
1524
54e348cb
MJ
1525 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1526 tmp = PHI_ARG_DEF (phi, 0);
1527 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1528 tmp = PHI_ARG_DEF (phi, 1);
1529 else
1530 return;
b258210c
MJ
1531 if (TREE_CODE (tmp) != SSA_NAME
1532 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1533 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1534 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1535 return;
1536
1537 assign = SSA_NAME_DEF_STMT (tmp);
1538 assign_bb = gimple_bb (assign);
40591473 1539 if (!single_pred_p (assign_bb))
b258210c 1540 return;
40591473
MJ
1541 expr = get_ancestor_addr_info (assign, &obj, &offset);
1542 if (!expr)
b258210c
MJ
1543 return;
1544 parm = TREE_OPERAND (expr, 0);
b258210c 1545 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1546 if (index < 0)
1547 return;
b258210c
MJ
1548
1549 cond_bb = single_pred (assign_bb);
1550 cond = last_stmt (cond_bb);
69610617
SB
1551 if (!cond
1552 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1553 || gimple_cond_code (cond) != NE_EXPR
1554 || gimple_cond_lhs (cond) != parm
1555 || !integer_zerop (gimple_cond_rhs (cond)))
1556 return;
1557
b258210c
MJ
1558 phi_bb = gimple_bb (phi);
1559 for (i = 0; i < 2; i++)
1560 {
1561 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1562 if (pred != assign_bb && pred != cond_bb)
1563 return;
1564 }
1565
3b97a5c7
MJ
1566 ipa_set_ancestor_jf (jfunc, offset, index,
1567 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1568}
1569
be95e2b9
MJ
1570/* Inspect the given TYPE and return true iff it has the same structure (the
1571 same number of fields of the same types) as a C++ member pointer. If
1572 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1573 corresponding fields there. */
1574
3e293154
MJ
1575static bool
1576type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1577{
1578 tree fld;
1579
1580 if (TREE_CODE (type) != RECORD_TYPE)
1581 return false;
1582
1583 fld = TYPE_FIELDS (type);
1584 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1585 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1586 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1587 return false;
1588
1589 if (method_ptr)
1590 *method_ptr = fld;
1591
910ad8de 1592 fld = DECL_CHAIN (fld);
8b7773a4 1593 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1594 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1595 return false;
1596 if (delta)
1597 *delta = fld;
1598
910ad8de 1599 if (DECL_CHAIN (fld))
3e293154
MJ
1600 return false;
1601
1602 return true;
1603}
1604
61502ca8 1605/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
eb270950
FX
1606 return the rhs of its defining statement, and this statement is stored in
1607 *RHS_STMT. Otherwise return RHS as it is. */
7ec49257
MJ
1608
1609static inline tree
eb270950 1610get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
7ec49257
MJ
1611{
1612 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1613 {
355fe088 1614 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1615
1616 if (gimple_assign_single_p (def_stmt))
1617 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1618 else
1619 break;
eb270950 1620 *rhs_stmt = def_stmt;
7ec49257
MJ
1621 }
1622 return rhs;
1623}
1624
eb270950 1625/* Simple linked list, describing contents of an aggregate before call. */
8b7773a4
MJ
1626
1627struct ipa_known_agg_contents_list
1628{
1629 /* Offset and size of the described part of the aggregate. */
1630 HOST_WIDE_INT offset, size;
eb270950
FX
1631
1632 /* Type of the described part of the aggregate. */
1633 tree type;
1634
1635 /* Known constant value or jump function data describing contents. */
1636 struct ipa_load_agg_data value;
1637
8b7773a4
MJ
1638 /* Pointer to the next structure in the list. */
1639 struct ipa_known_agg_contents_list *next;
1640};
3e293154 1641
eb270950
FX
1642/* Add an aggregate content item into a linked list of
1643 ipa_known_agg_contents_list structure, in which all elements
1644 are sorted ascendingly by offset. */
0d48ee34 1645
46771da5
FX
1646static inline void
1647add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1648 struct ipa_known_agg_contents_list *item)
0d48ee34 1649{
46771da5
FX
1650 struct ipa_known_agg_contents_list *list = *plist;
1651
1652 for (; list; list = list->next)
0d48ee34 1653 {
46771da5
FX
1654 if (list->offset >= item->offset)
1655 break;
1656
1657 plist = &list->next;
0d48ee34
MJ
1658 }
1659
46771da5
FX
1660 item->next = list;
1661 *plist = item;
1662}
1663
eb270950 1664/* Check whether a given aggregate content is clobbered by certain element in
46771da5
FX
1665 a linked list of ipa_known_agg_contents_list. */
1666
1667static inline bool
1668clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1669 struct ipa_known_agg_contents_list *item)
1670{
1671 for (; list; list = list->next)
0d48ee34 1672 {
46771da5
FX
1673 if (list->offset >= item->offset)
1674 return list->offset < item->offset + item->size;
1675
1676 if (list->offset + list->size > item->offset)
1677 return true;
0d48ee34 1678 }
46771da5
FX
1679
1680 return false;
0d48ee34
MJ
1681}
1682
1683/* Build aggregate jump function from LIST, assuming there are exactly
eb270950 1684 VALUE_COUNT entries there and that offset of the passed argument
0d48ee34
MJ
1685 is ARG_OFFSET and store it into JFUNC. */
1686
1687static void
1688build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
eb270950 1689 int value_count, HOST_WIDE_INT arg_offset,
0d48ee34
MJ
1690 struct ipa_jump_func *jfunc)
1691{
7ee0681e 1692 vec_safe_reserve (jfunc->agg.items, value_count, true);
eb270950
FX
1693 for (; list; list = list->next)
1694 {
1695 struct ipa_agg_jf_item item;
1696 tree operand = list->value.pass_through.operand;
1697
1698 if (list->value.pass_through.formal_id >= 0)
1699 {
1700 /* Content value is derived from some formal paramerter. */
1701 if (list->value.offset >= 0)
1702 item.jftype = IPA_JF_LOAD_AGG;
1703 else
1704 item.jftype = IPA_JF_PASS_THROUGH;
1705
1706 item.value.load_agg = list->value;
1707 if (operand)
1708 item.value.pass_through.operand
1709 = unshare_expr_without_location (operand);
1710 }
1711 else if (operand)
1712 {
1713 /* Content value is known constant. */
1714 item.jftype = IPA_JF_CONST;
1715 item.value.constant = unshare_expr_without_location (operand);
1716 }
1717 else
1718 continue;
1719
1720 item.type = list->type;
1721 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1722
1723 item.offset = list->offset - arg_offset;
1724 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1725
1726 jfunc->agg.items->quick_push (item);
1727 }
1728}
1729
1730/* Given an assignment statement STMT, try to collect information into
1731 AGG_VALUE that will be used to construct jump function for RHS of the
1732 assignment, from which content value of an aggregate part comes.
1733
1734 Besides constant and simple pass-through jump functions, also try to
1735 identify whether it matches the following pattern that can be described by
1736 a load-value-from-aggregate jump function, which is a derivative of simple
1737 pass-through jump function.
1738
1739 foo (int *p)
1740 {
1741 ...
1742
1743 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1744 bar (q_5);
1745 }
1746
1747 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1748 constant, simple pass-through and load-vale-from-aggregate. If value
1749 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1750 set to -1. For simple pass-through and load-value-from-aggregate, field
1751 FORMAL_ID specifies the related formal parameter index, and field
1752 OFFSET can be used to distinguish them, -1 means simple pass-through,
1753 otherwise means load-value-from-aggregate. */
1754
1755static void
1756analyze_agg_content_value (struct ipa_func_body_info *fbi,
1757 struct ipa_load_agg_data *agg_value,
1758 gimple *stmt)
1759{
1760 tree lhs = gimple_assign_lhs (stmt);
1761 tree rhs1 = gimple_assign_rhs1 (stmt);
1762 enum tree_code code;
1763 int index = -1;
1764
1765 /* Initialize jump function data for the aggregate part. */
1766 memset (agg_value, 0, sizeof (*agg_value));
1767 agg_value->pass_through.operation = NOP_EXPR;
1768 agg_value->pass_through.formal_id = -1;
1769 agg_value->offset = -1;
1770
1771 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1772 || TREE_THIS_VOLATILE (lhs)
1773 || TREE_CODE (lhs) == BIT_FIELD_REF
1774 || contains_bitfld_component_ref_p (lhs))
1775 return;
1776
1777 /* Skip SSA copies. */
1778 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1779 {
1780 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1781 break;
1782
1783 stmt = SSA_NAME_DEF_STMT (rhs1);
1784 if (!is_gimple_assign (stmt))
f38a33a2 1785 break;
eb270950
FX
1786
1787 rhs1 = gimple_assign_rhs1 (stmt);
1788 }
1789
f38a33a2 1790 if (gphi *phi = dyn_cast<gphi *> (stmt))
0d48ee34 1791 {
f38a33a2
MJ
1792 /* Also special case like the following (a is a formal parameter):
1793
1794 _12 = *a_11(D).dim[0].stride;
1795 ...
1796 # iftmp.22_9 = PHI <_12(2), 1(3)>
1797 ...
1798 parm.6.dim[0].stride = iftmp.22_9;
1799 ...
1800 __x_MOD_foo (&parm.6, b_31(D));
1801
1802 The aggregate function describing parm.6.dim[0].stride is encoded as a
1803 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1804 (the constant from the PHI node). */
1805
1806 if (gimple_phi_num_args (phi) != 2)
1807 return;
1808 tree arg0 = gimple_phi_arg_def (phi, 0);
1809 tree arg1 = gimple_phi_arg_def (phi, 1);
1810 tree operand;
1811
1812 if (is_gimple_ip_invariant (arg1))
0d48ee34 1813 {
f38a33a2
MJ
1814 operand = arg1;
1815 rhs1 = arg0;
1816 }
1817 else if (is_gimple_ip_invariant (arg0))
1818 {
1819 operand = arg0;
1820 rhs1 = arg1;
0d48ee34 1821 }
f38a33a2 1822 else
eb270950
FX
1823 return;
1824
1825 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
f38a33a2
MJ
1826 if (!is_gimple_assign (stmt))
1827 return;
eb270950 1828
f38a33a2
MJ
1829 code = ASSERT_EXPR;
1830 agg_value->pass_through.operand = operand;
1831 }
1832 else if (is_gimple_assign (stmt))
1833 {
1834 code = gimple_assign_rhs_code (stmt);
1835 switch (gimple_assign_rhs_class (stmt))
1836 {
1837 case GIMPLE_SINGLE_RHS:
1838 if (is_gimple_ip_invariant (rhs1))
1839 {
1840 agg_value->pass_through.operand = rhs1;
1841 return;
1842 }
1843 code = NOP_EXPR;
1844 break;
1845
1846 case GIMPLE_UNARY_RHS:
1847 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1848 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1849 tcc_binary, this subtleness is somewhat misleading.
eb270950 1850
f38a33a2
MJ
1851 Since tcc_unary is widely used in IPA-CP code to check an operation
1852 with one operand, here we only allow tc_unary operation to avoid
1853 possible problem. Then we can use (opclass == tc_unary) or not to
1854 distinguish unary and binary. */
1855 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1856 return;
eb270950 1857
f38a33a2
MJ
1858 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1859 break;
1860
1861 case GIMPLE_BINARY_RHS:
eb270950 1862 {
f38a33a2
MJ
1863 gimple *rhs1_stmt = stmt;
1864 gimple *rhs2_stmt = stmt;
1865 tree rhs2 = gimple_assign_rhs2 (stmt);
1866
1867 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1868 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1869
1870 if (is_gimple_ip_invariant (rhs2))
1871 {
1872 agg_value->pass_through.operand = rhs2;
1873 stmt = rhs1_stmt;
1874 }
1875 else if (is_gimple_ip_invariant (rhs1))
1876 {
1877 if (TREE_CODE_CLASS (code) == tcc_comparison)
1878 code = swap_tree_comparison (code);
1879 else if (!commutative_tree_code (code))
1880 return;
1881
1882 agg_value->pass_through.operand = rhs1;
1883 stmt = rhs2_stmt;
1884 rhs1 = rhs2;
1885 }
1886 else
eb270950
FX
1887 return;
1888
f38a33a2
MJ
1889 if (TREE_CODE_CLASS (code) != tcc_comparison
1890 && !useless_type_conversion_p (TREE_TYPE (lhs),
1891 TREE_TYPE (rhs1)))
1892 return;
eb270950 1893 }
f38a33a2 1894 break;
eb270950 1895
f38a33a2 1896 default:
eb270950 1897 return;
f38a33a2
MJ
1898 }
1899 }
1900 else
1901 return;
eb270950
FX
1902
1903 if (TREE_CODE (rhs1) != SSA_NAME)
1904 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1905 &agg_value->offset,
1906 &agg_value->by_ref);
1907 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1908 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1909
1910 if (index >= 0)
1911 {
1912 if (agg_value->offset >= 0)
1913 agg_value->type = TREE_TYPE (rhs1);
1914 agg_value->pass_through.formal_id = index;
1915 agg_value->pass_through.operation = code;
0d48ee34 1916 }
eb270950
FX
1917 else
1918 agg_value->pass_through.operand = NULL_TREE;
0d48ee34
MJ
1919}
1920
46771da5
FX
1921/* If STMT is a memory store to the object whose address is BASE, extract
1922 information (offset, size, and value) into CONTENT, and return true,
1923 otherwise we conservatively assume the whole object is modified with
1924 unknown content, and return false. CHECK_REF means that access to object
1925 is expected to be in form of MEM_REF expression. */
1926
1927static bool
eb270950
FX
1928extract_mem_content (struct ipa_func_body_info *fbi,
1929 gimple *stmt, tree base, bool check_ref,
46771da5
FX
1930 struct ipa_known_agg_contents_list *content)
1931{
1932 HOST_WIDE_INT lhs_offset, lhs_size;
46771da5
FX
1933 bool reverse;
1934
eb270950 1935 if (!is_gimple_assign (stmt))
46771da5
FX
1936 return false;
1937
eb270950
FX
1938 tree lhs = gimple_assign_lhs (stmt);
1939 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1940 &reverse);
46771da5
FX
1941 if (!lhs_base)
1942 return false;
1943
1944 if (check_ref)
1945 {
1946 if (TREE_CODE (lhs_base) != MEM_REF
1947 || TREE_OPERAND (lhs_base, 0) != base
1948 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1949 return false;
1950 }
1951 else if (lhs_base != base)
1952 return false;
1953
46771da5 1954 content->offset = lhs_offset;
eb270950
FX
1955 content->size = lhs_size;
1956 content->type = TREE_TYPE (lhs);
46771da5
FX
1957 content->next = NULL;
1958
eb270950 1959 analyze_agg_content_value (fbi, &content->value, stmt);
46771da5
FX
1960 return true;
1961}
1962
8b7773a4 1963/* Traverse statements from CALL backwards, scanning whether an aggregate given
eb270950
FX
1964 in ARG is filled in constants or values that are derived from caller's
1965 formal parameter in the way described by some kinds of jump functions. FBI
1966 is the context of the caller function for interprocedural analysis. ARG can
1967 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1968 the type of the aggregate, JFUNC is the jump function for the aggregate. */
be95e2b9 1969
3e293154 1970static void
eb270950
FX
1971determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1972 gcall *call, tree arg,
46771da5 1973 tree arg_type,
eb270950 1974 struct ipa_jump_func *jfunc)
3e293154 1975{
46771da5
FX
1976 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1977 bitmap visited = NULL;
eb270950 1978 int item_count = 0, value_count = 0;
8b7773a4 1979 HOST_WIDE_INT arg_offset, arg_size;
8b7773a4
MJ
1980 tree arg_base;
1981 bool check_ref, by_ref;
1982 ao_ref r;
de2e0835 1983 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
3e293154 1984
de2e0835 1985 if (max_agg_items == 0)
29799e9d
MJ
1986 return;
1987
8b7773a4
MJ
1988 /* The function operates in three stages. First, we prepare check_ref, r,
1989 arg_base and arg_offset based on what is actually passed as an actual
1990 argument. */
3e293154 1991
85942f45 1992 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1993 {
1994 by_ref = true;
1995 if (TREE_CODE (arg) == SSA_NAME)
1996 {
1997 tree type_size;
63831879
MJ
1998 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1999 || !POINTER_TYPE_P (TREE_TYPE (arg)))
8b7773a4
MJ
2000 return;
2001 check_ref = true;
2002 arg_base = arg;
2003 arg_offset = 0;
85942f45 2004 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 2005 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
2006 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2007 }
2008 else if (TREE_CODE (arg) == ADDR_EXPR)
2009 {
ee45a32d 2010 bool reverse;
8b7773a4
MJ
2011
2012 arg = TREE_OPERAND (arg, 0);
588db50c
RS
2013 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2014 &arg_size, &reverse);
2015 if (!arg_base)
8b7773a4
MJ
2016 return;
2017 if (DECL_P (arg_base))
2018 {
8b7773a4 2019 check_ref = false;
0d48ee34 2020 ao_ref_init (&r, arg_base);
8b7773a4
MJ
2021 }
2022 else
2023 return;
2024 }
2025 else
2026 return;
2027 }
2028 else
2029 {
ee45a32d 2030 bool reverse;
8b7773a4
MJ
2031
2032 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2033
2034 by_ref = false;
2035 check_ref = false;
588db50c
RS
2036 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2037 &arg_size, &reverse);
2038 if (!arg_base)
8b7773a4
MJ
2039 return;
2040
2041 ao_ref_init (&r, arg);
2042 }
2043
46771da5
FX
2044 /* Second stage traverses virtual SSA web backwards starting from the call
2045 statement, only looks at individual dominating virtual operand (its
2046 definition dominates the call), as long as it is confident that content
2047 of the aggregate is affected by definition of the virtual operand, it
2048 builds a sorted linked list of ipa_agg_jf_list describing that. */
3e293154 2049
6cc886bf
RB
2050 for (tree dom_vuse = gimple_vuse (call);
2051 dom_vuse && fbi->aa_walk_budget > 0;)
46771da5
FX
2052 {
2053 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
3e293154 2054
46771da5 2055 if (gimple_code (stmt) == GIMPLE_PHI)
518dc859 2056 {
fb4697e3 2057 dom_vuse = get_continuation_for_phi (stmt, &r, true,
eb270950 2058 fbi->aa_walk_budget,
46771da5
FX
2059 &visited, false, NULL, NULL);
2060 continue;
3e293154 2061 }
46771da5 2062
6cc886bf 2063 fbi->aa_walk_budget--;
46771da5 2064 if (stmt_may_clobber_ref_p_1 (stmt, &r))
774b8a55 2065 {
46771da5
FX
2066 struct ipa_known_agg_contents_list *content
2067 = XALLOCA (struct ipa_known_agg_contents_list);
2068
eb270950 2069 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
774b8a55 2070 break;
3e293154 2071
46771da5
FX
2072 /* Now we get a dominating virtual operand, and need to check
2073 whether its value is clobbered any other dominating one. */
eb270950
FX
2074 if ((content->value.pass_through.formal_id >= 0
2075 || content->value.pass_through.operand)
46771da5
FX
2076 && !clobber_by_agg_contents_list_p (all_list, content))
2077 {
2078 struct ipa_known_agg_contents_list *copy
2079 = XALLOCA (struct ipa_known_agg_contents_list);
3e293154 2080
46771da5
FX
2081 /* Add to the list consisting of only dominating virtual
2082 operands, whose definitions can finally reach the call. */
2083 add_to_agg_contents_list (&list, (*copy = *content, copy));
2084
de2e0835 2085 if (++value_count == max_agg_items)
46771da5
FX
2086 break;
2087 }
2088
2089 /* Add to the list consisting of all dominating virtual operands. */
2090 add_to_agg_contents_list (&all_list, content);
2091
de2e0835 2092 if (++item_count == 2 * max_agg_items)
46771da5 2093 break;
8b7773a4 2094 }
46771da5
FX
2095 dom_vuse = gimple_vuse (stmt);
2096 }
3e293154 2097
46771da5
FX
2098 if (visited)
2099 BITMAP_FREE (visited);
be95e2b9 2100
8b7773a4 2101 /* Third stage just goes over the list and creates an appropriate vector of
46771da5 2102 ipa_agg_jf_item structures out of it, of course only if there are
eb270950 2103 any meaningful items to begin with. */
3e293154 2104
eb270950 2105 if (value_count)
3e293154 2106 {
8b7773a4 2107 jfunc->agg.by_ref = by_ref;
eb270950 2108 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
3e293154
MJ
2109 }
2110}
2111
46771da5 2112
5d5f1e95
KV
2113/* Return the Ith param type of callee associated with call graph
2114 edge E. */
2115
2116tree
06d65050
JH
2117ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2118{
2119 int n;
2120 tree type = (e->callee
67348ccc 2121 ? TREE_TYPE (e->callee->decl)
06d65050
JH
2122 : gimple_call_fntype (e->call_stmt));
2123 tree t = TYPE_ARG_TYPES (type);
2124
2125 for (n = 0; n < i; n++)
2126 {
2127 if (!t)
2128 break;
2129 t = TREE_CHAIN (t);
2130 }
2131 if (t)
2132 return TREE_VALUE (t);
2133 if (!e->callee)
2134 return NULL;
67348ccc 2135 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
2136 for (n = 0; n < i; n++)
2137 {
2138 if (!t)
2139 return NULL;
2140 t = TREE_CHAIN (t);
2141 }
2142 if (t)
2143 return TREE_TYPE (t);
2144 return NULL;
2145}
2146
86cd0334
MJ
2147/* Return ipa_bits with VALUE and MASK values, which can be either a newly
2148 allocated structure or a previously existing one shared with other jump
2149 functions and/or transformation summaries. */
2150
2151ipa_bits *
2152ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2153{
2154 ipa_bits tmp;
2155 tmp.value = value;
2156 tmp.mask = mask;
2157
2158 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2159 if (*slot)
2160 return *slot;
2161
2162 ipa_bits *res = ggc_alloc<ipa_bits> ();
2163 res->value = value;
2164 res->mask = mask;
2165 *slot = res;
2166
2167 return res;
2168}
2169
2170/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2171 table in order to avoid creating multiple same ipa_bits structures. */
2172
2173static void
2174ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2175 const widest_int &mask)
2176{
2177 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2178}
2179
2180/* Return a pointer to a value_range just like *TMP, but either find it in
2181 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2182
028d81b1
AH
2183static value_range *
2184ipa_get_value_range (value_range *tmp)
86cd0334 2185{
028d81b1 2186 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
86cd0334
MJ
2187 if (*slot)
2188 return *slot;
2189
4ba9fb0a 2190 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
86cd0334
MJ
2191 *vr = *tmp;
2192 *slot = vr;
2193
2194 return vr;
2195}
2196
2197/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2198 equiv set. Use hash table in order to avoid creating multiple same copies of
2199 value_ranges. */
2200
028d81b1 2201static value_range *
5d462877 2202ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
86cd0334 2203{
5d462877 2204 value_range tmp (min, max, kind);
86cd0334
MJ
2205 return ipa_get_value_range (&tmp);
2206}
2207
2208/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2209 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2210 same value_range structures. */
2211
2212static void
54994253 2213ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
86cd0334
MJ
2214 tree min, tree max)
2215{
2216 jf->m_vr = ipa_get_value_range (type, min, max);
2217}
2218
46771da5 2219/* Assign to JF a pointer to a value_range just like TMP but either fetch a
86cd0334
MJ
2220 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2221
2222static void
028d81b1 2223ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
86cd0334
MJ
2224{
2225 jf->m_vr = ipa_get_value_range (tmp);
2226}
2227
3e293154
MJ
2228/* Compute jump function for all arguments of callsite CS and insert the
2229 information in the jump_functions array in the ipa_edge_args corresponding
2230 to this callsite. */
be95e2b9 2231
749aa96d 2232static void
56b40062 2233ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 2234 struct cgraph_edge *cs)
3e293154 2235{
a4a3cdd0
MJ
2236 ipa_node_params *info = ipa_node_params_sum->get (cs->caller);
2237 ipa_edge_args *args = ipa_edge_args_sum->get_create (cs);
538dd0b7 2238 gcall *call = cs->call_stmt;
8b7773a4 2239 int n, arg_num = gimple_call_num_args (call);
5ce97055 2240 bool useful_context = false;
45f4e2b0 2241 value_range vr;
3e293154 2242
606d9a09 2243 if (arg_num == 0 || args->jump_functions)
3e293154 2244 return;
cb3874dc 2245 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
5ce97055 2246 if (flag_devirtualize)
cb3874dc 2247 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
3e293154 2248
96e24d49
JJ
2249 if (gimple_call_internal_p (call))
2250 return;
5fe8e757
MJ
2251 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2252 return;
2253
8b7773a4
MJ
2254 for (n = 0; n < arg_num; n++)
2255 {
2256 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2257 tree arg = gimple_call_arg (call, n);
06d65050 2258 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
2259 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2260 {
049e6d36 2261 tree instance;
99b1c316 2262 class ipa_polymorphic_call_context context (cs->caller->decl,
5ce97055 2263 arg, cs->call_stmt,
049e6d36 2264 &instance);
c628d1c3
MJ
2265 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2266 &fbi->aa_walk_budget);
5ce97055
JH
2267 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2268 if (!context.useless_p ())
2269 useful_context = true;
2270 }
3e293154 2271
718625ad
KV
2272 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2273 {
f7503699
KV
2274 bool addr_nonzero = false;
2275 bool strict_overflow = false;
2276
718625ad
KV
2277 if (TREE_CODE (arg) == SSA_NAME
2278 && param_type
45f4e2b0
AH
2279 && get_range_query (cfun)->range_of_expr (vr, arg)
2280 && vr.nonzero_p ())
f7503699
KV
2281 addr_nonzero = true;
2282 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2283 addr_nonzero = true;
2284
2285 if (addr_nonzero)
718625ad 2286 {
86cd0334
MJ
2287 tree z = build_int_cst (TREE_TYPE (arg), 0);
2288 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
718625ad
KV
2289 }
2290 else
86cd0334 2291 gcc_assert (!jfunc->m_vr);
718625ad
KV
2292 }
2293 else
8bc5448f 2294 {
8bc5448f
KV
2295 if (TREE_CODE (arg) == SSA_NAME
2296 && param_type
45f4e2b0
AH
2297 && get_range_query (cfun)->range_of_expr (vr, arg)
2298 && !vr.undefined_p ())
8bc5448f 2299 {
028d81b1 2300 value_range resvr;
38a73435 2301 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
45f4e2b0 2302 &vr, TREE_TYPE (arg));
54994253 2303 if (!resvr.undefined_p () && !resvr.varying_p ())
86cd0334 2304 ipa_set_jfunc_vr (jfunc, &resvr);
3a4228ba 2305 else
86cd0334 2306 gcc_assert (!jfunc->m_vr);
8bc5448f
KV
2307 }
2308 else
86cd0334 2309 gcc_assert (!jfunc->m_vr);
8bc5448f 2310 }
04be694e 2311
209ca542
PK
2312 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2313 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2314 {
209ca542 2315 if (TREE_CODE (arg) == SSA_NAME)
86cd0334
MJ
2316 ipa_set_jfunc_bits (jfunc, 0,
2317 widest_int::from (get_nonzero_bits (arg),
2318 TYPE_SIGN (TREE_TYPE (arg))));
209ca542 2319 else
86cd0334 2320 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
209ca542 2321 }
67b97478
PK
2322 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2323 {
2324 unsigned HOST_WIDE_INT bitpos;
2325 unsigned align;
2326
67b97478 2327 get_pointer_alignment_1 (arg, &align, &bitpos);
7b27cb4b
RS
2328 widest_int mask = wi::bit_and_not
2329 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2330 align / BITS_PER_UNIT - 1);
86cd0334
MJ
2331 widest_int value = bitpos / BITS_PER_UNIT;
2332 ipa_set_jfunc_bits (jfunc, value, mask);
67b97478 2333 }
209ca542 2334 else
86cd0334 2335 gcc_assert (!jfunc->bits);
209ca542 2336
04643334 2337 if (is_gimple_ip_invariant (arg)
8813a647 2338 || (VAR_P (arg)
04643334
MJ
2339 && is_global_var (arg)
2340 && TREE_READONLY (arg)))
4502fe8d 2341 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
2342 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2343 && TREE_CODE (arg) == PARM_DECL)
2344 {
2345 int index = ipa_get_param_decl_index (info, arg);
2346
2347 gcc_assert (index >=0);
2348 /* Aggregate passed by value, check for pass-through, otherwise we
2349 will attempt to fill in aggregate contents later in this
2350 for cycle. */
8aab5218 2351 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 2352 {
3b97a5c7 2353 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
2354 continue;
2355 }
2356 }
2357 else if (TREE_CODE (arg) == SSA_NAME)
2358 {
2359 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2360 {
2361 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 2362 if (index >= 0)
8b7773a4 2363 {
3b97a5c7 2364 bool agg_p;
8aab5218 2365 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 2366 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
2367 }
2368 }
2369 else
2370 {
355fe088 2371 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 2372 if (is_gimple_assign (stmt))
8aab5218 2373 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 2374 call, stmt, arg, param_type);
8b7773a4 2375 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 2376 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
2377 call,
2378 as_a <gphi *> (stmt));
8b7773a4
MJ
2379 }
2380 }
3e293154 2381
67914693 2382 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
85942f45
JH
2383 passed (because type conversions are ignored in gimple). Usually we can
2384 safely get type from function declaration, but in case of K&R prototypes or
2385 variadic functions we can try our luck with type of the pointer passed.
2386 TODO: Since we look for actual initialization of the memory object, we may better
2387 work out the type based on the memory stores we find. */
2388 if (!param_type)
2389 param_type = TREE_TYPE (arg);
2390
8b7773a4
MJ
2391 if ((jfunc->type != IPA_JF_PASS_THROUGH
2392 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2393 && (jfunc->type != IPA_JF_ANCESTOR
2394 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2395 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 2396 || POINTER_TYPE_P (param_type)))
eb270950 2397 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
8b7773a4 2398 }
5ce97055
JH
2399 if (!useful_context)
2400 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
2401}
2402
749aa96d 2403/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 2404 from BB. */
749aa96d 2405
062c604f 2406static void
56b40062 2407ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 2408{
8aab5218
MJ
2409 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2410 int i;
749aa96d
MJ
2411 struct cgraph_edge *cs;
2412
8aab5218 2413 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 2414 {
8aab5218 2415 struct cgraph_node *callee = cs->callee;
749aa96d 2416
8aab5218
MJ
2417 if (callee)
2418 {
d7caa129 2419 callee = callee->ultimate_alias_target ();
8aab5218
MJ
2420 /* We do not need to bother analyzing calls to unknown functions
2421 unless they may become known during lto/whopr. */
6cef01c3
JH
2422 if (!callee->definition && !flag_lto
2423 && !gimple_call_fnspec (cs->call_stmt).known_p ())
8aab5218
MJ
2424 continue;
2425 }
2426 ipa_compute_jump_functions_for_edge (fbi, cs);
2427 }
749aa96d
MJ
2428}
2429
8b7773a4
MJ
2430/* If STMT looks like a statement loading a value from a member pointer formal
2431 parameter, return that parameter and store the offset of the field to
2432 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2433 might be clobbered). If USE_DELTA, then we look for a use of the delta
2434 field rather than the pfn. */
be95e2b9 2435
3e293154 2436static tree
355fe088 2437ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 2438 HOST_WIDE_INT *offset_p)
3e293154 2439{
8b7773a4
MJ
2440 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2441
2442 if (!gimple_assign_single_p (stmt))
2443 return NULL_TREE;
3e293154 2444
8b7773a4 2445 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2446 if (TREE_CODE (rhs) == COMPONENT_REF)
2447 {
2448 ref_field = TREE_OPERAND (rhs, 1);
2449 rhs = TREE_OPERAND (rhs, 0);
2450 }
2451 else
2452 ref_field = NULL_TREE;
d242d063 2453 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2454 return NULL_TREE;
3e293154 2455 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2456 if (TREE_CODE (rec) != ADDR_EXPR)
2457 return NULL_TREE;
2458 rec = TREE_OPERAND (rec, 0);
3e293154 2459 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2460 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2461 return NULL_TREE;
d242d063 2462 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2463
8b7773a4
MJ
2464 if (use_delta)
2465 fld = delta_field;
2466 else
2467 fld = ptr_field;
2468 if (offset_p)
2469 *offset_p = int_bit_position (fld);
2470
ae788515
EB
2471 if (ref_field)
2472 {
2473 if (integer_nonzerop (ref_offset))
2474 return NULL_TREE;
ae788515
EB
2475 return ref_field == fld ? rec : NULL_TREE;
2476 }
3e293154 2477 else
8b7773a4
MJ
2478 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2479 : NULL_TREE;
3e293154
MJ
2480}
2481
2482/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2483
3e293154
MJ
2484static bool
2485ipa_is_ssa_with_stmt_def (tree t)
2486{
2487 if (TREE_CODE (t) == SSA_NAME
2488 && !SSA_NAME_IS_DEFAULT_DEF (t))
2489 return true;
2490 else
2491 return false;
2492}
2493
40591473
MJ
2494/* Find the indirect call graph edge corresponding to STMT and mark it as a
2495 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
40a777e8
JH
2496 indirect call graph edge.
2497 If POLYMORPHIC is true record is as a destination of polymorphic call. */
be95e2b9 2498
40591473 2499static struct cgraph_edge *
538dd0b7 2500ipa_note_param_call (struct cgraph_node *node, int param_index,
40a777e8 2501 gcall *stmt, bool polymorphic)
3e293154 2502{
e33c6cd6 2503 struct cgraph_edge *cs;
3e293154 2504
d52f5295 2505 cs = node->get_edge (stmt);
b258210c 2506 cs->indirect_info->param_index = param_index;
8b7773a4 2507 cs->indirect_info->agg_contents = 0;
c13bc3d9 2508 cs->indirect_info->member_ptr = 0;
91bb9f80 2509 cs->indirect_info->guaranteed_unmodified = 0;
a4a3cdd0
MJ
2510 ipa_node_params *info = ipa_node_params_sum->get (node);
2511 ipa_set_param_used_by_indirect_call (info, param_index, true);
40a777e8 2512 if (cs->indirect_info->polymorphic || polymorphic)
a4a3cdd0 2513 ipa_set_param_used_by_polymorphic_call (info, param_index, true);
40591473 2514 return cs;
3e293154
MJ
2515}
2516
e33c6cd6 2517/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2518 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2519 intermediate information about each formal parameter. Currently it checks
2520 whether the call calls a pointer that is a formal parameter and if so, the
2521 parameter is marked with the called flag and an indirect call graph edge
2522 describing the call is created. This is very simple for ordinary pointers
2523 represented in SSA but not-so-nice when it comes to member pointers. The
2524 ugly part of this function does nothing more than trying to match the
2525 pattern of such a call. An example of such a pattern is the gimple dump
2526 below, the call is on the last line:
3e293154 2527
ae788515
EB
2528 <bb 2>:
2529 f$__delta_5 = f.__delta;
2530 f$__pfn_24 = f.__pfn;
2531
2532 or
3e293154 2533 <bb 2>:
d242d063
MJ
2534 f$__delta_5 = MEM[(struct *)&f];
2535 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2536
ae788515 2537 and a few lines below:
8aa29647
MJ
2538
2539 <bb 5>
3e293154
MJ
2540 D.2496_3 = (int) f$__pfn_24;
2541 D.2497_4 = D.2496_3 & 1;
2542 if (D.2497_4 != 0)
2543 goto <bb 3>;
2544 else
2545 goto <bb 4>;
2546
8aa29647 2547 <bb 6>:
3e293154
MJ
2548 D.2500_7 = (unsigned int) f$__delta_5;
2549 D.2501_8 = &S + D.2500_7;
2550 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2551 D.2503_10 = *D.2502_9;
2552 D.2504_12 = f$__pfn_24 + -1;
2553 D.2505_13 = (unsigned int) D.2504_12;
2554 D.2506_14 = D.2503_10 + D.2505_13;
2555 D.2507_15 = *D.2506_14;
2556 iftmp.11_16 = (String:: *) D.2507_15;
2557
8aa29647 2558 <bb 7>:
3e293154
MJ
2559 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2560 D.2500_19 = (unsigned int) f$__delta_5;
2561 D.2508_20 = &S + D.2500_19;
2562 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2563
2564 Such patterns are results of simple calls to a member pointer:
2565
2566 int doprinting (int (MyString::* f)(int) const)
2567 {
2568 MyString S ("somestring");
2569
2570 return (S.*f)(4);
2571 }
8b7773a4
MJ
2572
2573 Moreover, the function also looks for called pointers loaded from aggregates
2574 passed by value or reference. */
3e293154
MJ
2575
2576static void
56b40062 2577ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 2578 tree target)
3e293154 2579{
99b1c316 2580 class ipa_node_params *info = fbi->info;
8b7773a4
MJ
2581 HOST_WIDE_INT offset;
2582 bool by_ref;
3e293154 2583
3e293154
MJ
2584 if (SSA_NAME_IS_DEFAULT_DEF (target))
2585 {
b258210c 2586 tree var = SSA_NAME_VAR (target);
8aab5218 2587 int index = ipa_get_param_decl_index (info, var);
3e293154 2588 if (index >= 0)
40a777e8 2589 ipa_note_param_call (fbi->node, index, call, false);
3e293154
MJ
2590 return;
2591 }
2592
8aab5218 2593 int index;
355fe088 2594 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 2595 bool guaranteed_unmodified;
8b7773a4 2596 if (gimple_assign_single_p (def)
ff302741
PB
2597 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2598 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2599 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2600 {
40a777e8
JH
2601 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2602 call, false);
8b7773a4
MJ
2603 cs->indirect_info->offset = offset;
2604 cs->indirect_info->agg_contents = 1;
2605 cs->indirect_info->by_ref = by_ref;
91bb9f80 2606 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2607 return;
2608 }
2609
3e293154
MJ
2610 /* Now we need to try to match the complex pattern of calling a member
2611 pointer. */
8b7773a4
MJ
2612 if (gimple_code (def) != GIMPLE_PHI
2613 || gimple_phi_num_args (def) != 2
2614 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2615 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2616 return;
2617
3e293154
MJ
2618 /* First, we need to check whether one of these is a load from a member
2619 pointer that is a parameter to this function. */
8aab5218
MJ
2620 tree n1 = PHI_ARG_DEF (def, 0);
2621 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2622 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2623 return;
355fe088
TS
2624 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2625 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2626
8aab5218
MJ
2627 tree rec;
2628 basic_block bb, virt_bb;
2629 basic_block join = gimple_bb (def);
8b7773a4 2630 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2631 {
8b7773a4 2632 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2633 return;
2634
8aa29647 2635 bb = EDGE_PRED (join, 0)->src;
726a989a 2636 virt_bb = gimple_bb (d2);
3e293154 2637 }
8b7773a4 2638 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2639 {
8aa29647 2640 bb = EDGE_PRED (join, 1)->src;
726a989a 2641 virt_bb = gimple_bb (d1);
3e293154
MJ
2642 }
2643 else
2644 return;
2645
2646 /* Second, we need to check that the basic blocks are laid out in the way
2647 corresponding to the pattern. */
2648
3e293154
MJ
2649 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2650 || single_pred (virt_bb) != bb
2651 || single_succ (virt_bb) != join)
2652 return;
2653
2654 /* Third, let's see that the branching is done depending on the least
2655 significant bit of the pfn. */
2656
355fe088 2657 gimple *branch = last_stmt (bb);
8aa29647 2658 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2659 return;
2660
12430896
RG
2661 if ((gimple_cond_code (branch) != NE_EXPR
2662 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2663 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2664 return;
3e293154 2665
8aab5218 2666 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2667 if (!ipa_is_ssa_with_stmt_def (cond))
2668 return;
2669
726a989a 2670 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2671 if (!is_gimple_assign (def)
726a989a
RB
2672 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2673 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2674 return;
726a989a
RB
2675
2676 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2677 if (!ipa_is_ssa_with_stmt_def (cond))
2678 return;
2679
726a989a 2680 def = SSA_NAME_DEF_STMT (cond);
3e293154 2681
8b75fc9b
MJ
2682 if (is_gimple_assign (def)
2683 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2684 {
726a989a 2685 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2686 if (!ipa_is_ssa_with_stmt_def (cond))
2687 return;
726a989a 2688 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2689 }
2690
8aab5218 2691 tree rec2;
6f7b8b70
RE
2692 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2693 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2694 == ptrmemfunc_vbit_in_delta),
2695 NULL);
3e293154
MJ
2696 if (rec != rec2)
2697 return;
2698
2699 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2700 if (index >= 0
8aab5218 2701 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2702 {
40a777e8
JH
2703 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2704 call, false);
8b7773a4
MJ
2705 cs->indirect_info->offset = offset;
2706 cs->indirect_info->agg_contents = 1;
c13bc3d9 2707 cs->indirect_info->member_ptr = 1;
91bb9f80 2708 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2709 }
3e293154
MJ
2710
2711 return;
2712}
2713
b258210c
MJ
2714/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2715 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2716 FBI->node (described by FBI->info), create a call note for the
2717 statement. */
b258210c
MJ
2718
2719static void
56b40062 2720ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2721 gcall *call, tree target)
b258210c
MJ
2722{
2723 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2724 int index;
40591473 2725 HOST_WIDE_INT anc_offset;
b258210c 2726
05842ff5
MJ
2727 if (!flag_devirtualize)
2728 return;
2729
40591473 2730 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2731 return;
2732
99b1c316 2733 class ipa_node_params *info = fbi->info;
40591473
MJ
2734 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2735 {
2736 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2737 return;
b258210c 2738
40591473
MJ
2739 anc_offset = 0;
2740 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2741 gcc_assert (index >= 0);
c628d1c3 2742 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
c199f329 2743 call))
40591473
MJ
2744 return;
2745 }
2746 else
2747 {
355fe088 2748 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2749 tree expr;
2750
2751 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2752 if (!expr)
2753 return;
2754 index = ipa_get_param_decl_index (info,
2755 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2756 gcc_assert (index >= 0);
c628d1c3 2757 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
c199f329 2758 call, anc_offset))
40591473
MJ
2759 return;
2760 }
2761
40a777e8
JH
2762 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2763 call, true);
99b1c316 2764 class cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2765 ii->offset = anc_offset;
ae7e9ddd 2766 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2767 ii->otr_type = obj_type_ref_class (target);
40591473 2768 ii->polymorphic = 1;
b258210c
MJ
2769}
2770
2771/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2772 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2773 containing intermediate information about each formal parameter. */
b258210c
MJ
2774
2775static void
56b40062 2776ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2777{
2778 tree target = gimple_call_fn (call);
b786d31f
JH
2779
2780 if (!target
2781 || (TREE_CODE (target) != SSA_NAME
2782 && !virtual_method_call_p (target)))
2783 return;
b258210c 2784
7d0aa05b 2785 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2786 /* If we previously turned the call into a direct call, there is
2787 no need to analyze. */
b786d31f 2788 if (cs && !cs->indirect_unknown_callee)
25583c4f 2789 return;
7d0aa05b 2790
a5b58b28 2791 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2792 {
7d0aa05b
JH
2793 tree instance;
2794 tree target = gimple_call_fn (call);
6f8091fc
JH
2795 ipa_polymorphic_call_context context (current_function_decl,
2796 target, call, &instance);
7d0aa05b 2797
ba392339
JH
2798 gcc_checking_assert (cs->indirect_info->otr_type
2799 == obj_type_ref_class (target));
2800 gcc_checking_assert (cs->indirect_info->otr_token
2801 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2802
29c43c83
JH
2803 cs->indirect_info->vptr_changed
2804 = !context.get_dynamic_type (instance,
2805 OBJ_TYPE_REF_OBJECT (target),
c628d1c3
MJ
2806 obj_type_ref_class (target), call,
2807 &fbi->aa_walk_budget);
0127c169 2808 cs->indirect_info->context = context;
7d0aa05b
JH
2809 }
2810
b258210c 2811 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2812 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2813 else if (virtual_method_call_p (target))
8aab5218 2814 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2815}
2816
2817
e33c6cd6 2818/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2819 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2820 formal parameters are called. */
be95e2b9 2821
3e293154 2822static void
355fe088 2823ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2824{
726a989a 2825 if (is_gimple_call (stmt))
538dd0b7 2826 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2827}
2828
2829/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2830 If OP is a parameter declaration, mark it as used in the info structure
2831 passed in DATA. */
2832
2833static bool
355fe088 2834visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f 2835{
99b1c316 2836 class ipa_node_params *info = (class ipa_node_params *) data;
062c604f
MJ
2837
2838 op = get_base_address (op);
2839 if (op
2840 && TREE_CODE (op) == PARM_DECL)
2841 {
2842 int index = ipa_get_param_decl_index (info, op);
2843 gcc_assert (index >= 0);
310bc633 2844 ipa_set_param_used (info, index, true);
062c604f
MJ
2845 }
2846
2847 return false;
3e293154
MJ
2848}
2849
8aab5218
MJ
2850/* Scan the statements in BB and inspect the uses of formal parameters. Store
2851 the findings in various structures of the associated ipa_node_params
2852 structure, such as parameter flags, notes etc. FBI holds various data about
2853 the function being analyzed. */
be95e2b9 2854
062c604f 2855static void
56b40062 2856ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2857{
726a989a 2858 gimple_stmt_iterator gsi;
8aab5218
MJ
2859 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2860 {
355fe088 2861 gimple *stmt = gsi_stmt (gsi);
3e293154 2862
8aab5218
MJ
2863 if (is_gimple_debug (stmt))
2864 continue;
3e293154 2865
8aab5218
MJ
2866 ipa_analyze_stmt_uses (fbi, stmt);
2867 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2868 visit_ref_for_mod_analysis,
2869 visit_ref_for_mod_analysis,
2870 visit_ref_for_mod_analysis);
5fe8e757 2871 }
8aab5218
MJ
2872 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2873 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2874 visit_ref_for_mod_analysis,
2875 visit_ref_for_mod_analysis,
2876 visit_ref_for_mod_analysis);
2877}
2878
13586172
MJ
2879/* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2880
2881static bool
2882load_from_dereferenced_name (tree expr, tree name)
2883{
2884 tree base = get_base_address (expr);
2885 return (TREE_CODE (base) == MEM_REF
2886 && TREE_OPERAND (base, 0) == name);
2887}
2888
8aab5218
MJ
2889/* Calculate controlled uses of parameters of NODE. */
2890
2891static void
2892ipa_analyze_controlled_uses (struct cgraph_node *node)
2893{
a4a3cdd0 2894 ipa_node_params *info = ipa_node_params_sum->get (node);
5fe8e757 2895
8aab5218 2896 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2897 {
2898 tree parm = ipa_get_param (info, i);
13586172
MJ
2899 int call_uses = 0;
2900 bool load_dereferenced = false;
4502fe8d 2901
062c604f
MJ
2902 /* For SSA regs see if parameter is used. For non-SSA we compute
2903 the flag during modification analysis. */
4502fe8d
MJ
2904 if (is_gimple_reg (parm))
2905 {
67348ccc 2906 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2907 parm);
2908 if (ddef && !has_zero_uses (ddef))
2909 {
2910 imm_use_iterator imm_iter;
13586172 2911 gimple *stmt;
4502fe8d
MJ
2912
2913 ipa_set_param_used (info, i, true);
13586172
MJ
2914 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)
2915 {
2916 if (is_gimple_debug (stmt))
2917 continue;
2918
2919 int all_stmt_uses = 0;
2920 use_operand_p use_p;
2921 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
2922 all_stmt_uses++;
2923
2924 if (is_gimple_call (stmt))
2925 {
2926 if (gimple_call_internal_p (stmt))
2927 {
2928 call_uses = IPA_UNDESCRIBED_USE;
2929 break;
2930 }
2931 int recognized_stmt_uses;
2932 if (gimple_call_fn (stmt) == ddef)
2933 recognized_stmt_uses = 1;
2934 else
2935 recognized_stmt_uses = 0;
2936 unsigned arg_count = gimple_call_num_args (stmt);
2937 for (unsigned i = 0; i < arg_count; i++)
2938 {
2939 tree arg = gimple_call_arg (stmt, i);
2940 if (arg == ddef)
2941 recognized_stmt_uses++;
2942 else if (load_from_dereferenced_name (arg, ddef))
2943 {
2944 load_dereferenced = true;
2945 recognized_stmt_uses++;
2946 }
2947 }
2948
2949 if (recognized_stmt_uses != all_stmt_uses)
2950 {
2951 call_uses = IPA_UNDESCRIBED_USE;
2952 break;
2953 }
2954 if (call_uses >= 0)
2955 call_uses += all_stmt_uses;
2956 }
2957 else if (gimple_assign_single_p (stmt))
2958 {
2959 tree rhs = gimple_assign_rhs1 (stmt);
2960 if (all_stmt_uses != 1
2961 || !load_from_dereferenced_name (rhs, ddef))
2962 {
2963 call_uses = IPA_UNDESCRIBED_USE;
2964 break;
2965 }
2966 load_dereferenced = true;
2967 }
2968 else
2969 {
2970 call_uses = IPA_UNDESCRIBED_USE;
2971 break;
2972 }
2973 }
4502fe8d
MJ
2974 }
2975 else
13586172 2976 call_uses = 0;
4502fe8d
MJ
2977 }
2978 else
13586172
MJ
2979 call_uses = IPA_UNDESCRIBED_USE;
2980 ipa_set_controlled_uses (info, i, call_uses);
2981 ipa_set_param_load_dereferenced (info, i, load_dereferenced);
062c604f 2982 }
8aab5218 2983}
062c604f 2984
8aab5218 2985/* Free stuff in BI. */
062c604f 2986
8aab5218
MJ
2987static void
2988free_ipa_bb_info (struct ipa_bb_info *bi)
2989{
2990 bi->cg_edges.release ();
2991 bi->param_aa_statuses.release ();
3e293154
MJ
2992}
2993
8aab5218 2994/* Dominator walker driving the analysis. */
2c9561b5 2995
8aab5218 2996class analysis_dom_walker : public dom_walker
2c9561b5 2997{
8aab5218 2998public:
56b40062 2999 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 3000 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 3001
3daacdcd 3002 virtual edge before_dom_children (basic_block);
8aab5218
MJ
3003
3004private:
56b40062 3005 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
3006};
3007
3daacdcd 3008edge
8aab5218
MJ
3009analysis_dom_walker::before_dom_children (basic_block bb)
3010{
3011 ipa_analyze_params_uses_in_bb (m_fbi, bb);
3012 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 3013 return NULL;
2c9561b5
MJ
3014}
3015
c3431191
ML
3016/* Release body info FBI. */
3017
3018void
3019ipa_release_body_info (struct ipa_func_body_info *fbi)
3020{
3021 int i;
3022 struct ipa_bb_info *bi;
3023
3024 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
3025 free_ipa_bb_info (bi);
3026 fbi->bb_infos.release ();
3027}
3028
026c3cfd 3029/* Initialize the array describing properties of formal parameters
dd5a833e
MS
3030 of NODE, analyze their uses and compute jump functions associated
3031 with actual arguments of calls from within NODE. */
062c604f
MJ
3032
3033void
3034ipa_analyze_node (struct cgraph_node *node)
3035{
56b40062 3036 struct ipa_func_body_info fbi;
99b1c316 3037 class ipa_node_params *info;
062c604f 3038
57dbdc5a
MJ
3039 ipa_check_create_node_params ();
3040 ipa_check_create_edge_args ();
a4a3cdd0 3041 info = ipa_node_params_sum->get_create (node);
8aab5218
MJ
3042
3043 if (info->analysis_done)
3044 return;
3045 info->analysis_done = 1;
3046
3047 if (ipa_func_spec_opts_forbid_analysis_p (node))
3048 {
3049 for (int i = 0; i < ipa_get_param_count (info); i++)
3050 {
3051 ipa_set_param_used (info, i, true);
3052 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
3053 }
3054 return;
3055 }
3056
3057 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3058 push_cfun (func);
3059 calculate_dominance_info (CDI_DOMINATORS);
062c604f 3060 ipa_initialize_node_params (node);
8aab5218 3061 ipa_analyze_controlled_uses (node);
062c604f 3062
8aab5218 3063 fbi.node = node;
a4a3cdd0 3064 fbi.info = info;
8aab5218 3065 fbi.bb_infos = vNULL;
cb3874dc 3066 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
8aab5218 3067 fbi.param_count = ipa_get_param_count (info);
fdfd7f53 3068 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
062c604f 3069
8aab5218
MJ
3070 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3071 {
3072 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3073 bi->cg_edges.safe_push (cs);
3074 }
062c604f 3075
8aab5218
MJ
3076 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3077 {
3078 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3079 bi->cg_edges.safe_push (cs);
3080 }
3081
3082 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3083
c3431191 3084 ipa_release_body_info (&fbi);
8aab5218 3085 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 3086 pop_cfun ();
062c604f 3087}
062c604f 3088
be95e2b9 3089/* Update the jump functions associated with call graph edge E when the call
3e293154 3090 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 3091 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 3092
3e293154
MJ
3093static void
3094update_jump_functions_after_inlining (struct cgraph_edge *cs,
3095 struct cgraph_edge *e)
3096{
a4a3cdd0
MJ
3097 ipa_edge_args *top = ipa_edge_args_sum->get (cs);
3098 ipa_edge_args *args = ipa_edge_args_sum->get (e);
a33c028e
JH
3099 if (!args)
3100 return;
3e293154
MJ
3101 int count = ipa_get_cs_argument_count (args);
3102 int i;
3103
3104 for (i = 0; i < count; i++)
3105 {
b258210c 3106 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
99b1c316 3107 class ipa_polymorphic_call_context *dst_ctx
5ce97055 3108 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 3109
eb270950
FX
3110 if (dst->agg.items)
3111 {
3112 struct ipa_agg_jf_item *item;
3113 int j;
3114
3115 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3116 {
3117 int dst_fid;
3118 struct ipa_jump_func *src;
3119
3120 if (item->jftype != IPA_JF_PASS_THROUGH
3121 && item->jftype != IPA_JF_LOAD_AGG)
3122 continue;
3123
3124 dst_fid = item->value.pass_through.formal_id;
1c3c3f45 3125 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
eb270950
FX
3126 {
3127 item->jftype = IPA_JF_UNKNOWN;
3128 continue;
3129 }
3130
3131 item->value.pass_through.formal_id = -1;
3132 src = ipa_get_ith_jump_func (top, dst_fid);
3133 if (src->type == IPA_JF_CONST)
3134 {
3135 if (item->jftype == IPA_JF_PASS_THROUGH
3136 && item->value.pass_through.operation == NOP_EXPR)
3137 {
3138 item->jftype = IPA_JF_CONST;
3139 item->value.constant = src->value.constant.value;
3140 continue;
3141 }
3142 }
3143 else if (src->type == IPA_JF_PASS_THROUGH
3144 && src->value.pass_through.operation == NOP_EXPR)
3145 {
3146 if (item->jftype == IPA_JF_PASS_THROUGH
3147 || !item->value.load_agg.by_ref
3148 || src->value.pass_through.agg_preserved)
3149 item->value.pass_through.formal_id
3150 = src->value.pass_through.formal_id;
3151 }
3152 else if (src->type == IPA_JF_ANCESTOR)
3153 {
3154 if (item->jftype == IPA_JF_PASS_THROUGH)
3155 {
3156 if (!src->value.ancestor.offset)
3157 item->value.pass_through.formal_id
3158 = src->value.ancestor.formal_id;
3159 }
3160 else if (src->value.ancestor.agg_preserved)
3161 {
3162 gcc_checking_assert (item->value.load_agg.by_ref);
3163
3164 item->value.pass_through.formal_id
3165 = src->value.ancestor.formal_id;
3166 item->value.load_agg.offset
3167 += src->value.ancestor.offset;
3168 }
3169 }
3170
3171 if (item->value.pass_through.formal_id < 0)
3172 item->jftype = IPA_JF_UNKNOWN;
3173 }
3174 }
3175
1c3c3f45
FX
3176 if (!top)
3177 {
3178 ipa_set_jf_unknown (dst);
3179 continue;
3180 }
3181
685b0d13
MJ
3182 if (dst->type == IPA_JF_ANCESTOR)
3183 {
b258210c 3184 struct ipa_jump_func *src;
8b7773a4 3185 int dst_fid = dst->value.ancestor.formal_id;
99b1c316 3186 class ipa_polymorphic_call_context *src_ctx
5ce97055 3187 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 3188
b258210c
MJ
3189 /* Variable number of arguments can cause havoc if we try to access
3190 one that does not exist in the inlined edge. So make sure we
3191 don't. */
8b7773a4 3192 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 3193 {
04be694e 3194 ipa_set_jf_unknown (dst);
b258210c
MJ
3195 continue;
3196 }
3197
8b7773a4
MJ
3198 src = ipa_get_ith_jump_func (top, dst_fid);
3199
5ce97055
JH
3200 if (src_ctx && !src_ctx->useless_p ())
3201 {
99b1c316 3202 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
3203
3204 /* TODO: Make type preserved safe WRT contexts. */
44210a96 3205 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 3206 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
3207 ctx.offset_by (dst->value.ancestor.offset);
3208 if (!ctx.useless_p ())
3209 {
a7d1f3fe
ML
3210 if (!dst_ctx)
3211 {
3212 vec_safe_grow_cleared (args->polymorphic_call_contexts,
cb3874dc 3213 count, true);
a7d1f3fe
ML
3214 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3215 }
3216
3217 dst_ctx->combine_with (ctx);
5ce97055
JH
3218 }
3219 }
3220
eb270950
FX
3221 /* Parameter and argument in ancestor jump function must be pointer
3222 type, which means access to aggregate must be by-reference. */
3223 gcc_assert (!src->agg.items || src->agg.by_ref);
3224
3225 if (src->agg.items && dst->value.ancestor.agg_preserved)
8b7773a4
MJ
3226 {
3227 struct ipa_agg_jf_item *item;
3228 int j;
3229
3230 /* Currently we do not produce clobber aggregate jump functions,
3231 replace with merging when we do. */
3232 gcc_assert (!dst->agg.items);
3233
9771b263 3234 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 3235 dst->agg.by_ref = src->agg.by_ref;
9771b263 3236 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
3237 item->offset -= dst->value.ancestor.offset;
3238 }
3239
3b97a5c7
MJ
3240 if (src->type == IPA_JF_PASS_THROUGH
3241 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
3242 {
3243 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3244 dst->value.ancestor.agg_preserved &=
3245 src->value.pass_through.agg_preserved;
3246 }
b258210c
MJ
3247 else if (src->type == IPA_JF_ANCESTOR)
3248 {
3249 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3250 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
3251 dst->value.ancestor.agg_preserved &=
3252 src->value.ancestor.agg_preserved;
b258210c
MJ
3253 }
3254 else
04be694e 3255 ipa_set_jf_unknown (dst);
b258210c
MJ
3256 }
3257 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 3258 {
b258210c
MJ
3259 struct ipa_jump_func *src;
3260 /* We must check range due to calls with variable number of arguments
3261 and we cannot combine jump functions with operations. */
3262 if (dst->value.pass_through.operation == NOP_EXPR
5a0236f8 3263 && (top && dst->value.pass_through.formal_id
b258210c
MJ
3264 < ipa_get_cs_argument_count (top)))
3265 {
8b7773a4
MJ
3266 int dst_fid = dst->value.pass_through.formal_id;
3267 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 3268 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
99b1c316 3269 class ipa_polymorphic_call_context *src_ctx
5ce97055 3270 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 3271
5ce97055
JH
3272 if (src_ctx && !src_ctx->useless_p ())
3273 {
99b1c316 3274 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
3275
3276 /* TODO: Make type preserved safe WRT contexts. */
44210a96 3277 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 3278 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
3279 if (!ctx.useless_p ())
3280 {
3281 if (!dst_ctx)
3282 {
3283 vec_safe_grow_cleared (args->polymorphic_call_contexts,
cb3874dc 3284 count, true);
5ce97055
JH
3285 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3286 }
3287 dst_ctx->combine_with (ctx);
3288 }
3289 }
b8f6e610
MJ
3290 switch (src->type)
3291 {
3292 case IPA_JF_UNKNOWN:
04be694e 3293 ipa_set_jf_unknown (dst);
b8f6e610 3294 break;
b8f6e610
MJ
3295 case IPA_JF_CONST:
3296 ipa_set_jf_cst_copy (dst, src);
3297 break;
3298
3299 case IPA_JF_PASS_THROUGH:
3300 {
3301 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3302 enum tree_code operation;
3303 operation = ipa_get_jf_pass_through_operation (src);
3304
3305 if (operation == NOP_EXPR)
3306 {
3b97a5c7 3307 bool agg_p;
b8f6e610
MJ
3308 agg_p = dst_agg_p
3309 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 3310 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610 3311 }
a2b4c188
KV
3312 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3313 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
b8f6e610
MJ
3314 else
3315 {
3316 tree operand = ipa_get_jf_pass_through_operand (src);
3317 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3318 operation);
3319 }
3320 break;
3321 }
3322 case IPA_JF_ANCESTOR:
3323 {
3b97a5c7 3324 bool agg_p;
b8f6e610
MJ
3325 agg_p = dst_agg_p
3326 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
3327 ipa_set_ancestor_jf (dst,
3328 ipa_get_jf_ancestor_offset (src),
b8f6e610 3329 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 3330 agg_p);
b8f6e610
MJ
3331 break;
3332 }
3333 default:
3334 gcc_unreachable ();
3335 }
8b7773a4
MJ
3336
3337 if (src->agg.items
b8f6e610 3338 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
3339 {
3340 /* Currently we do not produce clobber aggregate jump
3341 functions, replace with merging when we do. */
3342 gcc_assert (!dst->agg.items);
3343
3344 dst->agg.by_ref = src->agg.by_ref;
9771b263 3345 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 3346 }
b258210c
MJ
3347 }
3348 else
04be694e 3349 ipa_set_jf_unknown (dst);
3e293154 3350 }
b258210c
MJ
3351 }
3352}
3353
5ce97055
JH
3354/* If TARGET is an addr_expr of a function declaration, make it the
3355 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3356 Otherwise, return NULL. */
b258210c 3357
3949c4a7 3358struct cgraph_edge *
5ce97055
JH
3359ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3360 bool speculative)
b258210c
MJ
3361{
3362 struct cgraph_node *callee;
48b1474e 3363 bool unreachable = false;
b258210c 3364
ceeffab0
MJ
3365 if (TREE_CODE (target) == ADDR_EXPR)
3366 target = TREE_OPERAND (target, 0);
b258210c 3367 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
3368 {
3369 target = canonicalize_constructor_val (target, NULL);
3370 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3371 {
db66bf68
JH
3372 /* Member pointer call that goes through a VMT lookup. */
3373 if (ie->indirect_info->member_ptr
3374 /* Or if target is not an invariant expression and we do not
3375 know if it will evaulate to function at runtime.
3376 This can happen when folding through &VAR, where &VAR
3377 is IP invariant, but VAR itself is not.
3378
3379 TODO: Revisit this when GCC 5 is branched. It seems that
3380 member_ptr check is not needed and that we may try to fold
3381 the expression and see if VAR is readonly. */
3382 || !is_gimple_ip_invariant (target))
3383 {
3384 if (dump_enabled_p ())
3385 {
4f5b9c80 3386 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118
ML
3387 "discovered direct call non-invariant %s\n",
3388 ie->caller->dump_name ());
db66bf68
JH
3389 }
3390 return NULL;
3391 }
3392
c13bc3d9 3393
2b5f0895
XDL
3394 if (dump_enabled_p ())
3395 {
4f5b9c80 3396 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118 3397 "discovered direct call to non-function in %s, "
807b7d62 3398 "making it __builtin_unreachable\n",
464d0118 3399 ie->caller->dump_name ());
2b5f0895 3400 }
3c9e6fca 3401
48b1474e 3402 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3403 callee = cgraph_node::get_create (target);
48b1474e 3404 unreachable = true;
a0a7b611 3405 }
48b1474e 3406 else
d52f5295 3407 callee = cgraph_node::get (target);
a0a7b611 3408 }
48b1474e 3409 else
d52f5295 3410 callee = cgraph_node::get (target);
a0a7b611
JH
3411
3412 /* Because may-edges are not explicitely represented and vtable may be external,
3413 we may create the first reference to the object in the unit. */
a62bfab5 3414 if (!callee || callee->inlined_to)
a0a7b611 3415 {
a0a7b611
JH
3416
3417 /* We are better to ensure we can refer to it.
3418 In the case of static functions we are out of luck, since we already
3419 removed its body. In the case of public functions we may or may
3420 not introduce the reference. */
3421 if (!canonicalize_constructor_val (target, NULL)
3422 || !TREE_PUBLIC (target))
3423 {
3424 if (dump_file)
3425 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
845bb366 3426 "(%s -> %s) but cannot refer to it. Giving up.\n",
464d0118
ML
3427 ie->caller->dump_name (),
3428 ie->callee->dump_name ());
a0a7b611
JH
3429 return NULL;
3430 }
d52f5295 3431 callee = cgraph_node::get_create (target);
a0a7b611 3432 }
2b5f0895 3433
0127c169
JH
3434 /* If the edge is already speculated. */
3435 if (speculative && ie->speculative)
3436 {
845bb366 3437 if (dump_file)
0127c169 3438 {
845bb366
JH
3439 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3440 if (!e2)
3441 {
3442 if (dump_file)
3443 fprintf (dump_file, "ipa-prop: Discovered call to a "
3444 "speculative target (%s -> %s) but the call is "
3445 "already speculated to different target. "
3446 "Giving up.\n",
3447 ie->caller->dump_name (), callee->dump_name ());
3448 }
3449 else
3450 {
3451 if (dump_file)
3452 fprintf (dump_file,
3453 "ipa-prop: Discovered call to a speculative target "
3454 "(%s -> %s) this agree with previous speculation.\n",
3455 ie->caller->dump_name (), callee->dump_name ());
3456 }
0127c169
JH
3457 }
3458 return NULL;
3459 }
3460
2b5f0895
XDL
3461 if (!dbg_cnt (devirt))
3462 return NULL;
3463
1dbee8c9 3464 ipa_check_create_node_params ();
ceeffab0 3465
67914693 3466 /* We cannot make edges to inline clones. It is bug that someone removed
81fa35bd 3467 the cgraph node too early. */
a62bfab5 3468 gcc_assert (!callee->inlined_to);
17afc0fe 3469
48b1474e 3470 if (dump_file && !unreachable)
b258210c 3471 {
5ce97055 3472 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
464d0118 3473 "(%s -> %s), for stmt ",
b258210c 3474 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 3475 speculative ? "speculative" : "known",
464d0118
ML
3476 ie->caller->dump_name (),
3477 callee->dump_name ());
b258210c
MJ
3478 if (ie->call_stmt)
3479 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3480 else
3481 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 3482 }
2b5f0895
XDL
3483 if (dump_enabled_p ())
3484 {
4f5b9c80 3485 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
807b7d62 3486 "converting indirect call in %s to direct call to %s\n",
3629ff8a 3487 ie->caller->dump_name (), callee->dump_name ());
2b5f0895 3488 }
5ce97055 3489 if (!speculative)
d8d5aef1
JH
3490 {
3491 struct cgraph_edge *orig = ie;
27c5a177 3492 ie = cgraph_edge::make_direct (ie, callee);
d8d5aef1
JH
3493 /* If we resolved speculative edge the cost is already up to date
3494 for direct call (adjusted by inline_edge_duplication_hook). */
3495 if (ie == orig)
3496 {
56f62793 3497 ipa_call_summary *es = ipa_call_summaries->get (ie);
d8d5aef1
JH
3498 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3499 - eni_size_weights.call_cost);
3500 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3501 - eni_time_weights.call_cost);
3502 }
3503 }
5ce97055
JH
3504 else
3505 {
3506 if (!callee->can_be_discarded_p ())
3507 {
3508 cgraph_node *alias;
3509 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3510 if (alias)
3511 callee = alias;
3512 }
d8d5aef1 3513 /* make_speculative will update ie's cost to direct call cost. */
5ce97055 3514 ie = ie->make_speculative
1bad9c18 3515 (callee, ie->count.apply_scale (8, 10));
5ce97055 3516 }
749aa96d 3517
b258210c 3518 return ie;
3e293154
MJ
3519}
3520
91bb9f80
MJ
3521/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3522 CONSTRUCTOR and return it. Return NULL if the search fails for some
3523 reason. */
3524
3525static tree
3526find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3527{
3528 tree type = TREE_TYPE (constructor);
3529 if (TREE_CODE (type) != ARRAY_TYPE
3530 && TREE_CODE (type) != RECORD_TYPE)
3531 return NULL;
3532
3533 unsigned ix;
3534 tree index, val;
3535 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3536 {
3537 HOST_WIDE_INT elt_offset;
3538 if (TREE_CODE (type) == ARRAY_TYPE)
3539 {
3540 offset_int off;
3541 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3542 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3543
3544 if (index)
3545 {
db9bbdec
RB
3546 if (TREE_CODE (index) == RANGE_EXPR)
3547 off = wi::to_offset (TREE_OPERAND (index, 0));
3548 else
3549 off = wi::to_offset (index);
91bb9f80
MJ
3550 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3551 {
3552 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3553 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3554 off = wi::sext (off - wi::to_offset (low_bound),
3555 TYPE_PRECISION (TREE_TYPE (index)));
3556 }
3557 off *= wi::to_offset (unit_size);
db9bbdec
RB
3558 /* ??? Handle more than just the first index of a
3559 RANGE_EXPR. */
91bb9f80
MJ
3560 }
3561 else
3562 off = wi::to_offset (unit_size) * ix;
3563
3564 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3565 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3566 continue;
3567 elt_offset = off.to_shwi ();
3568 }
3569 else if (TREE_CODE (type) == RECORD_TYPE)
3570 {
3571 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3572 if (DECL_BIT_FIELD (index))
3573 continue;
3574 elt_offset = int_bit_position (index);
3575 }
3576 else
3577 gcc_unreachable ();
3578
3579 if (elt_offset > req_offset)
3580 return NULL;
3581
3582 if (TREE_CODE (val) == CONSTRUCTOR)
3583 return find_constructor_constant_at_offset (val,
3584 req_offset - elt_offset);
3585
3586 if (elt_offset == req_offset
3587 && is_gimple_reg_type (TREE_TYPE (val))
3588 && is_gimple_ip_invariant (val))
3589 return val;
3590 }
3591 return NULL;
3592}
3593
3594/* Check whether SCALAR could be used to look up an aggregate interprocedural
3595 invariant from a static constructor and if so, return it. Otherwise return
3596 NULL. */
3597
3598static tree
3599ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3600{
3601 if (by_ref)
3602 {
3603 if (TREE_CODE (scalar) != ADDR_EXPR)
3604 return NULL;
3605 scalar = TREE_OPERAND (scalar, 0);
3606 }
3607
8813a647 3608 if (!VAR_P (scalar)
91bb9f80
MJ
3609 || !is_global_var (scalar)
3610 || !TREE_READONLY (scalar)
3611 || !DECL_INITIAL (scalar)
3612 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3613 return NULL;
3614
3615 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3616}
3617
eb270950
FX
3618/* Retrieve value from AGG, a set of known offset/value for an aggregate or
3619 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3620 return NULL if there is none. BY_REF specifies whether the value has to be
3621 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3622 the boolean it points to is set to true if the value comes from an
3623 initializer of a constant. */
8b7773a4
MJ
3624
3625tree
00dcc88a 3626ipa_find_agg_cst_for_param (const ipa_agg_value_set *agg, tree scalar,
91bb9f80
MJ
3627 HOST_WIDE_INT offset, bool by_ref,
3628 bool *from_global_constant)
8b7773a4 3629{
eb270950 3630 struct ipa_agg_value *item;
8b7773a4
MJ
3631 int i;
3632
91bb9f80
MJ
3633 if (scalar)
3634 {
3635 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3636 if (res)
3637 {
3638 if (from_global_constant)
3639 *from_global_constant = true;
3640 return res;
3641 }
3642 }
3643
3644 if (!agg
3645 || by_ref != agg->by_ref)
8b7773a4
MJ
3646 return NULL;
3647
eb270950 3648 FOR_EACH_VEC_ELT (agg->items, i, item)
2c9561b5
MJ
3649 if (item->offset == offset)
3650 {
3651 /* Currently we do not have clobber values, return NULL for them once
3652 we do. */
3653 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
3654 if (from_global_constant)
3655 *from_global_constant = false;
2c9561b5
MJ
3656 return item->value;
3657 }
8b7773a4
MJ
3658 return NULL;
3659}
3660
4502fe8d 3661/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
3662 reference description RDESC. Return true if the reference has been
3663 successfully found and removed. */
4502fe8d 3664
568cda29 3665static bool
5e20cdc9 3666remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3667{
3668 struct ipa_ref *to_del;
3669 struct cgraph_edge *origin;
3670
3671 origin = rdesc->cs;
a854f856
MJ
3672 if (!origin)
3673 return false;
d122681a
ML
3674 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3675 origin->lto_stmt_uid);
568cda29
MJ
3676 if (!to_del)
3677 return false;
3678
d122681a 3679 to_del->remove_reference ();
4502fe8d 3680 if (dump_file)
464d0118 3681 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3629ff8a 3682 origin->caller->dump_name (), symbol->dump_name ());
568cda29 3683 return true;
4502fe8d
MJ
3684}
3685
3686/* If JFUNC has a reference description with refcount different from
3687 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3688 NULL. JFUNC must be a constant jump function. */
3689
3690static struct ipa_cst_ref_desc *
3691jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3692{
3693 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3694 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3695 return rdesc;
3696 else
3697 return NULL;
3698}
3699
568cda29
MJ
3700/* If the value of constant jump function JFUNC is an address of a function
3701 declaration, return the associated call graph node. Otherwise return
3702 NULL. */
3703
13586172
MJ
3704static symtab_node *
3705symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
568cda29
MJ
3706{
3707 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3708 tree cst = ipa_get_jf_constant (jfunc);
3709 if (TREE_CODE (cst) != ADDR_EXPR
13586172
MJ
3710 || (TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL
3711 && TREE_CODE (TREE_OPERAND (cst, 0)) != VAR_DECL))
568cda29
MJ
3712 return NULL;
3713
13586172 3714 return symtab_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3715}
3716
3717
3718/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3719 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3720 the edge specified in the rdesc. Return false if either the symbol or the
3721 reference could not be found, otherwise return true. */
3722
3723static bool
3724try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3725{
3726 struct ipa_cst_ref_desc *rdesc;
3727 if (jfunc->type == IPA_JF_CONST
3728 && (rdesc = jfunc_rdesc_usable (jfunc))
3729 && --rdesc->refcount == 0)
3730 {
13586172 3731 symtab_node *symbol = symtab_node_for_jfunc (jfunc);
568cda29
MJ
3732 if (!symbol)
3733 return false;
3734
3735 return remove_described_reference (symbol, rdesc);
3736 }
3737 return true;
3738}
3739
b258210c
MJ
3740/* Try to find a destination for indirect edge IE that corresponds to a simple
3741 call or a call of a member function pointer and where the destination is a
e5cf5e11
PK
3742 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3743 the type of the parameter to which the result of JFUNC is passed. If it can
3744 be determined, return the newly direct edge, otherwise return NULL.
eb270950
FX
3745 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3746 relative to. */
be95e2b9 3747
b258210c
MJ
3748static struct cgraph_edge *
3749try_make_edge_direct_simple_call (struct cgraph_edge *ie,
e5cf5e11 3750 struct ipa_jump_func *jfunc, tree target_type,
eb270950 3751 struct cgraph_node *new_root,
99b1c316 3752 class ipa_node_params *new_root_info)
b258210c 3753{
4502fe8d 3754 struct cgraph_edge *cs;
b258210c 3755 tree target;
042ae7d2 3756 bool agg_contents = ie->indirect_info->agg_contents;
e5cf5e11 3757 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
91bb9f80
MJ
3758 if (agg_contents)
3759 {
3760 bool from_global_constant;
eb270950
FX
3761 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3762 new_root,
3763 &jfunc->agg);
3764 target = ipa_find_agg_cst_for_param (&agg, scalar,
91bb9f80
MJ
3765 ie->indirect_info->offset,
3766 ie->indirect_info->by_ref,
3767 &from_global_constant);
eb270950 3768 agg.release ();
91bb9f80
MJ
3769 if (target
3770 && !from_global_constant
3771 && !ie->indirect_info->guaranteed_unmodified)
3772 return NULL;
3773 }
b258210c 3774 else
91bb9f80 3775 target = scalar;
d250540a
MJ
3776 if (!target)
3777 return NULL;
4502fe8d
MJ
3778 cs = ipa_make_edge_direct_to_target (ie, target);
3779
a12cd2db 3780 if (cs && !agg_contents)
568cda29
MJ
3781 {
3782 bool ok;
3783 gcc_checking_assert (cs->callee
ae6d0907
MJ
3784 && (cs != ie
3785 || jfunc->type != IPA_JF_CONST
13586172
MJ
3786 || !symtab_node_for_jfunc (jfunc)
3787 || cs->callee == symtab_node_for_jfunc (jfunc)));
568cda29
MJ
3788 ok = try_decrement_rdesc_refcount (jfunc);
3789 gcc_checking_assert (ok);
3790 }
4502fe8d
MJ
3791
3792 return cs;
b258210c
MJ
3793}
3794
bec81025
MJ
3795/* Return the target to be used in cases of impossible devirtualization. IE
3796 and target (the latter can be NULL) are dumped when dumping is enabled. */
3797
72972c22
MJ
3798tree
3799ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3800{
3801 if (dump_file)
3802 {
3803 if (target)
3804 fprintf (dump_file,
464d0118
ML
3805 "Type inconsistent devirtualization: %s->%s\n",
3806 ie->caller->dump_name (),
bec81025
MJ
3807 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3808 else
3809 fprintf (dump_file,
464d0118
ML
3810 "No devirtualization target in %s\n",
3811 ie->caller->dump_name ());
bec81025
MJ
3812 }
3813 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3814 cgraph_node::get_create (new_target);
bec81025
MJ
3815 return new_target;
3816}
3817
d250540a
MJ
3818/* Try to find a destination for indirect edge IE that corresponds to a virtual
3819 call based on a formal parameter which is described by jump function JFUNC
3820 and if it can be determined, make it direct and return the direct edge.
44210a96 3821 Otherwise, return NULL. CTX describes the polymorphic context that the
eb270950
FX
3822 parameter the call is based on brings along with it. NEW_ROOT and
3823 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3824 to. */
b258210c
MJ
3825
3826static struct cgraph_edge *
3827try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3828 struct ipa_jump_func *jfunc,
eb270950
FX
3829 class ipa_polymorphic_call_context ctx,
3830 struct cgraph_node *new_root,
3831 class ipa_node_params *new_root_info)
3e293154 3832{
44210a96 3833 tree target = NULL;
5ce97055 3834 bool speculative = false;
85942f45 3835
2bf86c84 3836 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3837 return NULL;
b258210c 3838
44210a96 3839 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3840
3841 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3842 if (!ie->indirect_info->vptr_changed
3843 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3844 {
9de2f554
JH
3845 tree vtable;
3846 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3847 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3848 : NULL;
eb270950
FX
3849 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3850 new_root,
3851 &jfunc->agg);
3852 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
85942f45
JH
3853 ie->indirect_info->offset,
3854 true);
eb270950 3855 agg.release ();
9de2f554
JH
3856 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3857 {
2994ab20 3858 bool can_refer;
0127c169 3859 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3860 vtable, offset, &can_refer);
3861 if (can_refer)
9de2f554 3862 {
2994ab20 3863 if (!t
cb1180d5 3864 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
9de2f554 3865 || !possible_polymorphic_call_target_p
0127c169
JH
3866 (ie, cgraph_node::get (t)))
3867 {
33c3b6be 3868 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3869 if (!ie->indirect_info->vptr_changed)
3870 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3871 else
3872 target = NULL;
0127c169
JH
3873 }
3874 else
3875 {
3876 target = t;
3877 speculative = ie->indirect_info->vptr_changed;
3878 }
9de2f554
JH
3879 }
3880 }
85942f45
JH
3881 }
3882
44210a96
MJ
3883 ipa_polymorphic_call_context ie_context (ie);
3884 vec <cgraph_node *>targets;
3885 bool final;
d250540a 3886
44210a96
MJ
3887 ctx.offset_by (ie->indirect_info->offset);
3888 if (ie->indirect_info->vptr_changed)
3889 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3890 ie->indirect_info->otr_type);
3891 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3892 targets = possible_polymorphic_call_targets
3893 (ie->indirect_info->otr_type,
3894 ie->indirect_info->otr_token,
3895 ctx, &final);
3896 if (final && targets.length () <= 1)
5ce97055 3897 {
33c3b6be 3898 speculative = false;
44210a96
MJ
3899 if (targets.length () == 1)
3900 target = targets[0]->decl;
3901 else
3902 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3903 }
2bf86c84 3904 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3905 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3906 {
44210a96
MJ
3907 cgraph_node *n;
3908 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3909 ie->indirect_info->otr_token,
3910 ie->indirect_info->context);
3911 if (n)
5ce97055 3912 {
44210a96
MJ
3913 target = n->decl;
3914 speculative = true;
5ce97055 3915 }
5bccb77a 3916 }
b258210c
MJ
3917
3918 if (target)
450ad0cd 3919 {
44210a96
MJ
3920 if (!possible_polymorphic_call_target_p
3921 (ie, cgraph_node::get_create (target)))
0127c169 3922 {
29c43c83 3923 if (speculative)
0127c169
JH
3924 return NULL;
3925 target = ipa_impossible_devirt_target (ie, target);
3926 }
5ce97055 3927 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3928 }
b258210c
MJ
3929 else
3930 return NULL;
3e293154
MJ
3931}
3932
3933/* Update the param called notes associated with NODE when CS is being inlined,
3934 assuming NODE is (potentially indirectly) inlined into CS->callee.
3935 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3936 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3937 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3938
f8e2a1ed 3939static bool
e33c6cd6
MJ
3940update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3941 struct cgraph_node *node,
d52f5295 3942 vec<cgraph_edge *> *new_edges)
3e293154 3943{
99b1c316 3944 class ipa_edge_args *top;
b258210c 3945 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
eb270950 3946 struct cgraph_node *new_root;
99b1c316 3947 class ipa_node_params *new_root_info, *inlined_node_info;
f8e2a1ed 3948 bool res = false;
3e293154 3949
e33c6cd6 3950 ipa_check_create_edge_args ();
a4a3cdd0 3951 top = ipa_edge_args_sum->get (cs);
eb270950
FX
3952 new_root = cs->caller->inlined_to
3953 ? cs->caller->inlined_to : cs->caller;
a4a3cdd0
MJ
3954 new_root_info = ipa_node_params_sum->get (new_root);
3955 inlined_node_info = ipa_node_params_sum->get (cs->callee->function_symbol ());
e33c6cd6
MJ
3956
3957 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3958 {
99b1c316 3959 class cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3960 struct ipa_jump_func *jfunc;
8b7773a4 3961 int param_index;
3e293154 3962
e33c6cd6 3963 next_ie = ie->next_callee;
3e293154 3964
5f902d76
JH
3965 if (ici->param_index == -1)
3966 continue;
e33c6cd6 3967
3e293154 3968 /* We must check range due to calls with variable number of arguments: */
5a0236f8 3969 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3970 {
5ee53a06 3971 ici->param_index = -1;
3e293154
MJ
3972 continue;
3973 }
3974
8b7773a4
MJ
3975 param_index = ici->param_index;
3976 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3977
e089e433 3978 auto_vec<cgraph_node *, 4> spec_targets;
3ff29913 3979 if (ie->speculative)
e089e433
MJ
3980 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3981 direct;
3982 direct = direct->next_speculative_call_target ())
3983 spec_targets.safe_push (direct->callee);
3ff29913 3984
2bf86c84 3985 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3986 new_direct_edge = NULL;
3987 else if (ici->polymorphic)
5ce97055 3988 {
44210a96
MJ
3989 ipa_polymorphic_call_context ctx;
3990 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
eb270950
FX
3991 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3992 new_root,
3993 new_root_info);
5ce97055 3994 }
b258210c 3995 else
e5cf5e11
PK
3996 {
3997 tree target_type = ipa_get_type (inlined_node_info, param_index);
3998 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3999 target_type,
eb270950 4000 new_root,
e5cf5e11
PK
4001 new_root_info);
4002 }
4003
042ae7d2 4004 /* If speculation was removed, then we need to do nothing. */
3ff29913 4005 if (new_direct_edge && new_direct_edge != ie
e089e433 4006 && spec_targets.contains (new_direct_edge->callee))
042ae7d2
JH
4007 {
4008 new_direct_edge->indirect_inlining_edge = 1;
042ae7d2 4009 res = true;
73d098df
JH
4010 if (!new_direct_edge->speculative)
4011 continue;
042ae7d2
JH
4012 }
4013 else if (new_direct_edge)
685b0d13 4014 {
b258210c
MJ
4015 new_direct_edge->indirect_inlining_edge = 1;
4016 if (new_edges)
4017 {
9771b263 4018 new_edges->safe_push (new_direct_edge);
b258210c
MJ
4019 res = true;
4020 }
3ff29913
JH
4021 /* If speculative edge was introduced we still need to update
4022 call info of the indirect edge. */
4023 if (!new_direct_edge->speculative)
4024 continue;
685b0d13 4025 }
3ff29913
JH
4026 if (jfunc->type == IPA_JF_PASS_THROUGH
4027 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 4028 {
d0502276
JH
4029 if (ici->agg_contents
4030 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
4031 && !ici->polymorphic)
36b72910
JH
4032 ici->param_index = -1;
4033 else
d0502276
JH
4034 {
4035 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4036 if (ici->polymorphic
4037 && !ipa_get_jf_pass_through_type_preserved (jfunc))
4038 ici->vptr_changed = true;
40a777e8
JH
4039 ipa_set_param_used_by_indirect_call (new_root_info,
4040 ici->param_index, true);
4041 if (ici->polymorphic)
4042 ipa_set_param_used_by_polymorphic_call (new_root_info,
4043 ici->param_index, true);
d0502276 4044 }
36b72910
JH
4045 }
4046 else if (jfunc->type == IPA_JF_ANCESTOR)
4047 {
d0502276
JH
4048 if (ici->agg_contents
4049 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
4050 && !ici->polymorphic)
36b72910
JH
4051 ici->param_index = -1;
4052 else
4053 {
4054 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4055 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
4056 if (ici->polymorphic
4057 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4058 ici->vptr_changed = true;
b914768c
JH
4059 ipa_set_param_used_by_indirect_call (new_root_info,
4060 ici->param_index, true);
4061 if (ici->polymorphic)
4062 ipa_set_param_used_by_polymorphic_call (new_root_info,
4063 ici->param_index, true);
36b72910
JH
4064 }
4065 }
4066 else
4067 /* Either we can find a destination for this edge now or never. */
4068 ici->param_index = -1;
3e293154 4069 }
e33c6cd6 4070
f8e2a1ed 4071 return res;
3e293154
MJ
4072}
4073
4074/* Recursively traverse subtree of NODE (including node) made of inlined
4075 cgraph_edges when CS has been inlined and invoke
e33c6cd6 4076 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
4077 update_jump_functions_after_inlining on all non-inlined edges that lead out
4078 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
4079 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4080 created. */
be95e2b9 4081
f8e2a1ed 4082static bool
3e293154
MJ
4083propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4084 struct cgraph_node *node,
d52f5295 4085 vec<cgraph_edge *> *new_edges)
3e293154
MJ
4086{
4087 struct cgraph_edge *e;
f8e2a1ed 4088 bool res;
3e293154 4089
e33c6cd6 4090 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
4091
4092 for (e = node->callees; e; e = e->next_callee)
4093 if (!e->inline_failed)
f8e2a1ed 4094 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
4095 else
4096 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
4097 for (e = node->indirect_calls; e; e = e->next_callee)
4098 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
4099
4100 return res;
3e293154
MJ
4101}
4102
4502fe8d
MJ
4103/* Combine two controlled uses counts as done during inlining. */
4104
4105static int
4106combine_controlled_uses_counters (int c, int d)
4107{
4108 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4109 return IPA_UNDESCRIBED_USE;
4110 else
4111 return c + d - 1;
4112}
4113
4114/* Propagate number of controlled users from CS->caleee to the new root of the
4115 tree of inlined nodes. */
4116
4117static void
4118propagate_controlled_uses (struct cgraph_edge *cs)
4119{
a4a3cdd0 4120 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
a33c028e
JH
4121 if (!args)
4122 return;
a62bfab5
ML
4123 struct cgraph_node *new_root = cs->caller->inlined_to
4124 ? cs->caller->inlined_to : cs->caller;
a4a3cdd0
MJ
4125 ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4126 ipa_node_params *old_root_info = ipa_node_params_sum->get (cs->callee);
4502fe8d
MJ
4127 int count, i;
4128
6cf67b62
JH
4129 if (!old_root_info)
4130 return;
4131
4502fe8d
MJ
4132 count = MIN (ipa_get_cs_argument_count (args),
4133 ipa_get_param_count (old_root_info));
4134 for (i = 0; i < count; i++)
4135 {
4136 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4137 struct ipa_cst_ref_desc *rdesc;
4138
4139 if (jf->type == IPA_JF_PASS_THROUGH)
4140 {
4141 int src_idx, c, d;
4142 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4143 c = ipa_get_controlled_uses (new_root_info, src_idx);
4144 d = ipa_get_controlled_uses (old_root_info, i);
4145
4146 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4147 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4148 c = combine_controlled_uses_counters (c, d);
4149 ipa_set_controlled_uses (new_root_info, src_idx, c);
13586172
MJ
4150 bool lderef = true;
4151 if (c != IPA_UNDESCRIBED_USE)
4152 {
4153 lderef = (ipa_get_param_load_dereferenced (new_root_info, src_idx)
4154 || ipa_get_param_load_dereferenced (old_root_info, i));
4155 ipa_set_param_load_dereferenced (new_root_info, src_idx, lderef);
4156 }
4157
4158 if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4502fe8d
MJ
4159 {
4160 struct cgraph_node *n;
4161 struct ipa_ref *ref;
44210a96 4162 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
4163
4164 if (t && TREE_CODE (t) == ADDR_EXPR
4165 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 4166 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 4167 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
4168 {
4169 if (dump_file)
4170 fprintf (dump_file, "ipa-prop: Removing cloning-created "
464d0118
ML
4171 "reference from %s to %s.\n",
4172 new_root->dump_name (),
4173 n->dump_name ());
d122681a 4174 ref->remove_reference ();
4502fe8d
MJ
4175 }
4176 }
4177 }
4178 else if (jf->type == IPA_JF_CONST
4179 && (rdesc = jfunc_rdesc_usable (jf)))
4180 {
4181 int d = ipa_get_controlled_uses (old_root_info, i);
4182 int c = rdesc->refcount;
4183 rdesc->refcount = combine_controlled_uses_counters (c, d);
4184 if (rdesc->refcount == 0)
4185 {
4186 tree cst = ipa_get_jf_constant (jf);
4502fe8d 4187 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
13586172
MJ
4188 && ((TREE_CODE (TREE_OPERAND (cst, 0))
4189 == FUNCTION_DECL)
4190 || (TREE_CODE (TREE_OPERAND (cst, 0))
4191 == VAR_DECL)));
4192
4193 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
4194 if (n)
4195 {
4196 struct cgraph_node *clone;
13586172
MJ
4197 bool removed = remove_described_reference (n, rdesc);
4198 /* The reference might have been removed by IPA-CP. */
4199 if (removed
4200 && ipa_get_param_load_dereferenced (old_root_info, i))
4201 {
4202 new_root->create_reference (n, IPA_REF_LOAD, NULL);
4203 if (dump_file)
4204 fprintf (dump_file, "ipa-prop: ...replaced it with "
d55d3f5b 4205 "LOAD one from %s to %s.\n",
13586172
MJ
4206 new_root->dump_name (), n->dump_name ());
4207 }
4502fe8d
MJ
4208
4209 clone = cs->caller;
a62bfab5 4210 while (clone->inlined_to
6cf67b62
JH
4211 && clone->ipcp_clone
4212 && clone != rdesc->cs->caller)
4502fe8d
MJ
4213 {
4214 struct ipa_ref *ref;
d122681a 4215 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
4216 if (ref)
4217 {
4218 if (dump_file)
4219 fprintf (dump_file, "ipa-prop: Removing "
4220 "cloning-created reference "
464d0118
ML
4221 "from %s to %s.\n",
4222 clone->dump_name (),
4223 n->dump_name ());
d122681a 4224 ref->remove_reference ();
4502fe8d
MJ
4225 }
4226 clone = clone->callers->caller;
4227 }
4228 }
4229 }
4230 }
4231 }
4232
4233 for (i = ipa_get_param_count (old_root_info);
4234 i < ipa_get_cs_argument_count (args);
4235 i++)
4236 {
4237 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4238
4239 if (jf->type == IPA_JF_CONST)
4240 {
4241 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4242 if (rdesc)
4243 rdesc->refcount = IPA_UNDESCRIBED_USE;
4244 }
4245 else if (jf->type == IPA_JF_PASS_THROUGH)
4246 ipa_set_controlled_uses (new_root_info,
4247 jf->value.pass_through.formal_id,
4248 IPA_UNDESCRIBED_USE);
4249 }
4250}
4251
3e293154
MJ
4252/* Update jump functions and call note functions on inlining the call site CS.
4253 CS is expected to lead to a node already cloned by
4254 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
4255 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4256 created. */
be95e2b9 4257
f8e2a1ed 4258bool
3e293154 4259ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 4260 vec<cgraph_edge *> *new_edges)
3e293154 4261{
5ee53a06 4262 bool changed;
f8e2a1ed
MJ
4263 /* Do nothing if the preparation phase has not been carried out yet
4264 (i.e. during early inlining). */
dd912cb8 4265 if (!ipa_node_params_sum)
f8e2a1ed 4266 return false;
6fe906a3 4267 gcc_assert (ipa_edge_args_sum);
f8e2a1ed 4268
4502fe8d 4269 propagate_controlled_uses (cs);
5ee53a06 4270 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
6cf67b62 4271 ipa_node_params_sum->remove (cs->callee);
5ee53a06 4272
a4a3cdd0 4273 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
d40e2362
JH
4274 if (args)
4275 {
4276 bool ok = true;
4277 if (args->jump_functions)
4278 {
4279 struct ipa_jump_func *jf;
4280 int i;
4281 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4282 if (jf->type == IPA_JF_CONST
4283 && ipa_get_jf_constant_rdesc (jf))
4284 {
4285 ok = false;
4286 break;
4287 }
4288 }
4289 if (ok)
4290 ipa_edge_args_sum->remove (cs);
4291 }
98aad294
JH
4292 if (ipcp_transformation_sum)
4293 ipcp_transformation_sum->remove (cs->callee);
d40e2362 4294
5ee53a06 4295 return changed;
518dc859
RL
4296}
4297
86cd0334
MJ
4298/* Ensure that array of edge arguments infos is big enough to accommodate a
4299 structure for all edges and reallocates it if not. Also, allocate
4300 associated hash tables is they do not already exist. */
4301
4302void
4303ipa_check_create_edge_args (void)
4304{
6fe906a3
MJ
4305 if (!ipa_edge_args_sum)
4306 ipa_edge_args_sum
78cd68c0 4307 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
6fe906a3 4308 ipa_edge_args_sum_t (symtab, true));
86cd0334
MJ
4309 if (!ipa_bits_hash_table)
4310 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4311 if (!ipa_vr_hash_table)
4312 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4313}
4314
771578a0 4315/* Free all ipa_edge structures. */
be95e2b9 4316
518dc859 4317void
771578a0 4318ipa_free_all_edge_args (void)
518dc859 4319{
6fe906a3 4320 if (!ipa_edge_args_sum)
9771b263
DN
4321 return;
4322
ddf628e4 4323 ggc_delete (ipa_edge_args_sum);
6fe906a3 4324 ipa_edge_args_sum = NULL;
518dc859
RL
4325}
4326
771578a0 4327/* Free all ipa_node_params structures. */
be95e2b9 4328
518dc859 4329void
771578a0 4330ipa_free_all_node_params (void)
518dc859 4331{
bc2fcccd
JH
4332 if (ipa_node_params_sum)
4333 ggc_delete (ipa_node_params_sum);
dd912cb8 4334 ipa_node_params_sum = NULL;
771578a0
MJ
4335}
4336
9d3e0adc 4337/* Initialize IPA CP transformation summary and also allocate any necessary hash
86cd0334 4338 tables if they do not already exist. */
04be694e
MJ
4339
4340void
9d3e0adc 4341ipcp_transformation_initialize (void)
04be694e 4342{
86cd0334
MJ
4343 if (!ipa_bits_hash_table)
4344 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4345 if (!ipa_vr_hash_table)
4346 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
9d3e0adc 4347 if (ipcp_transformation_sum == NULL)
40e67ab8
JH
4348 {
4349 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4350 ipcp_transformation_sum->disable_insertion_hook ();
4351 }
04be694e
MJ
4352}
4353
12e088ba
AC
4354/* Release the IPA CP transformation summary. */
4355
4356void
4357ipcp_free_transformation_sum (void)
4358{
4359 if (!ipcp_transformation_sum)
4360 return;
4361
78cd68c0
ML
4362 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4363 ggc_free (ipcp_transformation_sum);
12e088ba
AC
4364 ipcp_transformation_sum = NULL;
4365}
4366
2c9561b5
MJ
4367/* Set the aggregate replacements of NODE to be AGGVALS. */
4368
4369void
4370ipa_set_node_agg_value_chain (struct cgraph_node *node,
4371 struct ipa_agg_replacement_value *aggvals)
4372{
9d3e0adc
ML
4373 ipcp_transformation_initialize ();
4374 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4375 s->agg_values = aggvals;
2c9561b5
MJ
4376}
4377
6fe906a3
MJ
4378/* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4379 count data structures accordingly. */
be95e2b9 4380
6fe906a3
MJ
4381void
4382ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
771578a0 4383{
568cda29
MJ
4384 if (args->jump_functions)
4385 {
4386 struct ipa_jump_func *jf;
4387 int i;
4388 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
4389 {
4390 struct ipa_cst_ref_desc *rdesc;
4391 try_decrement_rdesc_refcount (jf);
4392 if (jf->type == IPA_JF_CONST
4393 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4394 && rdesc->cs == cs)
4395 rdesc->cs = NULL;
4396 }
568cda29 4397 }
518dc859
RL
4398}
4399
6fe906a3
MJ
4400/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4401 reference count data strucutres accordingly. */
be95e2b9 4402
6fe906a3
MJ
4403void
4404ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4405 ipa_edge_args *old_args, ipa_edge_args *new_args)
771578a0 4406{
8b7773a4 4407 unsigned int i;
771578a0 4408
9771b263 4409 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
4410 if (old_args->polymorphic_call_contexts)
4411 new_args->polymorphic_call_contexts
4412 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 4413
9771b263 4414 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
4415 {
4416 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4417 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4418
4419 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4420
4421 if (src_jf->type == IPA_JF_CONST)
4422 {
4423 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4424
4425 if (!src_rdesc)
4426 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
4427 else if (src->caller == dst->caller)
4428 {
1a7d452c
MJ
4429 /* Creation of a speculative edge. If the source edge is the one
4430 grabbing a reference, we must create a new (duplicate)
4431 reference description. Otherwise they refer to the same
4432 description corresponding to a reference taken in a function
4433 src->caller is inlined to. In that case we just must
4434 increment the refcount. */
4435 if (src_rdesc->cs == src)
4436 {
4437 symtab_node *n = symtab_node_for_jfunc (src_jf);
4438 gcc_checking_assert (n);
4439 ipa_ref *ref
4440 = src->caller->find_reference (n, src->call_stmt,
4441 src->lto_stmt_uid);
4442 gcc_checking_assert (ref);
4443 dst->caller->clone_reference (ref, ref->stmt);
4444
4445 ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4446 dst_rdesc->cs = dst;
4447 dst_rdesc->refcount = src_rdesc->refcount;
4448 dst_rdesc->next_duplicate = NULL;
4449 dst_jf->value.constant.rdesc = dst_rdesc;
4450 }
4451 else
4452 {
4453 src_rdesc->refcount++;
4454 dst_jf->value.constant.rdesc = src_rdesc;
4455 }
568cda29 4456 }
4502fe8d
MJ
4457 else if (src_rdesc->cs == src)
4458 {
601f3293 4459 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 4460 dst_rdesc->cs = dst;
4502fe8d 4461 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
4462 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4463 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
4464 dst_jf->value.constant.rdesc = dst_rdesc;
4465 }
4466 else
4467 {
4468 struct ipa_cst_ref_desc *dst_rdesc;
4469 /* This can happen during inlining, when a JFUNC can refer to a
4470 reference taken in a function up in the tree of inline clones.
4471 We need to find the duplicate that refers to our tree of
4472 inline clones. */
4473
a62bfab5 4474 gcc_assert (dst->caller->inlined_to);
4502fe8d
MJ
4475 for (dst_rdesc = src_rdesc->next_duplicate;
4476 dst_rdesc;
4477 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
4478 {
4479 struct cgraph_node *top;
a62bfab5
ML
4480 top = dst_rdesc->cs->caller->inlined_to
4481 ? dst_rdesc->cs->caller->inlined_to
2fd0985c 4482 : dst_rdesc->cs->caller;
a62bfab5 4483 if (dst->caller->inlined_to == top)
2fd0985c
MJ
4484 break;
4485 }
44a60244 4486 gcc_assert (dst_rdesc);
4502fe8d
MJ
4487 dst_jf->value.constant.rdesc = dst_rdesc;
4488 }
4489 }
6fe45955
MJ
4490 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4491 && src->caller == dst->caller)
4492 {
a62bfab5
ML
4493 struct cgraph_node *inline_root = dst->caller->inlined_to
4494 ? dst->caller->inlined_to : dst->caller;
a4a3cdd0 4495 ipa_node_params *root_info = ipa_node_params_sum->get (inline_root);
6fe45955
MJ
4496 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4497
4498 int c = ipa_get_controlled_uses (root_info, idx);
4499 if (c != IPA_UNDESCRIBED_USE)
4500 {
4501 c++;
4502 ipa_set_controlled_uses (root_info, idx, c);
4503 }
4504 }
4502fe8d 4505 }
771578a0
MJ
4506}
4507
dd912cb8 4508/* Analyze newly added function into callgraph. */
be95e2b9 4509
771578a0 4510static void
dd912cb8 4511ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 4512{
dd912cb8
ML
4513 if (node->has_gimple_body_p ())
4514 ipa_analyze_node (node);
4515}
771578a0 4516
dd912cb8
ML
4517/* Hook that is called by summary when a node is duplicated. */
4518
4519void
4520ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4521 ipa_node_params *old_info,
4522 ipa_node_params *new_info)
4523{
4524 ipa_agg_replacement_value *old_av, *new_av;
771578a0 4525
f65f1ae3 4526 new_info->descriptors = vec_safe_copy (old_info->descriptors);
310bc633 4527 new_info->lattices = NULL;
771578a0 4528 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
f65f1ae3
MJ
4529 new_info->known_csts = old_info->known_csts.copy ();
4530 new_info->known_contexts = old_info->known_contexts.copy ();
3949c4a7 4531
8aab5218 4532 new_info->analysis_done = old_info->analysis_done;
3949c4a7 4533 new_info->node_enqueued = old_info->node_enqueued;
7e729474 4534 new_info->versionable = old_info->versionable;
2c9561b5
MJ
4535
4536 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 4537 if (old_av)
2c9561b5 4538 {
04be694e
MJ
4539 new_av = NULL;
4540 while (old_av)
4541 {
4542 struct ipa_agg_replacement_value *v;
2c9561b5 4543
04be694e
MJ
4544 v = ggc_alloc<ipa_agg_replacement_value> ();
4545 memcpy (v, old_av, sizeof (*v));
4546 v->next = new_av;
4547 new_av = v;
4548 old_av = old_av->next;
4549 }
4550 ipa_set_node_agg_value_chain (dst, new_av);
4551 }
98aad294 4552}
04be694e 4553
98aad294 4554/* Duplication of ipcp transformation summaries. */
86cd0334 4555
98aad294
JH
4556void
4557ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4558 ipcp_transformation *src_trans,
4559 ipcp_transformation *dst_trans)
4560{
4561 /* Avoid redundant work of duplicating vectors we will never use. */
4562 if (dst->inlined_to)
4563 return;
4564 dst_trans->bits = vec_safe_copy (src_trans->bits);
4565 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4566 ipa_agg_replacement_value *agg = src_trans->agg_values,
4567 **aggptr = &dst_trans->agg_values;
4568 while (agg)
4569 {
4570 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4571 **aggptr = *agg;
4572 agg = agg->next;
4573 aggptr = &(*aggptr)->next;
2c9561b5 4574 }
771578a0
MJ
4575}
4576
4577/* Register our cgraph hooks if they are not already there. */
be95e2b9 4578
518dc859 4579void
771578a0 4580ipa_register_cgraph_hooks (void)
518dc859 4581{
dd912cb8 4582 ipa_check_create_node_params ();
6fe906a3 4583 ipa_check_create_edge_args ();
dd912cb8 4584
dd912cb8 4585 function_insertion_hook_holder =
3dafb85c 4586 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 4587}
518dc859 4588
771578a0 4589/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 4590
771578a0
MJ
4591static void
4592ipa_unregister_cgraph_hooks (void)
4593{
bc2fcccd
JH
4594 if (function_insertion_hook_holder)
4595 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 4596 function_insertion_hook_holder = NULL;
771578a0
MJ
4597}
4598
4599/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4600 longer needed after ipa-cp. */
be95e2b9 4601
771578a0 4602void
e33c6cd6 4603ipa_free_all_structures_after_ipa_cp (void)
3e293154 4604{
2bf86c84 4605 if (!optimize && !in_lto_p)
3e293154
MJ
4606 {
4607 ipa_free_all_edge_args ();
4608 ipa_free_all_node_params ();
2651e637
ML
4609 ipcp_sources_pool.release ();
4610 ipcp_cst_values_pool.release ();
4611 ipcp_poly_ctx_values_pool.release ();
4612 ipcp_agg_lattice_pool.release ();
3e293154 4613 ipa_unregister_cgraph_hooks ();
601f3293 4614 ipa_refdesc_pool.release ();
3e293154
MJ
4615 }
4616}
4617
4618/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4619 longer needed after indirect inlining. */
be95e2b9 4620
3e293154 4621void
e33c6cd6 4622ipa_free_all_structures_after_iinln (void)
771578a0
MJ
4623{
4624 ipa_free_all_edge_args ();
4625 ipa_free_all_node_params ();
4626 ipa_unregister_cgraph_hooks ();
2651e637
ML
4627 ipcp_sources_pool.release ();
4628 ipcp_cst_values_pool.release ();
4629 ipcp_poly_ctx_values_pool.release ();
4630 ipcp_agg_lattice_pool.release ();
601f3293 4631 ipa_refdesc_pool.release ();
518dc859
RL
4632}
4633
dcd416e3 4634/* Print ipa_tree_map data structures of all functions in the
518dc859 4635 callgraph to F. */
be95e2b9 4636
518dc859 4637void
2c9561b5 4638ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
4639{
4640 int i, count;
99b1c316 4641 class ipa_node_params *info;
518dc859 4642
67348ccc 4643 if (!node->definition)
3e293154 4644 return;
a4a3cdd0 4645 info = ipa_node_params_sum->get (node);
464d0118 4646 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
0302955a
JH
4647 if (!info)
4648 {
4649 fprintf (f, " no params return\n");
4650 return;
4651 }
3e293154
MJ
4652 count = ipa_get_param_count (info);
4653 for (i = 0; i < count; i++)
518dc859 4654 {
4502fe8d
MJ
4655 int c;
4656
a4e33812 4657 fprintf (f, " ");
e067bd43 4658 ipa_dump_param (f, info, i);
339f49ec
JH
4659 if (ipa_is_param_used (info, i))
4660 fprintf (f, " used");
40a777e8
JH
4661 if (ipa_is_param_used_by_ipa_predicates (info, i))
4662 fprintf (f, " used_by_ipa_predicates");
4663 if (ipa_is_param_used_by_indirect_call (info, i))
4664 fprintf (f, " used_by_indirect_call");
4665 if (ipa_is_param_used_by_polymorphic_call (info, i))
4666 fprintf (f, " used_by_polymorphic_call");
4502fe8d
MJ
4667 c = ipa_get_controlled_uses (info, i);
4668 if (c == IPA_UNDESCRIBED_USE)
4669 fprintf (f, " undescribed_use");
4670 else
13586172
MJ
4671 fprintf (f, " controlled_uses=%i %s", c,
4672 ipa_get_param_load_dereferenced (info, i)
4673 ? "(load_dereferenced)" : "");
3e293154 4674 fprintf (f, "\n");
518dc859
RL
4675 }
4676}
dcd416e3 4677
ca30a539 4678/* Print ipa_tree_map data structures of all functions in the
3e293154 4679 callgraph to F. */
be95e2b9 4680
3e293154 4681void
ca30a539 4682ipa_print_all_params (FILE * f)
3e293154
MJ
4683{
4684 struct cgraph_node *node;
4685
ca30a539 4686 fprintf (f, "\nFunction parameters:\n");
65c70e6b 4687 FOR_EACH_FUNCTION (node)
ca30a539 4688 ipa_print_node_params (f, node);
3e293154 4689}
3f84bf08 4690
2c9561b5
MJ
4691/* Dump the AV linked list. */
4692
4693void
4694ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4695{
4696 bool comma = false;
4697 fprintf (f, " Aggregate replacements:");
4698 for (; av; av = av->next)
4699 {
4700 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4701 av->index, av->offset);
ef6cb4c7 4702 print_generic_expr (f, av->value);
2c9561b5
MJ
4703 comma = true;
4704 }
4705 fprintf (f, "\n");
4706}
4707
fb3f88cc
JH
4708/* Stream out jump function JUMP_FUNC to OB. */
4709
4710static void
4711ipa_write_jump_function (struct output_block *ob,
4712 struct ipa_jump_func *jump_func)
4713{
8b7773a4
MJ
4714 struct ipa_agg_jf_item *item;
4715 struct bitpack_d bp;
4716 int i, count;
f714ecf5 4717 int flag = 0;
fb3f88cc 4718
f714ecf5
JH
4719 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4720 as well as WPA memory by handling them specially. */
4721 if (jump_func->type == IPA_JF_CONST
4722 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4723 flag = 1;
4724
4725 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
fb3f88cc
JH
4726 switch (jump_func->type)
4727 {
4728 case IPA_JF_UNKNOWN:
4729 break;
4730 case IPA_JF_CONST:
5368224f 4731 gcc_assert (
4502fe8d 4732 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
f714ecf5
JH
4733 stream_write_tree (ob,
4734 flag
4735 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4736 : jump_func->value.constant.value, true);
fb3f88cc
JH
4737 break;
4738 case IPA_JF_PASS_THROUGH:
412288f1 4739 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4740 if (jump_func->value.pass_through.operation == NOP_EXPR)
4741 {
4742 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4743 bp = bitpack_create (ob->main_stream);
4744 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4745 streamer_write_bitpack (&bp);
4746 }
a2b4c188
KV
4747 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4748 == tcc_unary)
4749 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4a53743e
MJ
4750 else
4751 {
4752 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4753 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4754 }
fb3f88cc
JH
4755 break;
4756 case IPA_JF_ANCESTOR:
412288f1 4757 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4758 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4759 bp = bitpack_create (ob->main_stream);
4760 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4761 streamer_write_bitpack (&bp);
fb3f88cc 4762 break;
eb270950
FX
4763 default:
4764 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4765 }
4766
9771b263 4767 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4768 streamer_write_uhwi (ob, count);
4769 if (count)
4770 {
4771 bp = bitpack_create (ob->main_stream);
4772 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4773 streamer_write_bitpack (&bp);
4774 }
4775
9771b263 4776 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4 4777 {
eb270950 4778 stream_write_tree (ob, item->type, true);
8b7773a4 4779 streamer_write_uhwi (ob, item->offset);
eb270950
FX
4780 streamer_write_uhwi (ob, item->jftype);
4781 switch (item->jftype)
4782 {
4783 case IPA_JF_UNKNOWN:
4784 break;
4785 case IPA_JF_CONST:
4786 stream_write_tree (ob, item->value.constant, true);
4787 break;
4788 case IPA_JF_PASS_THROUGH:
4789 case IPA_JF_LOAD_AGG:
4790 streamer_write_uhwi (ob, item->value.pass_through.operation);
4791 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4792 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4793 != tcc_unary)
4794 stream_write_tree (ob, item->value.pass_through.operand, true);
4795 if (item->jftype == IPA_JF_LOAD_AGG)
4796 {
4797 stream_write_tree (ob, item->value.load_agg.type, true);
4798 streamer_write_uhwi (ob, item->value.load_agg.offset);
4799 bp = bitpack_create (ob->main_stream);
4800 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4801 streamer_write_bitpack (&bp);
4802 }
4803 break;
4804 default:
4805 fatal_error (UNKNOWN_LOCATION,
4806 "invalid jump function in LTO stream");
4807 }
fb3f88cc 4808 }
04be694e 4809
209ca542 4810 bp = bitpack_create (ob->main_stream);
86cd0334 4811 bp_pack_value (&bp, !!jump_func->bits, 1);
209ca542 4812 streamer_write_bitpack (&bp);
86cd0334 4813 if (jump_func->bits)
209ca542 4814 {
86cd0334
MJ
4815 streamer_write_widest_int (ob, jump_func->bits->value);
4816 streamer_write_widest_int (ob, jump_func->bits->mask);
a5e14a42 4817 }
86cd0334 4818 bp_pack_value (&bp, !!jump_func->m_vr, 1);
8bc5448f 4819 streamer_write_bitpack (&bp);
86cd0334 4820 if (jump_func->m_vr)
8bc5448f
KV
4821 {
4822 streamer_write_enum (ob->main_stream, value_rang_type,
54994253
AH
4823 VR_LAST, jump_func->m_vr->kind ());
4824 stream_write_tree (ob, jump_func->m_vr->min (), true);
4825 stream_write_tree (ob, jump_func->m_vr->max (), true);
8bc5448f 4826 }
fb3f88cc
JH
4827}
4828
4829/* Read in jump function JUMP_FUNC from IB. */
4830
4831static void
99b1c316 4832ipa_read_jump_function (class lto_input_block *ib,
fb3f88cc 4833 struct ipa_jump_func *jump_func,
4502fe8d 4834 struct cgraph_edge *cs,
99b1c316 4835 class data_in *data_in,
f714ecf5 4836 bool prevails)
fb3f88cc 4837{
4a53743e
MJ
4838 enum jump_func_type jftype;
4839 enum tree_code operation;
8b7773a4 4840 int i, count;
f714ecf5
JH
4841 int val = streamer_read_uhwi (ib);
4842 bool flag = val & 1;
fb3f88cc 4843
f714ecf5 4844 jftype = (enum jump_func_type) (val / 2);
4a53743e 4845 switch (jftype)
fb3f88cc
JH
4846 {
4847 case IPA_JF_UNKNOWN:
04be694e 4848 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4849 break;
4850 case IPA_JF_CONST:
f714ecf5
JH
4851 {
4852 tree t = stream_read_tree (ib, data_in);
4853 if (flag && prevails)
7a2090b0 4854 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
f714ecf5
JH
4855 ipa_set_jf_constant (jump_func, t, cs);
4856 }
fb3f88cc
JH
4857 break;
4858 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4859 operation = (enum tree_code) streamer_read_uhwi (ib);
4860 if (operation == NOP_EXPR)
4861 {
4862 int formal_id = streamer_read_uhwi (ib);
4863 struct bitpack_d bp = streamer_read_bitpack (ib);
4864 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4865 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e 4866 }
a2b4c188
KV
4867 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4868 {
4869 int formal_id = streamer_read_uhwi (ib);
4870 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4871 }
4a53743e
MJ
4872 else
4873 {
4874 tree operand = stream_read_tree (ib, data_in);
4875 int formal_id = streamer_read_uhwi (ib);
4876 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4877 operation);
4878 }
fb3f88cc
JH
4879 break;
4880 case IPA_JF_ANCESTOR:
4a53743e
MJ
4881 {
4882 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4883 int formal_id = streamer_read_uhwi (ib);
4884 struct bitpack_d bp = streamer_read_bitpack (ib);
4885 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4886 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4887 break;
4888 }
f714ecf5
JH
4889 default:
4890 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4891 }
4892
4893 count = streamer_read_uhwi (ib);
f714ecf5 4894 if (prevails)
7ee0681e
JH
4895 {
4896 jump_func->agg.items = NULL;
4897 vec_safe_reserve (jump_func->agg.items, count, true);
4898 }
8b7773a4
MJ
4899 if (count)
4900 {
4a53743e 4901 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4902 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4903 }
4904 for (i = 0; i < count; i++)
4905 {
f32682ca 4906 struct ipa_agg_jf_item item;
eb270950 4907 item.type = stream_read_tree (ib, data_in);
f32682ca 4908 item.offset = streamer_read_uhwi (ib);
eb270950
FX
4909 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4910
4911 switch (item.jftype)
4912 {
4913 case IPA_JF_UNKNOWN:
4914 break;
4915 case IPA_JF_CONST:
4916 item.value.constant = stream_read_tree (ib, data_in);
4917 break;
4918 case IPA_JF_PASS_THROUGH:
4919 case IPA_JF_LOAD_AGG:
4920 operation = (enum tree_code) streamer_read_uhwi (ib);
4921 item.value.pass_through.operation = operation;
4922 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4923 if (TREE_CODE_CLASS (operation) == tcc_unary)
4924 item.value.pass_through.operand = NULL_TREE;
4925 else
4926 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4927 if (item.jftype == IPA_JF_LOAD_AGG)
4928 {
4929 struct bitpack_d bp;
4930 item.value.load_agg.type = stream_read_tree (ib, data_in);
4931 item.value.load_agg.offset = streamer_read_uhwi (ib);
4932 bp = streamer_read_bitpack (ib);
4933 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4934 }
4935 break;
4936 default:
4937 fatal_error (UNKNOWN_LOCATION,
4938 "invalid jump function in LTO stream");
4939 }
f714ecf5
JH
4940 if (prevails)
4941 jump_func->agg.items->quick_push (item);
fb3f88cc 4942 }
04be694e
MJ
4943
4944 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4945 bool bits_known = bp_unpack_value (&bp, 1);
4946 if (bits_known)
4947 {
86cd0334
MJ
4948 widest_int value = streamer_read_widest_int (ib);
4949 widest_int mask = streamer_read_widest_int (ib);
f714ecf5
JH
4950 if (prevails)
4951 ipa_set_jfunc_bits (jump_func, value, mask);
209ca542
PK
4952 }
4953 else
86cd0334 4954 jump_func->bits = NULL;
8bc5448f
KV
4955
4956 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4957 bool vr_known = bp_unpack_value (&vr_bp, 1);
4958 if (vr_known)
4959 {
54994253 4960 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
86cd0334
MJ
4961 VR_LAST);
4962 tree min = stream_read_tree (ib, data_in);
4963 tree max = stream_read_tree (ib, data_in);
f714ecf5
JH
4964 if (prevails)
4965 ipa_set_jfunc_vr (jump_func, type, min, max);
8bc5448f
KV
4966 }
4967 else
86cd0334 4968 jump_func->m_vr = NULL;
fb3f88cc
JH
4969}
4970
e33c6cd6
MJ
4971/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4972 relevant to indirect inlining to OB. */
661e7330
MJ
4973
4974static void
e33c6cd6
MJ
4975ipa_write_indirect_edge_info (struct output_block *ob,
4976 struct cgraph_edge *cs)
661e7330 4977{
99b1c316 4978 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4979 struct bitpack_d bp;
e33c6cd6 4980
412288f1 4981 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4982 bp = bitpack_create (ob->main_stream);
4983 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4984 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4985 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4986 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4987 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4988 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4989 streamer_write_bitpack (&bp);
ba392339
JH
4990 if (ii->agg_contents || ii->polymorphic)
4991 streamer_write_hwi (ob, ii->offset);
4992 else
4993 gcc_assert (ii->offset == 0);
b258210c
MJ
4994
4995 if (ii->polymorphic)
4996 {
412288f1 4997 streamer_write_hwi (ob, ii->otr_token);
b9393656 4998 stream_write_tree (ob, ii->otr_type, true);
ba392339 4999 ii->context.stream_out (ob);
b258210c 5000 }
661e7330
MJ
5001}
5002
e33c6cd6
MJ
5003/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5004 relevant to indirect inlining from IB. */
661e7330
MJ
5005
5006static void
99b1c316
MS
5007ipa_read_indirect_edge_info (class lto_input_block *ib,
5008 class data_in *data_in,
40a777e8
JH
5009 struct cgraph_edge *cs,
5010 class ipa_node_params *info)
661e7330 5011{
99b1c316 5012 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 5013 struct bitpack_d bp;
661e7330 5014
412288f1 5015 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 5016 bp = streamer_read_bitpack (ib);
2465dcc2 5017 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 5018 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 5019 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 5020 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 5021 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 5022 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
5023 if (ii->agg_contents || ii->polymorphic)
5024 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5025 else
5026 ii->offset = 0;
b258210c
MJ
5027 if (ii->polymorphic)
5028 {
412288f1 5029 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 5030 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 5031 ii->context.stream_in (ib, data_in);
b258210c 5032 }
40a777e8
JH
5033 if (info && ii->param_index >= 0)
5034 {
5035 if (ii->polymorphic)
5036 ipa_set_param_used_by_polymorphic_call (info,
5037 ii->param_index , true);
5038 ipa_set_param_used_by_indirect_call (info,
5039 ii->param_index, true);
5040 }
661e7330
MJ
5041}
5042
fb3f88cc
JH
5043/* Stream out NODE info to OB. */
5044
5045static void
5046ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5047{
5048 int node_ref;
7380e6ef 5049 lto_symtab_encoder_t encoder;
a4a3cdd0 5050 ipa_node_params *info = ipa_node_params_sum->get (node);
fb3f88cc
JH
5051 int j;
5052 struct cgraph_edge *e;
2465dcc2 5053 struct bitpack_d bp;
fb3f88cc 5054
7380e6ef 5055 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5056 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 5057 streamer_write_uhwi (ob, node_ref);
fb3f88cc 5058
0e8853ee
JH
5059 streamer_write_uhwi (ob, ipa_get_param_count (info));
5060 for (j = 0; j < ipa_get_param_count (info); j++)
5061 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 5062 bp = bitpack_create (ob->main_stream);
8aab5218 5063 gcc_assert (info->analysis_done
661e7330 5064 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
5065 gcc_assert (!info->node_enqueued);
5066 gcc_assert (!info->ipcp_orig_node);
5067 for (j = 0; j < ipa_get_param_count (info); j++)
13586172
MJ
5068 {
5069 /* TODO: We could just not stream the bit in the undescribed case. */
5070 bool d = (ipa_get_controlled_uses (info, j) != IPA_UNDESCRIBED_USE)
5071 ? ipa_get_param_load_dereferenced (info, j) : true;
5072 bp_pack_value (&bp, d, 1);
5073 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5074 }
412288f1 5075 streamer_write_bitpack (&bp);
4502fe8d 5076 for (j = 0; j < ipa_get_param_count (info); j++)
a5e14a42
MJ
5077 {
5078 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5079 stream_write_tree (ob, ipa_get_type (info, j), true);
5080 }
fb3f88cc
JH
5081 for (e = node->callees; e; e = e->next_callee)
5082 {
a4a3cdd0 5083 ipa_edge_args *args = ipa_edge_args_sum->get (e);
fb3f88cc 5084
a33c028e
JH
5085 if (!args)
5086 {
5087 streamer_write_uhwi (ob, 0);
5088 continue;
5089 }
5090
5ce97055
JH
5091 streamer_write_uhwi (ob,
5092 ipa_get_cs_argument_count (args) * 2
5093 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 5094 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
5095 {
5096 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5097 if (args->polymorphic_call_contexts != NULL)
5098 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5099 }
fb3f88cc 5100 }
e33c6cd6 5101 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 5102 {
a4a3cdd0 5103 ipa_edge_args *args = ipa_edge_args_sum->get (e);
a33c028e
JH
5104 if (!args)
5105 streamer_write_uhwi (ob, 0);
5106 else
5ce97055 5107 {
a33c028e
JH
5108 streamer_write_uhwi (ob,
5109 ipa_get_cs_argument_count (args) * 2
5110 + (args->polymorphic_call_contexts != NULL));
5111 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5112 {
5113 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5114 if (args->polymorphic_call_contexts != NULL)
5115 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5116 }
5ce97055 5117 }
c8246dbe
JH
5118 ipa_write_indirect_edge_info (ob, e);
5119 }
fb3f88cc
JH
5120}
5121
f714ecf5 5122/* Stream in edge E from IB. */
7b377b22 5123
f714ecf5 5124static void
99b1c316
MS
5125ipa_read_edge_info (class lto_input_block *ib,
5126 class data_in *data_in,
f714ecf5 5127 struct cgraph_edge *e, bool prevails)
7b377b22 5128{
f714ecf5
JH
5129 int count = streamer_read_uhwi (ib);
5130 bool contexts_computed = count & 1;
5131
5132 count /= 2;
5133 if (!count)
5134 return;
6cef01c3
JH
5135 if (prevails
5136 && (e->possibly_call_in_translation_unit_p ()
5137 /* Also stream in jump functions to builtins in hope that they
5138 will get fnspecs. */
5139 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
f714ecf5 5140 {
a4a3cdd0 5141 ipa_edge_args *args = ipa_edge_args_sum->get_create (e);
cb3874dc 5142 vec_safe_grow_cleared (args->jump_functions, count, true);
f714ecf5 5143 if (contexts_computed)
cb3874dc 5144 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
f714ecf5
JH
5145 for (int k = 0; k < count; k++)
5146 {
5147 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5148 data_in, prevails);
5149 if (contexts_computed)
5150 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5151 (ib, data_in);
5152 }
5153 }
5154 else
5155 {
5156 for (int k = 0; k < count; k++)
5157 {
5158 struct ipa_jump_func dummy;
5159 ipa_read_jump_function (ib, &dummy, e,
5160 data_in, prevails);
5161 if (contexts_computed)
5162 {
99b1c316 5163 class ipa_polymorphic_call_context ctx;
f714ecf5
JH
5164 ctx.stream_in (ib, data_in);
5165 }
5166 }
5167 }
7b377b22
JH
5168}
5169
61502ca8 5170/* Stream in NODE info from IB. */
fb3f88cc
JH
5171
5172static void
99b1c316
MS
5173ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5174 class data_in *data_in)
fb3f88cc 5175{
fb3f88cc
JH
5176 int k;
5177 struct cgraph_edge *e;
2465dcc2 5178 struct bitpack_d bp;
f714ecf5 5179 bool prevails = node->prevailing_p ();
a4a3cdd0
MJ
5180 ipa_node_params *info
5181 = prevails ? ipa_node_params_sum->get_create (node) : NULL;
fb3f88cc 5182
f714ecf5
JH
5183 int param_count = streamer_read_uhwi (ib);
5184 if (prevails)
5185 {
5186 ipa_alloc_node_params (node, param_count);
5187 for (k = 0; k < param_count; k++)
5188 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5189 if (ipa_get_param_count (info) != 0)
5190 info->analysis_done = true;
5191 info->node_enqueued = false;
5192 }
5193 else
5194 for (k = 0; k < param_count; k++)
5195 streamer_read_uhwi (ib);
a5e14a42 5196
412288f1 5197 bp = streamer_read_bitpack (ib);
f714ecf5 5198 for (k = 0; k < param_count; k++)
a5e14a42 5199 {
13586172 5200 bool load_dereferenced = bp_unpack_value (&bp, 1);
f714ecf5
JH
5201 bool used = bp_unpack_value (&bp, 1);
5202
5203 if (prevails)
13586172
MJ
5204 {
5205 ipa_set_param_load_dereferenced (info, k, load_dereferenced);
5206 ipa_set_param_used (info, k, used);
5207 }
a5e14a42 5208 }
f714ecf5 5209 for (k = 0; k < param_count; k++)
fb3f88cc 5210 {
f714ecf5
JH
5211 int nuses = streamer_read_hwi (ib);
5212 tree type = stream_read_tree (ib, data_in);
fb3f88cc 5213
f714ecf5 5214 if (prevails)
5ce97055 5215 {
f714ecf5
JH
5216 ipa_set_controlled_uses (info, k, nuses);
5217 (*info->descriptors)[k].decl_or_type = type;
5ce97055 5218 }
fb3f88cc 5219 }
f714ecf5
JH
5220 for (e = node->callees; e; e = e->next_callee)
5221 ipa_read_edge_info (ib, data_in, e, prevails);
e33c6cd6 5222 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 5223 {
f714ecf5 5224 ipa_read_edge_info (ib, data_in, e, prevails);
40a777e8 5225 ipa_read_indirect_edge_info (ib, data_in, e, info);
c8246dbe 5226 }
fb3f88cc
JH
5227}
5228
5229/* Write jump functions for nodes in SET. */
5230
5231void
f27c1867 5232ipa_prop_write_jump_functions (void)
fb3f88cc 5233{
93536c97 5234 struct output_block *ob;
fb3f88cc 5235 unsigned int count = 0;
f27c1867
JH
5236 lto_symtab_encoder_iterator lsei;
5237 lto_symtab_encoder_t encoder;
5238
6fe906a3 5239 if (!ipa_node_params_sum || !ipa_edge_args_sum)
93536c97 5240 return;
fb3f88cc 5241
93536c97 5242 ob = create_output_block (LTO_section_jump_functions);
f27c1867 5243 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5244 ob->symbol = NULL;
f27c1867
JH
5245 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5246 lsei_next_function_in_partition (&lsei))
fb3f88cc 5247 {
a4a3cdd0 5248 cgraph_node *node = lsei_cgraph_node (lsei);
d52f5295 5249 if (node->has_gimple_body_p ()
a4a3cdd0 5250 && ipa_node_params_sum->get (node) != NULL)
fb3f88cc
JH
5251 count++;
5252 }
5253
412288f1 5254 streamer_write_uhwi (ob, count);
fb3f88cc
JH
5255
5256 /* Process all of the functions. */
f27c1867
JH
5257 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5258 lsei_next_function_in_partition (&lsei))
fb3f88cc 5259 {
a4a3cdd0 5260 cgraph_node *node = lsei_cgraph_node (lsei);
d52f5295 5261 if (node->has_gimple_body_p ()
a4a3cdd0 5262 && ipa_node_params_sum->get (node) != NULL)
fb3f88cc
JH
5263 ipa_write_node_info (ob, node);
5264 }
412288f1 5265 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
5266 produce_asm (ob, NULL);
5267 destroy_output_block (ob);
5268}
5269
5270/* Read section in file FILE_DATA of length LEN with data DATA. */
5271
5272static void
5273ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5274 size_t len)
5275{
5276 const struct lto_function_header *header =
5277 (const struct lto_function_header *) data;
4ad9a9de
EB
5278 const int cfg_offset = sizeof (struct lto_function_header);
5279 const int main_offset = cfg_offset + header->cfg_size;
5280 const int string_offset = main_offset + header->main_size;
99b1c316 5281 class data_in *data_in;
fb3f88cc
JH
5282 unsigned int i;
5283 unsigned int count;
5284
207c68cd 5285 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5286 header->main_size, file_data->mode_table);
fb3f88cc
JH
5287
5288 data_in =
5289 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5290 header->string_size, vNULL);
412288f1 5291 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
5292
5293 for (i = 0; i < count; i++)
5294 {
5295 unsigned int index;
5296 struct cgraph_node *node;
7380e6ef 5297 lto_symtab_encoder_t encoder;
fb3f88cc 5298
412288f1 5299 index = streamer_read_uhwi (&ib_main);
7380e6ef 5300 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5301 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5302 index));
67348ccc 5303 gcc_assert (node->definition);
fb3f88cc
JH
5304 ipa_read_node_info (&ib_main, node, data_in);
5305 }
5306 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5307 len);
5308 lto_data_in_delete (data_in);
5309}
5310
5311/* Read ipcp jump functions. */
5312
5313void
5314ipa_prop_read_jump_functions (void)
5315{
5316 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5317 struct lto_file_decl_data *file_data;
5318 unsigned int j = 0;
5319
5320 ipa_check_create_node_params ();
5321 ipa_check_create_edge_args ();
5322 ipa_register_cgraph_hooks ();
5323
5324 while ((file_data = file_data_vec[j++]))
5325 {
5326 size_t len;
3c56d8d8
ML
5327 const char *data
5328 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5329 &len);
fb3f88cc
JH
5330 if (data)
5331 ipa_prop_read_section (file_data, data, len);
5332 }
5333}
5334
2c9561b5 5335void
04be694e 5336write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
5337{
5338 int node_ref;
5339 unsigned int count = 0;
5340 lto_symtab_encoder_t encoder;
5341 struct ipa_agg_replacement_value *aggvals, *av;
5342
5343 aggvals = ipa_get_agg_replacements_for_node (node);
5344 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5345 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
5346 streamer_write_uhwi (ob, node_ref);
5347
5348 for (av = aggvals; av; av = av->next)
5349 count++;
5350 streamer_write_uhwi (ob, count);
5351
5352 for (av = aggvals; av; av = av->next)
5353 {
7b920a9a
MJ
5354 struct bitpack_d bp;
5355
2c9561b5
MJ
5356 streamer_write_uhwi (ob, av->offset);
5357 streamer_write_uhwi (ob, av->index);
5358 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
5359
5360 bp = bitpack_create (ob->main_stream);
5361 bp_pack_value (&bp, av->by_ref, 1);
5362 streamer_write_bitpack (&bp);
2c9561b5 5363 }
04be694e 5364
9d3e0adc 5365 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5366 if (ts && vec_safe_length (ts->m_vr) > 0)
5367 {
5368 count = ts->m_vr->length ();
5369 streamer_write_uhwi (ob, count);
5370 for (unsigned i = 0; i < count; ++i)
5371 {
5372 struct bitpack_d bp;
5373 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5374 bp = bitpack_create (ob->main_stream);
5375 bp_pack_value (&bp, parm_vr->known, 1);
5376 streamer_write_bitpack (&bp);
5377 if (parm_vr->known)
5378 {
5379 streamer_write_enum (ob->main_stream, value_rang_type,
5380 VR_LAST, parm_vr->type);
5381 streamer_write_wide_int (ob, parm_vr->min);
5382 streamer_write_wide_int (ob, parm_vr->max);
5383 }
5384 }
5385 }
5386 else
5387 streamer_write_uhwi (ob, 0);
5388
209ca542
PK
5389 if (ts && vec_safe_length (ts->bits) > 0)
5390 {
5391 count = ts->bits->length ();
5392 streamer_write_uhwi (ob, count);
5393
5394 for (unsigned i = 0; i < count; ++i)
5395 {
86cd0334 5396 const ipa_bits *bits_jfunc = (*ts->bits)[i];
209ca542 5397 struct bitpack_d bp = bitpack_create (ob->main_stream);
86cd0334 5398 bp_pack_value (&bp, !!bits_jfunc, 1);
209ca542 5399 streamer_write_bitpack (&bp);
86cd0334 5400 if (bits_jfunc)
209ca542 5401 {
86cd0334
MJ
5402 streamer_write_widest_int (ob, bits_jfunc->value);
5403 streamer_write_widest_int (ob, bits_jfunc->mask);
209ca542
PK
5404 }
5405 }
5406 }
5407 else
5408 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
5409}
5410
5411/* Stream in the aggregate value replacement chain for NODE from IB. */
5412
5413static void
04be694e
MJ
5414read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5415 data_in *data_in)
2c9561b5
MJ
5416{
5417 struct ipa_agg_replacement_value *aggvals = NULL;
5418 unsigned int count, i;
5419
5420 count = streamer_read_uhwi (ib);
5421 for (i = 0; i <count; i++)
5422 {
5423 struct ipa_agg_replacement_value *av;
7b920a9a 5424 struct bitpack_d bp;
2c9561b5 5425
766090c2 5426 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
5427 av->offset = streamer_read_uhwi (ib);
5428 av->index = streamer_read_uhwi (ib);
5429 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
5430 bp = streamer_read_bitpack (ib);
5431 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
5432 av->next = aggvals;
5433 aggvals = av;
5434 }
5435 ipa_set_node_agg_value_chain (node, aggvals);
67b97478 5436
209ca542
PK
5437 count = streamer_read_uhwi (ib);
5438 if (count > 0)
5439 {
9d3e0adc
ML
5440 ipcp_transformation_initialize ();
5441 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
cb3874dc 5442 vec_safe_grow_cleared (ts->m_vr, count, true);
8bc5448f
KV
5443 for (i = 0; i < count; i++)
5444 {
5445 ipa_vr *parm_vr;
5446 parm_vr = &(*ts->m_vr)[i];
5447 struct bitpack_d bp;
5448 bp = streamer_read_bitpack (ib);
5449 parm_vr->known = bp_unpack_value (&bp, 1);
5450 if (parm_vr->known)
5451 {
54994253 5452 parm_vr->type = streamer_read_enum (ib, value_range_kind,
8bc5448f
KV
5453 VR_LAST);
5454 parm_vr->min = streamer_read_wide_int (ib);
5455 parm_vr->max = streamer_read_wide_int (ib);
5456 }
5457 }
5458 }
5459 count = streamer_read_uhwi (ib);
5460 if (count > 0)
5461 {
9d3e0adc
ML
5462 ipcp_transformation_initialize ();
5463 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
cb3874dc 5464 vec_safe_grow_cleared (ts->bits, count, true);
209ca542
PK
5465
5466 for (i = 0; i < count; i++)
5467 {
209ca542 5468 struct bitpack_d bp = streamer_read_bitpack (ib);
86cd0334
MJ
5469 bool known = bp_unpack_value (&bp, 1);
5470 if (known)
209ca542 5471 {
2fb1d6d6
JH
5472 const widest_int value = streamer_read_widest_int (ib);
5473 const widest_int mask = streamer_read_widest_int (ib);
86cd0334 5474 ipa_bits *bits
2fb1d6d6 5475 = ipa_get_ipa_bits_for_value (value, mask);
86cd0334 5476 (*ts->bits)[i] = bits;
209ca542
PK
5477 }
5478 }
5479 }
2c9561b5
MJ
5480}
5481
5482/* Write all aggregate replacement for nodes in set. */
5483
5484void
04be694e 5485ipcp_write_transformation_summaries (void)
2c9561b5
MJ
5486{
5487 struct cgraph_node *node;
5488 struct output_block *ob;
5489 unsigned int count = 0;
5490 lto_symtab_encoder_iterator lsei;
5491 lto_symtab_encoder_t encoder;
5492
2c9561b5
MJ
5493 ob = create_output_block (LTO_section_ipcp_transform);
5494 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5495 ob->symbol = NULL;
2c9561b5
MJ
5496 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5497 lsei_next_function_in_partition (&lsei))
5498 {
5499 node = lsei_cgraph_node (lsei);
04be694e 5500 if (node->has_gimple_body_p ())
2c9561b5
MJ
5501 count++;
5502 }
5503
5504 streamer_write_uhwi (ob, count);
5505
5506 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5507 lsei_next_function_in_partition (&lsei))
5508 {
5509 node = lsei_cgraph_node (lsei);
04be694e
MJ
5510 if (node->has_gimple_body_p ())
5511 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
5512 }
5513 streamer_write_char_stream (ob->main_stream, 0);
5514 produce_asm (ob, NULL);
5515 destroy_output_block (ob);
5516}
5517
5518/* Read replacements section in file FILE_DATA of length LEN with data
5519 DATA. */
5520
5521static void
5522read_replacements_section (struct lto_file_decl_data *file_data,
5523 const char *data,
5524 size_t len)
5525{
5526 const struct lto_function_header *header =
5527 (const struct lto_function_header *) data;
5528 const int cfg_offset = sizeof (struct lto_function_header);
5529 const int main_offset = cfg_offset + header->cfg_size;
5530 const int string_offset = main_offset + header->main_size;
99b1c316 5531 class data_in *data_in;
2c9561b5
MJ
5532 unsigned int i;
5533 unsigned int count;
5534
207c68cd 5535 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5536 header->main_size, file_data->mode_table);
2c9561b5
MJ
5537
5538 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5539 header->string_size, vNULL);
2c9561b5
MJ
5540 count = streamer_read_uhwi (&ib_main);
5541
5542 for (i = 0; i < count; i++)
5543 {
5544 unsigned int index;
5545 struct cgraph_node *node;
5546 lto_symtab_encoder_t encoder;
5547
5548 index = streamer_read_uhwi (&ib_main);
5549 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5550 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5551 index));
67348ccc 5552 gcc_assert (node->definition);
04be694e 5553 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
5554 }
5555 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5556 len);
5557 lto_data_in_delete (data_in);
5558}
5559
5560/* Read IPA-CP aggregate replacements. */
5561
5562void
04be694e 5563ipcp_read_transformation_summaries (void)
2c9561b5
MJ
5564{
5565 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5566 struct lto_file_decl_data *file_data;
5567 unsigned int j = 0;
5568
5569 while ((file_data = file_data_vec[j++]))
5570 {
5571 size_t len;
3c56d8d8
ML
5572 const char *data
5573 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5574 &len);
2c9561b5
MJ
5575 if (data)
5576 read_replacements_section (file_data, data, len);
5577 }
5578}
5579
5580/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5581 NODE. */
5582
5583static void
5584adjust_agg_replacement_values (struct cgraph_node *node,
5585 struct ipa_agg_replacement_value *aggval)
5586{
5587 struct ipa_agg_replacement_value *v;
ae7a23a3 5588 clone_info *cinfo = clone_info::get (node);
2c9561b5 5589
ae7a23a3 5590 if (!cinfo || !cinfo->param_adjustments)
2c9561b5
MJ
5591 return;
5592
ff6686d2 5593 auto_vec<int, 16> new_indices;
ae7a23a3 5594 cinfo->param_adjustments->get_updated_indices (&new_indices);
2c9561b5
MJ
5595 for (v = aggval; v; v = v->next)
5596 {
ff6686d2 5597 gcc_checking_assert (v->index >= 0);
2c9561b5 5598
ff6686d2
MJ
5599 if ((unsigned) v->index < new_indices.length ())
5600 v->index = new_indices[v->index];
5601 else
5602 /* This can happen if we know about a constant passed by reference by
5603 an argument which is never actually used for anything, let alone
5604 loading that constant. */
5605 v->index = -1;
5606 }
2c9561b5
MJ
5607}
5608
8aab5218
MJ
5609/* Dominator walker driving the ipcp modification phase. */
5610
5611class ipcp_modif_dom_walker : public dom_walker
5612{
5613public:
56b40062 5614 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
f65f1ae3 5615 vec<ipa_param_descriptor, va_gc> *descs,
8aab5218 5616 struct ipa_agg_replacement_value *av,
8ddce3f7 5617 bool *sc)
8aab5218 5618 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
8ddce3f7 5619 m_aggval (av), m_something_changed (sc) {}
8aab5218 5620
3daacdcd 5621 virtual edge before_dom_children (basic_block);
8ddce3f7
RB
5622 bool cleanup_eh ()
5623 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
8aab5218
MJ
5624
5625private:
56b40062 5626 struct ipa_func_body_info *m_fbi;
f65f1ae3 5627 vec<ipa_param_descriptor, va_gc> *m_descriptors;
8aab5218 5628 struct ipa_agg_replacement_value *m_aggval;
8ddce3f7
RB
5629 bool *m_something_changed;
5630 auto_bitmap m_need_eh_cleanup;
8aab5218
MJ
5631};
5632
3daacdcd 5633edge
8aab5218
MJ
5634ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5635{
5636 gimple_stmt_iterator gsi;
5637 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5638 {
5639 struct ipa_agg_replacement_value *v;
355fe088 5640 gimple *stmt = gsi_stmt (gsi);
8aab5218 5641 tree rhs, val, t;
86003645
RS
5642 HOST_WIDE_INT offset;
5643 poly_int64 size;
8aab5218
MJ
5644 int index;
5645 bool by_ref, vce;
5646
5647 if (!gimple_assign_load_p (stmt))
5648 continue;
5649 rhs = gimple_assign_rhs1 (stmt);
5650 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5651 continue;
2c9561b5 5652
8aab5218
MJ
5653 vce = false;
5654 t = rhs;
5655 while (handled_component_p (t))
5656 {
5657 /* V_C_E can do things like convert an array of integers to one
5658 bigger integer and similar things we do not handle below. */
b66113e9 5659 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8aab5218
MJ
5660 {
5661 vce = true;
5662 break;
5663 }
5664 t = TREE_OPERAND (t, 0);
5665 }
5666 if (vce)
5667 continue;
5668
ff302741
PB
5669 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5670 &offset, &size, &by_ref))
8aab5218
MJ
5671 continue;
5672 for (v = m_aggval; v; v = v->next)
5673 if (v->index == index
5674 && v->offset == offset)
5675 break;
5676 if (!v
5677 || v->by_ref != by_ref
86003645
RS
5678 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5679 size))
8aab5218
MJ
5680 continue;
5681
5682 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5683 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5684 {
5685 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5686 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5687 else if (TYPE_SIZE (TREE_TYPE (rhs))
5688 == TYPE_SIZE (TREE_TYPE (v->value)))
5689 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5690 else
5691 {
5692 if (dump_file)
5693 {
5694 fprintf (dump_file, " const ");
ef6cb4c7 5695 print_generic_expr (dump_file, v->value);
8aab5218 5696 fprintf (dump_file, " can't be converted to type of ");
ef6cb4c7 5697 print_generic_expr (dump_file, rhs);
8aab5218
MJ
5698 fprintf (dump_file, "\n");
5699 }
5700 continue;
5701 }
5702 }
5703 else
5704 val = v->value;
5705
5706 if (dump_file && (dump_flags & TDF_DETAILS))
5707 {
5708 fprintf (dump_file, "Modifying stmt:\n ");
ef6cb4c7 5709 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5710 }
5711 gimple_assign_set_rhs_from_tree (&gsi, val);
5712 update_stmt (stmt);
5713
5714 if (dump_file && (dump_flags & TDF_DETAILS))
5715 {
5716 fprintf (dump_file, "into:\n ");
ef6cb4c7 5717 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5718 fprintf (dump_file, "\n");
5719 }
5720
5721 *m_something_changed = true;
8ddce3f7
RB
5722 if (maybe_clean_eh_stmt (stmt))
5723 bitmap_set_bit (m_need_eh_cleanup, bb->index);
8aab5218 5724 }
3daacdcd 5725 return NULL;
8aab5218
MJ
5726}
5727
c7ac9a0c
JH
5728/* Return true if we have recorded VALUE and MASK about PARM.
5729 Set VALUE and MASk accordingly. */
5730
5731bool
5732ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5733{
5734 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5735 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5736 if (!ts || vec_safe_length (ts->bits) == 0)
5737 return false;
5738
5739 int i = 0;
5740 for (tree p = DECL_ARGUMENTS (current_function_decl);
5741 p != parm; p = DECL_CHAIN (p))
5742 {
5743 i++;
5744 /* Ignore static chain. */
5745 if (!p)
5746 return false;
5747 }
5748
ae7a23a3
JH
5749 clone_info *cinfo = clone_info::get (cnode);
5750 if (cinfo && cinfo->param_adjustments)
c7ac9a0c 5751 {
ae7a23a3 5752 i = cinfo->param_adjustments->get_original_index (i);
c7ac9a0c
JH
5753 if (i < 0)
5754 return false;
5755 }
5756
5757 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5758 if (!bits[i])
5759 return false;
5760 *mask = bits[i]->mask;
5761 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5762 return true;
5763}
5764
5765
209ca542 5766/* Update bits info of formal parameters as described in
9d3e0adc 5767 ipcp_transformation. */
209ca542
PK
5768
5769static void
5770ipcp_update_bits (struct cgraph_node *node)
5771{
9d3e0adc 5772 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
209ca542
PK
5773
5774 if (!ts || vec_safe_length (ts->bits) == 0)
5775 return;
86cd0334 5776 vec<ipa_bits *, va_gc> &bits = *ts->bits;
209ca542 5777 unsigned count = bits.length ();
ff6686d2
MJ
5778 if (!count)
5779 return;
209ca542 5780
ff6686d2
MJ
5781 auto_vec<int, 16> new_indices;
5782 bool need_remapping = false;
ae7a23a3
JH
5783 clone_info *cinfo = clone_info::get (node);
5784 if (cinfo && cinfo->param_adjustments)
209ca542 5785 {
ae7a23a3 5786 cinfo->param_adjustments->get_updated_indices (&new_indices);
ff6686d2
MJ
5787 need_remapping = true;
5788 }
5789 auto_vec <tree, 16> parm_decls;
5790 push_function_arg_decls (&parm_decls, node->decl);
209ca542 5791
ff6686d2
MJ
5792 for (unsigned i = 0; i < count; ++i)
5793 {
5794 tree parm;
5795 if (need_remapping)
5796 {
5797 if (i >= new_indices.length ())
5798 continue;
5799 int idx = new_indices[i];
5800 if (idx < 0)
5801 continue;
5802 parm = parm_decls[idx];
5803 }
5804 else
5805 parm = parm_decls[i];
209ca542 5806 gcc_checking_assert (parm);
ff6686d2 5807
209ca542 5808
86cd0334
MJ
5809 if (!bits[i]
5810 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5811 || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542 5812 || !is_gimple_reg (parm))
86cd0334 5813 continue;
209ca542
PK
5814
5815 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5816 if (!ddef)
5817 continue;
5818
5819 if (dump_file)
5820 {
86cd0334
MJ
5821 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5822 print_hex (bits[i]->mask, dump_file);
209ca542
PK
5823 fprintf (dump_file, "\n");
5824 }
5825
67b97478
PK
5826 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5827 {
5828 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5829 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5830
86cd0334
MJ
5831 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5832 | wide_int::from (bits[i]->value, prec, sgn);
67b97478
PK
5833 set_nonzero_bits (ddef, nonzero_bits);
5834 }
5835 else
5836 {
86cd0334
MJ
5837 unsigned tem = bits[i]->mask.to_uhwi ();
5838 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
67b97478
PK
5839 unsigned align = tem & -tem;
5840 unsigned misalign = bitpos & (align - 1);
209ca542 5841
67b97478
PK
5842 if (align > 1)
5843 {
5844 if (dump_file)
5845 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5846
5847 unsigned old_align, old_misalign;
5848 struct ptr_info_def *pi = get_ptr_info (ddef);
5849 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5850
5851 if (old_known
5852 && old_align > align)
5853 {
5854 if (dump_file)
5855 {
5856 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5857 if ((old_misalign & (align - 1)) != misalign)
5858 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5859 old_misalign, misalign);
5860 }
5861 continue;
5862 }
5863
5864 if (old_known
5865 && ((misalign & (old_align - 1)) != old_misalign)
5866 && dump_file)
5867 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5868 old_misalign, misalign);
5869
5870 set_ptr_info_alignment (pi, align, misalign);
5871 }
5872 }
209ca542
PK
5873 }
5874}
5875
523fe5b6
AH
5876bool
5877ipa_vr::nonzero_p (tree expr_type) const
5878{
5879 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5880 return true;
5881
5882 unsigned prec = TYPE_PRECISION (expr_type);
5883 return (type == VR_RANGE
19849d15 5884 && TYPE_UNSIGNED (expr_type)
523fe5b6
AH
5885 && wi::eq_p (min, wi::one (prec))
5886 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5887}
5888
8bc5448f 5889/* Update value range of formal parameters as described in
9d3e0adc 5890 ipcp_transformation. */
8bc5448f
KV
5891
5892static void
5893ipcp_update_vr (struct cgraph_node *node)
5894{
9d3e0adc 5895 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5896 if (!ts || vec_safe_length (ts->m_vr) == 0)
5897 return;
5898 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5899 unsigned count = vr.length ();
ff6686d2
MJ
5900 if (!count)
5901 return;
8bc5448f 5902
ff6686d2
MJ
5903 auto_vec<int, 16> new_indices;
5904 bool need_remapping = false;
ae7a23a3
JH
5905 clone_info *cinfo = clone_info::get (node);
5906 if (cinfo && cinfo->param_adjustments)
8bc5448f 5907 {
ae7a23a3 5908 cinfo->param_adjustments->get_updated_indices (&new_indices);
ff6686d2
MJ
5909 need_remapping = true;
5910 }
5911 auto_vec <tree, 16> parm_decls;
5912 push_function_arg_decls (&parm_decls, node->decl);
5913
5914 for (unsigned i = 0; i < count; ++i)
5915 {
5916 tree parm;
5917 int remapped_idx;
5918 if (need_remapping)
5919 {
5920 if (i >= new_indices.length ())
5921 continue;
5922 remapped_idx = new_indices[i];
5923 if (remapped_idx < 0)
5924 continue;
5925 }
5926 else
5927 remapped_idx = i;
5928
5929 parm = parm_decls[remapped_idx];
5930
8bc5448f 5931 gcc_checking_assert (parm);
8bc5448f
KV
5932 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5933
5934 if (!ddef || !is_gimple_reg (parm))
5935 continue;
5936
5937 if (vr[i].known
8bc5448f
KV
5938 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5939 {
5940 tree type = TREE_TYPE (ddef);
5941 unsigned prec = TYPE_PRECISION (type);
718625ad
KV
5942 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5943 {
5944 if (dump_file)
5945 {
ff6686d2
MJ
5946 fprintf (dump_file, "Setting value range of param %u "
5947 "(now %i) ", i, remapped_idx);
718625ad
KV
5948 fprintf (dump_file, "%s[",
5949 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5950 print_decs (vr[i].min, dump_file);
5951 fprintf (dump_file, ", ");
5952 print_decs (vr[i].max, dump_file);
5953 fprintf (dump_file, "]\n");
5954 }
5955 set_range_info (ddef, vr[i].type,
5956 wide_int_storage::from (vr[i].min, prec,
5957 TYPE_SIGN (type)),
5958 wide_int_storage::from (vr[i].max, prec,
5959 TYPE_SIGN (type)));
5960 }
5961 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
523fe5b6 5962 && vr[i].nonzero_p (TREE_TYPE (ddef)))
8bc5448f 5963 {
718625ad
KV
5964 if (dump_file)
5965 fprintf (dump_file, "Setting nonnull for %u\n", i);
5966 set_ptr_nonnull (ddef);
8bc5448f 5967 }
8bc5448f
KV
5968 }
5969 }
5970}
5971
8aab5218 5972/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5973
5974unsigned int
5975ipcp_transform_function (struct cgraph_node *node)
5976{
f65f1ae3 5977 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
56b40062 5978 struct ipa_func_body_info fbi;
2c9561b5 5979 struct ipa_agg_replacement_value *aggval;
2c9561b5 5980 int param_count;
8ddce3f7 5981 bool something_changed = false;
2c9561b5
MJ
5982
5983 gcc_checking_assert (cfun);
5984 gcc_checking_assert (current_function_decl);
5985
5986 if (dump_file)
464d0118
ML
5987 fprintf (dump_file, "Modification phase of node %s\n",
5988 node->dump_name ());
2c9561b5 5989
209ca542 5990 ipcp_update_bits (node);
8bc5448f 5991 ipcp_update_vr (node);
2c9561b5
MJ
5992 aggval = ipa_get_agg_replacements_for_node (node);
5993 if (!aggval)
5994 return 0;
67348ccc 5995 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5996 if (param_count == 0)
5997 return 0;
5998 adjust_agg_replacement_values (node, aggval);
5999 if (dump_file)
6000 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 6001
8aab5218
MJ
6002 fbi.node = node;
6003 fbi.info = NULL;
6004 fbi.bb_infos = vNULL;
cb3874dc 6005 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
8aab5218 6006 fbi.param_count = param_count;
fdfd7f53 6007 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
2c9561b5 6008
cb3874dc 6009 vec_safe_grow_cleared (descriptors, param_count, true);
f65f1ae3 6010 ipa_populate_param_decls (node, *descriptors);
8aab5218 6011 calculate_dominance_info (CDI_DOMINATORS);
8ddce3f7
RB
6012 ipcp_modif_dom_walker walker (&fbi, descriptors, aggval, &something_changed);
6013 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6014 free_dominance_info (CDI_DOMINATORS);
6015 bool cfg_changed = walker.cleanup_eh ();
2c9561b5 6016
8aab5218
MJ
6017 int i;
6018 struct ipa_bb_info *bi;
6019 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
6020 free_ipa_bb_info (bi);
6021 fbi.bb_infos.release ();
9d3e0adc
ML
6022
6023 ipcp_transformation *s = ipcp_transformation_sum->get (node);
6024 s->agg_values = NULL;
6025 s->bits = NULL;
6026 s->m_vr = NULL;
676b4899 6027
f65f1ae3 6028 vec_free (descriptors);
2c9561b5
MJ
6029
6030 if (!something_changed)
6031 return 0;
95a2ed03
MJ
6032
6033 if (cfg_changed)
6034 delete_unreachable_blocks_update_callgraph (node, false);
6035
6036 return TODO_update_ssa_only_virtuals;
2c9561b5 6037}
86cd0334 6038
ac6f2e59 6039
eb270950 6040/* Return true if OTHER describes same agg value. */
ac6f2e59 6041bool
eb270950 6042ipa_agg_value::equal_to (const ipa_agg_value &other)
ac6f2e59
JH
6043{
6044 return offset == other.offset
6045 && operand_equal_p (value, other.value, 0);
6046}
9d5af1db
MJ
6047
6048/* Destructor also removing individual aggregate values. */
6049
6050ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
6051{
6052 ipa_release_agg_values (m_known_aggs, false);
6053}
6054
6055
6056
86cd0334 6057#include "gt-ipa-prop.h"