]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
Handle fnspec in local ipa-modref
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
8d9254fc 2 Copyright (C) 2005-2020 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
27d020cf 48#include "ipa-fnsummary.h"
cf835838 49#include "gimple-pretty-print.h"
450ad0cd 50#include "ipa-utils.h"
2b5f0895 51#include "dbgcnt.h"
8aab5218 52#include "domwalk.h"
9b2b7279 53#include "builtins.h"
95a2ed03 54#include "tree-cfgcleanup.h"
c7b6a758 55#include "options.h"
771578a0 56
dd912cb8
ML
57/* Function summary where the parameter infos are actually stored. */
58ipa_node_params_t *ipa_node_params_sum = NULL;
9d3e0adc
ML
59
60function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
61
6fe906a3
MJ
62/* Edge summary for IPA-CP edge information. */
63ipa_edge_args_sum_t *ipa_edge_args_sum;
771578a0 64
86cd0334
MJ
65/* Traits for a hash table for reusing already existing ipa_bits. */
66
67struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
68{
69 typedef ipa_bits *value_type;
70 typedef ipa_bits *compare_type;
71 static hashval_t
72 hash (const ipa_bits *p)
73 {
74 hashval_t t = (hashval_t) p->value.to_shwi ();
75 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
76 }
77 static bool
78 equal (const ipa_bits *a, const ipa_bits *b)
79 {
80 return a->value == b->value && a->mask == b->mask;
81 }
7ca50de0 82 static const bool empty_zero_p = true;
86cd0334
MJ
83 static void
84 mark_empty (ipa_bits *&p)
85 {
86 p = NULL;
87 }
88 static bool
89 is_empty (const ipa_bits *p)
90 {
91 return p == NULL;
92 }
93 static bool
94 is_deleted (const ipa_bits *p)
95 {
96 return p == reinterpret_cast<const ipa_bits *> (1);
97 }
98 static void
99 mark_deleted (ipa_bits *&p)
100 {
101 p = reinterpret_cast<ipa_bits *> (1);
102 }
103};
104
105/* Hash table for avoid repeated allocations of equal ipa_bits. */
106static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
107
108/* Traits for a hash table for reusing value_ranges used for IPA. Note that
109 the equiv bitmap is not hashed and is expected to be NULL. */
110
028d81b1 111struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
86cd0334 112{
028d81b1
AH
113 typedef value_range *value_type;
114 typedef value_range *compare_type;
86cd0334 115 static hashval_t
028d81b1 116 hash (const value_range *p)
59b2c134 117 {
54994253 118 inchash::hash hstate (p->kind ());
74ca1c01
ML
119 inchash::add_expr (p->min (), hstate);
120 inchash::add_expr (p->max (), hstate);
59b2c134
JJ
121 return hstate.end ();
122 }
86cd0334 123 static bool
028d81b1 124 equal (const value_range *a, const value_range *b)
86cd0334 125 {
a8662390
ML
126 return (a->equal_p (*b)
127 && types_compatible_p (a->type (), b->type ()));
86cd0334 128 }
7ca50de0 129 static const bool empty_zero_p = true;
86cd0334 130 static void
028d81b1 131 mark_empty (value_range *&p)
86cd0334
MJ
132 {
133 p = NULL;
134 }
135 static bool
028d81b1 136 is_empty (const value_range *p)
86cd0334
MJ
137 {
138 return p == NULL;
139 }
140 static bool
028d81b1 141 is_deleted (const value_range *p)
86cd0334 142 {
028d81b1 143 return p == reinterpret_cast<const value_range *> (1);
86cd0334
MJ
144 }
145 static void
028d81b1 146 mark_deleted (value_range *&p)
86cd0334 147 {
028d81b1 148 p = reinterpret_cast<value_range *> (1);
86cd0334
MJ
149 }
150};
151
152/* Hash table for avoid repeated allocations of equal value_ranges. */
153static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
154
771578a0 155/* Holders of ipa cgraph hooks: */
40982661 156static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 157
4502fe8d
MJ
158/* Description of a reference to an IPA constant. */
159struct ipa_cst_ref_desc
160{
161 /* Edge that corresponds to the statement which took the reference. */
162 struct cgraph_edge *cs;
163 /* Linked list of duplicates created when call graph edges are cloned. */
164 struct ipa_cst_ref_desc *next_duplicate;
165 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
166 if out of control. */
167 int refcount;
168};
169
170/* Allocation pool for reference descriptions. */
171
fb0b2914 172static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 173 ("IPA-PROP ref descriptions");
4502fe8d 174
5fe8e757
MJ
175/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
176 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
177
178static bool
179ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
180{
67348ccc 181 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
182
183 if (!fs_opts)
184 return false;
2bf86c84 185 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
186}
187
be95e2b9
MJ
188/* Return index of the formal whose tree is PTREE in function which corresponds
189 to INFO. */
190
d044dd17 191static int
f65f1ae3
MJ
192ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
193 tree ptree)
518dc859
RL
194{
195 int i, count;
196
f65f1ae3 197 count = vec_safe_length (descriptors);
518dc859 198 for (i = 0; i < count; i++)
f65f1ae3 199 if ((*descriptors)[i].decl_or_type == ptree)
518dc859
RL
200 return i;
201
202 return -1;
203}
204
d044dd17
MJ
205/* Return index of the formal whose tree is PTREE in function which corresponds
206 to INFO. */
207
208int
99b1c316 209ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
d044dd17
MJ
210{
211 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
212}
213
214/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
215 NODE. */
be95e2b9 216
f8e2a1ed
MJ
217static void
218ipa_populate_param_decls (struct cgraph_node *node,
f65f1ae3 219 vec<ipa_param_descriptor, va_gc> &descriptors)
518dc859
RL
220{
221 tree fndecl;
222 tree fnargs;
223 tree parm;
224 int param_num;
3e293154 225
67348ccc 226 fndecl = node->decl;
0e8853ee 227 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
228 fnargs = DECL_ARGUMENTS (fndecl);
229 param_num = 0;
910ad8de 230 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 231 {
209ca542 232 descriptors[param_num].decl_or_type = parm;
40a777e8
JH
233 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
234 descriptors[param_num].move_cost = cost;
235 /* Watch overflow, move_cost is a bitfield. */
236 gcc_checking_assert (cost == descriptors[param_num].move_cost);
518dc859
RL
237 param_num++;
238 }
239}
240
3f84bf08
MJ
241/* Return how many formal parameters FNDECL has. */
242
fd29c024 243int
310bc633 244count_formal_params (tree fndecl)
3f84bf08
MJ
245{
246 tree parm;
247 int count = 0;
0e8853ee 248 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 249
910ad8de 250 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
251 count++;
252
253 return count;
254}
255
0e8853ee
JH
256/* Return the declaration of Ith formal parameter of the function corresponding
257 to INFO. Note there is no setter function as this array is built just once
258 using ipa_initialize_node_params. */
259
260void
99b1c316 261ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
0e8853ee
JH
262{
263 fprintf (file, "param #%i", i);
f65f1ae3 264 if ((*info->descriptors)[i].decl_or_type)
0e8853ee
JH
265 {
266 fprintf (file, " ");
ef6cb4c7 267 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
0e8853ee
JH
268 }
269}
270
159f01f8
MJ
271/* If necessary, allocate vector of parameter descriptors in info of NODE.
272 Return true if they were allocated, false if not. */
0e8853ee 273
159f01f8 274static bool
0e8853ee
JH
275ipa_alloc_node_params (struct cgraph_node *node, int param_count)
276{
6cf67b62 277 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
0e8853ee 278
f65f1ae3 279 if (!info->descriptors && param_count)
159f01f8 280 {
cb3874dc 281 vec_safe_grow_cleared (info->descriptors, param_count, true);
159f01f8
MJ
282 return true;
283 }
284 else
285 return false;
0e8853ee
JH
286}
287
f8e2a1ed
MJ
288/* Initialize the ipa_node_params structure associated with NODE by counting
289 the function parameters, creating the descriptors and populating their
290 param_decls. */
be95e2b9 291
f8e2a1ed
MJ
292void
293ipa_initialize_node_params (struct cgraph_node *node)
294{
6cf67b62 295 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
f8e2a1ed 296
159f01f8
MJ
297 if (!info->descriptors
298 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
299 ipa_populate_param_decls (node, *info->descriptors);
518dc859
RL
300}
301
749aa96d
MJ
302/* Print the jump functions associated with call graph edge CS to file F. */
303
304static void
305ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
306{
307 int i, count;
308
309 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
310 for (i = 0; i < count; i++)
311 {
312 struct ipa_jump_func *jump_func;
313 enum jump_func_type type;
314
315 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
316 type = jump_func->type;
317
318 fprintf (f, " param %d: ", i);
319 if (type == IPA_JF_UNKNOWN)
320 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
321 else if (type == IPA_JF_CONST)
322 {
4502fe8d 323 tree val = jump_func->value.constant.value;
749aa96d 324 fprintf (f, "CONST: ");
ef6cb4c7 325 print_generic_expr (f, val);
749aa96d
MJ
326 if (TREE_CODE (val) == ADDR_EXPR
327 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
328 {
329 fprintf (f, " -> ");
ef6cb4c7 330 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
749aa96d
MJ
331 }
332 fprintf (f, "\n");
333 }
749aa96d
MJ
334 else if (type == IPA_JF_PASS_THROUGH)
335 {
336 fprintf (f, "PASS THROUGH: ");
8b7773a4 337 fprintf (f, "%d, op %s",
749aa96d 338 jump_func->value.pass_through.formal_id,
5806f481 339 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 340 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
341 {
342 fprintf (f, " ");
ef6cb4c7 343 print_generic_expr (f, jump_func->value.pass_through.operand);
8b7773a4
MJ
344 }
345 if (jump_func->value.pass_through.agg_preserved)
346 fprintf (f, ", agg_preserved");
3ea6239f 347 fprintf (f, "\n");
749aa96d
MJ
348 }
349 else if (type == IPA_JF_ANCESTOR)
350 {
351 fprintf (f, "ANCESTOR: ");
16998094 352 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
353 jump_func->value.ancestor.formal_id,
354 jump_func->value.ancestor.offset);
8b7773a4
MJ
355 if (jump_func->value.ancestor.agg_preserved)
356 fprintf (f, ", agg_preserved");
3ea6239f 357 fprintf (f, "\n");
749aa96d 358 }
8b7773a4
MJ
359
360 if (jump_func->agg.items)
361 {
362 struct ipa_agg_jf_item *item;
363 int j;
364
365 fprintf (f, " Aggregate passed by %s:\n",
366 jump_func->agg.by_ref ? "reference" : "value");
eb270950 367 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
8b7773a4
MJ
368 {
369 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
370 item->offset);
eb270950
FX
371 fprintf (f, "type: ");
372 print_generic_expr (f, item->type);
373 fprintf (f, ", ");
374 if (item->jftype == IPA_JF_PASS_THROUGH)
375 fprintf (f, "PASS THROUGH: %d,",
376 item->value.pass_through.formal_id);
377 else if (item->jftype == IPA_JF_LOAD_AGG)
378 {
379 fprintf (f, "LOAD AGG: %d",
380 item->value.pass_through.formal_id);
381 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
382 item->value.load_agg.offset,
383 item->value.load_agg.by_ref ? "reference"
384 : "value");
385 }
386
387 if (item->jftype == IPA_JF_PASS_THROUGH
388 || item->jftype == IPA_JF_LOAD_AGG)
389 {
390 fprintf (f, " op %s",
391 get_tree_code_name (item->value.pass_through.operation));
392 if (item->value.pass_through.operation != NOP_EXPR)
393 {
394 fprintf (f, " ");
395 print_generic_expr (f, item->value.pass_through.operand);
396 }
397 }
398 else if (item->jftype == IPA_JF_CONST)
8b7773a4 399 {
eb270950
FX
400 fprintf (f, "CONST: ");
401 print_generic_expr (f, item->value.constant);
8b7773a4 402 }
eb270950
FX
403 else if (item->jftype == IPA_JF_UNKNOWN)
404 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
405 tree_to_uhwi (TYPE_SIZE (item->type)));
8b7773a4
MJ
406 fprintf (f, "\n");
407 }
408 }
44210a96 409
99b1c316 410 class ipa_polymorphic_call_context *ctx
44210a96
MJ
411 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
412 if (ctx && !ctx->useless_p ())
413 {
414 fprintf (f, " Context: ");
415 ctx->dump (dump_file);
416 }
04be694e 417
86cd0334 418 if (jump_func->bits)
209ca542 419 {
86cd0334
MJ
420 fprintf (f, " value: ");
421 print_hex (jump_func->bits->value, f);
422 fprintf (f, ", mask: ");
423 print_hex (jump_func->bits->mask, f);
209ca542
PK
424 fprintf (f, "\n");
425 }
426 else
427 fprintf (f, " Unknown bits\n");
8bc5448f 428
86cd0334 429 if (jump_func->m_vr)
8bc5448f
KV
430 {
431 fprintf (f, " VR ");
432 fprintf (f, "%s[",
54994253
AH
433 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
434 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
8bc5448f 435 fprintf (f, ", ");
54994253 436 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
8bc5448f
KV
437 fprintf (f, "]\n");
438 }
439 else
440 fprintf (f, " Unknown VR\n");
749aa96d
MJ
441 }
442}
443
444
be95e2b9
MJ
445/* Print the jump functions of all arguments on all call graph edges going from
446 NODE to file F. */
447
518dc859 448void
3e293154 449ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 450{
3e293154 451 struct cgraph_edge *cs;
518dc859 452
464d0118 453 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
3e293154
MJ
454 for (cs = node->callees; cs; cs = cs->next_callee)
455 {
3e293154 456
464d0118
ML
457 fprintf (f, " callsite %s -> %s : \n",
458 node->dump_name (),
459 cs->callee->dump_name ());
0302955a
JH
460 if (!ipa_edge_args_info_available_for_edge_p (cs))
461 fprintf (f, " no arg info\n");
462 else
463 ipa_print_node_jump_functions_for_edge (f, cs);
749aa96d 464 }
518dc859 465
9de04252 466 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 467 {
99b1c316 468 class cgraph_indirect_call_info *ii;
3e293154 469
9de04252
MJ
470 ii = cs->indirect_info;
471 if (ii->agg_contents)
c13bc3d9 472 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 473 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 474 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
475 ii->param_index, ii->offset,
476 ii->by_ref ? "by reference" : "by_value");
477 else
85942f45
JH
478 fprintf (f, " indirect %s callsite, calling param %i, "
479 "offset " HOST_WIDE_INT_PRINT_DEC,
480 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
481 ii->offset);
9de04252 482
749aa96d
MJ
483 if (cs->call_stmt)
484 {
9de04252 485 fprintf (f, ", for stmt ");
749aa96d 486 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 487 }
749aa96d 488 else
9de04252 489 fprintf (f, "\n");
ba392339
JH
490 if (ii->polymorphic)
491 ii->context.dump (f);
0302955a
JH
492 if (!ipa_edge_args_info_available_for_edge_p (cs))
493 fprintf (f, " no arg info\n");
494 else
495 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
496 }
497}
498
499/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 500
3e293154
MJ
501void
502ipa_print_all_jump_functions (FILE *f)
503{
504 struct cgraph_node *node;
505
ca30a539 506 fprintf (f, "\nJump functions:\n");
65c70e6b 507 FOR_EACH_FUNCTION (node)
3e293154
MJ
508 {
509 ipa_print_node_jump_functions (f, node);
510 }
511}
512
04be694e
MJ
513/* Set jfunc to be a know-really nothing jump function. */
514
515static void
516ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
517{
518 jfunc->type = IPA_JF_UNKNOWN;
04be694e
MJ
519}
520
b8f6e610
MJ
521/* Set JFUNC to be a copy of another jmp (to be used by jump function
522 combination code). The two functions will share their rdesc. */
523
524static void
525ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
526 struct ipa_jump_func *src)
527
528{
529 gcc_checking_assert (src->type == IPA_JF_CONST);
530 dst->type = IPA_JF_CONST;
531 dst->value.constant = src->value.constant;
532}
533
7b872d9e
MJ
534/* Set JFUNC to be a constant jmp function. */
535
536static void
4502fe8d
MJ
537ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
538 struct cgraph_edge *cs)
7b872d9e
MJ
539{
540 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
541 jfunc->value.constant.value = unshare_expr_without_location (constant);
542
543 if (TREE_CODE (constant) == ADDR_EXPR
544 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
545 {
546 struct ipa_cst_ref_desc *rdesc;
4502fe8d 547
601f3293 548 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
549 rdesc->cs = cs;
550 rdesc->next_duplicate = NULL;
551 rdesc->refcount = 1;
552 jfunc->value.constant.rdesc = rdesc;
553 }
554 else
555 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
556}
557
558/* Set JFUNC to be a simple pass-through jump function. */
559static void
8b7773a4 560ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 561 bool agg_preserved)
7b872d9e
MJ
562{
563 jfunc->type = IPA_JF_PASS_THROUGH;
564 jfunc->value.pass_through.operand = NULL_TREE;
565 jfunc->value.pass_through.formal_id = formal_id;
566 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 567 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
568}
569
a2b4c188
KV
570/* Set JFUNC to be an unary pass through jump function. */
571
572static void
573ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
574 enum tree_code operation)
575{
576 jfunc->type = IPA_JF_PASS_THROUGH;
577 jfunc->value.pass_through.operand = NULL_TREE;
578 jfunc->value.pass_through.formal_id = formal_id;
579 jfunc->value.pass_through.operation = operation;
580 jfunc->value.pass_through.agg_preserved = false;
581}
7b872d9e
MJ
582/* Set JFUNC to be an arithmetic pass through jump function. */
583
584static void
585ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
586 tree operand, enum tree_code operation)
587{
588 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 589 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
590 jfunc->value.pass_through.formal_id = formal_id;
591 jfunc->value.pass_through.operation = operation;
8b7773a4 592 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
593}
594
595/* Set JFUNC to be an ancestor jump function. */
596
597static void
598ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 599 int formal_id, bool agg_preserved)
7b872d9e
MJ
600{
601 jfunc->type = IPA_JF_ANCESTOR;
602 jfunc->value.ancestor.formal_id = formal_id;
603 jfunc->value.ancestor.offset = offset;
8b7773a4 604 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
605}
606
8aab5218
MJ
607/* Get IPA BB information about the given BB. FBI is the context of analyzis
608 of this function body. */
609
610static struct ipa_bb_info *
56b40062 611ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
612{
613 gcc_checking_assert (fbi);
614 return &fbi->bb_infos[bb->index];
615}
616
f65cf2b7
MJ
617/* Structure to be passed in between detect_type_change and
618 check_stmt_for_type_change. */
619
11478306 620struct prop_type_change_info
f65cf2b7 621{
290ebcb7
MJ
622 /* Offset into the object where there is the virtual method pointer we are
623 looking for. */
624 HOST_WIDE_INT offset;
625 /* The declaration or SSA_NAME pointer of the base that we are checking for
626 type change. */
627 tree object;
f65cf2b7
MJ
628 /* Set to true if dynamic type change has been detected. */
629 bool type_maybe_changed;
630};
631
632/* Return true if STMT can modify a virtual method table pointer.
633
634 This function makes special assumptions about both constructors and
635 destructors which are all the functions that are allowed to alter the VMT
636 pointers. It assumes that destructors begin with assignment into all VMT
637 pointers and that constructors essentially look in the following way:
638
639 1) The very first thing they do is that they call constructors of ancestor
640 sub-objects that have them.
641
642 2) Then VMT pointers of this and all its ancestors is set to new values
643 corresponding to the type corresponding to the constructor.
644
645 3) Only afterwards, other stuff such as constructor of member sub-objects
646 and the code written by the user is run. Only this may include calling
647 virtual functions, directly or indirectly.
648
649 There is no way to call a constructor of an ancestor sub-object in any
650 other way.
651
652 This means that we do not have to care whether constructors get the correct
653 type information because they will always change it (in fact, if we define
654 the type to be given by the VMT pointer, it is undefined).
655
656 The most important fact to derive from the above is that if, for some
657 statement in the section 3, we try to detect whether the dynamic type has
658 changed, we can safely ignore all calls as we examine the function body
659 backwards until we reach statements in section 2 because these calls cannot
660 be ancestor constructors or destructors (if the input is not bogus) and so
661 do not change the dynamic type (this holds true only for automatically
662 allocated objects but at the moment we devirtualize only these). We then
663 must detect that statements in section 2 change the dynamic type and can try
664 to derive the new type. That is enough and we can stop, we will never see
665 the calls into constructors of sub-objects in this code. Therefore we can
666 safely ignore all call statements that we traverse.
667 */
668
669static bool
355fe088 670stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
671{
672 if (is_gimple_call (stmt))
673 return false;
70f633c5
JH
674 if (gimple_clobber_p (stmt))
675 return false;
f65cf2b7
MJ
676 else if (is_gimple_assign (stmt))
677 {
678 tree lhs = gimple_assign_lhs (stmt);
679
0004f992
MJ
680 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
681 {
682 if (flag_strict_aliasing
683 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
684 return false;
685
686 if (TREE_CODE (lhs) == COMPONENT_REF
687 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 688 return false;
450aa0ee 689 /* In the future we might want to use get_ref_base_and_extent to find
0004f992
MJ
690 if there is a field corresponding to the offset and if so, proceed
691 almost like if it was a component ref. */
692 }
f65cf2b7
MJ
693 }
694 return true;
695}
696
3b97a5c7
MJ
697/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
698 to check whether a particular statement may modify the virtual table
699 pointerIt stores its result into DATA, which points to a
11478306 700 prop_type_change_info structure. */
f65cf2b7
MJ
701
702static bool
703check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
704{
355fe088 705 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 706 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
707
708 if (stmt_may_be_vtbl_ptr_store (stmt))
709 {
710 tci->type_maybe_changed = true;
711 return true;
712 }
713 else
714 return false;
715}
716
058d0a90
JH
717/* See if ARG is PARAM_DECl describing instance passed by pointer
718 or reference in FUNCTION. Return false if the dynamic type may change
719 in between beggining of the function until CALL is invoked.
290ebcb7 720
058d0a90 721 Generally functions are not allowed to change type of such instances,
67914693 722 but they call destructors. We assume that methods cannot destroy the THIS
058d0a90
JH
723 pointer. Also as a special cases, constructor and destructors may change
724 type of the THIS pointer. */
725
726static bool
355fe088 727param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90 728{
67914693 729 /* Pure functions cannot do any changes on the dynamic type;
058d0a90
JH
730 that require writting to memory. */
731 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
732 return false;
733 /* We need to check if we are within inlined consturctor
734 or destructor (ideally we would have way to check that the
735 inline cdtor is actually working on ARG, but we don't have
736 easy tie on this, so punt on all non-pure cdtors.
737 We may also record the types of cdtors and once we know type
738 of the instance match them.
739
740 Also code unification optimizations may merge calls from
741 different blocks making return values unreliable. So
742 do nothing during late optimization. */
743 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
744 return true;
745 if (TREE_CODE (arg) == SSA_NAME
746 && SSA_NAME_IS_DEFAULT_DEF (arg)
747 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
748 {
749 /* Normal (non-THIS) argument. */
750 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
751 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 752 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
753 and destructors as those definitely may change the dynamic
754 type. */
755 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
756 && !DECL_CXX_CONSTRUCTOR_P (function)
757 && !DECL_CXX_DESTRUCTOR_P (function)
758 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
759 {
760 /* Walk the inline stack and watch out for ctors/dtors. */
761 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
762 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
763 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
764 return true;
058d0a90
JH
765 return false;
766 }
767 }
768 return true;
769}
290ebcb7 770
06d65050
JH
771/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
772 callsite CALL) by looking for assignments to its virtual table pointer. If
c199f329 773 it is, return true. ARG is the object itself (not a pointer
06d65050 774 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
775 returned by get_ref_base_and_extent, as is the offset.
776
777 This is helper function for detect_type_change and detect_type_change_ssa
778 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
779
780static bool
c628d1c3
MJ
781detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
782 tree base, tree comp_type, gcall *call,
058d0a90 783 HOST_WIDE_INT offset)
f65cf2b7 784{
11478306 785 struct prop_type_change_info tci;
f65cf2b7
MJ
786 ao_ref ao;
787
788 gcc_checking_assert (DECL_P (arg)
789 || TREE_CODE (arg) == MEM_REF
790 || handled_component_p (arg));
f65cf2b7 791
b49407f8
JH
792 comp_type = TYPE_MAIN_VARIANT (comp_type);
793
d570d364
JH
794 /* Const calls cannot call virtual methods through VMT and so type changes do
795 not matter. */
796 if (!flag_devirtualize || !gimple_vuse (call)
797 /* Be sure expected_type is polymorphic. */
798 || !comp_type
799 || TREE_CODE (comp_type) != RECORD_TYPE
800 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
801 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
802 return true;
4bf2a588 803
dd887943 804 ao_ref_init (&ao, arg);
f65cf2b7
MJ
805 ao.base = base;
806 ao.offset = offset;
807 ao.size = POINTER_SIZE;
808 ao.max_size = ao.size;
f65cf2b7 809
290ebcb7
MJ
810 tci.offset = offset;
811 tci.object = get_base_address (arg);
290ebcb7 812 tci.type_maybe_changed = false;
290ebcb7 813
c628d1c3
MJ
814 int walked
815 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
816 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
817
818 if (walked >= 0 && !tci.type_maybe_changed)
f65cf2b7
MJ
819 return false;
820
f65cf2b7
MJ
821 return true;
822}
823
058d0a90 824/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
c199f329 825 If it is, return true. ARG is the object itself (not a pointer
058d0a90
JH
826 to it, unless dereferenced). BASE is the base of the memory access as
827 returned by get_ref_base_and_extent, as is the offset. */
828
829static bool
c628d1c3 830detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
c199f329 831 tree comp_type, gcall *call,
c628d1c3 832 HOST_WIDE_INT offset)
058d0a90
JH
833{
834 if (!flag_devirtualize)
835 return false;
836
837 if (TREE_CODE (base) == MEM_REF
838 && !param_type_may_change_p (current_function_decl,
839 TREE_OPERAND (base, 0),
840 call))
841 return false;
c628d1c3 842 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
c199f329 843 call, offset);
058d0a90
JH
844}
845
f65cf2b7
MJ
846/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
847 SSA name (its dereference will become the base and the offset is assumed to
848 be zero). */
849
850static bool
c628d1c3 851detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
c199f329 852 gcall *call)
f65cf2b7
MJ
853{
854 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 855 if (!flag_devirtualize
06d65050 856 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
857 return false;
858
058d0a90
JH
859 if (!param_type_may_change_p (current_function_decl, arg, call))
860 return false;
861
f65cf2b7 862 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 863 build_int_cst (ptr_type_node, 0));
f65cf2b7 864
c628d1c3 865 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
c199f329 866 call, 0);
f65cf2b7
MJ
867}
868
fdb0e1b4
MJ
869/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
870 boolean variable pointed to by DATA. */
871
872static bool
873mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
874 void *data)
875{
876 bool *b = (bool *) data;
877 *b = true;
878 return true;
879}
880
8aab5218
MJ
881/* Find the nearest valid aa status for parameter specified by INDEX that
882 dominates BB. */
883
56b40062
MJ
884static struct ipa_param_aa_status *
885find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
886 int index)
887{
888 while (true)
889 {
890 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
891 if (!bb)
892 return NULL;
893 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
894 if (!bi->param_aa_statuses.is_empty ()
895 && bi->param_aa_statuses[index].valid)
896 return &bi->param_aa_statuses[index];
897 }
898}
899
900/* Get AA status structure for the given BB and parameter with INDEX. Allocate
901 structures and/or intialize the result with a dominating description as
902 necessary. */
903
56b40062
MJ
904static struct ipa_param_aa_status *
905parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
906 int index)
907{
908 gcc_checking_assert (fbi);
909 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
910 if (bi->param_aa_statuses.is_empty ())
cb3874dc 911 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
56b40062 912 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
913 if (!paa->valid)
914 {
915 gcc_checking_assert (!paa->parm_modified
916 && !paa->ref_modified
917 && !paa->pt_modified);
56b40062 918 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
919 dom_paa = find_dominating_aa_status (fbi, bb, index);
920 if (dom_paa)
921 *paa = *dom_paa;
922 else
923 paa->valid = true;
924 }
925
926 return paa;
927}
928
688010ba 929/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 930 a value known not to be modified in this function before reaching the
8aab5218
MJ
931 statement STMT. FBI holds information about the function we have so far
932 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
933
934static bool
56b40062 935parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 936 gimple *stmt, tree parm_load)
fdb0e1b4 937{
56b40062 938 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
939 bool modified = false;
940 ao_ref refd;
941
776e4fe2
MJ
942 tree base = get_base_address (parm_load);
943 gcc_assert (TREE_CODE (base) == PARM_DECL);
944 if (TREE_READONLY (base))
945 return true;
946
c628d1c3
MJ
947 gcc_checking_assert (fbi);
948 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
949 if (paa->parm_modified)
950 return false;
fdb0e1b4
MJ
951
952 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 953 ao_ref_init (&refd, parm_load);
8aab5218 954 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3
MJ
955 &modified, NULL, NULL,
956 fbi->aa_walk_budget + 1);
957 if (walked < 0)
958 {
959 modified = true;
960 if (fbi)
961 fbi->aa_walk_budget = 0;
962 }
963 else if (fbi)
964 fbi->aa_walk_budget -= walked;
8aab5218
MJ
965 if (paa && modified)
966 paa->parm_modified = true;
8b7773a4 967 return !modified;
fdb0e1b4
MJ
968}
969
a2b4c188
KV
970/* If STMT is an assignment that loads a value from an parameter declaration,
971 return the index of the parameter in ipa_node_params which has not been
972 modified. Otherwise return -1. */
973
974static int
975load_from_unmodified_param (struct ipa_func_body_info *fbi,
f65f1ae3 976 vec<ipa_param_descriptor, va_gc> *descriptors,
a2b4c188
KV
977 gimple *stmt)
978{
bda2bc48
MJ
979 int index;
980 tree op1;
981
a2b4c188
KV
982 if (!gimple_assign_single_p (stmt))
983 return -1;
984
bda2bc48
MJ
985 op1 = gimple_assign_rhs1 (stmt);
986 if (TREE_CODE (op1) != PARM_DECL)
a2b4c188
KV
987 return -1;
988
bda2bc48
MJ
989 index = ipa_get_param_decl_index_1 (descriptors, op1);
990 if (index < 0
991 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
a2b4c188
KV
992 return -1;
993
bda2bc48 994 return index;
a2b4c188
KV
995}
996
8aab5218
MJ
997/* Return true if memory reference REF (which must be a load through parameter
998 with INDEX) loads data that are known to be unmodified in this function
999 before reaching statement STMT. */
8b7773a4
MJ
1000
1001static bool
56b40062 1002parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 1003 int index, gimple *stmt, tree ref)
8b7773a4 1004{
56b40062 1005 struct ipa_param_aa_status *paa;
8b7773a4
MJ
1006 bool modified = false;
1007 ao_ref refd;
1008
c628d1c3
MJ
1009 gcc_checking_assert (fbi);
1010 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1011 if (paa->ref_modified)
1012 return false;
8b7773a4 1013
8aab5218 1014 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 1015 ao_ref_init (&refd, ref);
8aab5218 1016 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3
MJ
1017 &modified, NULL, NULL,
1018 fbi->aa_walk_budget + 1);
1019 if (walked < 0)
1020 {
1021 modified = true;
1022 fbi->aa_walk_budget = 0;
1023 }
1024 else
1025 fbi->aa_walk_budget -= walked;
1026 if (modified)
8aab5218 1027 paa->ref_modified = true;
8b7773a4
MJ
1028 return !modified;
1029}
1030
8aab5218
MJ
1031/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1032 is known to be unmodified in this function before reaching call statement
1033 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1034
1035static bool
56b40062 1036parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 1037 gimple *call, tree parm)
8b7773a4
MJ
1038{
1039 bool modified = false;
1040 ao_ref refd;
1041
1042 /* It's unnecessary to calculate anything about memory contnets for a const
1043 function because it is not goin to use it. But do not cache the result
1044 either. Also, no such calculations for non-pointers. */
1045 if (!gimple_vuse (call)
c628d1c3 1046 || !POINTER_TYPE_P (TREE_TYPE (parm)))
8b7773a4
MJ
1047 return false;
1048
56b40062
MJ
1049 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1050 gimple_bb (call),
1051 index);
8aab5218 1052 if (paa->pt_modified)
8b7773a4
MJ
1053 return false;
1054
1055 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218 1056 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
c628d1c3
MJ
1057 &modified, NULL, NULL,
1058 fbi->aa_walk_budget + 1);
1059 if (walked < 0)
1060 {
1061 fbi->aa_walk_budget = 0;
1062 modified = true;
1063 }
1064 else
1065 fbi->aa_walk_budget -= walked;
8b7773a4 1066 if (modified)
8aab5218 1067 paa->pt_modified = true;
8b7773a4
MJ
1068 return !modified;
1069}
1070
91bb9f80
MJ
1071/* Return true if we can prove that OP is a memory reference loading
1072 data from an aggregate passed as a parameter.
1073
1074 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1075 false if it cannot prove that the value has not been modified before the
1076 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1077 if it cannot prove the value has not been modified, in that case it will
1078 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1079
8b7773a4
MJ
1080 INFO and PARMS_AINFO describe parameters of the current function (but the
1081 latter can be NULL), STMT is the load statement. If function returns true,
1082 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1083 within the aggregate and whether it is a load from a value passed by
1084 reference respectively. */
1085
ff302741 1086bool
56b40062 1087ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
f65f1ae3 1088 vec<ipa_param_descriptor, va_gc> *descriptors,
355fe088 1089 gimple *stmt, tree op, int *index_p,
86003645 1090 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
91bb9f80 1091 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
1092{
1093 int index;
588db50c 1094 HOST_WIDE_INT size;
ee45a32d 1095 bool reverse;
588db50c 1096 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
8b7773a4 1097
588db50c 1098 if (!base)
8b7773a4
MJ
1099 return false;
1100
1101 if (DECL_P (base))
1102 {
d044dd17 1103 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1104 if (index >= 0
8aab5218 1105 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1106 {
1107 *index_p = index;
1108 *by_ref_p = false;
3ff2ca23
JJ
1109 if (size_p)
1110 *size_p = size;
91bb9f80
MJ
1111 if (guaranteed_unmodified)
1112 *guaranteed_unmodified = true;
8b7773a4
MJ
1113 return true;
1114 }
1115 return false;
1116 }
1117
1118 if (TREE_CODE (base) != MEM_REF
1119 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1120 || !integer_zerop (TREE_OPERAND (base, 1)))
1121 return false;
1122
1123 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1124 {
1125 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1126 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1127 }
1128 else
1129 {
1130 /* This branch catches situations where a pointer parameter is not a
1131 gimple register, for example:
1132
1133 void hip7(S*) (struct S * p)
1134 {
1135 void (*<T2e4>) (struct S *) D.1867;
1136 struct S * p.1;
1137
1138 <bb 2>:
1139 p.1_1 = p;
1140 D.1867_2 = p.1_1->f;
1141 D.1867_2 ();
1142 gdp = &p;
1143 */
1144
355fe088 1145 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1146 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1147 }
1148
91bb9f80 1149 if (index >= 0)
8b7773a4 1150 {
91bb9f80
MJ
1151 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1152 if (!data_preserved && !guaranteed_unmodified)
1153 return false;
1154
8b7773a4
MJ
1155 *index_p = index;
1156 *by_ref_p = true;
3ff2ca23
JJ
1157 if (size_p)
1158 *size_p = size;
91bb9f80
MJ
1159 if (guaranteed_unmodified)
1160 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1161 return true;
1162 }
1163 return false;
1164}
1165
eb270950
FX
1166/* If STMT is an assignment that loads a value from a parameter declaration,
1167 or from an aggregate passed as the parameter either by value or reference,
1168 return the index of the parameter in ipa_node_params. Otherwise return -1.
1169
1170 FBI holds gathered information about the function. INFO describes
1171 parameters of the function, STMT is the assignment statement. If it is a
1172 memory load from an aggregate, *OFFSET_P is filled with offset within the
1173 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1174 reference. */
1175
1176static int
1177load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1178 class ipa_node_params *info,
1179 gimple *stmt,
1180 HOST_WIDE_INT *offset_p,
1181 bool *by_ref_p)
1182{
1183 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1184 poly_int64 size;
1185
1186 /* Load value from a parameter declaration. */
1187 if (index >= 0)
1188 {
1189 *offset_p = -1;
1190 return index;
1191 }
1192
1193 if (!gimple_assign_load_p (stmt))
1194 return -1;
1195
1196 tree rhs = gimple_assign_rhs1 (stmt);
1197
1198 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1199 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1200 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1201 return -1;
1202
1203 /* Skip memory reference containing bit-field. */
1204 if (TREE_CODE (rhs) == BIT_FIELD_REF
1205 || contains_bitfld_component_ref_p (rhs))
1206 return -1;
1207
1208 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1209 offset_p, &size, by_ref_p))
1210 return -1;
1211
1212 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1213 size));
1214 if (!*by_ref_p)
1215 {
1216 tree param_type = ipa_get_type (info, index);
1217
1218 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1219 return -1;
1220 }
1221 else if (TREE_THIS_VOLATILE (rhs))
1222 return -1;
1223
1224 return index;
1225}
1226
c7b6a758
JH
1227/* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1228 to find original pointer. Initialize RET to the pointer which results from
1229 the walk.
1230 If offset is known return true and initialize OFFSET_RET. */
1231
1232bool
1233unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1234{
1235 poly_int64 offset = 0;
1236 bool offset_known = true;
1237 int i;
1238
1239 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1240 {
1241 if (TREE_CODE (op) == ADDR_EXPR)
1242 {
1243 poly_int64 extra_offset = 0;
1244 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1245 &offset);
1246 if (!base)
1247 {
1248 base = get_base_address (TREE_OPERAND (op, 0));
1249 if (TREE_CODE (base) != MEM_REF)
1250 break;
1251 offset_known = false;
1252 }
1253 else
1254 {
1255 if (TREE_CODE (base) != MEM_REF)
1256 break;
1257 offset += extra_offset;
1258 }
1259 op = TREE_OPERAND (base, 0);
1260 if (mem_ref_offset (base).to_shwi (&extra_offset))
1261 offset += extra_offset;
1262 else
1263 offset_known = false;
1264 }
1265 else if (TREE_CODE (op) == SSA_NAME
1266 && !SSA_NAME_IS_DEFAULT_DEF (op))
1267 {
1268 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1269
1270 if (gimple_assign_single_p (pstmt))
1271 op = gimple_assign_rhs1 (pstmt);
1272 else if (is_gimple_assign (pstmt)
1273 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1274 {
1275 poly_int64 extra_offset = 0;
1276 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1277 &extra_offset))
1278 offset += extra_offset;
1279 else
1280 offset_known = false;
1281 op = gimple_assign_rhs1 (pstmt);
1282 }
1283 else
1284 break;
1285 }
1286 else
1287 break;
1288 }
1289 *ret = op;
1290 *offset_ret = offset;
1291 return offset_known;
1292}
1293
b258210c 1294/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1295 of an assignment statement STMT, try to determine whether we are actually
1296 handling any of the following cases and construct an appropriate jump
1297 function into JFUNC if so:
1298
1299 1) The passed value is loaded from a formal parameter which is not a gimple
1300 register (most probably because it is addressable, the value has to be
1301 scalar) and we can guarantee the value has not changed. This case can
1302 therefore be described by a simple pass-through jump function. For example:
1303
1304 foo (int a)
1305 {
1306 int a.0;
1307
1308 a.0_2 = a;
1309 bar (a.0_2);
1310
1311 2) The passed value can be described by a simple arithmetic pass-through
1312 jump function. E.g.
1313
1314 foo (int a)
1315 {
1316 int D.2064;
1317
1318 D.2064_4 = a.1(D) + 4;
1319 bar (D.2064_4);
1320
1321 This case can also occur in combination of the previous one, e.g.:
1322
1323 foo (int a, int z)
1324 {
1325 int a.0;
1326 int D.2064;
1327
1328 a.0_3 = a;
1329 D.2064_4 = a.0_3 + 4;
1330 foo (D.2064_4);
1331
1332 3) The passed value is an address of an object within another one (which
1333 also passed by reference). Such situations are described by an ancestor
1334 jump function and describe situations such as:
1335
1336 B::foo() (struct B * const this)
1337 {
1338 struct A * D.1845;
1339
1340 D.1845_2 = &this_1(D)->D.1748;
1341 A::bar (D.1845_2);
1342
1343 INFO is the structure describing individual parameters access different
1344 stages of IPA optimizations. PARMS_AINFO contains the information that is
1345 only needed for intraprocedural analysis. */
685b0d13
MJ
1346
1347static void
56b40062 1348compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1349 class ipa_node_params *info,
b258210c 1350 struct ipa_jump_func *jfunc,
355fe088 1351 gcall *call, gimple *stmt, tree name,
06d65050 1352 tree param_type)
685b0d13 1353{
588db50c 1354 HOST_WIDE_INT offset, size;
fdb0e1b4 1355 tree op1, tc_ssa, base, ssa;
ee45a32d 1356 bool reverse;
685b0d13 1357 int index;
685b0d13 1358
685b0d13 1359 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1360
fdb0e1b4 1361 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1362 {
fdb0e1b4
MJ
1363 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1364 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1365 else
bda2bc48
MJ
1366 index = load_from_unmodified_param (fbi, info->descriptors,
1367 SSA_NAME_DEF_STMT (op1));
fdb0e1b4
MJ
1368 tc_ssa = op1;
1369 }
1370 else
1371 {
bda2bc48 1372 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1373 tc_ssa = gimple_assign_lhs (stmt);
1374 }
1375
1376 if (index >= 0)
1377 {
a77af182 1378 switch (gimple_assign_rhs_class (stmt))
8b7773a4 1379 {
a77af182
RB
1380 case GIMPLE_BINARY_RHS:
1381 {
1382 tree op2 = gimple_assign_rhs2 (stmt);
1383 if (!is_gimple_ip_invariant (op2)
1384 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1385 != tcc_comparison)
1386 && !useless_type_conversion_p (TREE_TYPE (name),
1387 TREE_TYPE (op1))))
1388 return;
1389
1390 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1391 gimple_assign_rhs_code (stmt));
1392 break;
1393 }
1394 case GIMPLE_SINGLE_RHS:
1395 {
1396 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1397 tc_ssa);
1398 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1399 break;
1400 }
1401 case GIMPLE_UNARY_RHS:
b66113e9 1402 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
a77af182 1403 ipa_set_jf_unary_pass_through (jfunc, index,
bda2bc48 1404 gimple_assign_rhs_code (stmt));
a77af182 1405 default:;
8b7773a4 1406 }
685b0d13
MJ
1407 return;
1408 }
1409
1410 if (TREE_CODE (op1) != ADDR_EXPR)
1411 return;
1412 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1413 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1414 return;
588db50c 1415 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
aca52e6f
RS
1416 offset_int mem_offset;
1417 if (!base
1418 || TREE_CODE (base) != MEM_REF
1419 || !mem_ref_offset (base).is_constant (&mem_offset))
685b0d13 1420 return;
aca52e6f 1421 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1422 ssa = TREE_OPERAND (base, 0);
1423 if (TREE_CODE (ssa) != SSA_NAME
1424 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1425 || offset < 0)
685b0d13
MJ
1426 return;
1427
b8f6e610 1428 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1429 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1430 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1431 ipa_set_ancestor_jf (jfunc, offset, index,
1432 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1433}
1434
40591473
MJ
1435/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1436 it looks like:
1437
1438 iftmp.1_3 = &obj_2(D)->D.1762;
1439
1440 The base of the MEM_REF must be a default definition SSA NAME of a
1441 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1442 whole MEM_REF expression is returned and the offset calculated from any
1443 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1444 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1445
1446static tree
355fe088 1447get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473 1448{
588db50c 1449 HOST_WIDE_INT size;
40591473 1450 tree expr, parm, obj;
ee45a32d 1451 bool reverse;
40591473
MJ
1452
1453 if (!gimple_assign_single_p (assign))
1454 return NULL_TREE;
1455 expr = gimple_assign_rhs1 (assign);
1456
1457 if (TREE_CODE (expr) != ADDR_EXPR)
1458 return NULL_TREE;
1459 expr = TREE_OPERAND (expr, 0);
1460 obj = expr;
588db50c 1461 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
40591473 1462
aca52e6f
RS
1463 offset_int mem_offset;
1464 if (!expr
1465 || TREE_CODE (expr) != MEM_REF
1466 || !mem_ref_offset (expr).is_constant (&mem_offset))
40591473
MJ
1467 return NULL_TREE;
1468 parm = TREE_OPERAND (expr, 0);
1469 if (TREE_CODE (parm) != SSA_NAME
1470 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1471 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1472 return NULL_TREE;
1473
aca52e6f 1474 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1475 *obj_p = obj;
1476 return expr;
1477}
1478
685b0d13 1479
b258210c
MJ
1480/* Given that an actual argument is an SSA_NAME that is a result of a phi
1481 statement PHI, try to find out whether NAME is in fact a
1482 multiple-inheritance typecast from a descendant into an ancestor of a formal
1483 parameter and thus can be described by an ancestor jump function and if so,
1484 write the appropriate function into JFUNC.
1485
1486 Essentially we want to match the following pattern:
1487
1488 if (obj_2(D) != 0B)
1489 goto <bb 3>;
1490 else
1491 goto <bb 4>;
1492
1493 <bb 3>:
1494 iftmp.1_3 = &obj_2(D)->D.1762;
1495
1496 <bb 4>:
1497 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1498 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1499 return D.1879_6; */
1500
1501static void
56b40062 1502compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1503 class ipa_node_params *info,
b258210c 1504 struct ipa_jump_func *jfunc,
538dd0b7 1505 gcall *call, gphi *phi)
b258210c 1506{
40591473 1507 HOST_WIDE_INT offset;
355fe088 1508 gimple *assign, *cond;
b258210c 1509 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1510 tree tmp, parm, expr, obj;
b258210c
MJ
1511 int index, i;
1512
54e348cb 1513 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1514 return;
1515
54e348cb
MJ
1516 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1517 tmp = PHI_ARG_DEF (phi, 0);
1518 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1519 tmp = PHI_ARG_DEF (phi, 1);
1520 else
1521 return;
b258210c
MJ
1522 if (TREE_CODE (tmp) != SSA_NAME
1523 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1524 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1525 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1526 return;
1527
1528 assign = SSA_NAME_DEF_STMT (tmp);
1529 assign_bb = gimple_bb (assign);
40591473 1530 if (!single_pred_p (assign_bb))
b258210c 1531 return;
40591473
MJ
1532 expr = get_ancestor_addr_info (assign, &obj, &offset);
1533 if (!expr)
b258210c
MJ
1534 return;
1535 parm = TREE_OPERAND (expr, 0);
b258210c 1536 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1537 if (index < 0)
1538 return;
b258210c
MJ
1539
1540 cond_bb = single_pred (assign_bb);
1541 cond = last_stmt (cond_bb);
69610617
SB
1542 if (!cond
1543 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1544 || gimple_cond_code (cond) != NE_EXPR
1545 || gimple_cond_lhs (cond) != parm
1546 || !integer_zerop (gimple_cond_rhs (cond)))
1547 return;
1548
b258210c
MJ
1549 phi_bb = gimple_bb (phi);
1550 for (i = 0; i < 2; i++)
1551 {
1552 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1553 if (pred != assign_bb && pred != cond_bb)
1554 return;
1555 }
1556
3b97a5c7
MJ
1557 ipa_set_ancestor_jf (jfunc, offset, index,
1558 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1559}
1560
be95e2b9
MJ
1561/* Inspect the given TYPE and return true iff it has the same structure (the
1562 same number of fields of the same types) as a C++ member pointer. If
1563 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1564 corresponding fields there. */
1565
3e293154
MJ
1566static bool
1567type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1568{
1569 tree fld;
1570
1571 if (TREE_CODE (type) != RECORD_TYPE)
1572 return false;
1573
1574 fld = TYPE_FIELDS (type);
1575 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1576 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1577 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1578 return false;
1579
1580 if (method_ptr)
1581 *method_ptr = fld;
1582
910ad8de 1583 fld = DECL_CHAIN (fld);
8b7773a4 1584 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1585 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1586 return false;
1587 if (delta)
1588 *delta = fld;
1589
910ad8de 1590 if (DECL_CHAIN (fld))
3e293154
MJ
1591 return false;
1592
1593 return true;
1594}
1595
61502ca8 1596/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
eb270950
FX
1597 return the rhs of its defining statement, and this statement is stored in
1598 *RHS_STMT. Otherwise return RHS as it is. */
7ec49257
MJ
1599
1600static inline tree
eb270950 1601get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
7ec49257
MJ
1602{
1603 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1604 {
355fe088 1605 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1606
1607 if (gimple_assign_single_p (def_stmt))
1608 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1609 else
1610 break;
eb270950 1611 *rhs_stmt = def_stmt;
7ec49257
MJ
1612 }
1613 return rhs;
1614}
1615
eb270950 1616/* Simple linked list, describing contents of an aggregate before call. */
8b7773a4
MJ
1617
1618struct ipa_known_agg_contents_list
1619{
1620 /* Offset and size of the described part of the aggregate. */
1621 HOST_WIDE_INT offset, size;
eb270950
FX
1622
1623 /* Type of the described part of the aggregate. */
1624 tree type;
1625
1626 /* Known constant value or jump function data describing contents. */
1627 struct ipa_load_agg_data value;
1628
8b7773a4
MJ
1629 /* Pointer to the next structure in the list. */
1630 struct ipa_known_agg_contents_list *next;
1631};
3e293154 1632
eb270950
FX
1633/* Add an aggregate content item into a linked list of
1634 ipa_known_agg_contents_list structure, in which all elements
1635 are sorted ascendingly by offset. */
0d48ee34 1636
46771da5
FX
1637static inline void
1638add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1639 struct ipa_known_agg_contents_list *item)
0d48ee34 1640{
46771da5
FX
1641 struct ipa_known_agg_contents_list *list = *plist;
1642
1643 for (; list; list = list->next)
0d48ee34 1644 {
46771da5
FX
1645 if (list->offset >= item->offset)
1646 break;
1647
1648 plist = &list->next;
0d48ee34
MJ
1649 }
1650
46771da5
FX
1651 item->next = list;
1652 *plist = item;
1653}
1654
eb270950 1655/* Check whether a given aggregate content is clobbered by certain element in
46771da5
FX
1656 a linked list of ipa_known_agg_contents_list. */
1657
1658static inline bool
1659clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1660 struct ipa_known_agg_contents_list *item)
1661{
1662 for (; list; list = list->next)
0d48ee34 1663 {
46771da5
FX
1664 if (list->offset >= item->offset)
1665 return list->offset < item->offset + item->size;
1666
1667 if (list->offset + list->size > item->offset)
1668 return true;
0d48ee34 1669 }
46771da5
FX
1670
1671 return false;
0d48ee34
MJ
1672}
1673
1674/* Build aggregate jump function from LIST, assuming there are exactly
eb270950 1675 VALUE_COUNT entries there and that offset of the passed argument
0d48ee34
MJ
1676 is ARG_OFFSET and store it into JFUNC. */
1677
1678static void
1679build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
eb270950 1680 int value_count, HOST_WIDE_INT arg_offset,
0d48ee34
MJ
1681 struct ipa_jump_func *jfunc)
1682{
eb270950
FX
1683 vec_alloc (jfunc->agg.items, value_count);
1684 for (; list; list = list->next)
1685 {
1686 struct ipa_agg_jf_item item;
1687 tree operand = list->value.pass_through.operand;
1688
1689 if (list->value.pass_through.formal_id >= 0)
1690 {
1691 /* Content value is derived from some formal paramerter. */
1692 if (list->value.offset >= 0)
1693 item.jftype = IPA_JF_LOAD_AGG;
1694 else
1695 item.jftype = IPA_JF_PASS_THROUGH;
1696
1697 item.value.load_agg = list->value;
1698 if (operand)
1699 item.value.pass_through.operand
1700 = unshare_expr_without_location (operand);
1701 }
1702 else if (operand)
1703 {
1704 /* Content value is known constant. */
1705 item.jftype = IPA_JF_CONST;
1706 item.value.constant = unshare_expr_without_location (operand);
1707 }
1708 else
1709 continue;
1710
1711 item.type = list->type;
1712 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1713
1714 item.offset = list->offset - arg_offset;
1715 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1716
1717 jfunc->agg.items->quick_push (item);
1718 }
1719}
1720
1721/* Given an assignment statement STMT, try to collect information into
1722 AGG_VALUE that will be used to construct jump function for RHS of the
1723 assignment, from which content value of an aggregate part comes.
1724
1725 Besides constant and simple pass-through jump functions, also try to
1726 identify whether it matches the following pattern that can be described by
1727 a load-value-from-aggregate jump function, which is a derivative of simple
1728 pass-through jump function.
1729
1730 foo (int *p)
1731 {
1732 ...
1733
1734 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1735 bar (q_5);
1736 }
1737
1738 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1739 constant, simple pass-through and load-vale-from-aggregate. If value
1740 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1741 set to -1. For simple pass-through and load-value-from-aggregate, field
1742 FORMAL_ID specifies the related formal parameter index, and field
1743 OFFSET can be used to distinguish them, -1 means simple pass-through,
1744 otherwise means load-value-from-aggregate. */
1745
1746static void
1747analyze_agg_content_value (struct ipa_func_body_info *fbi,
1748 struct ipa_load_agg_data *agg_value,
1749 gimple *stmt)
1750{
1751 tree lhs = gimple_assign_lhs (stmt);
1752 tree rhs1 = gimple_assign_rhs1 (stmt);
1753 enum tree_code code;
1754 int index = -1;
1755
1756 /* Initialize jump function data for the aggregate part. */
1757 memset (agg_value, 0, sizeof (*agg_value));
1758 agg_value->pass_through.operation = NOP_EXPR;
1759 agg_value->pass_through.formal_id = -1;
1760 agg_value->offset = -1;
1761
1762 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1763 || TREE_THIS_VOLATILE (lhs)
1764 || TREE_CODE (lhs) == BIT_FIELD_REF
1765 || contains_bitfld_component_ref_p (lhs))
1766 return;
1767
1768 /* Skip SSA copies. */
1769 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1770 {
1771 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1772 break;
1773
1774 stmt = SSA_NAME_DEF_STMT (rhs1);
1775 if (!is_gimple_assign (stmt))
1776 return;
1777
1778 rhs1 = gimple_assign_rhs1 (stmt);
1779 }
1780
1781 code = gimple_assign_rhs_code (stmt);
1782 switch (gimple_assign_rhs_class (stmt))
0d48ee34 1783 {
eb270950
FX
1784 case GIMPLE_SINGLE_RHS:
1785 if (is_gimple_ip_invariant (rhs1))
0d48ee34 1786 {
eb270950
FX
1787 agg_value->pass_through.operand = rhs1;
1788 return;
0d48ee34 1789 }
eb270950
FX
1790 code = NOP_EXPR;
1791 break;
1792
1793 case GIMPLE_UNARY_RHS:
1794 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1795 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1796 tcc_binary, this subtleness is somewhat misleading.
1797
1798 Since tcc_unary is widely used in IPA-CP code to check an operation
1799 with one operand, here we only allow tc_unary operation to avoid
1800 possible problem. Then we can use (opclass == tc_unary) or not to
1801 distinguish unary and binary. */
1802 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1803 return;
1804
1805 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1806 break;
1807
1808 case GIMPLE_BINARY_RHS:
1809 {
1810 gimple *rhs1_stmt = stmt;
1811 gimple *rhs2_stmt = stmt;
1812 tree rhs2 = gimple_assign_rhs2 (stmt);
1813
1814 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1815 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1816
1817 if (is_gimple_ip_invariant (rhs2))
1818 {
1819 agg_value->pass_through.operand = rhs2;
1820 stmt = rhs1_stmt;
1821 }
1822 else if (is_gimple_ip_invariant (rhs1))
1823 {
1824 if (TREE_CODE_CLASS (code) == tcc_comparison)
1825 code = swap_tree_comparison (code);
1826 else if (!commutative_tree_code (code))
1827 return;
1828
1829 agg_value->pass_through.operand = rhs1;
1830 stmt = rhs2_stmt;
1831 rhs1 = rhs2;
1832 }
1833 else
1834 return;
1835
1836 if (TREE_CODE_CLASS (code) != tcc_comparison
1837 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs1)))
1838 return;
1839 }
1840 break;
1841
1842 default:
1843 return;
1844 }
1845
1846 if (TREE_CODE (rhs1) != SSA_NAME)
1847 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1848 &agg_value->offset,
1849 &agg_value->by_ref);
1850 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1851 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1852
1853 if (index >= 0)
1854 {
1855 if (agg_value->offset >= 0)
1856 agg_value->type = TREE_TYPE (rhs1);
1857 agg_value->pass_through.formal_id = index;
1858 agg_value->pass_through.operation = code;
0d48ee34 1859 }
eb270950
FX
1860 else
1861 agg_value->pass_through.operand = NULL_TREE;
0d48ee34
MJ
1862}
1863
46771da5
FX
1864/* If STMT is a memory store to the object whose address is BASE, extract
1865 information (offset, size, and value) into CONTENT, and return true,
1866 otherwise we conservatively assume the whole object is modified with
1867 unknown content, and return false. CHECK_REF means that access to object
1868 is expected to be in form of MEM_REF expression. */
1869
1870static bool
eb270950
FX
1871extract_mem_content (struct ipa_func_body_info *fbi,
1872 gimple *stmt, tree base, bool check_ref,
46771da5
FX
1873 struct ipa_known_agg_contents_list *content)
1874{
1875 HOST_WIDE_INT lhs_offset, lhs_size;
46771da5
FX
1876 bool reverse;
1877
eb270950 1878 if (!is_gimple_assign (stmt))
46771da5
FX
1879 return false;
1880
eb270950
FX
1881 tree lhs = gimple_assign_lhs (stmt);
1882 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1883 &reverse);
46771da5
FX
1884 if (!lhs_base)
1885 return false;
1886
1887 if (check_ref)
1888 {
1889 if (TREE_CODE (lhs_base) != MEM_REF
1890 || TREE_OPERAND (lhs_base, 0) != base
1891 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1892 return false;
1893 }
1894 else if (lhs_base != base)
1895 return false;
1896
46771da5 1897 content->offset = lhs_offset;
eb270950
FX
1898 content->size = lhs_size;
1899 content->type = TREE_TYPE (lhs);
46771da5
FX
1900 content->next = NULL;
1901
eb270950 1902 analyze_agg_content_value (fbi, &content->value, stmt);
46771da5
FX
1903 return true;
1904}
1905
8b7773a4 1906/* Traverse statements from CALL backwards, scanning whether an aggregate given
eb270950
FX
1907 in ARG is filled in constants or values that are derived from caller's
1908 formal parameter in the way described by some kinds of jump functions. FBI
1909 is the context of the caller function for interprocedural analysis. ARG can
1910 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1911 the type of the aggregate, JFUNC is the jump function for the aggregate. */
be95e2b9 1912
3e293154 1913static void
eb270950
FX
1914determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1915 gcall *call, tree arg,
46771da5 1916 tree arg_type,
eb270950 1917 struct ipa_jump_func *jfunc)
3e293154 1918{
46771da5
FX
1919 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1920 bitmap visited = NULL;
eb270950 1921 int item_count = 0, value_count = 0;
8b7773a4 1922 HOST_WIDE_INT arg_offset, arg_size;
8b7773a4
MJ
1923 tree arg_base;
1924 bool check_ref, by_ref;
1925 ao_ref r;
de2e0835 1926 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
3e293154 1927
de2e0835 1928 if (max_agg_items == 0)
29799e9d
MJ
1929 return;
1930
8b7773a4
MJ
1931 /* The function operates in three stages. First, we prepare check_ref, r,
1932 arg_base and arg_offset based on what is actually passed as an actual
1933 argument. */
3e293154 1934
85942f45 1935 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1936 {
1937 by_ref = true;
1938 if (TREE_CODE (arg) == SSA_NAME)
1939 {
1940 tree type_size;
63831879
MJ
1941 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1942 || !POINTER_TYPE_P (TREE_TYPE (arg)))
8b7773a4
MJ
1943 return;
1944 check_ref = true;
1945 arg_base = arg;
1946 arg_offset = 0;
85942f45 1947 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1948 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1949 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1950 }
1951 else if (TREE_CODE (arg) == ADDR_EXPR)
1952 {
ee45a32d 1953 bool reverse;
8b7773a4
MJ
1954
1955 arg = TREE_OPERAND (arg, 0);
588db50c
RS
1956 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1957 &arg_size, &reverse);
1958 if (!arg_base)
8b7773a4
MJ
1959 return;
1960 if (DECL_P (arg_base))
1961 {
8b7773a4 1962 check_ref = false;
0d48ee34 1963 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1964 }
1965 else
1966 return;
1967 }
1968 else
1969 return;
1970 }
1971 else
1972 {
ee45a32d 1973 bool reverse;
8b7773a4
MJ
1974
1975 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1976
1977 by_ref = false;
1978 check_ref = false;
588db50c
RS
1979 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1980 &arg_size, &reverse);
1981 if (!arg_base)
8b7773a4
MJ
1982 return;
1983
1984 ao_ref_init (&r, arg);
1985 }
1986
46771da5
FX
1987 /* Second stage traverses virtual SSA web backwards starting from the call
1988 statement, only looks at individual dominating virtual operand (its
1989 definition dominates the call), as long as it is confident that content
1990 of the aggregate is affected by definition of the virtual operand, it
1991 builds a sorted linked list of ipa_agg_jf_list describing that. */
3e293154 1992
46771da5
FX
1993 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1994 {
1995 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
3e293154 1996
46771da5 1997 if (gimple_code (stmt) == GIMPLE_PHI)
518dc859 1998 {
fb4697e3 1999 dom_vuse = get_continuation_for_phi (stmt, &r, true,
eb270950 2000 fbi->aa_walk_budget,
46771da5
FX
2001 &visited, false, NULL, NULL);
2002 continue;
3e293154 2003 }
46771da5
FX
2004
2005 if (stmt_may_clobber_ref_p_1 (stmt, &r))
774b8a55 2006 {
46771da5
FX
2007 struct ipa_known_agg_contents_list *content
2008 = XALLOCA (struct ipa_known_agg_contents_list);
2009
eb270950 2010 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
774b8a55 2011 break;
3e293154 2012
46771da5
FX
2013 /* Now we get a dominating virtual operand, and need to check
2014 whether its value is clobbered any other dominating one. */
eb270950
FX
2015 if ((content->value.pass_through.formal_id >= 0
2016 || content->value.pass_through.operand)
46771da5
FX
2017 && !clobber_by_agg_contents_list_p (all_list, content))
2018 {
2019 struct ipa_known_agg_contents_list *copy
2020 = XALLOCA (struct ipa_known_agg_contents_list);
3e293154 2021
46771da5
FX
2022 /* Add to the list consisting of only dominating virtual
2023 operands, whose definitions can finally reach the call. */
2024 add_to_agg_contents_list (&list, (*copy = *content, copy));
2025
de2e0835 2026 if (++value_count == max_agg_items)
46771da5
FX
2027 break;
2028 }
2029
2030 /* Add to the list consisting of all dominating virtual operands. */
2031 add_to_agg_contents_list (&all_list, content);
2032
de2e0835 2033 if (++item_count == 2 * max_agg_items)
46771da5 2034 break;
8b7773a4 2035 }
46771da5
FX
2036 dom_vuse = gimple_vuse (stmt);
2037 }
3e293154 2038
46771da5
FX
2039 if (visited)
2040 BITMAP_FREE (visited);
be95e2b9 2041
8b7773a4 2042 /* Third stage just goes over the list and creates an appropriate vector of
46771da5 2043 ipa_agg_jf_item structures out of it, of course only if there are
eb270950 2044 any meaningful items to begin with. */
3e293154 2045
eb270950 2046 if (value_count)
3e293154 2047 {
8b7773a4 2048 jfunc->agg.by_ref = by_ref;
eb270950 2049 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
3e293154
MJ
2050 }
2051}
2052
46771da5 2053
5d5f1e95
KV
2054/* Return the Ith param type of callee associated with call graph
2055 edge E. */
2056
2057tree
06d65050
JH
2058ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2059{
2060 int n;
2061 tree type = (e->callee
67348ccc 2062 ? TREE_TYPE (e->callee->decl)
06d65050
JH
2063 : gimple_call_fntype (e->call_stmt));
2064 tree t = TYPE_ARG_TYPES (type);
2065
2066 for (n = 0; n < i; n++)
2067 {
2068 if (!t)
2069 break;
2070 t = TREE_CHAIN (t);
2071 }
2072 if (t)
2073 return TREE_VALUE (t);
2074 if (!e->callee)
2075 return NULL;
67348ccc 2076 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
2077 for (n = 0; n < i; n++)
2078 {
2079 if (!t)
2080 return NULL;
2081 t = TREE_CHAIN (t);
2082 }
2083 if (t)
2084 return TREE_TYPE (t);
2085 return NULL;
2086}
2087
86cd0334
MJ
2088/* Return ipa_bits with VALUE and MASK values, which can be either a newly
2089 allocated structure or a previously existing one shared with other jump
2090 functions and/or transformation summaries. */
2091
2092ipa_bits *
2093ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2094{
2095 ipa_bits tmp;
2096 tmp.value = value;
2097 tmp.mask = mask;
2098
2099 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2100 if (*slot)
2101 return *slot;
2102
2103 ipa_bits *res = ggc_alloc<ipa_bits> ();
2104 res->value = value;
2105 res->mask = mask;
2106 *slot = res;
2107
2108 return res;
2109}
2110
2111/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2112 table in order to avoid creating multiple same ipa_bits structures. */
2113
2114static void
2115ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2116 const widest_int &mask)
2117{
2118 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2119}
2120
2121/* Return a pointer to a value_range just like *TMP, but either find it in
2122 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2123
028d81b1
AH
2124static value_range *
2125ipa_get_value_range (value_range *tmp)
86cd0334 2126{
028d81b1 2127 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
86cd0334
MJ
2128 if (*slot)
2129 return *slot;
2130
4ba9fb0a 2131 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
86cd0334
MJ
2132 *vr = *tmp;
2133 *slot = vr;
2134
2135 return vr;
2136}
2137
2138/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2139 equiv set. Use hash table in order to avoid creating multiple same copies of
2140 value_ranges. */
2141
028d81b1 2142static value_range *
5d462877 2143ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
86cd0334 2144{
5d462877 2145 value_range tmp (min, max, kind);
86cd0334
MJ
2146 return ipa_get_value_range (&tmp);
2147}
2148
2149/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2150 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2151 same value_range structures. */
2152
2153static void
54994253 2154ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
86cd0334
MJ
2155 tree min, tree max)
2156{
2157 jf->m_vr = ipa_get_value_range (type, min, max);
2158}
2159
46771da5 2160/* Assign to JF a pointer to a value_range just like TMP but either fetch a
86cd0334
MJ
2161 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2162
2163static void
028d81b1 2164ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
86cd0334
MJ
2165{
2166 jf->m_vr = ipa_get_value_range (tmp);
2167}
2168
3e293154
MJ
2169/* Compute jump function for all arguments of callsite CS and insert the
2170 information in the jump_functions array in the ipa_edge_args corresponding
2171 to this callsite. */
be95e2b9 2172
749aa96d 2173static void
56b40062 2174ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 2175 struct cgraph_edge *cs)
3e293154 2176{
99b1c316 2177 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
a33c028e 2178 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
538dd0b7 2179 gcall *call = cs->call_stmt;
8b7773a4 2180 int n, arg_num = gimple_call_num_args (call);
5ce97055 2181 bool useful_context = false;
3e293154 2182
606d9a09 2183 if (arg_num == 0 || args->jump_functions)
3e293154 2184 return;
cb3874dc 2185 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
5ce97055 2186 if (flag_devirtualize)
cb3874dc 2187 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
3e293154 2188
96e24d49
JJ
2189 if (gimple_call_internal_p (call))
2190 return;
5fe8e757
MJ
2191 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2192 return;
2193
8b7773a4
MJ
2194 for (n = 0; n < arg_num; n++)
2195 {
2196 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2197 tree arg = gimple_call_arg (call, n);
06d65050 2198 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
2199 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2200 {
049e6d36 2201 tree instance;
99b1c316 2202 class ipa_polymorphic_call_context context (cs->caller->decl,
5ce97055 2203 arg, cs->call_stmt,
049e6d36 2204 &instance);
c628d1c3
MJ
2205 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2206 &fbi->aa_walk_budget);
5ce97055
JH
2207 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2208 if (!context.useless_p ())
2209 useful_context = true;
2210 }
3e293154 2211
718625ad
KV
2212 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2213 {
f7503699
KV
2214 bool addr_nonzero = false;
2215 bool strict_overflow = false;
2216
718625ad
KV
2217 if (TREE_CODE (arg) == SSA_NAME
2218 && param_type
2219 && get_ptr_nonnull (arg))
f7503699
KV
2220 addr_nonzero = true;
2221 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2222 addr_nonzero = true;
2223
2224 if (addr_nonzero)
718625ad 2225 {
86cd0334
MJ
2226 tree z = build_int_cst (TREE_TYPE (arg), 0);
2227 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
718625ad
KV
2228 }
2229 else
86cd0334 2230 gcc_assert (!jfunc->m_vr);
718625ad
KV
2231 }
2232 else
8bc5448f
KV
2233 {
2234 wide_int min, max;
5d462877 2235 value_range_kind kind;
8bc5448f
KV
2236 if (TREE_CODE (arg) == SSA_NAME
2237 && param_type
5d462877
AH
2238 && (kind = get_range_info (arg, &min, &max))
2239 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
8bc5448f 2240 {
028d81b1 2241 value_range resvr;
5d462877
AH
2242 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2243 wide_int_to_tree (TREE_TYPE (arg), max),
2244 kind);
38a73435
AH
2245 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2246 &tmpvr, TREE_TYPE (arg));
54994253 2247 if (!resvr.undefined_p () && !resvr.varying_p ())
86cd0334 2248 ipa_set_jfunc_vr (jfunc, &resvr);
3a4228ba 2249 else
86cd0334 2250 gcc_assert (!jfunc->m_vr);
8bc5448f
KV
2251 }
2252 else
86cd0334 2253 gcc_assert (!jfunc->m_vr);
8bc5448f 2254 }
04be694e 2255
209ca542
PK
2256 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2257 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2258 {
209ca542 2259 if (TREE_CODE (arg) == SSA_NAME)
86cd0334
MJ
2260 ipa_set_jfunc_bits (jfunc, 0,
2261 widest_int::from (get_nonzero_bits (arg),
2262 TYPE_SIGN (TREE_TYPE (arg))));
209ca542 2263 else
86cd0334 2264 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
209ca542 2265 }
67b97478
PK
2266 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2267 {
2268 unsigned HOST_WIDE_INT bitpos;
2269 unsigned align;
2270
67b97478 2271 get_pointer_alignment_1 (arg, &align, &bitpos);
7b27cb4b
RS
2272 widest_int mask = wi::bit_and_not
2273 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2274 align / BITS_PER_UNIT - 1);
86cd0334
MJ
2275 widest_int value = bitpos / BITS_PER_UNIT;
2276 ipa_set_jfunc_bits (jfunc, value, mask);
67b97478 2277 }
209ca542 2278 else
86cd0334 2279 gcc_assert (!jfunc->bits);
209ca542 2280
04643334 2281 if (is_gimple_ip_invariant (arg)
8813a647 2282 || (VAR_P (arg)
04643334
MJ
2283 && is_global_var (arg)
2284 && TREE_READONLY (arg)))
4502fe8d 2285 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
2286 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2287 && TREE_CODE (arg) == PARM_DECL)
2288 {
2289 int index = ipa_get_param_decl_index (info, arg);
2290
2291 gcc_assert (index >=0);
2292 /* Aggregate passed by value, check for pass-through, otherwise we
2293 will attempt to fill in aggregate contents later in this
2294 for cycle. */
8aab5218 2295 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 2296 {
3b97a5c7 2297 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
2298 continue;
2299 }
2300 }
2301 else if (TREE_CODE (arg) == SSA_NAME)
2302 {
2303 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2304 {
2305 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 2306 if (index >= 0)
8b7773a4 2307 {
3b97a5c7 2308 bool agg_p;
8aab5218 2309 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 2310 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
2311 }
2312 }
2313 else
2314 {
355fe088 2315 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 2316 if (is_gimple_assign (stmt))
8aab5218 2317 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 2318 call, stmt, arg, param_type);
8b7773a4 2319 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 2320 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
2321 call,
2322 as_a <gphi *> (stmt));
8b7773a4
MJ
2323 }
2324 }
3e293154 2325
67914693 2326 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
85942f45
JH
2327 passed (because type conversions are ignored in gimple). Usually we can
2328 safely get type from function declaration, but in case of K&R prototypes or
2329 variadic functions we can try our luck with type of the pointer passed.
2330 TODO: Since we look for actual initialization of the memory object, we may better
2331 work out the type based on the memory stores we find. */
2332 if (!param_type)
2333 param_type = TREE_TYPE (arg);
2334
8b7773a4
MJ
2335 if ((jfunc->type != IPA_JF_PASS_THROUGH
2336 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2337 && (jfunc->type != IPA_JF_ANCESTOR
2338 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2339 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 2340 || POINTER_TYPE_P (param_type)))
eb270950 2341 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
8b7773a4 2342 }
5ce97055
JH
2343 if (!useful_context)
2344 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
2345}
2346
749aa96d 2347/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 2348 from BB. */
749aa96d 2349
062c604f 2350static void
56b40062 2351ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 2352{
8aab5218
MJ
2353 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2354 int i;
749aa96d
MJ
2355 struct cgraph_edge *cs;
2356
8aab5218 2357 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 2358 {
8aab5218 2359 struct cgraph_node *callee = cs->callee;
749aa96d 2360
8aab5218
MJ
2361 if (callee)
2362 {
d7caa129 2363 callee = callee->ultimate_alias_target ();
8aab5218
MJ
2364 /* We do not need to bother analyzing calls to unknown functions
2365 unless they may become known during lto/whopr. */
2366 if (!callee->definition && !flag_lto)
2367 continue;
2368 }
2369 ipa_compute_jump_functions_for_edge (fbi, cs);
2370 }
749aa96d
MJ
2371}
2372
8b7773a4
MJ
2373/* If STMT looks like a statement loading a value from a member pointer formal
2374 parameter, return that parameter and store the offset of the field to
2375 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2376 might be clobbered). If USE_DELTA, then we look for a use of the delta
2377 field rather than the pfn. */
be95e2b9 2378
3e293154 2379static tree
355fe088 2380ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 2381 HOST_WIDE_INT *offset_p)
3e293154 2382{
8b7773a4
MJ
2383 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2384
2385 if (!gimple_assign_single_p (stmt))
2386 return NULL_TREE;
3e293154 2387
8b7773a4 2388 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2389 if (TREE_CODE (rhs) == COMPONENT_REF)
2390 {
2391 ref_field = TREE_OPERAND (rhs, 1);
2392 rhs = TREE_OPERAND (rhs, 0);
2393 }
2394 else
2395 ref_field = NULL_TREE;
d242d063 2396 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2397 return NULL_TREE;
3e293154 2398 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2399 if (TREE_CODE (rec) != ADDR_EXPR)
2400 return NULL_TREE;
2401 rec = TREE_OPERAND (rec, 0);
3e293154 2402 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2403 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2404 return NULL_TREE;
d242d063 2405 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2406
8b7773a4
MJ
2407 if (use_delta)
2408 fld = delta_field;
2409 else
2410 fld = ptr_field;
2411 if (offset_p)
2412 *offset_p = int_bit_position (fld);
2413
ae788515
EB
2414 if (ref_field)
2415 {
2416 if (integer_nonzerop (ref_offset))
2417 return NULL_TREE;
ae788515
EB
2418 return ref_field == fld ? rec : NULL_TREE;
2419 }
3e293154 2420 else
8b7773a4
MJ
2421 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2422 : NULL_TREE;
3e293154
MJ
2423}
2424
2425/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2426
3e293154
MJ
2427static bool
2428ipa_is_ssa_with_stmt_def (tree t)
2429{
2430 if (TREE_CODE (t) == SSA_NAME
2431 && !SSA_NAME_IS_DEFAULT_DEF (t))
2432 return true;
2433 else
2434 return false;
2435}
2436
40591473
MJ
2437/* Find the indirect call graph edge corresponding to STMT and mark it as a
2438 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
40a777e8
JH
2439 indirect call graph edge.
2440 If POLYMORPHIC is true record is as a destination of polymorphic call. */
be95e2b9 2441
40591473 2442static struct cgraph_edge *
538dd0b7 2443ipa_note_param_call (struct cgraph_node *node, int param_index,
40a777e8 2444 gcall *stmt, bool polymorphic)
3e293154 2445{
e33c6cd6 2446 struct cgraph_edge *cs;
3e293154 2447
d52f5295 2448 cs = node->get_edge (stmt);
b258210c 2449 cs->indirect_info->param_index = param_index;
8b7773a4 2450 cs->indirect_info->agg_contents = 0;
c13bc3d9 2451 cs->indirect_info->member_ptr = 0;
91bb9f80 2452 cs->indirect_info->guaranteed_unmodified = 0;
40a777e8
JH
2453 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2454 param_index, true);
2455 if (cs->indirect_info->polymorphic || polymorphic)
2456 ipa_set_param_used_by_polymorphic_call
2457 (IPA_NODE_REF (node), param_index, true);
40591473 2458 return cs;
3e293154
MJ
2459}
2460
e33c6cd6 2461/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2462 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2463 intermediate information about each formal parameter. Currently it checks
2464 whether the call calls a pointer that is a formal parameter and if so, the
2465 parameter is marked with the called flag and an indirect call graph edge
2466 describing the call is created. This is very simple for ordinary pointers
2467 represented in SSA but not-so-nice when it comes to member pointers. The
2468 ugly part of this function does nothing more than trying to match the
2469 pattern of such a call. An example of such a pattern is the gimple dump
2470 below, the call is on the last line:
3e293154 2471
ae788515
EB
2472 <bb 2>:
2473 f$__delta_5 = f.__delta;
2474 f$__pfn_24 = f.__pfn;
2475
2476 or
3e293154 2477 <bb 2>:
d242d063
MJ
2478 f$__delta_5 = MEM[(struct *)&f];
2479 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2480
ae788515 2481 and a few lines below:
8aa29647
MJ
2482
2483 <bb 5>
3e293154
MJ
2484 D.2496_3 = (int) f$__pfn_24;
2485 D.2497_4 = D.2496_3 & 1;
2486 if (D.2497_4 != 0)
2487 goto <bb 3>;
2488 else
2489 goto <bb 4>;
2490
8aa29647 2491 <bb 6>:
3e293154
MJ
2492 D.2500_7 = (unsigned int) f$__delta_5;
2493 D.2501_8 = &S + D.2500_7;
2494 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2495 D.2503_10 = *D.2502_9;
2496 D.2504_12 = f$__pfn_24 + -1;
2497 D.2505_13 = (unsigned int) D.2504_12;
2498 D.2506_14 = D.2503_10 + D.2505_13;
2499 D.2507_15 = *D.2506_14;
2500 iftmp.11_16 = (String:: *) D.2507_15;
2501
8aa29647 2502 <bb 7>:
3e293154
MJ
2503 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2504 D.2500_19 = (unsigned int) f$__delta_5;
2505 D.2508_20 = &S + D.2500_19;
2506 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2507
2508 Such patterns are results of simple calls to a member pointer:
2509
2510 int doprinting (int (MyString::* f)(int) const)
2511 {
2512 MyString S ("somestring");
2513
2514 return (S.*f)(4);
2515 }
8b7773a4
MJ
2516
2517 Moreover, the function also looks for called pointers loaded from aggregates
2518 passed by value or reference. */
3e293154
MJ
2519
2520static void
56b40062 2521ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 2522 tree target)
3e293154 2523{
99b1c316 2524 class ipa_node_params *info = fbi->info;
8b7773a4
MJ
2525 HOST_WIDE_INT offset;
2526 bool by_ref;
3e293154 2527
3e293154
MJ
2528 if (SSA_NAME_IS_DEFAULT_DEF (target))
2529 {
b258210c 2530 tree var = SSA_NAME_VAR (target);
8aab5218 2531 int index = ipa_get_param_decl_index (info, var);
3e293154 2532 if (index >= 0)
40a777e8 2533 ipa_note_param_call (fbi->node, index, call, false);
3e293154
MJ
2534 return;
2535 }
2536
8aab5218 2537 int index;
355fe088 2538 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 2539 bool guaranteed_unmodified;
8b7773a4 2540 if (gimple_assign_single_p (def)
ff302741
PB
2541 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2542 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2543 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2544 {
40a777e8
JH
2545 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2546 call, false);
8b7773a4
MJ
2547 cs->indirect_info->offset = offset;
2548 cs->indirect_info->agg_contents = 1;
2549 cs->indirect_info->by_ref = by_ref;
91bb9f80 2550 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2551 return;
2552 }
2553
3e293154
MJ
2554 /* Now we need to try to match the complex pattern of calling a member
2555 pointer. */
8b7773a4
MJ
2556 if (gimple_code (def) != GIMPLE_PHI
2557 || gimple_phi_num_args (def) != 2
2558 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2559 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2560 return;
2561
3e293154
MJ
2562 /* First, we need to check whether one of these is a load from a member
2563 pointer that is a parameter to this function. */
8aab5218
MJ
2564 tree n1 = PHI_ARG_DEF (def, 0);
2565 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2566 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2567 return;
355fe088
TS
2568 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2569 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2570
8aab5218
MJ
2571 tree rec;
2572 basic_block bb, virt_bb;
2573 basic_block join = gimple_bb (def);
8b7773a4 2574 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2575 {
8b7773a4 2576 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2577 return;
2578
8aa29647 2579 bb = EDGE_PRED (join, 0)->src;
726a989a 2580 virt_bb = gimple_bb (d2);
3e293154 2581 }
8b7773a4 2582 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2583 {
8aa29647 2584 bb = EDGE_PRED (join, 1)->src;
726a989a 2585 virt_bb = gimple_bb (d1);
3e293154
MJ
2586 }
2587 else
2588 return;
2589
2590 /* Second, we need to check that the basic blocks are laid out in the way
2591 corresponding to the pattern. */
2592
3e293154
MJ
2593 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2594 || single_pred (virt_bb) != bb
2595 || single_succ (virt_bb) != join)
2596 return;
2597
2598 /* Third, let's see that the branching is done depending on the least
2599 significant bit of the pfn. */
2600
355fe088 2601 gimple *branch = last_stmt (bb);
8aa29647 2602 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2603 return;
2604
12430896
RG
2605 if ((gimple_cond_code (branch) != NE_EXPR
2606 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2607 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2608 return;
3e293154 2609
8aab5218 2610 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2611 if (!ipa_is_ssa_with_stmt_def (cond))
2612 return;
2613
726a989a 2614 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2615 if (!is_gimple_assign (def)
726a989a
RB
2616 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2617 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2618 return;
726a989a
RB
2619
2620 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2621 if (!ipa_is_ssa_with_stmt_def (cond))
2622 return;
2623
726a989a 2624 def = SSA_NAME_DEF_STMT (cond);
3e293154 2625
8b75fc9b
MJ
2626 if (is_gimple_assign (def)
2627 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2628 {
726a989a 2629 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2630 if (!ipa_is_ssa_with_stmt_def (cond))
2631 return;
726a989a 2632 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2633 }
2634
8aab5218 2635 tree rec2;
6f7b8b70
RE
2636 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2637 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2638 == ptrmemfunc_vbit_in_delta),
2639 NULL);
3e293154
MJ
2640 if (rec != rec2)
2641 return;
2642
2643 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2644 if (index >= 0
8aab5218 2645 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2646 {
40a777e8
JH
2647 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2648 call, false);
8b7773a4
MJ
2649 cs->indirect_info->offset = offset;
2650 cs->indirect_info->agg_contents = 1;
c13bc3d9 2651 cs->indirect_info->member_ptr = 1;
91bb9f80 2652 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2653 }
3e293154
MJ
2654
2655 return;
2656}
2657
b258210c
MJ
2658/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2659 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2660 FBI->node (described by FBI->info), create a call note for the
2661 statement. */
b258210c
MJ
2662
2663static void
56b40062 2664ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2665 gcall *call, tree target)
b258210c
MJ
2666{
2667 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2668 int index;
40591473 2669 HOST_WIDE_INT anc_offset;
b258210c 2670
05842ff5
MJ
2671 if (!flag_devirtualize)
2672 return;
2673
40591473 2674 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2675 return;
2676
99b1c316 2677 class ipa_node_params *info = fbi->info;
40591473
MJ
2678 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2679 {
2680 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2681 return;
b258210c 2682
40591473
MJ
2683 anc_offset = 0;
2684 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2685 gcc_assert (index >= 0);
c628d1c3 2686 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
c199f329 2687 call))
40591473
MJ
2688 return;
2689 }
2690 else
2691 {
355fe088 2692 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2693 tree expr;
2694
2695 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2696 if (!expr)
2697 return;
2698 index = ipa_get_param_decl_index (info,
2699 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2700 gcc_assert (index >= 0);
c628d1c3 2701 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
c199f329 2702 call, anc_offset))
40591473
MJ
2703 return;
2704 }
2705
40a777e8
JH
2706 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2707 call, true);
99b1c316 2708 class cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2709 ii->offset = anc_offset;
ae7e9ddd 2710 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2711 ii->otr_type = obj_type_ref_class (target);
40591473 2712 ii->polymorphic = 1;
b258210c
MJ
2713}
2714
2715/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2716 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2717 containing intermediate information about each formal parameter. */
b258210c
MJ
2718
2719static void
56b40062 2720ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2721{
2722 tree target = gimple_call_fn (call);
b786d31f
JH
2723
2724 if (!target
2725 || (TREE_CODE (target) != SSA_NAME
2726 && !virtual_method_call_p (target)))
2727 return;
b258210c 2728
7d0aa05b 2729 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2730 /* If we previously turned the call into a direct call, there is
2731 no need to analyze. */
b786d31f 2732 if (cs && !cs->indirect_unknown_callee)
25583c4f 2733 return;
7d0aa05b 2734
a5b58b28 2735 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2736 {
7d0aa05b
JH
2737 tree instance;
2738 tree target = gimple_call_fn (call);
6f8091fc
JH
2739 ipa_polymorphic_call_context context (current_function_decl,
2740 target, call, &instance);
7d0aa05b 2741
ba392339
JH
2742 gcc_checking_assert (cs->indirect_info->otr_type
2743 == obj_type_ref_class (target));
2744 gcc_checking_assert (cs->indirect_info->otr_token
2745 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2746
29c43c83
JH
2747 cs->indirect_info->vptr_changed
2748 = !context.get_dynamic_type (instance,
2749 OBJ_TYPE_REF_OBJECT (target),
c628d1c3
MJ
2750 obj_type_ref_class (target), call,
2751 &fbi->aa_walk_budget);
0127c169 2752 cs->indirect_info->context = context;
7d0aa05b
JH
2753 }
2754
b258210c 2755 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2756 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2757 else if (virtual_method_call_p (target))
8aab5218 2758 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2759}
2760
2761
e33c6cd6 2762/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2763 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2764 formal parameters are called. */
be95e2b9 2765
3e293154 2766static void
355fe088 2767ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2768{
726a989a 2769 if (is_gimple_call (stmt))
538dd0b7 2770 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2771}
2772
2773/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2774 If OP is a parameter declaration, mark it as used in the info structure
2775 passed in DATA. */
2776
2777static bool
355fe088 2778visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f 2779{
99b1c316 2780 class ipa_node_params *info = (class ipa_node_params *) data;
062c604f
MJ
2781
2782 op = get_base_address (op);
2783 if (op
2784 && TREE_CODE (op) == PARM_DECL)
2785 {
2786 int index = ipa_get_param_decl_index (info, op);
2787 gcc_assert (index >= 0);
310bc633 2788 ipa_set_param_used (info, index, true);
062c604f
MJ
2789 }
2790
2791 return false;
3e293154
MJ
2792}
2793
8aab5218
MJ
2794/* Scan the statements in BB and inspect the uses of formal parameters. Store
2795 the findings in various structures of the associated ipa_node_params
2796 structure, such as parameter flags, notes etc. FBI holds various data about
2797 the function being analyzed. */
be95e2b9 2798
062c604f 2799static void
56b40062 2800ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2801{
726a989a 2802 gimple_stmt_iterator gsi;
8aab5218
MJ
2803 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2804 {
355fe088 2805 gimple *stmt = gsi_stmt (gsi);
3e293154 2806
8aab5218
MJ
2807 if (is_gimple_debug (stmt))
2808 continue;
3e293154 2809
8aab5218
MJ
2810 ipa_analyze_stmt_uses (fbi, stmt);
2811 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2812 visit_ref_for_mod_analysis,
2813 visit_ref_for_mod_analysis,
2814 visit_ref_for_mod_analysis);
5fe8e757 2815 }
8aab5218
MJ
2816 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2817 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2818 visit_ref_for_mod_analysis,
2819 visit_ref_for_mod_analysis,
2820 visit_ref_for_mod_analysis);
2821}
2822
2823/* Calculate controlled uses of parameters of NODE. */
2824
2825static void
2826ipa_analyze_controlled_uses (struct cgraph_node *node)
2827{
99b1c316 2828 class ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2829
8aab5218 2830 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2831 {
2832 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2833 int controlled_uses = 0;
2834
062c604f
MJ
2835 /* For SSA regs see if parameter is used. For non-SSA we compute
2836 the flag during modification analysis. */
4502fe8d
MJ
2837 if (is_gimple_reg (parm))
2838 {
67348ccc 2839 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2840 parm);
2841 if (ddef && !has_zero_uses (ddef))
2842 {
2843 imm_use_iterator imm_iter;
2844 use_operand_p use_p;
2845
2846 ipa_set_param_used (info, i, true);
2847 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2848 if (!is_gimple_call (USE_STMT (use_p)))
2849 {
c6de6665
JJ
2850 if (!is_gimple_debug (USE_STMT (use_p)))
2851 {
2852 controlled_uses = IPA_UNDESCRIBED_USE;
2853 break;
2854 }
4502fe8d
MJ
2855 }
2856 else
2857 controlled_uses++;
2858 }
2859 else
2860 controlled_uses = 0;
2861 }
2862 else
2863 controlled_uses = IPA_UNDESCRIBED_USE;
2864 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2865 }
8aab5218 2866}
062c604f 2867
8aab5218 2868/* Free stuff in BI. */
062c604f 2869
8aab5218
MJ
2870static void
2871free_ipa_bb_info (struct ipa_bb_info *bi)
2872{
2873 bi->cg_edges.release ();
2874 bi->param_aa_statuses.release ();
3e293154
MJ
2875}
2876
8aab5218 2877/* Dominator walker driving the analysis. */
2c9561b5 2878
8aab5218 2879class analysis_dom_walker : public dom_walker
2c9561b5 2880{
8aab5218 2881public:
56b40062 2882 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 2883 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2884
3daacdcd 2885 virtual edge before_dom_children (basic_block);
8aab5218
MJ
2886
2887private:
56b40062 2888 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
2889};
2890
3daacdcd 2891edge
8aab5218
MJ
2892analysis_dom_walker::before_dom_children (basic_block bb)
2893{
2894 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2895 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 2896 return NULL;
2c9561b5
MJ
2897}
2898
c3431191
ML
2899/* Release body info FBI. */
2900
2901void
2902ipa_release_body_info (struct ipa_func_body_info *fbi)
2903{
2904 int i;
2905 struct ipa_bb_info *bi;
2906
2907 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2908 free_ipa_bb_info (bi);
2909 fbi->bb_infos.release ();
2910}
2911
026c3cfd 2912/* Initialize the array describing properties of formal parameters
dd5a833e
MS
2913 of NODE, analyze their uses and compute jump functions associated
2914 with actual arguments of calls from within NODE. */
062c604f
MJ
2915
2916void
2917ipa_analyze_node (struct cgraph_node *node)
2918{
56b40062 2919 struct ipa_func_body_info fbi;
99b1c316 2920 class ipa_node_params *info;
062c604f 2921
57dbdc5a
MJ
2922 ipa_check_create_node_params ();
2923 ipa_check_create_edge_args ();
6cf67b62 2924 info = IPA_NODE_REF_GET_CREATE (node);
8aab5218
MJ
2925
2926 if (info->analysis_done)
2927 return;
2928 info->analysis_done = 1;
2929
2930 if (ipa_func_spec_opts_forbid_analysis_p (node))
2931 {
2932 for (int i = 0; i < ipa_get_param_count (info); i++)
2933 {
2934 ipa_set_param_used (info, i, true);
2935 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2936 }
2937 return;
2938 }
2939
2940 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2941 push_cfun (func);
2942 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2943 ipa_initialize_node_params (node);
8aab5218 2944 ipa_analyze_controlled_uses (node);
062c604f 2945
8aab5218
MJ
2946 fbi.node = node;
2947 fbi.info = IPA_NODE_REF (node);
2948 fbi.bb_infos = vNULL;
cb3874dc 2949 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
8aab5218 2950 fbi.param_count = ipa_get_param_count (info);
fdfd7f53 2951 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
062c604f 2952
8aab5218
MJ
2953 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2954 {
2955 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2956 bi->cg_edges.safe_push (cs);
2957 }
062c604f 2958
8aab5218
MJ
2959 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2960 {
2961 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2962 bi->cg_edges.safe_push (cs);
2963 }
2964
2965 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2966
c3431191 2967 ipa_release_body_info (&fbi);
8aab5218 2968 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2969 pop_cfun ();
062c604f 2970}
062c604f 2971
be95e2b9 2972/* Update the jump functions associated with call graph edge E when the call
3e293154 2973 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2974 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2975
3e293154
MJ
2976static void
2977update_jump_functions_after_inlining (struct cgraph_edge *cs,
2978 struct cgraph_edge *e)
2979{
99b1c316
MS
2980 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2981 class ipa_edge_args *args = IPA_EDGE_REF (e);
a33c028e
JH
2982 if (!args)
2983 return;
3e293154
MJ
2984 int count = ipa_get_cs_argument_count (args);
2985 int i;
2986
2987 for (i = 0; i < count; i++)
2988 {
b258210c 2989 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
99b1c316 2990 class ipa_polymorphic_call_context *dst_ctx
5ce97055 2991 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2992
eb270950
FX
2993 if (dst->agg.items)
2994 {
2995 struct ipa_agg_jf_item *item;
2996 int j;
2997
2998 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
2999 {
3000 int dst_fid;
3001 struct ipa_jump_func *src;
3002
3003 if (item->jftype != IPA_JF_PASS_THROUGH
3004 && item->jftype != IPA_JF_LOAD_AGG)
3005 continue;
3006
3007 dst_fid = item->value.pass_through.formal_id;
1c3c3f45 3008 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
eb270950
FX
3009 {
3010 item->jftype = IPA_JF_UNKNOWN;
3011 continue;
3012 }
3013
3014 item->value.pass_through.formal_id = -1;
3015 src = ipa_get_ith_jump_func (top, dst_fid);
3016 if (src->type == IPA_JF_CONST)
3017 {
3018 if (item->jftype == IPA_JF_PASS_THROUGH
3019 && item->value.pass_through.operation == NOP_EXPR)
3020 {
3021 item->jftype = IPA_JF_CONST;
3022 item->value.constant = src->value.constant.value;
3023 continue;
3024 }
3025 }
3026 else if (src->type == IPA_JF_PASS_THROUGH
3027 && src->value.pass_through.operation == NOP_EXPR)
3028 {
3029 if (item->jftype == IPA_JF_PASS_THROUGH
3030 || !item->value.load_agg.by_ref
3031 || src->value.pass_through.agg_preserved)
3032 item->value.pass_through.formal_id
3033 = src->value.pass_through.formal_id;
3034 }
3035 else if (src->type == IPA_JF_ANCESTOR)
3036 {
3037 if (item->jftype == IPA_JF_PASS_THROUGH)
3038 {
3039 if (!src->value.ancestor.offset)
3040 item->value.pass_through.formal_id
3041 = src->value.ancestor.formal_id;
3042 }
3043 else if (src->value.ancestor.agg_preserved)
3044 {
3045 gcc_checking_assert (item->value.load_agg.by_ref);
3046
3047 item->value.pass_through.formal_id
3048 = src->value.ancestor.formal_id;
3049 item->value.load_agg.offset
3050 += src->value.ancestor.offset;
3051 }
3052 }
3053
3054 if (item->value.pass_through.formal_id < 0)
3055 item->jftype = IPA_JF_UNKNOWN;
3056 }
3057 }
3058
1c3c3f45
FX
3059 if (!top)
3060 {
3061 ipa_set_jf_unknown (dst);
3062 continue;
3063 }
3064
685b0d13
MJ
3065 if (dst->type == IPA_JF_ANCESTOR)
3066 {
b258210c 3067 struct ipa_jump_func *src;
8b7773a4 3068 int dst_fid = dst->value.ancestor.formal_id;
99b1c316 3069 class ipa_polymorphic_call_context *src_ctx
5ce97055 3070 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 3071
b258210c
MJ
3072 /* Variable number of arguments can cause havoc if we try to access
3073 one that does not exist in the inlined edge. So make sure we
3074 don't. */
8b7773a4 3075 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 3076 {
04be694e 3077 ipa_set_jf_unknown (dst);
b258210c
MJ
3078 continue;
3079 }
3080
8b7773a4
MJ
3081 src = ipa_get_ith_jump_func (top, dst_fid);
3082
5ce97055
JH
3083 if (src_ctx && !src_ctx->useless_p ())
3084 {
99b1c316 3085 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
3086
3087 /* TODO: Make type preserved safe WRT contexts. */
44210a96 3088 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 3089 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
3090 ctx.offset_by (dst->value.ancestor.offset);
3091 if (!ctx.useless_p ())
3092 {
a7d1f3fe
ML
3093 if (!dst_ctx)
3094 {
3095 vec_safe_grow_cleared (args->polymorphic_call_contexts,
cb3874dc 3096 count, true);
a7d1f3fe
ML
3097 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3098 }
3099
3100 dst_ctx->combine_with (ctx);
5ce97055
JH
3101 }
3102 }
3103
eb270950
FX
3104 /* Parameter and argument in ancestor jump function must be pointer
3105 type, which means access to aggregate must be by-reference. */
3106 gcc_assert (!src->agg.items || src->agg.by_ref);
3107
3108 if (src->agg.items && dst->value.ancestor.agg_preserved)
8b7773a4
MJ
3109 {
3110 struct ipa_agg_jf_item *item;
3111 int j;
3112
3113 /* Currently we do not produce clobber aggregate jump functions,
3114 replace with merging when we do. */
3115 gcc_assert (!dst->agg.items);
3116
9771b263 3117 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 3118 dst->agg.by_ref = src->agg.by_ref;
9771b263 3119 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
3120 item->offset -= dst->value.ancestor.offset;
3121 }
3122
3b97a5c7
MJ
3123 if (src->type == IPA_JF_PASS_THROUGH
3124 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
3125 {
3126 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3127 dst->value.ancestor.agg_preserved &=
3128 src->value.pass_through.agg_preserved;
3129 }
b258210c
MJ
3130 else if (src->type == IPA_JF_ANCESTOR)
3131 {
3132 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3133 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
3134 dst->value.ancestor.agg_preserved &=
3135 src->value.ancestor.agg_preserved;
b258210c
MJ
3136 }
3137 else
04be694e 3138 ipa_set_jf_unknown (dst);
b258210c
MJ
3139 }
3140 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 3141 {
b258210c
MJ
3142 struct ipa_jump_func *src;
3143 /* We must check range due to calls with variable number of arguments
3144 and we cannot combine jump functions with operations. */
3145 if (dst->value.pass_through.operation == NOP_EXPR
5a0236f8 3146 && (top && dst->value.pass_through.formal_id
b258210c
MJ
3147 < ipa_get_cs_argument_count (top)))
3148 {
8b7773a4
MJ
3149 int dst_fid = dst->value.pass_through.formal_id;
3150 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 3151 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
99b1c316 3152 class ipa_polymorphic_call_context *src_ctx
5ce97055 3153 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 3154
5ce97055
JH
3155 if (src_ctx && !src_ctx->useless_p ())
3156 {
99b1c316 3157 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
3158
3159 /* TODO: Make type preserved safe WRT contexts. */
44210a96 3160 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 3161 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
3162 if (!ctx.useless_p ())
3163 {
3164 if (!dst_ctx)
3165 {
3166 vec_safe_grow_cleared (args->polymorphic_call_contexts,
cb3874dc 3167 count, true);
5ce97055
JH
3168 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3169 }
3170 dst_ctx->combine_with (ctx);
3171 }
3172 }
b8f6e610
MJ
3173 switch (src->type)
3174 {
3175 case IPA_JF_UNKNOWN:
04be694e 3176 ipa_set_jf_unknown (dst);
b8f6e610 3177 break;
b8f6e610
MJ
3178 case IPA_JF_CONST:
3179 ipa_set_jf_cst_copy (dst, src);
3180 break;
3181
3182 case IPA_JF_PASS_THROUGH:
3183 {
3184 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3185 enum tree_code operation;
3186 operation = ipa_get_jf_pass_through_operation (src);
3187
3188 if (operation == NOP_EXPR)
3189 {
3b97a5c7 3190 bool agg_p;
b8f6e610
MJ
3191 agg_p = dst_agg_p
3192 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 3193 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610 3194 }
a2b4c188
KV
3195 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3196 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
b8f6e610
MJ
3197 else
3198 {
3199 tree operand = ipa_get_jf_pass_through_operand (src);
3200 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3201 operation);
3202 }
3203 break;
3204 }
3205 case IPA_JF_ANCESTOR:
3206 {
3b97a5c7 3207 bool agg_p;
b8f6e610
MJ
3208 agg_p = dst_agg_p
3209 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
3210 ipa_set_ancestor_jf (dst,
3211 ipa_get_jf_ancestor_offset (src),
b8f6e610 3212 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 3213 agg_p);
b8f6e610
MJ
3214 break;
3215 }
3216 default:
3217 gcc_unreachable ();
3218 }
8b7773a4
MJ
3219
3220 if (src->agg.items
b8f6e610 3221 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
3222 {
3223 /* Currently we do not produce clobber aggregate jump
3224 functions, replace with merging when we do. */
3225 gcc_assert (!dst->agg.items);
3226
3227 dst->agg.by_ref = src->agg.by_ref;
9771b263 3228 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 3229 }
b258210c
MJ
3230 }
3231 else
04be694e 3232 ipa_set_jf_unknown (dst);
3e293154 3233 }
b258210c
MJ
3234 }
3235}
3236
5ce97055
JH
3237/* If TARGET is an addr_expr of a function declaration, make it the
3238 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3239 Otherwise, return NULL. */
b258210c 3240
3949c4a7 3241struct cgraph_edge *
5ce97055
JH
3242ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3243 bool speculative)
b258210c
MJ
3244{
3245 struct cgraph_node *callee;
48b1474e 3246 bool unreachable = false;
b258210c 3247
ceeffab0
MJ
3248 if (TREE_CODE (target) == ADDR_EXPR)
3249 target = TREE_OPERAND (target, 0);
b258210c 3250 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
3251 {
3252 target = canonicalize_constructor_val (target, NULL);
3253 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3254 {
db66bf68
JH
3255 /* Member pointer call that goes through a VMT lookup. */
3256 if (ie->indirect_info->member_ptr
3257 /* Or if target is not an invariant expression and we do not
3258 know if it will evaulate to function at runtime.
3259 This can happen when folding through &VAR, where &VAR
3260 is IP invariant, but VAR itself is not.
3261
3262 TODO: Revisit this when GCC 5 is branched. It seems that
3263 member_ptr check is not needed and that we may try to fold
3264 the expression and see if VAR is readonly. */
3265 || !is_gimple_ip_invariant (target))
3266 {
3267 if (dump_enabled_p ())
3268 {
4f5b9c80 3269 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118
ML
3270 "discovered direct call non-invariant %s\n",
3271 ie->caller->dump_name ());
db66bf68
JH
3272 }
3273 return NULL;
3274 }
3275
c13bc3d9 3276
2b5f0895
XDL
3277 if (dump_enabled_p ())
3278 {
4f5b9c80 3279 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118 3280 "discovered direct call to non-function in %s, "
807b7d62 3281 "making it __builtin_unreachable\n",
464d0118 3282 ie->caller->dump_name ());
2b5f0895 3283 }
3c9e6fca 3284
48b1474e 3285 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3286 callee = cgraph_node::get_create (target);
48b1474e 3287 unreachable = true;
a0a7b611 3288 }
48b1474e 3289 else
d52f5295 3290 callee = cgraph_node::get (target);
a0a7b611 3291 }
48b1474e 3292 else
d52f5295 3293 callee = cgraph_node::get (target);
a0a7b611
JH
3294
3295 /* Because may-edges are not explicitely represented and vtable may be external,
3296 we may create the first reference to the object in the unit. */
a62bfab5 3297 if (!callee || callee->inlined_to)
a0a7b611 3298 {
a0a7b611
JH
3299
3300 /* We are better to ensure we can refer to it.
3301 In the case of static functions we are out of luck, since we already
3302 removed its body. In the case of public functions we may or may
3303 not introduce the reference. */
3304 if (!canonicalize_constructor_val (target, NULL)
3305 || !TREE_PUBLIC (target))
3306 {
3307 if (dump_file)
3308 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
845bb366 3309 "(%s -> %s) but cannot refer to it. Giving up.\n",
464d0118
ML
3310 ie->caller->dump_name (),
3311 ie->callee->dump_name ());
a0a7b611
JH
3312 return NULL;
3313 }
d52f5295 3314 callee = cgraph_node::get_create (target);
a0a7b611 3315 }
2b5f0895 3316
0127c169
JH
3317 /* If the edge is already speculated. */
3318 if (speculative && ie->speculative)
3319 {
845bb366 3320 if (dump_file)
0127c169 3321 {
845bb366
JH
3322 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3323 if (!e2)
3324 {
3325 if (dump_file)
3326 fprintf (dump_file, "ipa-prop: Discovered call to a "
3327 "speculative target (%s -> %s) but the call is "
3328 "already speculated to different target. "
3329 "Giving up.\n",
3330 ie->caller->dump_name (), callee->dump_name ());
3331 }
3332 else
3333 {
3334 if (dump_file)
3335 fprintf (dump_file,
3336 "ipa-prop: Discovered call to a speculative target "
3337 "(%s -> %s) this agree with previous speculation.\n",
3338 ie->caller->dump_name (), callee->dump_name ());
3339 }
0127c169
JH
3340 }
3341 return NULL;
3342 }
3343
2b5f0895
XDL
3344 if (!dbg_cnt (devirt))
3345 return NULL;
3346
1dbee8c9 3347 ipa_check_create_node_params ();
ceeffab0 3348
67914693 3349 /* We cannot make edges to inline clones. It is bug that someone removed
81fa35bd 3350 the cgraph node too early. */
a62bfab5 3351 gcc_assert (!callee->inlined_to);
17afc0fe 3352
48b1474e 3353 if (dump_file && !unreachable)
b258210c 3354 {
5ce97055 3355 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
464d0118 3356 "(%s -> %s), for stmt ",
b258210c 3357 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 3358 speculative ? "speculative" : "known",
464d0118
ML
3359 ie->caller->dump_name (),
3360 callee->dump_name ());
b258210c
MJ
3361 if (ie->call_stmt)
3362 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3363 else
3364 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 3365 }
2b5f0895
XDL
3366 if (dump_enabled_p ())
3367 {
4f5b9c80 3368 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
807b7d62 3369 "converting indirect call in %s to direct call to %s\n",
3629ff8a 3370 ie->caller->dump_name (), callee->dump_name ());
2b5f0895 3371 }
5ce97055 3372 if (!speculative)
d8d5aef1
JH
3373 {
3374 struct cgraph_edge *orig = ie;
27c5a177 3375 ie = cgraph_edge::make_direct (ie, callee);
d8d5aef1
JH
3376 /* If we resolved speculative edge the cost is already up to date
3377 for direct call (adjusted by inline_edge_duplication_hook). */
3378 if (ie == orig)
3379 {
56f62793 3380 ipa_call_summary *es = ipa_call_summaries->get (ie);
d8d5aef1
JH
3381 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3382 - eni_size_weights.call_cost);
3383 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3384 - eni_time_weights.call_cost);
3385 }
3386 }
5ce97055
JH
3387 else
3388 {
3389 if (!callee->can_be_discarded_p ())
3390 {
3391 cgraph_node *alias;
3392 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3393 if (alias)
3394 callee = alias;
3395 }
d8d5aef1 3396 /* make_speculative will update ie's cost to direct call cost. */
5ce97055 3397 ie = ie->make_speculative
1bad9c18 3398 (callee, ie->count.apply_scale (8, 10));
5ce97055 3399 }
749aa96d 3400
b258210c 3401 return ie;
3e293154
MJ
3402}
3403
91bb9f80
MJ
3404/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3405 CONSTRUCTOR and return it. Return NULL if the search fails for some
3406 reason. */
3407
3408static tree
3409find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3410{
3411 tree type = TREE_TYPE (constructor);
3412 if (TREE_CODE (type) != ARRAY_TYPE
3413 && TREE_CODE (type) != RECORD_TYPE)
3414 return NULL;
3415
3416 unsigned ix;
3417 tree index, val;
3418 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3419 {
3420 HOST_WIDE_INT elt_offset;
3421 if (TREE_CODE (type) == ARRAY_TYPE)
3422 {
3423 offset_int off;
3424 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3425 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3426
3427 if (index)
3428 {
db9bbdec
RB
3429 if (TREE_CODE (index) == RANGE_EXPR)
3430 off = wi::to_offset (TREE_OPERAND (index, 0));
3431 else
3432 off = wi::to_offset (index);
91bb9f80
MJ
3433 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3434 {
3435 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3436 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3437 off = wi::sext (off - wi::to_offset (low_bound),
3438 TYPE_PRECISION (TREE_TYPE (index)));
3439 }
3440 off *= wi::to_offset (unit_size);
db9bbdec
RB
3441 /* ??? Handle more than just the first index of a
3442 RANGE_EXPR. */
91bb9f80
MJ
3443 }
3444 else
3445 off = wi::to_offset (unit_size) * ix;
3446
3447 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3448 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3449 continue;
3450 elt_offset = off.to_shwi ();
3451 }
3452 else if (TREE_CODE (type) == RECORD_TYPE)
3453 {
3454 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3455 if (DECL_BIT_FIELD (index))
3456 continue;
3457 elt_offset = int_bit_position (index);
3458 }
3459 else
3460 gcc_unreachable ();
3461
3462 if (elt_offset > req_offset)
3463 return NULL;
3464
3465 if (TREE_CODE (val) == CONSTRUCTOR)
3466 return find_constructor_constant_at_offset (val,
3467 req_offset - elt_offset);
3468
3469 if (elt_offset == req_offset
3470 && is_gimple_reg_type (TREE_TYPE (val))
3471 && is_gimple_ip_invariant (val))
3472 return val;
3473 }
3474 return NULL;
3475}
3476
3477/* Check whether SCALAR could be used to look up an aggregate interprocedural
3478 invariant from a static constructor and if so, return it. Otherwise return
3479 NULL. */
3480
3481static tree
3482ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3483{
3484 if (by_ref)
3485 {
3486 if (TREE_CODE (scalar) != ADDR_EXPR)
3487 return NULL;
3488 scalar = TREE_OPERAND (scalar, 0);
3489 }
3490
8813a647 3491 if (!VAR_P (scalar)
91bb9f80
MJ
3492 || !is_global_var (scalar)
3493 || !TREE_READONLY (scalar)
3494 || !DECL_INITIAL (scalar)
3495 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3496 return NULL;
3497
3498 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3499}
3500
eb270950
FX
3501/* Retrieve value from AGG, a set of known offset/value for an aggregate or
3502 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3503 return NULL if there is none. BY_REF specifies whether the value has to be
3504 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3505 the boolean it points to is set to true if the value comes from an
3506 initializer of a constant. */
8b7773a4
MJ
3507
3508tree
eb270950 3509ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
91bb9f80
MJ
3510 HOST_WIDE_INT offset, bool by_ref,
3511 bool *from_global_constant)
8b7773a4 3512{
eb270950 3513 struct ipa_agg_value *item;
8b7773a4
MJ
3514 int i;
3515
91bb9f80
MJ
3516 if (scalar)
3517 {
3518 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3519 if (res)
3520 {
3521 if (from_global_constant)
3522 *from_global_constant = true;
3523 return res;
3524 }
3525 }
3526
3527 if (!agg
3528 || by_ref != agg->by_ref)
8b7773a4
MJ
3529 return NULL;
3530
eb270950 3531 FOR_EACH_VEC_ELT (agg->items, i, item)
2c9561b5
MJ
3532 if (item->offset == offset)
3533 {
3534 /* Currently we do not have clobber values, return NULL for them once
3535 we do. */
3536 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
3537 if (from_global_constant)
3538 *from_global_constant = false;
2c9561b5
MJ
3539 return item->value;
3540 }
8b7773a4
MJ
3541 return NULL;
3542}
3543
4502fe8d 3544/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
3545 reference description RDESC. Return true if the reference has been
3546 successfully found and removed. */
4502fe8d 3547
568cda29 3548static bool
5e20cdc9 3549remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3550{
3551 struct ipa_ref *to_del;
3552 struct cgraph_edge *origin;
3553
3554 origin = rdesc->cs;
a854f856
MJ
3555 if (!origin)
3556 return false;
d122681a
ML
3557 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3558 origin->lto_stmt_uid);
568cda29
MJ
3559 if (!to_del)
3560 return false;
3561
d122681a 3562 to_del->remove_reference ();
4502fe8d 3563 if (dump_file)
464d0118 3564 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3629ff8a 3565 origin->caller->dump_name (), symbol->dump_name ());
568cda29 3566 return true;
4502fe8d
MJ
3567}
3568
3569/* If JFUNC has a reference description with refcount different from
3570 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3571 NULL. JFUNC must be a constant jump function. */
3572
3573static struct ipa_cst_ref_desc *
3574jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3575{
3576 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3577 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3578 return rdesc;
3579 else
3580 return NULL;
3581}
3582
568cda29
MJ
3583/* If the value of constant jump function JFUNC is an address of a function
3584 declaration, return the associated call graph node. Otherwise return
3585 NULL. */
3586
3587static cgraph_node *
3588cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3589{
3590 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3591 tree cst = ipa_get_jf_constant (jfunc);
3592 if (TREE_CODE (cst) != ADDR_EXPR
3593 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3594 return NULL;
3595
d52f5295 3596 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3597}
3598
3599
3600/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3601 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3602 the edge specified in the rdesc. Return false if either the symbol or the
3603 reference could not be found, otherwise return true. */
3604
3605static bool
3606try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3607{
3608 struct ipa_cst_ref_desc *rdesc;
3609 if (jfunc->type == IPA_JF_CONST
3610 && (rdesc = jfunc_rdesc_usable (jfunc))
3611 && --rdesc->refcount == 0)
3612 {
5e20cdc9 3613 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
3614 if (!symbol)
3615 return false;
3616
3617 return remove_described_reference (symbol, rdesc);
3618 }
3619 return true;
3620}
3621
b258210c
MJ
3622/* Try to find a destination for indirect edge IE that corresponds to a simple
3623 call or a call of a member function pointer and where the destination is a
e5cf5e11
PK
3624 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3625 the type of the parameter to which the result of JFUNC is passed. If it can
3626 be determined, return the newly direct edge, otherwise return NULL.
eb270950
FX
3627 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3628 relative to. */
be95e2b9 3629
b258210c
MJ
3630static struct cgraph_edge *
3631try_make_edge_direct_simple_call (struct cgraph_edge *ie,
e5cf5e11 3632 struct ipa_jump_func *jfunc, tree target_type,
eb270950 3633 struct cgraph_node *new_root,
99b1c316 3634 class ipa_node_params *new_root_info)
b258210c 3635{
4502fe8d 3636 struct cgraph_edge *cs;
b258210c 3637 tree target;
042ae7d2 3638 bool agg_contents = ie->indirect_info->agg_contents;
e5cf5e11 3639 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
91bb9f80
MJ
3640 if (agg_contents)
3641 {
3642 bool from_global_constant;
eb270950
FX
3643 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3644 new_root,
3645 &jfunc->agg);
3646 target = ipa_find_agg_cst_for_param (&agg, scalar,
91bb9f80
MJ
3647 ie->indirect_info->offset,
3648 ie->indirect_info->by_ref,
3649 &from_global_constant);
eb270950 3650 agg.release ();
91bb9f80
MJ
3651 if (target
3652 && !from_global_constant
3653 && !ie->indirect_info->guaranteed_unmodified)
3654 return NULL;
3655 }
b258210c 3656 else
91bb9f80 3657 target = scalar;
d250540a
MJ
3658 if (!target)
3659 return NULL;
4502fe8d
MJ
3660 cs = ipa_make_edge_direct_to_target (ie, target);
3661
a12cd2db 3662 if (cs && !agg_contents)
568cda29
MJ
3663 {
3664 bool ok;
3665 gcc_checking_assert (cs->callee
ae6d0907
MJ
3666 && (cs != ie
3667 || jfunc->type != IPA_JF_CONST
568cda29
MJ
3668 || !cgraph_node_for_jfunc (jfunc)
3669 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3670 ok = try_decrement_rdesc_refcount (jfunc);
3671 gcc_checking_assert (ok);
3672 }
4502fe8d
MJ
3673
3674 return cs;
b258210c
MJ
3675}
3676
bec81025
MJ
3677/* Return the target to be used in cases of impossible devirtualization. IE
3678 and target (the latter can be NULL) are dumped when dumping is enabled. */
3679
72972c22
MJ
3680tree
3681ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3682{
3683 if (dump_file)
3684 {
3685 if (target)
3686 fprintf (dump_file,
464d0118
ML
3687 "Type inconsistent devirtualization: %s->%s\n",
3688 ie->caller->dump_name (),
bec81025
MJ
3689 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3690 else
3691 fprintf (dump_file,
464d0118
ML
3692 "No devirtualization target in %s\n",
3693 ie->caller->dump_name ());
bec81025
MJ
3694 }
3695 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3696 cgraph_node::get_create (new_target);
bec81025
MJ
3697 return new_target;
3698}
3699
d250540a
MJ
3700/* Try to find a destination for indirect edge IE that corresponds to a virtual
3701 call based on a formal parameter which is described by jump function JFUNC
3702 and if it can be determined, make it direct and return the direct edge.
44210a96 3703 Otherwise, return NULL. CTX describes the polymorphic context that the
eb270950
FX
3704 parameter the call is based on brings along with it. NEW_ROOT and
3705 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3706 to. */
b258210c
MJ
3707
3708static struct cgraph_edge *
3709try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3710 struct ipa_jump_func *jfunc,
eb270950
FX
3711 class ipa_polymorphic_call_context ctx,
3712 struct cgraph_node *new_root,
3713 class ipa_node_params *new_root_info)
3e293154 3714{
44210a96 3715 tree target = NULL;
5ce97055 3716 bool speculative = false;
85942f45 3717
2bf86c84 3718 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3719 return NULL;
b258210c 3720
44210a96 3721 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3722
3723 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3724 if (!ie->indirect_info->vptr_changed
3725 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3726 {
9de2f554
JH
3727 tree vtable;
3728 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3729 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3730 : NULL;
eb270950
FX
3731 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3732 new_root,
3733 &jfunc->agg);
3734 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
85942f45
JH
3735 ie->indirect_info->offset,
3736 true);
eb270950 3737 agg.release ();
9de2f554
JH
3738 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3739 {
2994ab20 3740 bool can_refer;
0127c169 3741 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3742 vtable, offset, &can_refer);
3743 if (can_refer)
9de2f554 3744 {
2994ab20 3745 if (!t
cb1180d5 3746 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
9de2f554 3747 || !possible_polymorphic_call_target_p
0127c169
JH
3748 (ie, cgraph_node::get (t)))
3749 {
33c3b6be 3750 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3751 if (!ie->indirect_info->vptr_changed)
3752 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3753 else
3754 target = NULL;
0127c169
JH
3755 }
3756 else
3757 {
3758 target = t;
3759 speculative = ie->indirect_info->vptr_changed;
3760 }
9de2f554
JH
3761 }
3762 }
85942f45
JH
3763 }
3764
44210a96
MJ
3765 ipa_polymorphic_call_context ie_context (ie);
3766 vec <cgraph_node *>targets;
3767 bool final;
d250540a 3768
44210a96
MJ
3769 ctx.offset_by (ie->indirect_info->offset);
3770 if (ie->indirect_info->vptr_changed)
3771 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3772 ie->indirect_info->otr_type);
3773 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3774 targets = possible_polymorphic_call_targets
3775 (ie->indirect_info->otr_type,
3776 ie->indirect_info->otr_token,
3777 ctx, &final);
3778 if (final && targets.length () <= 1)
5ce97055 3779 {
33c3b6be 3780 speculative = false;
44210a96
MJ
3781 if (targets.length () == 1)
3782 target = targets[0]->decl;
3783 else
3784 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3785 }
2bf86c84 3786 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3787 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3788 {
44210a96
MJ
3789 cgraph_node *n;
3790 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3791 ie->indirect_info->otr_token,
3792 ie->indirect_info->context);
3793 if (n)
5ce97055 3794 {
44210a96
MJ
3795 target = n->decl;
3796 speculative = true;
5ce97055 3797 }
5bccb77a 3798 }
b258210c
MJ
3799
3800 if (target)
450ad0cd 3801 {
44210a96
MJ
3802 if (!possible_polymorphic_call_target_p
3803 (ie, cgraph_node::get_create (target)))
0127c169 3804 {
29c43c83 3805 if (speculative)
0127c169
JH
3806 return NULL;
3807 target = ipa_impossible_devirt_target (ie, target);
3808 }
5ce97055 3809 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3810 }
b258210c
MJ
3811 else
3812 return NULL;
3e293154
MJ
3813}
3814
3815/* Update the param called notes associated with NODE when CS is being inlined,
3816 assuming NODE is (potentially indirectly) inlined into CS->callee.
3817 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3818 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3819 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3820
f8e2a1ed 3821static bool
e33c6cd6
MJ
3822update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3823 struct cgraph_node *node,
d52f5295 3824 vec<cgraph_edge *> *new_edges)
3e293154 3825{
99b1c316 3826 class ipa_edge_args *top;
b258210c 3827 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
eb270950 3828 struct cgraph_node *new_root;
99b1c316 3829 class ipa_node_params *new_root_info, *inlined_node_info;
f8e2a1ed 3830 bool res = false;
3e293154 3831
e33c6cd6 3832 ipa_check_create_edge_args ();
9e97ff61 3833 top = IPA_EDGE_REF (cs);
eb270950
FX
3834 new_root = cs->caller->inlined_to
3835 ? cs->caller->inlined_to : cs->caller;
3836 new_root_info = IPA_NODE_REF (new_root);
e5cf5e11 3837 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
e33c6cd6
MJ
3838
3839 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3840 {
99b1c316 3841 class cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3842 struct ipa_jump_func *jfunc;
8b7773a4 3843 int param_index;
3e293154 3844
e33c6cd6 3845 next_ie = ie->next_callee;
3e293154 3846
5f902d76
JH
3847 if (ici->param_index == -1)
3848 continue;
e33c6cd6 3849
3e293154 3850 /* We must check range due to calls with variable number of arguments: */
5a0236f8 3851 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3852 {
5ee53a06 3853 ici->param_index = -1;
3e293154
MJ
3854 continue;
3855 }
3856
8b7773a4
MJ
3857 param_index = ici->param_index;
3858 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3859
e089e433 3860 auto_vec<cgraph_node *, 4> spec_targets;
3ff29913 3861 if (ie->speculative)
e089e433
MJ
3862 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3863 direct;
3864 direct = direct->next_speculative_call_target ())
3865 spec_targets.safe_push (direct->callee);
3ff29913 3866
2bf86c84 3867 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3868 new_direct_edge = NULL;
3869 else if (ici->polymorphic)
5ce97055 3870 {
44210a96
MJ
3871 ipa_polymorphic_call_context ctx;
3872 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
eb270950
FX
3873 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3874 new_root,
3875 new_root_info);
5ce97055 3876 }
b258210c 3877 else
e5cf5e11
PK
3878 {
3879 tree target_type = ipa_get_type (inlined_node_info, param_index);
3880 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3881 target_type,
eb270950 3882 new_root,
e5cf5e11
PK
3883 new_root_info);
3884 }
3885
042ae7d2 3886 /* If speculation was removed, then we need to do nothing. */
3ff29913 3887 if (new_direct_edge && new_direct_edge != ie
e089e433 3888 && spec_targets.contains (new_direct_edge->callee))
042ae7d2
JH
3889 {
3890 new_direct_edge->indirect_inlining_edge = 1;
3891 top = IPA_EDGE_REF (cs);
3892 res = true;
73d098df
JH
3893 if (!new_direct_edge->speculative)
3894 continue;
042ae7d2
JH
3895 }
3896 else if (new_direct_edge)
685b0d13 3897 {
b258210c
MJ
3898 new_direct_edge->indirect_inlining_edge = 1;
3899 if (new_edges)
3900 {
9771b263 3901 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3902 res = true;
3903 }
042ae7d2 3904 top = IPA_EDGE_REF (cs);
3ff29913
JH
3905 /* If speculative edge was introduced we still need to update
3906 call info of the indirect edge. */
3907 if (!new_direct_edge->speculative)
3908 continue;
685b0d13 3909 }
3ff29913
JH
3910 if (jfunc->type == IPA_JF_PASS_THROUGH
3911 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 3912 {
d0502276
JH
3913 if (ici->agg_contents
3914 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3915 && !ici->polymorphic)
36b72910
JH
3916 ici->param_index = -1;
3917 else
d0502276
JH
3918 {
3919 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3920 if (ici->polymorphic
3921 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3922 ici->vptr_changed = true;
40a777e8
JH
3923 ipa_set_param_used_by_indirect_call (new_root_info,
3924 ici->param_index, true);
3925 if (ici->polymorphic)
3926 ipa_set_param_used_by_polymorphic_call (new_root_info,
3927 ici->param_index, true);
d0502276 3928 }
36b72910
JH
3929 }
3930 else if (jfunc->type == IPA_JF_ANCESTOR)
3931 {
d0502276
JH
3932 if (ici->agg_contents
3933 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3934 && !ici->polymorphic)
36b72910
JH
3935 ici->param_index = -1;
3936 else
3937 {
3938 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3939 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
3940 if (ici->polymorphic
3941 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3942 ici->vptr_changed = true;
b914768c
JH
3943 ipa_set_param_used_by_indirect_call (new_root_info,
3944 ici->param_index, true);
3945 if (ici->polymorphic)
3946 ipa_set_param_used_by_polymorphic_call (new_root_info,
3947 ici->param_index, true);
36b72910
JH
3948 }
3949 }
3950 else
3951 /* Either we can find a destination for this edge now or never. */
3952 ici->param_index = -1;
3e293154 3953 }
e33c6cd6 3954
f8e2a1ed 3955 return res;
3e293154
MJ
3956}
3957
3958/* Recursively traverse subtree of NODE (including node) made of inlined
3959 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3960 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3961 update_jump_functions_after_inlining on all non-inlined edges that lead out
3962 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3963 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3964 created. */
be95e2b9 3965
f8e2a1ed 3966static bool
3e293154
MJ
3967propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3968 struct cgraph_node *node,
d52f5295 3969 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3970{
3971 struct cgraph_edge *e;
f8e2a1ed 3972 bool res;
3e293154 3973
e33c6cd6 3974 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3975
3976 for (e = node->callees; e; e = e->next_callee)
3977 if (!e->inline_failed)
f8e2a1ed 3978 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3979 else
3980 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3981 for (e = node->indirect_calls; e; e = e->next_callee)
3982 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3983
3984 return res;
3e293154
MJ
3985}
3986
4502fe8d
MJ
3987/* Combine two controlled uses counts as done during inlining. */
3988
3989static int
3990combine_controlled_uses_counters (int c, int d)
3991{
3992 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3993 return IPA_UNDESCRIBED_USE;
3994 else
3995 return c + d - 1;
3996}
3997
3998/* Propagate number of controlled users from CS->caleee to the new root of the
3999 tree of inlined nodes. */
4000
4001static void
4002propagate_controlled_uses (struct cgraph_edge *cs)
4003{
99b1c316 4004 class ipa_edge_args *args = IPA_EDGE_REF (cs);
a33c028e
JH
4005 if (!args)
4006 return;
a62bfab5
ML
4007 struct cgraph_node *new_root = cs->caller->inlined_to
4008 ? cs->caller->inlined_to : cs->caller;
99b1c316
MS
4009 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
4010 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
4502fe8d
MJ
4011 int count, i;
4012
6cf67b62
JH
4013 if (!old_root_info)
4014 return;
4015
4502fe8d
MJ
4016 count = MIN (ipa_get_cs_argument_count (args),
4017 ipa_get_param_count (old_root_info));
4018 for (i = 0; i < count; i++)
4019 {
4020 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4021 struct ipa_cst_ref_desc *rdesc;
4022
4023 if (jf->type == IPA_JF_PASS_THROUGH)
4024 {
4025 int src_idx, c, d;
4026 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4027 c = ipa_get_controlled_uses (new_root_info, src_idx);
4028 d = ipa_get_controlled_uses (old_root_info, i);
4029
4030 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4031 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4032 c = combine_controlled_uses_counters (c, d);
4033 ipa_set_controlled_uses (new_root_info, src_idx, c);
4034 if (c == 0 && new_root_info->ipcp_orig_node)
4035 {
4036 struct cgraph_node *n;
4037 struct ipa_ref *ref;
44210a96 4038 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
4039
4040 if (t && TREE_CODE (t) == ADDR_EXPR
4041 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 4042 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 4043 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
4044 {
4045 if (dump_file)
4046 fprintf (dump_file, "ipa-prop: Removing cloning-created "
464d0118
ML
4047 "reference from %s to %s.\n",
4048 new_root->dump_name (),
4049 n->dump_name ());
d122681a 4050 ref->remove_reference ();
4502fe8d
MJ
4051 }
4052 }
4053 }
4054 else if (jf->type == IPA_JF_CONST
4055 && (rdesc = jfunc_rdesc_usable (jf)))
4056 {
4057 int d = ipa_get_controlled_uses (old_root_info, i);
4058 int c = rdesc->refcount;
4059 rdesc->refcount = combine_controlled_uses_counters (c, d);
4060 if (rdesc->refcount == 0)
4061 {
4062 tree cst = ipa_get_jf_constant (jf);
4063 struct cgraph_node *n;
4064 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4065 && TREE_CODE (TREE_OPERAND (cst, 0))
4066 == FUNCTION_DECL);
d52f5295 4067 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
4068 if (n)
4069 {
4070 struct cgraph_node *clone;
568cda29 4071 bool ok;
67348ccc 4072 ok = remove_described_reference (n, rdesc);
568cda29 4073 gcc_checking_assert (ok);
4502fe8d
MJ
4074
4075 clone = cs->caller;
a62bfab5 4076 while (clone->inlined_to
6cf67b62
JH
4077 && clone->ipcp_clone
4078 && clone != rdesc->cs->caller)
4502fe8d
MJ
4079 {
4080 struct ipa_ref *ref;
d122681a 4081 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
4082 if (ref)
4083 {
4084 if (dump_file)
4085 fprintf (dump_file, "ipa-prop: Removing "
4086 "cloning-created reference "
464d0118
ML
4087 "from %s to %s.\n",
4088 clone->dump_name (),
4089 n->dump_name ());
d122681a 4090 ref->remove_reference ();
4502fe8d
MJ
4091 }
4092 clone = clone->callers->caller;
4093 }
4094 }
4095 }
4096 }
4097 }
4098
4099 for (i = ipa_get_param_count (old_root_info);
4100 i < ipa_get_cs_argument_count (args);
4101 i++)
4102 {
4103 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4104
4105 if (jf->type == IPA_JF_CONST)
4106 {
4107 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4108 if (rdesc)
4109 rdesc->refcount = IPA_UNDESCRIBED_USE;
4110 }
4111 else if (jf->type == IPA_JF_PASS_THROUGH)
4112 ipa_set_controlled_uses (new_root_info,
4113 jf->value.pass_through.formal_id,
4114 IPA_UNDESCRIBED_USE);
4115 }
4116}
4117
3e293154
MJ
4118/* Update jump functions and call note functions on inlining the call site CS.
4119 CS is expected to lead to a node already cloned by
4120 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
4121 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4122 created. */
be95e2b9 4123
f8e2a1ed 4124bool
3e293154 4125ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 4126 vec<cgraph_edge *> *new_edges)
3e293154 4127{
5ee53a06 4128 bool changed;
f8e2a1ed
MJ
4129 /* Do nothing if the preparation phase has not been carried out yet
4130 (i.e. during early inlining). */
dd912cb8 4131 if (!ipa_node_params_sum)
f8e2a1ed 4132 return false;
6fe906a3 4133 gcc_assert (ipa_edge_args_sum);
f8e2a1ed 4134
4502fe8d 4135 propagate_controlled_uses (cs);
5ee53a06 4136 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
6cf67b62 4137 ipa_node_params_sum->remove (cs->callee);
5ee53a06 4138
d40e2362
JH
4139 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4140 if (args)
4141 {
4142 bool ok = true;
4143 if (args->jump_functions)
4144 {
4145 struct ipa_jump_func *jf;
4146 int i;
4147 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4148 if (jf->type == IPA_JF_CONST
4149 && ipa_get_jf_constant_rdesc (jf))
4150 {
4151 ok = false;
4152 break;
4153 }
4154 }
4155 if (ok)
4156 ipa_edge_args_sum->remove (cs);
4157 }
98aad294
JH
4158 if (ipcp_transformation_sum)
4159 ipcp_transformation_sum->remove (cs->callee);
d40e2362 4160
5ee53a06 4161 return changed;
518dc859
RL
4162}
4163
86cd0334
MJ
4164/* Ensure that array of edge arguments infos is big enough to accommodate a
4165 structure for all edges and reallocates it if not. Also, allocate
4166 associated hash tables is they do not already exist. */
4167
4168void
4169ipa_check_create_edge_args (void)
4170{
6fe906a3
MJ
4171 if (!ipa_edge_args_sum)
4172 ipa_edge_args_sum
78cd68c0 4173 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
6fe906a3 4174 ipa_edge_args_sum_t (symtab, true));
86cd0334
MJ
4175 if (!ipa_bits_hash_table)
4176 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4177 if (!ipa_vr_hash_table)
4178 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4179}
4180
771578a0 4181/* Free all ipa_edge structures. */
be95e2b9 4182
518dc859 4183void
771578a0 4184ipa_free_all_edge_args (void)
518dc859 4185{
6fe906a3 4186 if (!ipa_edge_args_sum)
9771b263
DN
4187 return;
4188
ddf628e4 4189 ggc_delete (ipa_edge_args_sum);
6fe906a3 4190 ipa_edge_args_sum = NULL;
518dc859
RL
4191}
4192
771578a0 4193/* Free all ipa_node_params structures. */
be95e2b9 4194
518dc859 4195void
771578a0 4196ipa_free_all_node_params (void)
518dc859 4197{
bc2fcccd
JH
4198 if (ipa_node_params_sum)
4199 ggc_delete (ipa_node_params_sum);
dd912cb8 4200 ipa_node_params_sum = NULL;
771578a0
MJ
4201}
4202
9d3e0adc 4203/* Initialize IPA CP transformation summary and also allocate any necessary hash
86cd0334 4204 tables if they do not already exist. */
04be694e
MJ
4205
4206void
9d3e0adc 4207ipcp_transformation_initialize (void)
04be694e 4208{
86cd0334
MJ
4209 if (!ipa_bits_hash_table)
4210 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4211 if (!ipa_vr_hash_table)
4212 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
9d3e0adc 4213 if (ipcp_transformation_sum == NULL)
40e67ab8
JH
4214 {
4215 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4216 ipcp_transformation_sum->disable_insertion_hook ();
4217 }
04be694e
MJ
4218}
4219
12e088ba
AC
4220/* Release the IPA CP transformation summary. */
4221
4222void
4223ipcp_free_transformation_sum (void)
4224{
4225 if (!ipcp_transformation_sum)
4226 return;
4227
78cd68c0
ML
4228 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4229 ggc_free (ipcp_transformation_sum);
12e088ba
AC
4230 ipcp_transformation_sum = NULL;
4231}
4232
2c9561b5
MJ
4233/* Set the aggregate replacements of NODE to be AGGVALS. */
4234
4235void
4236ipa_set_node_agg_value_chain (struct cgraph_node *node,
4237 struct ipa_agg_replacement_value *aggvals)
4238{
9d3e0adc
ML
4239 ipcp_transformation_initialize ();
4240 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4241 s->agg_values = aggvals;
2c9561b5
MJ
4242}
4243
6fe906a3
MJ
4244/* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4245 count data structures accordingly. */
be95e2b9 4246
6fe906a3
MJ
4247void
4248ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
771578a0 4249{
568cda29
MJ
4250 if (args->jump_functions)
4251 {
4252 struct ipa_jump_func *jf;
4253 int i;
4254 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
4255 {
4256 struct ipa_cst_ref_desc *rdesc;
4257 try_decrement_rdesc_refcount (jf);
4258 if (jf->type == IPA_JF_CONST
4259 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4260 && rdesc->cs == cs)
4261 rdesc->cs = NULL;
4262 }
568cda29 4263 }
518dc859
RL
4264}
4265
6fe906a3
MJ
4266/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4267 reference count data strucutres accordingly. */
be95e2b9 4268
6fe906a3
MJ
4269void
4270ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4271 ipa_edge_args *old_args, ipa_edge_args *new_args)
771578a0 4272{
8b7773a4 4273 unsigned int i;
771578a0 4274
9771b263 4275 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
4276 if (old_args->polymorphic_call_contexts)
4277 new_args->polymorphic_call_contexts
4278 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 4279
9771b263 4280 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
4281 {
4282 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4283 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4284
4285 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4286
4287 if (src_jf->type == IPA_JF_CONST)
4288 {
4289 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4290
4291 if (!src_rdesc)
4292 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
4293 else if (src->caller == dst->caller)
4294 {
4295 struct ipa_ref *ref;
5e20cdc9 4296 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 4297 gcc_checking_assert (n);
d122681a
ML
4298 ref = src->caller->find_reference (n, src->call_stmt,
4299 src->lto_stmt_uid);
568cda29 4300 gcc_checking_assert (ref);
d122681a 4301 dst->caller->clone_reference (ref, ref->stmt);
568cda29 4302
601f3293 4303 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
568cda29
MJ
4304 dst_rdesc->cs = dst;
4305 dst_rdesc->refcount = src_rdesc->refcount;
4306 dst_rdesc->next_duplicate = NULL;
4307 dst_jf->value.constant.rdesc = dst_rdesc;
4308 }
4502fe8d
MJ
4309 else if (src_rdesc->cs == src)
4310 {
601f3293 4311 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 4312 dst_rdesc->cs = dst;
4502fe8d 4313 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
4314 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4315 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
4316 dst_jf->value.constant.rdesc = dst_rdesc;
4317 }
4318 else
4319 {
4320 struct ipa_cst_ref_desc *dst_rdesc;
4321 /* This can happen during inlining, when a JFUNC can refer to a
4322 reference taken in a function up in the tree of inline clones.
4323 We need to find the duplicate that refers to our tree of
4324 inline clones. */
4325
a62bfab5 4326 gcc_assert (dst->caller->inlined_to);
4502fe8d
MJ
4327 for (dst_rdesc = src_rdesc->next_duplicate;
4328 dst_rdesc;
4329 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
4330 {
4331 struct cgraph_node *top;
a62bfab5
ML
4332 top = dst_rdesc->cs->caller->inlined_to
4333 ? dst_rdesc->cs->caller->inlined_to
2fd0985c 4334 : dst_rdesc->cs->caller;
a62bfab5 4335 if (dst->caller->inlined_to == top)
2fd0985c
MJ
4336 break;
4337 }
44a60244 4338 gcc_assert (dst_rdesc);
4502fe8d
MJ
4339 dst_jf->value.constant.rdesc = dst_rdesc;
4340 }
4341 }
6fe45955
MJ
4342 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4343 && src->caller == dst->caller)
4344 {
a62bfab5
ML
4345 struct cgraph_node *inline_root = dst->caller->inlined_to
4346 ? dst->caller->inlined_to : dst->caller;
99b1c316 4347 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
6fe45955
MJ
4348 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4349
4350 int c = ipa_get_controlled_uses (root_info, idx);
4351 if (c != IPA_UNDESCRIBED_USE)
4352 {
4353 c++;
4354 ipa_set_controlled_uses (root_info, idx, c);
4355 }
4356 }
4502fe8d 4357 }
771578a0
MJ
4358}
4359
dd912cb8 4360/* Analyze newly added function into callgraph. */
be95e2b9 4361
771578a0 4362static void
dd912cb8 4363ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 4364{
dd912cb8
ML
4365 if (node->has_gimple_body_p ())
4366 ipa_analyze_node (node);
4367}
771578a0 4368
dd912cb8
ML
4369/* Hook that is called by summary when a node is duplicated. */
4370
4371void
4372ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4373 ipa_node_params *old_info,
4374 ipa_node_params *new_info)
4375{
4376 ipa_agg_replacement_value *old_av, *new_av;
771578a0 4377
f65f1ae3 4378 new_info->descriptors = vec_safe_copy (old_info->descriptors);
310bc633 4379 new_info->lattices = NULL;
771578a0 4380 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
f65f1ae3
MJ
4381 new_info->known_csts = old_info->known_csts.copy ();
4382 new_info->known_contexts = old_info->known_contexts.copy ();
3949c4a7 4383
8aab5218 4384 new_info->analysis_done = old_info->analysis_done;
3949c4a7 4385 new_info->node_enqueued = old_info->node_enqueued;
7e729474 4386 new_info->versionable = old_info->versionable;
2c9561b5
MJ
4387
4388 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 4389 if (old_av)
2c9561b5 4390 {
04be694e
MJ
4391 new_av = NULL;
4392 while (old_av)
4393 {
4394 struct ipa_agg_replacement_value *v;
2c9561b5 4395
04be694e
MJ
4396 v = ggc_alloc<ipa_agg_replacement_value> ();
4397 memcpy (v, old_av, sizeof (*v));
4398 v->next = new_av;
4399 new_av = v;
4400 old_av = old_av->next;
4401 }
4402 ipa_set_node_agg_value_chain (dst, new_av);
4403 }
98aad294 4404}
04be694e 4405
98aad294 4406/* Duplication of ipcp transformation summaries. */
86cd0334 4407
98aad294
JH
4408void
4409ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4410 ipcp_transformation *src_trans,
4411 ipcp_transformation *dst_trans)
4412{
4413 /* Avoid redundant work of duplicating vectors we will never use. */
4414 if (dst->inlined_to)
4415 return;
4416 dst_trans->bits = vec_safe_copy (src_trans->bits);
4417 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4418 ipa_agg_replacement_value *agg = src_trans->agg_values,
4419 **aggptr = &dst_trans->agg_values;
4420 while (agg)
4421 {
4422 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4423 **aggptr = *agg;
4424 agg = agg->next;
4425 aggptr = &(*aggptr)->next;
2c9561b5 4426 }
771578a0
MJ
4427}
4428
4429/* Register our cgraph hooks if they are not already there. */
be95e2b9 4430
518dc859 4431void
771578a0 4432ipa_register_cgraph_hooks (void)
518dc859 4433{
dd912cb8 4434 ipa_check_create_node_params ();
6fe906a3 4435 ipa_check_create_edge_args ();
dd912cb8 4436
dd912cb8 4437 function_insertion_hook_holder =
3dafb85c 4438 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 4439}
518dc859 4440
771578a0 4441/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 4442
771578a0
MJ
4443static void
4444ipa_unregister_cgraph_hooks (void)
4445{
bc2fcccd
JH
4446 if (function_insertion_hook_holder)
4447 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 4448 function_insertion_hook_holder = NULL;
771578a0
MJ
4449}
4450
4451/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4452 longer needed after ipa-cp. */
be95e2b9 4453
771578a0 4454void
e33c6cd6 4455ipa_free_all_structures_after_ipa_cp (void)
3e293154 4456{
2bf86c84 4457 if (!optimize && !in_lto_p)
3e293154
MJ
4458 {
4459 ipa_free_all_edge_args ();
4460 ipa_free_all_node_params ();
2651e637
ML
4461 ipcp_sources_pool.release ();
4462 ipcp_cst_values_pool.release ();
4463 ipcp_poly_ctx_values_pool.release ();
4464 ipcp_agg_lattice_pool.release ();
3e293154 4465 ipa_unregister_cgraph_hooks ();
601f3293 4466 ipa_refdesc_pool.release ();
3e293154
MJ
4467 }
4468}
4469
4470/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4471 longer needed after indirect inlining. */
be95e2b9 4472
3e293154 4473void
e33c6cd6 4474ipa_free_all_structures_after_iinln (void)
771578a0
MJ
4475{
4476 ipa_free_all_edge_args ();
4477 ipa_free_all_node_params ();
4478 ipa_unregister_cgraph_hooks ();
2651e637
ML
4479 ipcp_sources_pool.release ();
4480 ipcp_cst_values_pool.release ();
4481 ipcp_poly_ctx_values_pool.release ();
4482 ipcp_agg_lattice_pool.release ();
601f3293 4483 ipa_refdesc_pool.release ();
518dc859
RL
4484}
4485
dcd416e3 4486/* Print ipa_tree_map data structures of all functions in the
518dc859 4487 callgraph to F. */
be95e2b9 4488
518dc859 4489void
2c9561b5 4490ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
4491{
4492 int i, count;
99b1c316 4493 class ipa_node_params *info;
518dc859 4494
67348ccc 4495 if (!node->definition)
3e293154
MJ
4496 return;
4497 info = IPA_NODE_REF (node);
464d0118 4498 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
0302955a
JH
4499 if (!info)
4500 {
4501 fprintf (f, " no params return\n");
4502 return;
4503 }
3e293154
MJ
4504 count = ipa_get_param_count (info);
4505 for (i = 0; i < count; i++)
518dc859 4506 {
4502fe8d
MJ
4507 int c;
4508
a4e33812 4509 fprintf (f, " ");
e067bd43 4510 ipa_dump_param (f, info, i);
339f49ec
JH
4511 if (ipa_is_param_used (info, i))
4512 fprintf (f, " used");
40a777e8
JH
4513 if (ipa_is_param_used_by_ipa_predicates (info, i))
4514 fprintf (f, " used_by_ipa_predicates");
4515 if (ipa_is_param_used_by_indirect_call (info, i))
4516 fprintf (f, " used_by_indirect_call");
4517 if (ipa_is_param_used_by_polymorphic_call (info, i))
4518 fprintf (f, " used_by_polymorphic_call");
4502fe8d
MJ
4519 c = ipa_get_controlled_uses (info, i);
4520 if (c == IPA_UNDESCRIBED_USE)
4521 fprintf (f, " undescribed_use");
4522 else
4523 fprintf (f, " controlled_uses=%i", c);
3e293154 4524 fprintf (f, "\n");
518dc859
RL
4525 }
4526}
dcd416e3 4527
ca30a539 4528/* Print ipa_tree_map data structures of all functions in the
3e293154 4529 callgraph to F. */
be95e2b9 4530
3e293154 4531void
ca30a539 4532ipa_print_all_params (FILE * f)
3e293154
MJ
4533{
4534 struct cgraph_node *node;
4535
ca30a539 4536 fprintf (f, "\nFunction parameters:\n");
65c70e6b 4537 FOR_EACH_FUNCTION (node)
ca30a539 4538 ipa_print_node_params (f, node);
3e293154 4539}
3f84bf08 4540
2c9561b5
MJ
4541/* Dump the AV linked list. */
4542
4543void
4544ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4545{
4546 bool comma = false;
4547 fprintf (f, " Aggregate replacements:");
4548 for (; av; av = av->next)
4549 {
4550 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4551 av->index, av->offset);
ef6cb4c7 4552 print_generic_expr (f, av->value);
2c9561b5
MJ
4553 comma = true;
4554 }
4555 fprintf (f, "\n");
4556}
4557
fb3f88cc
JH
4558/* Stream out jump function JUMP_FUNC to OB. */
4559
4560static void
4561ipa_write_jump_function (struct output_block *ob,
4562 struct ipa_jump_func *jump_func)
4563{
8b7773a4
MJ
4564 struct ipa_agg_jf_item *item;
4565 struct bitpack_d bp;
4566 int i, count;
f714ecf5 4567 int flag = 0;
fb3f88cc 4568
f714ecf5
JH
4569 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4570 as well as WPA memory by handling them specially. */
4571 if (jump_func->type == IPA_JF_CONST
4572 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4573 flag = 1;
4574
4575 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
fb3f88cc
JH
4576 switch (jump_func->type)
4577 {
4578 case IPA_JF_UNKNOWN:
4579 break;
4580 case IPA_JF_CONST:
5368224f 4581 gcc_assert (
4502fe8d 4582 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
f714ecf5
JH
4583 stream_write_tree (ob,
4584 flag
4585 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4586 : jump_func->value.constant.value, true);
fb3f88cc
JH
4587 break;
4588 case IPA_JF_PASS_THROUGH:
412288f1 4589 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4590 if (jump_func->value.pass_through.operation == NOP_EXPR)
4591 {
4592 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4593 bp = bitpack_create (ob->main_stream);
4594 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4595 streamer_write_bitpack (&bp);
4596 }
a2b4c188
KV
4597 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4598 == tcc_unary)
4599 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4a53743e
MJ
4600 else
4601 {
4602 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4603 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4604 }
fb3f88cc
JH
4605 break;
4606 case IPA_JF_ANCESTOR:
412288f1 4607 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4608 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4609 bp = bitpack_create (ob->main_stream);
4610 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4611 streamer_write_bitpack (&bp);
fb3f88cc 4612 break;
eb270950
FX
4613 default:
4614 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4615 }
4616
9771b263 4617 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4618 streamer_write_uhwi (ob, count);
4619 if (count)
4620 {
4621 bp = bitpack_create (ob->main_stream);
4622 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4623 streamer_write_bitpack (&bp);
4624 }
4625
9771b263 4626 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4 4627 {
eb270950 4628 stream_write_tree (ob, item->type, true);
8b7773a4 4629 streamer_write_uhwi (ob, item->offset);
eb270950
FX
4630 streamer_write_uhwi (ob, item->jftype);
4631 switch (item->jftype)
4632 {
4633 case IPA_JF_UNKNOWN:
4634 break;
4635 case IPA_JF_CONST:
4636 stream_write_tree (ob, item->value.constant, true);
4637 break;
4638 case IPA_JF_PASS_THROUGH:
4639 case IPA_JF_LOAD_AGG:
4640 streamer_write_uhwi (ob, item->value.pass_through.operation);
4641 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4642 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4643 != tcc_unary)
4644 stream_write_tree (ob, item->value.pass_through.operand, true);
4645 if (item->jftype == IPA_JF_LOAD_AGG)
4646 {
4647 stream_write_tree (ob, item->value.load_agg.type, true);
4648 streamer_write_uhwi (ob, item->value.load_agg.offset);
4649 bp = bitpack_create (ob->main_stream);
4650 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4651 streamer_write_bitpack (&bp);
4652 }
4653 break;
4654 default:
4655 fatal_error (UNKNOWN_LOCATION,
4656 "invalid jump function in LTO stream");
4657 }
fb3f88cc 4658 }
04be694e 4659
209ca542 4660 bp = bitpack_create (ob->main_stream);
86cd0334 4661 bp_pack_value (&bp, !!jump_func->bits, 1);
209ca542 4662 streamer_write_bitpack (&bp);
86cd0334 4663 if (jump_func->bits)
209ca542 4664 {
86cd0334
MJ
4665 streamer_write_widest_int (ob, jump_func->bits->value);
4666 streamer_write_widest_int (ob, jump_func->bits->mask);
a5e14a42 4667 }
86cd0334 4668 bp_pack_value (&bp, !!jump_func->m_vr, 1);
8bc5448f 4669 streamer_write_bitpack (&bp);
86cd0334 4670 if (jump_func->m_vr)
8bc5448f
KV
4671 {
4672 streamer_write_enum (ob->main_stream, value_rang_type,
54994253
AH
4673 VR_LAST, jump_func->m_vr->kind ());
4674 stream_write_tree (ob, jump_func->m_vr->min (), true);
4675 stream_write_tree (ob, jump_func->m_vr->max (), true);
8bc5448f 4676 }
fb3f88cc
JH
4677}
4678
4679/* Read in jump function JUMP_FUNC from IB. */
4680
4681static void
99b1c316 4682ipa_read_jump_function (class lto_input_block *ib,
fb3f88cc 4683 struct ipa_jump_func *jump_func,
4502fe8d 4684 struct cgraph_edge *cs,
99b1c316 4685 class data_in *data_in,
f714ecf5 4686 bool prevails)
fb3f88cc 4687{
4a53743e
MJ
4688 enum jump_func_type jftype;
4689 enum tree_code operation;
8b7773a4 4690 int i, count;
f714ecf5
JH
4691 int val = streamer_read_uhwi (ib);
4692 bool flag = val & 1;
fb3f88cc 4693
f714ecf5 4694 jftype = (enum jump_func_type) (val / 2);
4a53743e 4695 switch (jftype)
fb3f88cc
JH
4696 {
4697 case IPA_JF_UNKNOWN:
04be694e 4698 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4699 break;
4700 case IPA_JF_CONST:
f714ecf5
JH
4701 {
4702 tree t = stream_read_tree (ib, data_in);
4703 if (flag && prevails)
7a2090b0 4704 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
f714ecf5
JH
4705 ipa_set_jf_constant (jump_func, t, cs);
4706 }
fb3f88cc
JH
4707 break;
4708 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4709 operation = (enum tree_code) streamer_read_uhwi (ib);
4710 if (operation == NOP_EXPR)
4711 {
4712 int formal_id = streamer_read_uhwi (ib);
4713 struct bitpack_d bp = streamer_read_bitpack (ib);
4714 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4715 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e 4716 }
a2b4c188
KV
4717 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4718 {
4719 int formal_id = streamer_read_uhwi (ib);
4720 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4721 }
4a53743e
MJ
4722 else
4723 {
4724 tree operand = stream_read_tree (ib, data_in);
4725 int formal_id = streamer_read_uhwi (ib);
4726 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4727 operation);
4728 }
fb3f88cc
JH
4729 break;
4730 case IPA_JF_ANCESTOR:
4a53743e
MJ
4731 {
4732 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4733 int formal_id = streamer_read_uhwi (ib);
4734 struct bitpack_d bp = streamer_read_bitpack (ib);
4735 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4736 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4737 break;
4738 }
f714ecf5
JH
4739 default:
4740 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4741 }
4742
4743 count = streamer_read_uhwi (ib);
f714ecf5
JH
4744 if (prevails)
4745 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4746 if (count)
4747 {
4a53743e 4748 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4749 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4750 }
4751 for (i = 0; i < count; i++)
4752 {
f32682ca 4753 struct ipa_agg_jf_item item;
eb270950 4754 item.type = stream_read_tree (ib, data_in);
f32682ca 4755 item.offset = streamer_read_uhwi (ib);
eb270950
FX
4756 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4757
4758 switch (item.jftype)
4759 {
4760 case IPA_JF_UNKNOWN:
4761 break;
4762 case IPA_JF_CONST:
4763 item.value.constant = stream_read_tree (ib, data_in);
4764 break;
4765 case IPA_JF_PASS_THROUGH:
4766 case IPA_JF_LOAD_AGG:
4767 operation = (enum tree_code) streamer_read_uhwi (ib);
4768 item.value.pass_through.operation = operation;
4769 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4770 if (TREE_CODE_CLASS (operation) == tcc_unary)
4771 item.value.pass_through.operand = NULL_TREE;
4772 else
4773 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4774 if (item.jftype == IPA_JF_LOAD_AGG)
4775 {
4776 struct bitpack_d bp;
4777 item.value.load_agg.type = stream_read_tree (ib, data_in);
4778 item.value.load_agg.offset = streamer_read_uhwi (ib);
4779 bp = streamer_read_bitpack (ib);
4780 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4781 }
4782 break;
4783 default:
4784 fatal_error (UNKNOWN_LOCATION,
4785 "invalid jump function in LTO stream");
4786 }
f714ecf5
JH
4787 if (prevails)
4788 jump_func->agg.items->quick_push (item);
fb3f88cc 4789 }
04be694e
MJ
4790
4791 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4792 bool bits_known = bp_unpack_value (&bp, 1);
4793 if (bits_known)
4794 {
86cd0334
MJ
4795 widest_int value = streamer_read_widest_int (ib);
4796 widest_int mask = streamer_read_widest_int (ib);
f714ecf5
JH
4797 if (prevails)
4798 ipa_set_jfunc_bits (jump_func, value, mask);
209ca542
PK
4799 }
4800 else
86cd0334 4801 jump_func->bits = NULL;
8bc5448f
KV
4802
4803 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4804 bool vr_known = bp_unpack_value (&vr_bp, 1);
4805 if (vr_known)
4806 {
54994253 4807 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
86cd0334
MJ
4808 VR_LAST);
4809 tree min = stream_read_tree (ib, data_in);
4810 tree max = stream_read_tree (ib, data_in);
f714ecf5
JH
4811 if (prevails)
4812 ipa_set_jfunc_vr (jump_func, type, min, max);
8bc5448f
KV
4813 }
4814 else
86cd0334 4815 jump_func->m_vr = NULL;
fb3f88cc
JH
4816}
4817
e33c6cd6
MJ
4818/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4819 relevant to indirect inlining to OB. */
661e7330
MJ
4820
4821static void
e33c6cd6
MJ
4822ipa_write_indirect_edge_info (struct output_block *ob,
4823 struct cgraph_edge *cs)
661e7330 4824{
99b1c316 4825 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4826 struct bitpack_d bp;
e33c6cd6 4827
412288f1 4828 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4829 bp = bitpack_create (ob->main_stream);
4830 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4831 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4832 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4833 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4834 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4835 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4836 streamer_write_bitpack (&bp);
ba392339
JH
4837 if (ii->agg_contents || ii->polymorphic)
4838 streamer_write_hwi (ob, ii->offset);
4839 else
4840 gcc_assert (ii->offset == 0);
b258210c
MJ
4841
4842 if (ii->polymorphic)
4843 {
412288f1 4844 streamer_write_hwi (ob, ii->otr_token);
b9393656 4845 stream_write_tree (ob, ii->otr_type, true);
ba392339 4846 ii->context.stream_out (ob);
b258210c 4847 }
661e7330
MJ
4848}
4849
e33c6cd6
MJ
4850/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4851 relevant to indirect inlining from IB. */
661e7330
MJ
4852
4853static void
99b1c316
MS
4854ipa_read_indirect_edge_info (class lto_input_block *ib,
4855 class data_in *data_in,
40a777e8
JH
4856 struct cgraph_edge *cs,
4857 class ipa_node_params *info)
661e7330 4858{
99b1c316 4859 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4860 struct bitpack_d bp;
661e7330 4861
412288f1 4862 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 4863 bp = streamer_read_bitpack (ib);
2465dcc2 4864 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4865 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4866 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4867 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 4868 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 4869 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
4870 if (ii->agg_contents || ii->polymorphic)
4871 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4872 else
4873 ii->offset = 0;
b258210c
MJ
4874 if (ii->polymorphic)
4875 {
412288f1 4876 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4877 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 4878 ii->context.stream_in (ib, data_in);
b258210c 4879 }
40a777e8
JH
4880 if (info && ii->param_index >= 0)
4881 {
4882 if (ii->polymorphic)
4883 ipa_set_param_used_by_polymorphic_call (info,
4884 ii->param_index , true);
4885 ipa_set_param_used_by_indirect_call (info,
4886 ii->param_index, true);
4887 }
661e7330
MJ
4888}
4889
fb3f88cc
JH
4890/* Stream out NODE info to OB. */
4891
4892static void
4893ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4894{
4895 int node_ref;
7380e6ef 4896 lto_symtab_encoder_t encoder;
99b1c316 4897 class ipa_node_params *info = IPA_NODE_REF (node);
fb3f88cc
JH
4898 int j;
4899 struct cgraph_edge *e;
2465dcc2 4900 struct bitpack_d bp;
fb3f88cc 4901
7380e6ef 4902 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4903 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4904 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4905
0e8853ee
JH
4906 streamer_write_uhwi (ob, ipa_get_param_count (info));
4907 for (j = 0; j < ipa_get_param_count (info); j++)
4908 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4909 bp = bitpack_create (ob->main_stream);
8aab5218 4910 gcc_assert (info->analysis_done
661e7330 4911 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4912 gcc_assert (!info->node_enqueued);
4913 gcc_assert (!info->ipcp_orig_node);
4914 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4915 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4916 streamer_write_bitpack (&bp);
4502fe8d 4917 for (j = 0; j < ipa_get_param_count (info); j++)
a5e14a42
MJ
4918 {
4919 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4920 stream_write_tree (ob, ipa_get_type (info, j), true);
4921 }
fb3f88cc
JH
4922 for (e = node->callees; e; e = e->next_callee)
4923 {
99b1c316 4924 class ipa_edge_args *args = IPA_EDGE_REF (e);
fb3f88cc 4925
a33c028e
JH
4926 if (!args)
4927 {
4928 streamer_write_uhwi (ob, 0);
4929 continue;
4930 }
4931
5ce97055
JH
4932 streamer_write_uhwi (ob,
4933 ipa_get_cs_argument_count (args) * 2
4934 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 4935 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4936 {
4937 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4938 if (args->polymorphic_call_contexts != NULL)
4939 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4940 }
fb3f88cc 4941 }
e33c6cd6 4942 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 4943 {
99b1c316 4944 class ipa_edge_args *args = IPA_EDGE_REF (e);
a33c028e
JH
4945 if (!args)
4946 streamer_write_uhwi (ob, 0);
4947 else
5ce97055 4948 {
a33c028e
JH
4949 streamer_write_uhwi (ob,
4950 ipa_get_cs_argument_count (args) * 2
4951 + (args->polymorphic_call_contexts != NULL));
4952 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4953 {
4954 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4955 if (args->polymorphic_call_contexts != NULL)
4956 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4957 }
5ce97055 4958 }
c8246dbe
JH
4959 ipa_write_indirect_edge_info (ob, e);
4960 }
fb3f88cc
JH
4961}
4962
f714ecf5 4963/* Stream in edge E from IB. */
7b377b22 4964
f714ecf5 4965static void
99b1c316
MS
4966ipa_read_edge_info (class lto_input_block *ib,
4967 class data_in *data_in,
f714ecf5 4968 struct cgraph_edge *e, bool prevails)
7b377b22 4969{
f714ecf5
JH
4970 int count = streamer_read_uhwi (ib);
4971 bool contexts_computed = count & 1;
4972
4973 count /= 2;
4974 if (!count)
4975 return;
4976 if (prevails && e->possibly_call_in_translation_unit_p ())
4977 {
a33c028e 4978 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
cb3874dc 4979 vec_safe_grow_cleared (args->jump_functions, count, true);
f714ecf5 4980 if (contexts_computed)
cb3874dc 4981 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
f714ecf5
JH
4982 for (int k = 0; k < count; k++)
4983 {
4984 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4985 data_in, prevails);
4986 if (contexts_computed)
4987 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4988 (ib, data_in);
4989 }
4990 }
4991 else
4992 {
4993 for (int k = 0; k < count; k++)
4994 {
4995 struct ipa_jump_func dummy;
4996 ipa_read_jump_function (ib, &dummy, e,
4997 data_in, prevails);
4998 if (contexts_computed)
4999 {
99b1c316 5000 class ipa_polymorphic_call_context ctx;
f714ecf5
JH
5001 ctx.stream_in (ib, data_in);
5002 }
5003 }
5004 }
7b377b22
JH
5005}
5006
61502ca8 5007/* Stream in NODE info from IB. */
fb3f88cc
JH
5008
5009static void
99b1c316
MS
5010ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5011 class data_in *data_in)
fb3f88cc 5012{
fb3f88cc
JH
5013 int k;
5014 struct cgraph_edge *e;
2465dcc2 5015 struct bitpack_d bp;
f714ecf5 5016 bool prevails = node->prevailing_p ();
6cf67b62
JH
5017 class ipa_node_params *info = prevails
5018 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
fb3f88cc 5019
f714ecf5
JH
5020 int param_count = streamer_read_uhwi (ib);
5021 if (prevails)
5022 {
5023 ipa_alloc_node_params (node, param_count);
5024 for (k = 0; k < param_count; k++)
5025 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5026 if (ipa_get_param_count (info) != 0)
5027 info->analysis_done = true;
5028 info->node_enqueued = false;
5029 }
5030 else
5031 for (k = 0; k < param_count; k++)
5032 streamer_read_uhwi (ib);
a5e14a42 5033
412288f1 5034 bp = streamer_read_bitpack (ib);
f714ecf5 5035 for (k = 0; k < param_count; k++)
a5e14a42 5036 {
f714ecf5
JH
5037 bool used = bp_unpack_value (&bp, 1);
5038
5039 if (prevails)
5040 ipa_set_param_used (info, k, used);
a5e14a42 5041 }
f714ecf5 5042 for (k = 0; k < param_count; k++)
fb3f88cc 5043 {
f714ecf5
JH
5044 int nuses = streamer_read_hwi (ib);
5045 tree type = stream_read_tree (ib, data_in);
fb3f88cc 5046
f714ecf5 5047 if (prevails)
5ce97055 5048 {
f714ecf5
JH
5049 ipa_set_controlled_uses (info, k, nuses);
5050 (*info->descriptors)[k].decl_or_type = type;
5ce97055 5051 }
fb3f88cc 5052 }
f714ecf5
JH
5053 for (e = node->callees; e; e = e->next_callee)
5054 ipa_read_edge_info (ib, data_in, e, prevails);
e33c6cd6 5055 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 5056 {
f714ecf5 5057 ipa_read_edge_info (ib, data_in, e, prevails);
40a777e8 5058 ipa_read_indirect_edge_info (ib, data_in, e, info);
c8246dbe 5059 }
fb3f88cc
JH
5060}
5061
5062/* Write jump functions for nodes in SET. */
5063
5064void
f27c1867 5065ipa_prop_write_jump_functions (void)
fb3f88cc
JH
5066{
5067 struct cgraph_node *node;
93536c97 5068 struct output_block *ob;
fb3f88cc 5069 unsigned int count = 0;
f27c1867
JH
5070 lto_symtab_encoder_iterator lsei;
5071 lto_symtab_encoder_t encoder;
5072
6fe906a3 5073 if (!ipa_node_params_sum || !ipa_edge_args_sum)
93536c97 5074 return;
fb3f88cc 5075
93536c97 5076 ob = create_output_block (LTO_section_jump_functions);
f27c1867 5077 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5078 ob->symbol = NULL;
f27c1867
JH
5079 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5080 lsei_next_function_in_partition (&lsei))
fb3f88cc 5081 {
f27c1867 5082 node = lsei_cgraph_node (lsei);
d52f5295 5083 if (node->has_gimple_body_p ()
c47d0034 5084 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5085 count++;
5086 }
5087
412288f1 5088 streamer_write_uhwi (ob, count);
fb3f88cc
JH
5089
5090 /* Process all of the functions. */
f27c1867
JH
5091 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5092 lsei_next_function_in_partition (&lsei))
fb3f88cc 5093 {
f27c1867 5094 node = lsei_cgraph_node (lsei);
d52f5295 5095 if (node->has_gimple_body_p ()
c47d0034 5096 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5097 ipa_write_node_info (ob, node);
5098 }
412288f1 5099 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
5100 produce_asm (ob, NULL);
5101 destroy_output_block (ob);
5102}
5103
5104/* Read section in file FILE_DATA of length LEN with data DATA. */
5105
5106static void
5107ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5108 size_t len)
5109{
5110 const struct lto_function_header *header =
5111 (const struct lto_function_header *) data;
4ad9a9de
EB
5112 const int cfg_offset = sizeof (struct lto_function_header);
5113 const int main_offset = cfg_offset + header->cfg_size;
5114 const int string_offset = main_offset + header->main_size;
99b1c316 5115 class data_in *data_in;
fb3f88cc
JH
5116 unsigned int i;
5117 unsigned int count;
5118
207c68cd 5119 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5120 header->main_size, file_data->mode_table);
fb3f88cc
JH
5121
5122 data_in =
5123 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5124 header->string_size, vNULL);
412288f1 5125 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
5126
5127 for (i = 0; i < count; i++)
5128 {
5129 unsigned int index;
5130 struct cgraph_node *node;
7380e6ef 5131 lto_symtab_encoder_t encoder;
fb3f88cc 5132
412288f1 5133 index = streamer_read_uhwi (&ib_main);
7380e6ef 5134 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5135 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5136 index));
67348ccc 5137 gcc_assert (node->definition);
fb3f88cc
JH
5138 ipa_read_node_info (&ib_main, node, data_in);
5139 }
5140 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5141 len);
5142 lto_data_in_delete (data_in);
5143}
5144
5145/* Read ipcp jump functions. */
5146
5147void
5148ipa_prop_read_jump_functions (void)
5149{
5150 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5151 struct lto_file_decl_data *file_data;
5152 unsigned int j = 0;
5153
5154 ipa_check_create_node_params ();
5155 ipa_check_create_edge_args ();
5156 ipa_register_cgraph_hooks ();
5157
5158 while ((file_data = file_data_vec[j++]))
5159 {
5160 size_t len;
3c56d8d8
ML
5161 const char *data
5162 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5163 &len);
fb3f88cc
JH
5164 if (data)
5165 ipa_prop_read_section (file_data, data, len);
5166 }
5167}
5168
2c9561b5 5169void
04be694e 5170write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
5171{
5172 int node_ref;
5173 unsigned int count = 0;
5174 lto_symtab_encoder_t encoder;
5175 struct ipa_agg_replacement_value *aggvals, *av;
5176
5177 aggvals = ipa_get_agg_replacements_for_node (node);
5178 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5179 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
5180 streamer_write_uhwi (ob, node_ref);
5181
5182 for (av = aggvals; av; av = av->next)
5183 count++;
5184 streamer_write_uhwi (ob, count);
5185
5186 for (av = aggvals; av; av = av->next)
5187 {
7b920a9a
MJ
5188 struct bitpack_d bp;
5189
2c9561b5
MJ
5190 streamer_write_uhwi (ob, av->offset);
5191 streamer_write_uhwi (ob, av->index);
5192 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
5193
5194 bp = bitpack_create (ob->main_stream);
5195 bp_pack_value (&bp, av->by_ref, 1);
5196 streamer_write_bitpack (&bp);
2c9561b5 5197 }
04be694e 5198
9d3e0adc 5199 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5200 if (ts && vec_safe_length (ts->m_vr) > 0)
5201 {
5202 count = ts->m_vr->length ();
5203 streamer_write_uhwi (ob, count);
5204 for (unsigned i = 0; i < count; ++i)
5205 {
5206 struct bitpack_d bp;
5207 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5208 bp = bitpack_create (ob->main_stream);
5209 bp_pack_value (&bp, parm_vr->known, 1);
5210 streamer_write_bitpack (&bp);
5211 if (parm_vr->known)
5212 {
5213 streamer_write_enum (ob->main_stream, value_rang_type,
5214 VR_LAST, parm_vr->type);
5215 streamer_write_wide_int (ob, parm_vr->min);
5216 streamer_write_wide_int (ob, parm_vr->max);
5217 }
5218 }
5219 }
5220 else
5221 streamer_write_uhwi (ob, 0);
5222
209ca542
PK
5223 if (ts && vec_safe_length (ts->bits) > 0)
5224 {
5225 count = ts->bits->length ();
5226 streamer_write_uhwi (ob, count);
5227
5228 for (unsigned i = 0; i < count; ++i)
5229 {
86cd0334 5230 const ipa_bits *bits_jfunc = (*ts->bits)[i];
209ca542 5231 struct bitpack_d bp = bitpack_create (ob->main_stream);
86cd0334 5232 bp_pack_value (&bp, !!bits_jfunc, 1);
209ca542 5233 streamer_write_bitpack (&bp);
86cd0334 5234 if (bits_jfunc)
209ca542 5235 {
86cd0334
MJ
5236 streamer_write_widest_int (ob, bits_jfunc->value);
5237 streamer_write_widest_int (ob, bits_jfunc->mask);
209ca542
PK
5238 }
5239 }
5240 }
5241 else
5242 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
5243}
5244
5245/* Stream in the aggregate value replacement chain for NODE from IB. */
5246
5247static void
04be694e
MJ
5248read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5249 data_in *data_in)
2c9561b5
MJ
5250{
5251 struct ipa_agg_replacement_value *aggvals = NULL;
5252 unsigned int count, i;
5253
5254 count = streamer_read_uhwi (ib);
5255 for (i = 0; i <count; i++)
5256 {
5257 struct ipa_agg_replacement_value *av;
7b920a9a 5258 struct bitpack_d bp;
2c9561b5 5259
766090c2 5260 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
5261 av->offset = streamer_read_uhwi (ib);
5262 av->index = streamer_read_uhwi (ib);
5263 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
5264 bp = streamer_read_bitpack (ib);
5265 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
5266 av->next = aggvals;
5267 aggvals = av;
5268 }
5269 ipa_set_node_agg_value_chain (node, aggvals);
67b97478 5270
209ca542
PK
5271 count = streamer_read_uhwi (ib);
5272 if (count > 0)
5273 {
9d3e0adc
ML
5274 ipcp_transformation_initialize ();
5275 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
cb3874dc 5276 vec_safe_grow_cleared (ts->m_vr, count, true);
8bc5448f
KV
5277 for (i = 0; i < count; i++)
5278 {
5279 ipa_vr *parm_vr;
5280 parm_vr = &(*ts->m_vr)[i];
5281 struct bitpack_d bp;
5282 bp = streamer_read_bitpack (ib);
5283 parm_vr->known = bp_unpack_value (&bp, 1);
5284 if (parm_vr->known)
5285 {
54994253 5286 parm_vr->type = streamer_read_enum (ib, value_range_kind,
8bc5448f
KV
5287 VR_LAST);
5288 parm_vr->min = streamer_read_wide_int (ib);
5289 parm_vr->max = streamer_read_wide_int (ib);
5290 }
5291 }
5292 }
5293 count = streamer_read_uhwi (ib);
5294 if (count > 0)
5295 {
9d3e0adc
ML
5296 ipcp_transformation_initialize ();
5297 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
cb3874dc 5298 vec_safe_grow_cleared (ts->bits, count, true);
209ca542
PK
5299
5300 for (i = 0; i < count; i++)
5301 {
209ca542 5302 struct bitpack_d bp = streamer_read_bitpack (ib);
86cd0334
MJ
5303 bool known = bp_unpack_value (&bp, 1);
5304 if (known)
209ca542 5305 {
2fb1d6d6
JH
5306 const widest_int value = streamer_read_widest_int (ib);
5307 const widest_int mask = streamer_read_widest_int (ib);
86cd0334 5308 ipa_bits *bits
2fb1d6d6 5309 = ipa_get_ipa_bits_for_value (value, mask);
86cd0334 5310 (*ts->bits)[i] = bits;
209ca542
PK
5311 }
5312 }
5313 }
2c9561b5
MJ
5314}
5315
5316/* Write all aggregate replacement for nodes in set. */
5317
5318void
04be694e 5319ipcp_write_transformation_summaries (void)
2c9561b5
MJ
5320{
5321 struct cgraph_node *node;
5322 struct output_block *ob;
5323 unsigned int count = 0;
5324 lto_symtab_encoder_iterator lsei;
5325 lto_symtab_encoder_t encoder;
5326
2c9561b5
MJ
5327 ob = create_output_block (LTO_section_ipcp_transform);
5328 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5329 ob->symbol = NULL;
2c9561b5
MJ
5330 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5331 lsei_next_function_in_partition (&lsei))
5332 {
5333 node = lsei_cgraph_node (lsei);
04be694e 5334 if (node->has_gimple_body_p ())
2c9561b5
MJ
5335 count++;
5336 }
5337
5338 streamer_write_uhwi (ob, count);
5339
5340 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5341 lsei_next_function_in_partition (&lsei))
5342 {
5343 node = lsei_cgraph_node (lsei);
04be694e
MJ
5344 if (node->has_gimple_body_p ())
5345 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
5346 }
5347 streamer_write_char_stream (ob->main_stream, 0);
5348 produce_asm (ob, NULL);
5349 destroy_output_block (ob);
5350}
5351
5352/* Read replacements section in file FILE_DATA of length LEN with data
5353 DATA. */
5354
5355static void
5356read_replacements_section (struct lto_file_decl_data *file_data,
5357 const char *data,
5358 size_t len)
5359{
5360 const struct lto_function_header *header =
5361 (const struct lto_function_header *) data;
5362 const int cfg_offset = sizeof (struct lto_function_header);
5363 const int main_offset = cfg_offset + header->cfg_size;
5364 const int string_offset = main_offset + header->main_size;
99b1c316 5365 class data_in *data_in;
2c9561b5
MJ
5366 unsigned int i;
5367 unsigned int count;
5368
207c68cd 5369 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5370 header->main_size, file_data->mode_table);
2c9561b5
MJ
5371
5372 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5373 header->string_size, vNULL);
2c9561b5
MJ
5374 count = streamer_read_uhwi (&ib_main);
5375
5376 for (i = 0; i < count; i++)
5377 {
5378 unsigned int index;
5379 struct cgraph_node *node;
5380 lto_symtab_encoder_t encoder;
5381
5382 index = streamer_read_uhwi (&ib_main);
5383 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5384 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5385 index));
67348ccc 5386 gcc_assert (node->definition);
04be694e 5387 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
5388 }
5389 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5390 len);
5391 lto_data_in_delete (data_in);
5392}
5393
5394/* Read IPA-CP aggregate replacements. */
5395
5396void
04be694e 5397ipcp_read_transformation_summaries (void)
2c9561b5
MJ
5398{
5399 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5400 struct lto_file_decl_data *file_data;
5401 unsigned int j = 0;
5402
5403 while ((file_data = file_data_vec[j++]))
5404 {
5405 size_t len;
3c56d8d8
ML
5406 const char *data
5407 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5408 &len);
2c9561b5
MJ
5409 if (data)
5410 read_replacements_section (file_data, data, len);
5411 }
5412}
5413
5414/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5415 NODE. */
5416
5417static void
5418adjust_agg_replacement_values (struct cgraph_node *node,
5419 struct ipa_agg_replacement_value *aggval)
5420{
5421 struct ipa_agg_replacement_value *v;
2c9561b5 5422
ff6686d2 5423 if (!node->clone.param_adjustments)
2c9561b5
MJ
5424 return;
5425
ff6686d2
MJ
5426 auto_vec<int, 16> new_indices;
5427 node->clone.param_adjustments->get_updated_indices (&new_indices);
2c9561b5
MJ
5428 for (v = aggval; v; v = v->next)
5429 {
ff6686d2 5430 gcc_checking_assert (v->index >= 0);
2c9561b5 5431
ff6686d2
MJ
5432 if ((unsigned) v->index < new_indices.length ())
5433 v->index = new_indices[v->index];
5434 else
5435 /* This can happen if we know about a constant passed by reference by
5436 an argument which is never actually used for anything, let alone
5437 loading that constant. */
5438 v->index = -1;
5439 }
2c9561b5
MJ
5440}
5441
8aab5218
MJ
5442/* Dominator walker driving the ipcp modification phase. */
5443
5444class ipcp_modif_dom_walker : public dom_walker
5445{
5446public:
56b40062 5447 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
f65f1ae3 5448 vec<ipa_param_descriptor, va_gc> *descs,
8aab5218
MJ
5449 struct ipa_agg_replacement_value *av,
5450 bool *sc, bool *cc)
5451 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5452 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5453
3daacdcd 5454 virtual edge before_dom_children (basic_block);
8aab5218
MJ
5455
5456private:
56b40062 5457 struct ipa_func_body_info *m_fbi;
f65f1ae3 5458 vec<ipa_param_descriptor, va_gc> *m_descriptors;
8aab5218
MJ
5459 struct ipa_agg_replacement_value *m_aggval;
5460 bool *m_something_changed, *m_cfg_changed;
5461};
5462
3daacdcd 5463edge
8aab5218
MJ
5464ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5465{
5466 gimple_stmt_iterator gsi;
5467 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5468 {
5469 struct ipa_agg_replacement_value *v;
355fe088 5470 gimple *stmt = gsi_stmt (gsi);
8aab5218 5471 tree rhs, val, t;
86003645
RS
5472 HOST_WIDE_INT offset;
5473 poly_int64 size;
8aab5218
MJ
5474 int index;
5475 bool by_ref, vce;
5476
5477 if (!gimple_assign_load_p (stmt))
5478 continue;
5479 rhs = gimple_assign_rhs1 (stmt);
5480 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5481 continue;
2c9561b5 5482
8aab5218
MJ
5483 vce = false;
5484 t = rhs;
5485 while (handled_component_p (t))
5486 {
5487 /* V_C_E can do things like convert an array of integers to one
5488 bigger integer and similar things we do not handle below. */
b66113e9 5489 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8aab5218
MJ
5490 {
5491 vce = true;
5492 break;
5493 }
5494 t = TREE_OPERAND (t, 0);
5495 }
5496 if (vce)
5497 continue;
5498
ff302741
PB
5499 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5500 &offset, &size, &by_ref))
8aab5218
MJ
5501 continue;
5502 for (v = m_aggval; v; v = v->next)
5503 if (v->index == index
5504 && v->offset == offset)
5505 break;
5506 if (!v
5507 || v->by_ref != by_ref
86003645
RS
5508 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5509 size))
8aab5218
MJ
5510 continue;
5511
5512 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5513 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5514 {
5515 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5516 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5517 else if (TYPE_SIZE (TREE_TYPE (rhs))
5518 == TYPE_SIZE (TREE_TYPE (v->value)))
5519 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5520 else
5521 {
5522 if (dump_file)
5523 {
5524 fprintf (dump_file, " const ");
ef6cb4c7 5525 print_generic_expr (dump_file, v->value);
8aab5218 5526 fprintf (dump_file, " can't be converted to type of ");
ef6cb4c7 5527 print_generic_expr (dump_file, rhs);
8aab5218
MJ
5528 fprintf (dump_file, "\n");
5529 }
5530 continue;
5531 }
5532 }
5533 else
5534 val = v->value;
5535
5536 if (dump_file && (dump_flags & TDF_DETAILS))
5537 {
5538 fprintf (dump_file, "Modifying stmt:\n ");
ef6cb4c7 5539 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5540 }
5541 gimple_assign_set_rhs_from_tree (&gsi, val);
5542 update_stmt (stmt);
5543
5544 if (dump_file && (dump_flags & TDF_DETAILS))
5545 {
5546 fprintf (dump_file, "into:\n ");
ef6cb4c7 5547 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5548 fprintf (dump_file, "\n");
5549 }
5550
5551 *m_something_changed = true;
5552 if (maybe_clean_eh_stmt (stmt)
5553 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5554 *m_cfg_changed = true;
5555 }
3daacdcd 5556 return NULL;
8aab5218
MJ
5557}
5558
c7ac9a0c
JH
5559/* Return true if we have recorded VALUE and MASK about PARM.
5560 Set VALUE and MASk accordingly. */
5561
5562bool
5563ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5564{
5565 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5566 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5567 if (!ts || vec_safe_length (ts->bits) == 0)
5568 return false;
5569
5570 int i = 0;
5571 for (tree p = DECL_ARGUMENTS (current_function_decl);
5572 p != parm; p = DECL_CHAIN (p))
5573 {
5574 i++;
5575 /* Ignore static chain. */
5576 if (!p)
5577 return false;
5578 }
5579
5580 if (cnode->clone.param_adjustments)
5581 {
5582 i = cnode->clone.param_adjustments->get_original_index (i);
5583 if (i < 0)
5584 return false;
5585 }
5586
5587 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5588 if (!bits[i])
5589 return false;
5590 *mask = bits[i]->mask;
5591 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5592 return true;
5593}
5594
5595
209ca542 5596/* Update bits info of formal parameters as described in
9d3e0adc 5597 ipcp_transformation. */
209ca542
PK
5598
5599static void
5600ipcp_update_bits (struct cgraph_node *node)
5601{
9d3e0adc 5602 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
209ca542
PK
5603
5604 if (!ts || vec_safe_length (ts->bits) == 0)
5605 return;
86cd0334 5606 vec<ipa_bits *, va_gc> &bits = *ts->bits;
209ca542 5607 unsigned count = bits.length ();
ff6686d2
MJ
5608 if (!count)
5609 return;
209ca542 5610
ff6686d2
MJ
5611 auto_vec<int, 16> new_indices;
5612 bool need_remapping = false;
5613 if (node->clone.param_adjustments)
209ca542 5614 {
ff6686d2
MJ
5615 node->clone.param_adjustments->get_updated_indices (&new_indices);
5616 need_remapping = true;
5617 }
5618 auto_vec <tree, 16> parm_decls;
5619 push_function_arg_decls (&parm_decls, node->decl);
209ca542 5620
ff6686d2
MJ
5621 for (unsigned i = 0; i < count; ++i)
5622 {
5623 tree parm;
5624 if (need_remapping)
5625 {
5626 if (i >= new_indices.length ())
5627 continue;
5628 int idx = new_indices[i];
5629 if (idx < 0)
5630 continue;
5631 parm = parm_decls[idx];
5632 }
5633 else
5634 parm = parm_decls[i];
209ca542 5635 gcc_checking_assert (parm);
ff6686d2 5636
209ca542 5637
86cd0334
MJ
5638 if (!bits[i]
5639 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5640 || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542 5641 || !is_gimple_reg (parm))
86cd0334 5642 continue;
209ca542
PK
5643
5644 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5645 if (!ddef)
5646 continue;
5647
5648 if (dump_file)
5649 {
86cd0334
MJ
5650 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5651 print_hex (bits[i]->mask, dump_file);
209ca542
PK
5652 fprintf (dump_file, "\n");
5653 }
5654
67b97478
PK
5655 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5656 {
5657 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5658 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5659
86cd0334
MJ
5660 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5661 | wide_int::from (bits[i]->value, prec, sgn);
67b97478
PK
5662 set_nonzero_bits (ddef, nonzero_bits);
5663 }
5664 else
5665 {
86cd0334
MJ
5666 unsigned tem = bits[i]->mask.to_uhwi ();
5667 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
67b97478
PK
5668 unsigned align = tem & -tem;
5669 unsigned misalign = bitpos & (align - 1);
209ca542 5670
67b97478
PK
5671 if (align > 1)
5672 {
5673 if (dump_file)
5674 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5675
5676 unsigned old_align, old_misalign;
5677 struct ptr_info_def *pi = get_ptr_info (ddef);
5678 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5679
5680 if (old_known
5681 && old_align > align)
5682 {
5683 if (dump_file)
5684 {
5685 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5686 if ((old_misalign & (align - 1)) != misalign)
5687 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5688 old_misalign, misalign);
5689 }
5690 continue;
5691 }
5692
5693 if (old_known
5694 && ((misalign & (old_align - 1)) != old_misalign)
5695 && dump_file)
5696 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5697 old_misalign, misalign);
5698
5699 set_ptr_info_alignment (pi, align, misalign);
5700 }
5701 }
209ca542
PK
5702 }
5703}
5704
523fe5b6
AH
5705bool
5706ipa_vr::nonzero_p (tree expr_type) const
5707{
5708 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5709 return true;
5710
5711 unsigned prec = TYPE_PRECISION (expr_type);
5712 return (type == VR_RANGE
19849d15 5713 && TYPE_UNSIGNED (expr_type)
523fe5b6
AH
5714 && wi::eq_p (min, wi::one (prec))
5715 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5716}
5717
8bc5448f 5718/* Update value range of formal parameters as described in
9d3e0adc 5719 ipcp_transformation. */
8bc5448f
KV
5720
5721static void
5722ipcp_update_vr (struct cgraph_node *node)
5723{
9d3e0adc 5724 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5725 if (!ts || vec_safe_length (ts->m_vr) == 0)
5726 return;
5727 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5728 unsigned count = vr.length ();
ff6686d2
MJ
5729 if (!count)
5730 return;
8bc5448f 5731
ff6686d2
MJ
5732 auto_vec<int, 16> new_indices;
5733 bool need_remapping = false;
5734 if (node->clone.param_adjustments)
8bc5448f 5735 {
ff6686d2
MJ
5736 node->clone.param_adjustments->get_updated_indices (&new_indices);
5737 need_remapping = true;
5738 }
5739 auto_vec <tree, 16> parm_decls;
5740 push_function_arg_decls (&parm_decls, node->decl);
5741
5742 for (unsigned i = 0; i < count; ++i)
5743 {
5744 tree parm;
5745 int remapped_idx;
5746 if (need_remapping)
5747 {
5748 if (i >= new_indices.length ())
5749 continue;
5750 remapped_idx = new_indices[i];
5751 if (remapped_idx < 0)
5752 continue;
5753 }
5754 else
5755 remapped_idx = i;
5756
5757 parm = parm_decls[remapped_idx];
5758
8bc5448f 5759 gcc_checking_assert (parm);
8bc5448f
KV
5760 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5761
5762 if (!ddef || !is_gimple_reg (parm))
5763 continue;
5764
5765 if (vr[i].known
8bc5448f
KV
5766 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5767 {
5768 tree type = TREE_TYPE (ddef);
5769 unsigned prec = TYPE_PRECISION (type);
718625ad
KV
5770 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5771 {
5772 if (dump_file)
5773 {
ff6686d2
MJ
5774 fprintf (dump_file, "Setting value range of param %u "
5775 "(now %i) ", i, remapped_idx);
718625ad
KV
5776 fprintf (dump_file, "%s[",
5777 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5778 print_decs (vr[i].min, dump_file);
5779 fprintf (dump_file, ", ");
5780 print_decs (vr[i].max, dump_file);
5781 fprintf (dump_file, "]\n");
5782 }
5783 set_range_info (ddef, vr[i].type,
5784 wide_int_storage::from (vr[i].min, prec,
5785 TYPE_SIGN (type)),
5786 wide_int_storage::from (vr[i].max, prec,
5787 TYPE_SIGN (type)));
5788 }
5789 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
523fe5b6 5790 && vr[i].nonzero_p (TREE_TYPE (ddef)))
8bc5448f 5791 {
718625ad
KV
5792 if (dump_file)
5793 fprintf (dump_file, "Setting nonnull for %u\n", i);
5794 set_ptr_nonnull (ddef);
8bc5448f 5795 }
8bc5448f
KV
5796 }
5797 }
5798}
5799
8aab5218 5800/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5801
5802unsigned int
5803ipcp_transform_function (struct cgraph_node *node)
5804{
f65f1ae3 5805 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
56b40062 5806 struct ipa_func_body_info fbi;
2c9561b5 5807 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5808 int param_count;
5809 bool cfg_changed = false, something_changed = false;
5810
5811 gcc_checking_assert (cfun);
5812 gcc_checking_assert (current_function_decl);
5813
5814 if (dump_file)
464d0118
ML
5815 fprintf (dump_file, "Modification phase of node %s\n",
5816 node->dump_name ());
2c9561b5 5817
209ca542 5818 ipcp_update_bits (node);
8bc5448f 5819 ipcp_update_vr (node);
2c9561b5
MJ
5820 aggval = ipa_get_agg_replacements_for_node (node);
5821 if (!aggval)
5822 return 0;
67348ccc 5823 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5824 if (param_count == 0)
5825 return 0;
5826 adjust_agg_replacement_values (node, aggval);
5827 if (dump_file)
5828 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5829
8aab5218
MJ
5830 fbi.node = node;
5831 fbi.info = NULL;
5832 fbi.bb_infos = vNULL;
cb3874dc 5833 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
8aab5218 5834 fbi.param_count = param_count;
fdfd7f53 5835 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
2c9561b5 5836
cb3874dc 5837 vec_safe_grow_cleared (descriptors, param_count, true);
f65f1ae3 5838 ipa_populate_param_decls (node, *descriptors);
8aab5218
MJ
5839 calculate_dominance_info (CDI_DOMINATORS);
5840 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5841 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5842
8aab5218
MJ
5843 int i;
5844 struct ipa_bb_info *bi;
5845 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5846 free_ipa_bb_info (bi);
5847 fbi.bb_infos.release ();
5848 free_dominance_info (CDI_DOMINATORS);
9d3e0adc
ML
5849
5850 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5851 s->agg_values = NULL;
5852 s->bits = NULL;
5853 s->m_vr = NULL;
676b4899 5854
f65f1ae3 5855 vec_free (descriptors);
2c9561b5
MJ
5856
5857 if (!something_changed)
5858 return 0;
95a2ed03
MJ
5859
5860 if (cfg_changed)
5861 delete_unreachable_blocks_update_callgraph (node, false);
5862
5863 return TODO_update_ssa_only_virtuals;
2c9561b5 5864}
86cd0334 5865
ac6f2e59 5866
eb270950 5867/* Return true if OTHER describes same agg value. */
ac6f2e59 5868bool
eb270950 5869ipa_agg_value::equal_to (const ipa_agg_value &other)
ac6f2e59
JH
5870{
5871 return offset == other.offset
5872 && operand_equal_p (value, other.value, 0);
5873}
9d5af1db
MJ
5874
5875/* Destructor also removing individual aggregate values. */
5876
5877ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
5878{
5879 ipa_release_agg_values (m_known_aggs, false);
5880}
5881
5882
5883
86cd0334 5884#include "gt-ipa-prop.h"