]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
i386: Prefer TARGET_AVX over TARGET_SSE_TYPELESS_STORES
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
8d9254fc 2 Copyright (C) 2005-2020 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
27d020cf 48#include "ipa-fnsummary.h"
cf835838 49#include "gimple-pretty-print.h"
450ad0cd 50#include "ipa-utils.h"
2b5f0895 51#include "dbgcnt.h"
8aab5218 52#include "domwalk.h"
9b2b7279 53#include "builtins.h"
95a2ed03 54#include "tree-cfgcleanup.h"
771578a0 55
dd912cb8
ML
56/* Function summary where the parameter infos are actually stored. */
57ipa_node_params_t *ipa_node_params_sum = NULL;
9d3e0adc
ML
58
59function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
60
6fe906a3
MJ
61/* Edge summary for IPA-CP edge information. */
62ipa_edge_args_sum_t *ipa_edge_args_sum;
771578a0 63
86cd0334
MJ
64/* Traits for a hash table for reusing already existing ipa_bits. */
65
66struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67{
68 typedef ipa_bits *value_type;
69 typedef ipa_bits *compare_type;
70 static hashval_t
71 hash (const ipa_bits *p)
72 {
73 hashval_t t = (hashval_t) p->value.to_shwi ();
74 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 }
76 static bool
77 equal (const ipa_bits *a, const ipa_bits *b)
78 {
79 return a->value == b->value && a->mask == b->mask;
80 }
7ca50de0 81 static const bool empty_zero_p = true;
86cd0334
MJ
82 static void
83 mark_empty (ipa_bits *&p)
84 {
85 p = NULL;
86 }
87 static bool
88 is_empty (const ipa_bits *p)
89 {
90 return p == NULL;
91 }
92 static bool
93 is_deleted (const ipa_bits *p)
94 {
95 return p == reinterpret_cast<const ipa_bits *> (1);
96 }
97 static void
98 mark_deleted (ipa_bits *&p)
99 {
100 p = reinterpret_cast<ipa_bits *> (1);
101 }
102};
103
104/* Hash table for avoid repeated allocations of equal ipa_bits. */
105static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106
107/* Traits for a hash table for reusing value_ranges used for IPA. Note that
108 the equiv bitmap is not hashed and is expected to be NULL. */
109
028d81b1 110struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
86cd0334 111{
028d81b1
AH
112 typedef value_range *value_type;
113 typedef value_range *compare_type;
86cd0334 114 static hashval_t
028d81b1 115 hash (const value_range *p)
59b2c134 116 {
54994253 117 inchash::hash hstate (p->kind ());
74ca1c01
ML
118 inchash::add_expr (p->min (), hstate);
119 inchash::add_expr (p->max (), hstate);
59b2c134
JJ
120 return hstate.end ();
121 }
86cd0334 122 static bool
028d81b1 123 equal (const value_range *a, const value_range *b)
86cd0334 124 {
ff361cc6 125 return a->equal_p (*b);
86cd0334 126 }
7ca50de0 127 static const bool empty_zero_p = true;
86cd0334 128 static void
028d81b1 129 mark_empty (value_range *&p)
86cd0334
MJ
130 {
131 p = NULL;
132 }
133 static bool
028d81b1 134 is_empty (const value_range *p)
86cd0334
MJ
135 {
136 return p == NULL;
137 }
138 static bool
028d81b1 139 is_deleted (const value_range *p)
86cd0334 140 {
028d81b1 141 return p == reinterpret_cast<const value_range *> (1);
86cd0334
MJ
142 }
143 static void
028d81b1 144 mark_deleted (value_range *&p)
86cd0334 145 {
028d81b1 146 p = reinterpret_cast<value_range *> (1);
86cd0334
MJ
147 }
148};
149
150/* Hash table for avoid repeated allocations of equal value_ranges. */
151static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
152
771578a0 153/* Holders of ipa cgraph hooks: */
40982661 154static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 155
4502fe8d
MJ
156/* Description of a reference to an IPA constant. */
157struct ipa_cst_ref_desc
158{
159 /* Edge that corresponds to the statement which took the reference. */
160 struct cgraph_edge *cs;
161 /* Linked list of duplicates created when call graph edges are cloned. */
162 struct ipa_cst_ref_desc *next_duplicate;
163 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
164 if out of control. */
165 int refcount;
166};
167
168/* Allocation pool for reference descriptions. */
169
fb0b2914 170static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 171 ("IPA-PROP ref descriptions");
4502fe8d 172
5fe8e757
MJ
173/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
174 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
175
176static bool
177ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
178{
67348ccc 179 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
180
181 if (!fs_opts)
182 return false;
2bf86c84 183 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
184}
185
be95e2b9
MJ
186/* Return index of the formal whose tree is PTREE in function which corresponds
187 to INFO. */
188
d044dd17 189static int
f65f1ae3
MJ
190ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
191 tree ptree)
518dc859
RL
192{
193 int i, count;
194
f65f1ae3 195 count = vec_safe_length (descriptors);
518dc859 196 for (i = 0; i < count; i++)
f65f1ae3 197 if ((*descriptors)[i].decl_or_type == ptree)
518dc859
RL
198 return i;
199
200 return -1;
201}
202
d044dd17
MJ
203/* Return index of the formal whose tree is PTREE in function which corresponds
204 to INFO. */
205
206int
99b1c316 207ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
d044dd17
MJ
208{
209 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
210}
211
212/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
213 NODE. */
be95e2b9 214
f8e2a1ed
MJ
215static void
216ipa_populate_param_decls (struct cgraph_node *node,
f65f1ae3 217 vec<ipa_param_descriptor, va_gc> &descriptors)
518dc859
RL
218{
219 tree fndecl;
220 tree fnargs;
221 tree parm;
222 int param_num;
3e293154 223
67348ccc 224 fndecl = node->decl;
0e8853ee 225 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
226 fnargs = DECL_ARGUMENTS (fndecl);
227 param_num = 0;
910ad8de 228 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 229 {
209ca542 230 descriptors[param_num].decl_or_type = parm;
40a777e8
JH
231 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
232 descriptors[param_num].move_cost = cost;
233 /* Watch overflow, move_cost is a bitfield. */
234 gcc_checking_assert (cost == descriptors[param_num].move_cost);
518dc859
RL
235 param_num++;
236 }
237}
238
3f84bf08
MJ
239/* Return how many formal parameters FNDECL has. */
240
fd29c024 241int
310bc633 242count_formal_params (tree fndecl)
3f84bf08
MJ
243{
244 tree parm;
245 int count = 0;
0e8853ee 246 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 247
910ad8de 248 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
249 count++;
250
251 return count;
252}
253
0e8853ee
JH
254/* Return the declaration of Ith formal parameter of the function corresponding
255 to INFO. Note there is no setter function as this array is built just once
256 using ipa_initialize_node_params. */
257
258void
99b1c316 259ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
0e8853ee
JH
260{
261 fprintf (file, "param #%i", i);
f65f1ae3 262 if ((*info->descriptors)[i].decl_or_type)
0e8853ee
JH
263 {
264 fprintf (file, " ");
ef6cb4c7 265 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
0e8853ee
JH
266 }
267}
268
159f01f8
MJ
269/* If necessary, allocate vector of parameter descriptors in info of NODE.
270 Return true if they were allocated, false if not. */
0e8853ee 271
159f01f8 272static bool
0e8853ee
JH
273ipa_alloc_node_params (struct cgraph_node *node, int param_count)
274{
6cf67b62 275 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
0e8853ee 276
f65f1ae3 277 if (!info->descriptors && param_count)
159f01f8
MJ
278 {
279 vec_safe_grow_cleared (info->descriptors, param_count);
280 return true;
281 }
282 else
283 return false;
0e8853ee
JH
284}
285
f8e2a1ed
MJ
286/* Initialize the ipa_node_params structure associated with NODE by counting
287 the function parameters, creating the descriptors and populating their
288 param_decls. */
be95e2b9 289
f8e2a1ed
MJ
290void
291ipa_initialize_node_params (struct cgraph_node *node)
292{
6cf67b62 293 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
f8e2a1ed 294
159f01f8
MJ
295 if (!info->descriptors
296 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
297 ipa_populate_param_decls (node, *info->descriptors);
518dc859
RL
298}
299
749aa96d
MJ
300/* Print the jump functions associated with call graph edge CS to file F. */
301
302static void
303ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
304{
305 int i, count;
306
307 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
308 for (i = 0; i < count; i++)
309 {
310 struct ipa_jump_func *jump_func;
311 enum jump_func_type type;
312
313 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
314 type = jump_func->type;
315
316 fprintf (f, " param %d: ", i);
317 if (type == IPA_JF_UNKNOWN)
318 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
319 else if (type == IPA_JF_CONST)
320 {
4502fe8d 321 tree val = jump_func->value.constant.value;
749aa96d 322 fprintf (f, "CONST: ");
ef6cb4c7 323 print_generic_expr (f, val);
749aa96d
MJ
324 if (TREE_CODE (val) == ADDR_EXPR
325 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
326 {
327 fprintf (f, " -> ");
ef6cb4c7 328 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
749aa96d
MJ
329 }
330 fprintf (f, "\n");
331 }
749aa96d
MJ
332 else if (type == IPA_JF_PASS_THROUGH)
333 {
334 fprintf (f, "PASS THROUGH: ");
8b7773a4 335 fprintf (f, "%d, op %s",
749aa96d 336 jump_func->value.pass_through.formal_id,
5806f481 337 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 338 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
339 {
340 fprintf (f, " ");
ef6cb4c7 341 print_generic_expr (f, jump_func->value.pass_through.operand);
8b7773a4
MJ
342 }
343 if (jump_func->value.pass_through.agg_preserved)
344 fprintf (f, ", agg_preserved");
3ea6239f 345 fprintf (f, "\n");
749aa96d
MJ
346 }
347 else if (type == IPA_JF_ANCESTOR)
348 {
349 fprintf (f, "ANCESTOR: ");
16998094 350 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
351 jump_func->value.ancestor.formal_id,
352 jump_func->value.ancestor.offset);
8b7773a4
MJ
353 if (jump_func->value.ancestor.agg_preserved)
354 fprintf (f, ", agg_preserved");
3ea6239f 355 fprintf (f, "\n");
749aa96d 356 }
8b7773a4
MJ
357
358 if (jump_func->agg.items)
359 {
360 struct ipa_agg_jf_item *item;
361 int j;
362
363 fprintf (f, " Aggregate passed by %s:\n",
364 jump_func->agg.by_ref ? "reference" : "value");
eb270950 365 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
8b7773a4
MJ
366 {
367 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
368 item->offset);
eb270950
FX
369 fprintf (f, "type: ");
370 print_generic_expr (f, item->type);
371 fprintf (f, ", ");
372 if (item->jftype == IPA_JF_PASS_THROUGH)
373 fprintf (f, "PASS THROUGH: %d,",
374 item->value.pass_through.formal_id);
375 else if (item->jftype == IPA_JF_LOAD_AGG)
376 {
377 fprintf (f, "LOAD AGG: %d",
378 item->value.pass_through.formal_id);
379 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
380 item->value.load_agg.offset,
381 item->value.load_agg.by_ref ? "reference"
382 : "value");
383 }
384
385 if (item->jftype == IPA_JF_PASS_THROUGH
386 || item->jftype == IPA_JF_LOAD_AGG)
387 {
388 fprintf (f, " op %s",
389 get_tree_code_name (item->value.pass_through.operation));
390 if (item->value.pass_through.operation != NOP_EXPR)
391 {
392 fprintf (f, " ");
393 print_generic_expr (f, item->value.pass_through.operand);
394 }
395 }
396 else if (item->jftype == IPA_JF_CONST)
8b7773a4 397 {
eb270950
FX
398 fprintf (f, "CONST: ");
399 print_generic_expr (f, item->value.constant);
8b7773a4 400 }
eb270950
FX
401 else if (item->jftype == IPA_JF_UNKNOWN)
402 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
403 tree_to_uhwi (TYPE_SIZE (item->type)));
8b7773a4
MJ
404 fprintf (f, "\n");
405 }
406 }
44210a96 407
99b1c316 408 class ipa_polymorphic_call_context *ctx
44210a96
MJ
409 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
410 if (ctx && !ctx->useless_p ())
411 {
412 fprintf (f, " Context: ");
413 ctx->dump (dump_file);
414 }
04be694e 415
86cd0334 416 if (jump_func->bits)
209ca542 417 {
86cd0334
MJ
418 fprintf (f, " value: ");
419 print_hex (jump_func->bits->value, f);
420 fprintf (f, ", mask: ");
421 print_hex (jump_func->bits->mask, f);
209ca542
PK
422 fprintf (f, "\n");
423 }
424 else
425 fprintf (f, " Unknown bits\n");
8bc5448f 426
86cd0334 427 if (jump_func->m_vr)
8bc5448f
KV
428 {
429 fprintf (f, " VR ");
430 fprintf (f, "%s[",
54994253
AH
431 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
432 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
8bc5448f 433 fprintf (f, ", ");
54994253 434 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
8bc5448f
KV
435 fprintf (f, "]\n");
436 }
437 else
438 fprintf (f, " Unknown VR\n");
749aa96d
MJ
439 }
440}
441
442
be95e2b9
MJ
443/* Print the jump functions of all arguments on all call graph edges going from
444 NODE to file F. */
445
518dc859 446void
3e293154 447ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 448{
3e293154 449 struct cgraph_edge *cs;
518dc859 450
464d0118 451 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
3e293154
MJ
452 for (cs = node->callees; cs; cs = cs->next_callee)
453 {
3e293154 454
464d0118
ML
455 fprintf (f, " callsite %s -> %s : \n",
456 node->dump_name (),
457 cs->callee->dump_name ());
0302955a
JH
458 if (!ipa_edge_args_info_available_for_edge_p (cs))
459 fprintf (f, " no arg info\n");
460 else
461 ipa_print_node_jump_functions_for_edge (f, cs);
749aa96d 462 }
518dc859 463
9de04252 464 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 465 {
99b1c316 466 class cgraph_indirect_call_info *ii;
3e293154 467
9de04252
MJ
468 ii = cs->indirect_info;
469 if (ii->agg_contents)
c13bc3d9 470 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 471 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 472 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
473 ii->param_index, ii->offset,
474 ii->by_ref ? "by reference" : "by_value");
475 else
85942f45
JH
476 fprintf (f, " indirect %s callsite, calling param %i, "
477 "offset " HOST_WIDE_INT_PRINT_DEC,
478 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
479 ii->offset);
9de04252 480
749aa96d
MJ
481 if (cs->call_stmt)
482 {
9de04252 483 fprintf (f, ", for stmt ");
749aa96d 484 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 485 }
749aa96d 486 else
9de04252 487 fprintf (f, "\n");
ba392339
JH
488 if (ii->polymorphic)
489 ii->context.dump (f);
0302955a
JH
490 if (!ipa_edge_args_info_available_for_edge_p (cs))
491 fprintf (f, " no arg info\n");
492 else
493 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
494 }
495}
496
497/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 498
3e293154
MJ
499void
500ipa_print_all_jump_functions (FILE *f)
501{
502 struct cgraph_node *node;
503
ca30a539 504 fprintf (f, "\nJump functions:\n");
65c70e6b 505 FOR_EACH_FUNCTION (node)
3e293154
MJ
506 {
507 ipa_print_node_jump_functions (f, node);
508 }
509}
510
04be694e
MJ
511/* Set jfunc to be a know-really nothing jump function. */
512
513static void
514ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
515{
516 jfunc->type = IPA_JF_UNKNOWN;
04be694e
MJ
517}
518
b8f6e610
MJ
519/* Set JFUNC to be a copy of another jmp (to be used by jump function
520 combination code). The two functions will share their rdesc. */
521
522static void
523ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
524 struct ipa_jump_func *src)
525
526{
527 gcc_checking_assert (src->type == IPA_JF_CONST);
528 dst->type = IPA_JF_CONST;
529 dst->value.constant = src->value.constant;
530}
531
7b872d9e
MJ
532/* Set JFUNC to be a constant jmp function. */
533
534static void
4502fe8d
MJ
535ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
536 struct cgraph_edge *cs)
7b872d9e
MJ
537{
538 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
539 jfunc->value.constant.value = unshare_expr_without_location (constant);
540
541 if (TREE_CODE (constant) == ADDR_EXPR
542 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
543 {
544 struct ipa_cst_ref_desc *rdesc;
4502fe8d 545
601f3293 546 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
547 rdesc->cs = cs;
548 rdesc->next_duplicate = NULL;
549 rdesc->refcount = 1;
550 jfunc->value.constant.rdesc = rdesc;
551 }
552 else
553 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
554}
555
556/* Set JFUNC to be a simple pass-through jump function. */
557static void
8b7773a4 558ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 559 bool agg_preserved)
7b872d9e
MJ
560{
561 jfunc->type = IPA_JF_PASS_THROUGH;
562 jfunc->value.pass_through.operand = NULL_TREE;
563 jfunc->value.pass_through.formal_id = formal_id;
564 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 565 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
566}
567
a2b4c188
KV
568/* Set JFUNC to be an unary pass through jump function. */
569
570static void
571ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
572 enum tree_code operation)
573{
574 jfunc->type = IPA_JF_PASS_THROUGH;
575 jfunc->value.pass_through.operand = NULL_TREE;
576 jfunc->value.pass_through.formal_id = formal_id;
577 jfunc->value.pass_through.operation = operation;
578 jfunc->value.pass_through.agg_preserved = false;
579}
7b872d9e
MJ
580/* Set JFUNC to be an arithmetic pass through jump function. */
581
582static void
583ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
584 tree operand, enum tree_code operation)
585{
586 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 587 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
588 jfunc->value.pass_through.formal_id = formal_id;
589 jfunc->value.pass_through.operation = operation;
8b7773a4 590 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
591}
592
593/* Set JFUNC to be an ancestor jump function. */
594
595static void
596ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 597 int formal_id, bool agg_preserved)
7b872d9e
MJ
598{
599 jfunc->type = IPA_JF_ANCESTOR;
600 jfunc->value.ancestor.formal_id = formal_id;
601 jfunc->value.ancestor.offset = offset;
8b7773a4 602 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
603}
604
8aab5218
MJ
605/* Get IPA BB information about the given BB. FBI is the context of analyzis
606 of this function body. */
607
608static struct ipa_bb_info *
56b40062 609ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
610{
611 gcc_checking_assert (fbi);
612 return &fbi->bb_infos[bb->index];
613}
614
f65cf2b7
MJ
615/* Structure to be passed in between detect_type_change and
616 check_stmt_for_type_change. */
617
11478306 618struct prop_type_change_info
f65cf2b7 619{
290ebcb7
MJ
620 /* Offset into the object where there is the virtual method pointer we are
621 looking for. */
622 HOST_WIDE_INT offset;
623 /* The declaration or SSA_NAME pointer of the base that we are checking for
624 type change. */
625 tree object;
f65cf2b7
MJ
626 /* Set to true if dynamic type change has been detected. */
627 bool type_maybe_changed;
628};
629
630/* Return true if STMT can modify a virtual method table pointer.
631
632 This function makes special assumptions about both constructors and
633 destructors which are all the functions that are allowed to alter the VMT
634 pointers. It assumes that destructors begin with assignment into all VMT
635 pointers and that constructors essentially look in the following way:
636
637 1) The very first thing they do is that they call constructors of ancestor
638 sub-objects that have them.
639
640 2) Then VMT pointers of this and all its ancestors is set to new values
641 corresponding to the type corresponding to the constructor.
642
643 3) Only afterwards, other stuff such as constructor of member sub-objects
644 and the code written by the user is run. Only this may include calling
645 virtual functions, directly or indirectly.
646
647 There is no way to call a constructor of an ancestor sub-object in any
648 other way.
649
650 This means that we do not have to care whether constructors get the correct
651 type information because they will always change it (in fact, if we define
652 the type to be given by the VMT pointer, it is undefined).
653
654 The most important fact to derive from the above is that if, for some
655 statement in the section 3, we try to detect whether the dynamic type has
656 changed, we can safely ignore all calls as we examine the function body
657 backwards until we reach statements in section 2 because these calls cannot
658 be ancestor constructors or destructors (if the input is not bogus) and so
659 do not change the dynamic type (this holds true only for automatically
660 allocated objects but at the moment we devirtualize only these). We then
661 must detect that statements in section 2 change the dynamic type and can try
662 to derive the new type. That is enough and we can stop, we will never see
663 the calls into constructors of sub-objects in this code. Therefore we can
664 safely ignore all call statements that we traverse.
665 */
666
667static bool
355fe088 668stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
669{
670 if (is_gimple_call (stmt))
671 return false;
70f633c5
JH
672 if (gimple_clobber_p (stmt))
673 return false;
f65cf2b7
MJ
674 else if (is_gimple_assign (stmt))
675 {
676 tree lhs = gimple_assign_lhs (stmt);
677
0004f992
MJ
678 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
679 {
680 if (flag_strict_aliasing
681 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
682 return false;
683
684 if (TREE_CODE (lhs) == COMPONENT_REF
685 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 686 return false;
450aa0ee 687 /* In the future we might want to use get_ref_base_and_extent to find
0004f992
MJ
688 if there is a field corresponding to the offset and if so, proceed
689 almost like if it was a component ref. */
690 }
f65cf2b7
MJ
691 }
692 return true;
693}
694
3b97a5c7
MJ
695/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
696 to check whether a particular statement may modify the virtual table
697 pointerIt stores its result into DATA, which points to a
11478306 698 prop_type_change_info structure. */
f65cf2b7
MJ
699
700static bool
701check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
702{
355fe088 703 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 704 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
705
706 if (stmt_may_be_vtbl_ptr_store (stmt))
707 {
708 tci->type_maybe_changed = true;
709 return true;
710 }
711 else
712 return false;
713}
714
058d0a90
JH
715/* See if ARG is PARAM_DECl describing instance passed by pointer
716 or reference in FUNCTION. Return false if the dynamic type may change
717 in between beggining of the function until CALL is invoked.
290ebcb7 718
058d0a90 719 Generally functions are not allowed to change type of such instances,
67914693 720 but they call destructors. We assume that methods cannot destroy the THIS
058d0a90
JH
721 pointer. Also as a special cases, constructor and destructors may change
722 type of the THIS pointer. */
723
724static bool
355fe088 725param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90 726{
67914693 727 /* Pure functions cannot do any changes on the dynamic type;
058d0a90
JH
728 that require writting to memory. */
729 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
730 return false;
731 /* We need to check if we are within inlined consturctor
732 or destructor (ideally we would have way to check that the
733 inline cdtor is actually working on ARG, but we don't have
734 easy tie on this, so punt on all non-pure cdtors.
735 We may also record the types of cdtors and once we know type
736 of the instance match them.
737
738 Also code unification optimizations may merge calls from
739 different blocks making return values unreliable. So
740 do nothing during late optimization. */
741 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
742 return true;
743 if (TREE_CODE (arg) == SSA_NAME
744 && SSA_NAME_IS_DEFAULT_DEF (arg)
745 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
746 {
747 /* Normal (non-THIS) argument. */
748 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
749 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 750 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
751 and destructors as those definitely may change the dynamic
752 type. */
753 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
754 && !DECL_CXX_CONSTRUCTOR_P (function)
755 && !DECL_CXX_DESTRUCTOR_P (function)
756 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
757 {
758 /* Walk the inline stack and watch out for ctors/dtors. */
759 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
760 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
761 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
762 return true;
058d0a90
JH
763 return false;
764 }
765 }
766 return true;
767}
290ebcb7 768
06d65050
JH
769/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
770 callsite CALL) by looking for assignments to its virtual table pointer. If
c199f329 771 it is, return true. ARG is the object itself (not a pointer
06d65050 772 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
773 returned by get_ref_base_and_extent, as is the offset.
774
775 This is helper function for detect_type_change and detect_type_change_ssa
776 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
777
778static bool
c628d1c3
MJ
779detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
780 tree base, tree comp_type, gcall *call,
058d0a90 781 HOST_WIDE_INT offset)
f65cf2b7 782{
11478306 783 struct prop_type_change_info tci;
f65cf2b7
MJ
784 ao_ref ao;
785
786 gcc_checking_assert (DECL_P (arg)
787 || TREE_CODE (arg) == MEM_REF
788 || handled_component_p (arg));
f65cf2b7 789
b49407f8
JH
790 comp_type = TYPE_MAIN_VARIANT (comp_type);
791
d570d364
JH
792 /* Const calls cannot call virtual methods through VMT and so type changes do
793 not matter. */
794 if (!flag_devirtualize || !gimple_vuse (call)
795 /* Be sure expected_type is polymorphic. */
796 || !comp_type
797 || TREE_CODE (comp_type) != RECORD_TYPE
798 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
799 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
800 return true;
4bf2a588 801
dd887943 802 ao_ref_init (&ao, arg);
f65cf2b7
MJ
803 ao.base = base;
804 ao.offset = offset;
805 ao.size = POINTER_SIZE;
806 ao.max_size = ao.size;
f65cf2b7 807
290ebcb7
MJ
808 tci.offset = offset;
809 tci.object = get_base_address (arg);
290ebcb7 810 tci.type_maybe_changed = false;
290ebcb7 811
c628d1c3
MJ
812 int walked
813 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
814 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
815
816 if (walked >= 0 && !tci.type_maybe_changed)
f65cf2b7
MJ
817 return false;
818
f65cf2b7
MJ
819 return true;
820}
821
058d0a90 822/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
c199f329 823 If it is, return true. ARG is the object itself (not a pointer
058d0a90
JH
824 to it, unless dereferenced). BASE is the base of the memory access as
825 returned by get_ref_base_and_extent, as is the offset. */
826
827static bool
c628d1c3 828detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
c199f329 829 tree comp_type, gcall *call,
c628d1c3 830 HOST_WIDE_INT offset)
058d0a90
JH
831{
832 if (!flag_devirtualize)
833 return false;
834
835 if (TREE_CODE (base) == MEM_REF
836 && !param_type_may_change_p (current_function_decl,
837 TREE_OPERAND (base, 0),
838 call))
839 return false;
c628d1c3 840 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
c199f329 841 call, offset);
058d0a90
JH
842}
843
f65cf2b7
MJ
844/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
845 SSA name (its dereference will become the base and the offset is assumed to
846 be zero). */
847
848static bool
c628d1c3 849detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
c199f329 850 gcall *call)
f65cf2b7
MJ
851{
852 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 853 if (!flag_devirtualize
06d65050 854 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
855 return false;
856
058d0a90
JH
857 if (!param_type_may_change_p (current_function_decl, arg, call))
858 return false;
859
f65cf2b7 860 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 861 build_int_cst (ptr_type_node, 0));
f65cf2b7 862
c628d1c3 863 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
c199f329 864 call, 0);
f65cf2b7
MJ
865}
866
fdb0e1b4
MJ
867/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
868 boolean variable pointed to by DATA. */
869
870static bool
871mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
872 void *data)
873{
874 bool *b = (bool *) data;
875 *b = true;
876 return true;
877}
878
8aab5218
MJ
879/* Find the nearest valid aa status for parameter specified by INDEX that
880 dominates BB. */
881
56b40062
MJ
882static struct ipa_param_aa_status *
883find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
884 int index)
885{
886 while (true)
887 {
888 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
889 if (!bb)
890 return NULL;
891 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
892 if (!bi->param_aa_statuses.is_empty ()
893 && bi->param_aa_statuses[index].valid)
894 return &bi->param_aa_statuses[index];
895 }
896}
897
898/* Get AA status structure for the given BB and parameter with INDEX. Allocate
899 structures and/or intialize the result with a dominating description as
900 necessary. */
901
56b40062
MJ
902static struct ipa_param_aa_status *
903parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
904 int index)
905{
906 gcc_checking_assert (fbi);
907 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
908 if (bi->param_aa_statuses.is_empty ())
909 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
56b40062 910 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
911 if (!paa->valid)
912 {
913 gcc_checking_assert (!paa->parm_modified
914 && !paa->ref_modified
915 && !paa->pt_modified);
56b40062 916 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
917 dom_paa = find_dominating_aa_status (fbi, bb, index);
918 if (dom_paa)
919 *paa = *dom_paa;
920 else
921 paa->valid = true;
922 }
923
924 return paa;
925}
926
688010ba 927/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 928 a value known not to be modified in this function before reaching the
8aab5218
MJ
929 statement STMT. FBI holds information about the function we have so far
930 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
931
932static bool
56b40062 933parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 934 gimple *stmt, tree parm_load)
fdb0e1b4 935{
56b40062 936 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
937 bool modified = false;
938 ao_ref refd;
939
776e4fe2
MJ
940 tree base = get_base_address (parm_load);
941 gcc_assert (TREE_CODE (base) == PARM_DECL);
942 if (TREE_READONLY (base))
943 return true;
944
c628d1c3
MJ
945 gcc_checking_assert (fbi);
946 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
947 if (paa->parm_modified)
948 return false;
fdb0e1b4
MJ
949
950 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 951 ao_ref_init (&refd, parm_load);
8aab5218 952 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3
MJ
953 &modified, NULL, NULL,
954 fbi->aa_walk_budget + 1);
955 if (walked < 0)
956 {
957 modified = true;
958 if (fbi)
959 fbi->aa_walk_budget = 0;
960 }
961 else if (fbi)
962 fbi->aa_walk_budget -= walked;
8aab5218
MJ
963 if (paa && modified)
964 paa->parm_modified = true;
8b7773a4 965 return !modified;
fdb0e1b4
MJ
966}
967
a2b4c188
KV
968/* If STMT is an assignment that loads a value from an parameter declaration,
969 return the index of the parameter in ipa_node_params which has not been
970 modified. Otherwise return -1. */
971
972static int
973load_from_unmodified_param (struct ipa_func_body_info *fbi,
f65f1ae3 974 vec<ipa_param_descriptor, va_gc> *descriptors,
a2b4c188
KV
975 gimple *stmt)
976{
bda2bc48
MJ
977 int index;
978 tree op1;
979
a2b4c188
KV
980 if (!gimple_assign_single_p (stmt))
981 return -1;
982
bda2bc48
MJ
983 op1 = gimple_assign_rhs1 (stmt);
984 if (TREE_CODE (op1) != PARM_DECL)
a2b4c188
KV
985 return -1;
986
bda2bc48
MJ
987 index = ipa_get_param_decl_index_1 (descriptors, op1);
988 if (index < 0
989 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
a2b4c188
KV
990 return -1;
991
bda2bc48 992 return index;
a2b4c188
KV
993}
994
8aab5218
MJ
995/* Return true if memory reference REF (which must be a load through parameter
996 with INDEX) loads data that are known to be unmodified in this function
997 before reaching statement STMT. */
8b7773a4
MJ
998
999static bool
56b40062 1000parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 1001 int index, gimple *stmt, tree ref)
8b7773a4 1002{
56b40062 1003 struct ipa_param_aa_status *paa;
8b7773a4
MJ
1004 bool modified = false;
1005 ao_ref refd;
1006
c628d1c3
MJ
1007 gcc_checking_assert (fbi);
1008 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1009 if (paa->ref_modified)
1010 return false;
8b7773a4 1011
8aab5218 1012 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 1013 ao_ref_init (&refd, ref);
8aab5218 1014 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3
MJ
1015 &modified, NULL, NULL,
1016 fbi->aa_walk_budget + 1);
1017 if (walked < 0)
1018 {
1019 modified = true;
1020 fbi->aa_walk_budget = 0;
1021 }
1022 else
1023 fbi->aa_walk_budget -= walked;
1024 if (modified)
8aab5218 1025 paa->ref_modified = true;
8b7773a4
MJ
1026 return !modified;
1027}
1028
8aab5218
MJ
1029/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1030 is known to be unmodified in this function before reaching call statement
1031 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1032
1033static bool
56b40062 1034parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 1035 gimple *call, tree parm)
8b7773a4
MJ
1036{
1037 bool modified = false;
1038 ao_ref refd;
1039
1040 /* It's unnecessary to calculate anything about memory contnets for a const
1041 function because it is not goin to use it. But do not cache the result
1042 either. Also, no such calculations for non-pointers. */
1043 if (!gimple_vuse (call)
c628d1c3 1044 || !POINTER_TYPE_P (TREE_TYPE (parm)))
8b7773a4
MJ
1045 return false;
1046
56b40062
MJ
1047 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1048 gimple_bb (call),
1049 index);
8aab5218 1050 if (paa->pt_modified)
8b7773a4
MJ
1051 return false;
1052
1053 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218 1054 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
c628d1c3
MJ
1055 &modified, NULL, NULL,
1056 fbi->aa_walk_budget + 1);
1057 if (walked < 0)
1058 {
1059 fbi->aa_walk_budget = 0;
1060 modified = true;
1061 }
1062 else
1063 fbi->aa_walk_budget -= walked;
8b7773a4 1064 if (modified)
8aab5218 1065 paa->pt_modified = true;
8b7773a4
MJ
1066 return !modified;
1067}
1068
91bb9f80
MJ
1069/* Return true if we can prove that OP is a memory reference loading
1070 data from an aggregate passed as a parameter.
1071
1072 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1073 false if it cannot prove that the value has not been modified before the
1074 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1075 if it cannot prove the value has not been modified, in that case it will
1076 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1077
8b7773a4
MJ
1078 INFO and PARMS_AINFO describe parameters of the current function (but the
1079 latter can be NULL), STMT is the load statement. If function returns true,
1080 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1081 within the aggregate and whether it is a load from a value passed by
1082 reference respectively. */
1083
ff302741 1084bool
56b40062 1085ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
f65f1ae3 1086 vec<ipa_param_descriptor, va_gc> *descriptors,
355fe088 1087 gimple *stmt, tree op, int *index_p,
86003645 1088 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
91bb9f80 1089 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
1090{
1091 int index;
588db50c 1092 HOST_WIDE_INT size;
ee45a32d 1093 bool reverse;
588db50c 1094 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
8b7773a4 1095
588db50c 1096 if (!base)
8b7773a4
MJ
1097 return false;
1098
1099 if (DECL_P (base))
1100 {
d044dd17 1101 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1102 if (index >= 0
8aab5218 1103 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1104 {
1105 *index_p = index;
1106 *by_ref_p = false;
3ff2ca23
JJ
1107 if (size_p)
1108 *size_p = size;
91bb9f80
MJ
1109 if (guaranteed_unmodified)
1110 *guaranteed_unmodified = true;
8b7773a4
MJ
1111 return true;
1112 }
1113 return false;
1114 }
1115
1116 if (TREE_CODE (base) != MEM_REF
1117 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1118 || !integer_zerop (TREE_OPERAND (base, 1)))
1119 return false;
1120
1121 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1122 {
1123 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1124 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1125 }
1126 else
1127 {
1128 /* This branch catches situations where a pointer parameter is not a
1129 gimple register, for example:
1130
1131 void hip7(S*) (struct S * p)
1132 {
1133 void (*<T2e4>) (struct S *) D.1867;
1134 struct S * p.1;
1135
1136 <bb 2>:
1137 p.1_1 = p;
1138 D.1867_2 = p.1_1->f;
1139 D.1867_2 ();
1140 gdp = &p;
1141 */
1142
355fe088 1143 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1144 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1145 }
1146
91bb9f80 1147 if (index >= 0)
8b7773a4 1148 {
91bb9f80
MJ
1149 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1150 if (!data_preserved && !guaranteed_unmodified)
1151 return false;
1152
8b7773a4
MJ
1153 *index_p = index;
1154 *by_ref_p = true;
3ff2ca23
JJ
1155 if (size_p)
1156 *size_p = size;
91bb9f80
MJ
1157 if (guaranteed_unmodified)
1158 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1159 return true;
1160 }
1161 return false;
1162}
1163
eb270950
FX
1164/* If STMT is an assignment that loads a value from a parameter declaration,
1165 or from an aggregate passed as the parameter either by value or reference,
1166 return the index of the parameter in ipa_node_params. Otherwise return -1.
1167
1168 FBI holds gathered information about the function. INFO describes
1169 parameters of the function, STMT is the assignment statement. If it is a
1170 memory load from an aggregate, *OFFSET_P is filled with offset within the
1171 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1172 reference. */
1173
1174static int
1175load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1176 class ipa_node_params *info,
1177 gimple *stmt,
1178 HOST_WIDE_INT *offset_p,
1179 bool *by_ref_p)
1180{
1181 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1182 poly_int64 size;
1183
1184 /* Load value from a parameter declaration. */
1185 if (index >= 0)
1186 {
1187 *offset_p = -1;
1188 return index;
1189 }
1190
1191 if (!gimple_assign_load_p (stmt))
1192 return -1;
1193
1194 tree rhs = gimple_assign_rhs1 (stmt);
1195
1196 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1197 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1198 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1199 return -1;
1200
1201 /* Skip memory reference containing bit-field. */
1202 if (TREE_CODE (rhs) == BIT_FIELD_REF
1203 || contains_bitfld_component_ref_p (rhs))
1204 return -1;
1205
1206 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1207 offset_p, &size, by_ref_p))
1208 return -1;
1209
1210 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1211 size));
1212 if (!*by_ref_p)
1213 {
1214 tree param_type = ipa_get_type (info, index);
1215
1216 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1217 return -1;
1218 }
1219 else if (TREE_THIS_VOLATILE (rhs))
1220 return -1;
1221
1222 return index;
1223}
1224
b258210c 1225/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1226 of an assignment statement STMT, try to determine whether we are actually
1227 handling any of the following cases and construct an appropriate jump
1228 function into JFUNC if so:
1229
1230 1) The passed value is loaded from a formal parameter which is not a gimple
1231 register (most probably because it is addressable, the value has to be
1232 scalar) and we can guarantee the value has not changed. This case can
1233 therefore be described by a simple pass-through jump function. For example:
1234
1235 foo (int a)
1236 {
1237 int a.0;
1238
1239 a.0_2 = a;
1240 bar (a.0_2);
1241
1242 2) The passed value can be described by a simple arithmetic pass-through
1243 jump function. E.g.
1244
1245 foo (int a)
1246 {
1247 int D.2064;
1248
1249 D.2064_4 = a.1(D) + 4;
1250 bar (D.2064_4);
1251
1252 This case can also occur in combination of the previous one, e.g.:
1253
1254 foo (int a, int z)
1255 {
1256 int a.0;
1257 int D.2064;
1258
1259 a.0_3 = a;
1260 D.2064_4 = a.0_3 + 4;
1261 foo (D.2064_4);
1262
1263 3) The passed value is an address of an object within another one (which
1264 also passed by reference). Such situations are described by an ancestor
1265 jump function and describe situations such as:
1266
1267 B::foo() (struct B * const this)
1268 {
1269 struct A * D.1845;
1270
1271 D.1845_2 = &this_1(D)->D.1748;
1272 A::bar (D.1845_2);
1273
1274 INFO is the structure describing individual parameters access different
1275 stages of IPA optimizations. PARMS_AINFO contains the information that is
1276 only needed for intraprocedural analysis. */
685b0d13
MJ
1277
1278static void
56b40062 1279compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1280 class ipa_node_params *info,
b258210c 1281 struct ipa_jump_func *jfunc,
355fe088 1282 gcall *call, gimple *stmt, tree name,
06d65050 1283 tree param_type)
685b0d13 1284{
588db50c 1285 HOST_WIDE_INT offset, size;
fdb0e1b4 1286 tree op1, tc_ssa, base, ssa;
ee45a32d 1287 bool reverse;
685b0d13 1288 int index;
685b0d13 1289
685b0d13 1290 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1291
fdb0e1b4 1292 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1293 {
fdb0e1b4
MJ
1294 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1295 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1296 else
bda2bc48
MJ
1297 index = load_from_unmodified_param (fbi, info->descriptors,
1298 SSA_NAME_DEF_STMT (op1));
fdb0e1b4
MJ
1299 tc_ssa = op1;
1300 }
1301 else
1302 {
bda2bc48 1303 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1304 tc_ssa = gimple_assign_lhs (stmt);
1305 }
1306
1307 if (index >= 0)
1308 {
a77af182 1309 switch (gimple_assign_rhs_class (stmt))
8b7773a4 1310 {
a77af182
RB
1311 case GIMPLE_BINARY_RHS:
1312 {
1313 tree op2 = gimple_assign_rhs2 (stmt);
1314 if (!is_gimple_ip_invariant (op2)
1315 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1316 != tcc_comparison)
1317 && !useless_type_conversion_p (TREE_TYPE (name),
1318 TREE_TYPE (op1))))
1319 return;
1320
1321 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1322 gimple_assign_rhs_code (stmt));
1323 break;
1324 }
1325 case GIMPLE_SINGLE_RHS:
1326 {
1327 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1328 tc_ssa);
1329 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1330 break;
1331 }
1332 case GIMPLE_UNARY_RHS:
b66113e9 1333 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
a77af182 1334 ipa_set_jf_unary_pass_through (jfunc, index,
bda2bc48 1335 gimple_assign_rhs_code (stmt));
a77af182 1336 default:;
8b7773a4 1337 }
685b0d13
MJ
1338 return;
1339 }
1340
1341 if (TREE_CODE (op1) != ADDR_EXPR)
1342 return;
1343 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1344 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1345 return;
588db50c 1346 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
aca52e6f
RS
1347 offset_int mem_offset;
1348 if (!base
1349 || TREE_CODE (base) != MEM_REF
1350 || !mem_ref_offset (base).is_constant (&mem_offset))
685b0d13 1351 return;
aca52e6f 1352 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1353 ssa = TREE_OPERAND (base, 0);
1354 if (TREE_CODE (ssa) != SSA_NAME
1355 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1356 || offset < 0)
685b0d13
MJ
1357 return;
1358
b8f6e610 1359 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1360 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1361 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1362 ipa_set_ancestor_jf (jfunc, offset, index,
1363 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1364}
1365
40591473
MJ
1366/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1367 it looks like:
1368
1369 iftmp.1_3 = &obj_2(D)->D.1762;
1370
1371 The base of the MEM_REF must be a default definition SSA NAME of a
1372 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1373 whole MEM_REF expression is returned and the offset calculated from any
1374 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1375 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1376
1377static tree
355fe088 1378get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473 1379{
588db50c 1380 HOST_WIDE_INT size;
40591473 1381 tree expr, parm, obj;
ee45a32d 1382 bool reverse;
40591473
MJ
1383
1384 if (!gimple_assign_single_p (assign))
1385 return NULL_TREE;
1386 expr = gimple_assign_rhs1 (assign);
1387
1388 if (TREE_CODE (expr) != ADDR_EXPR)
1389 return NULL_TREE;
1390 expr = TREE_OPERAND (expr, 0);
1391 obj = expr;
588db50c 1392 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
40591473 1393
aca52e6f
RS
1394 offset_int mem_offset;
1395 if (!expr
1396 || TREE_CODE (expr) != MEM_REF
1397 || !mem_ref_offset (expr).is_constant (&mem_offset))
40591473
MJ
1398 return NULL_TREE;
1399 parm = TREE_OPERAND (expr, 0);
1400 if (TREE_CODE (parm) != SSA_NAME
1401 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1402 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1403 return NULL_TREE;
1404
aca52e6f 1405 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1406 *obj_p = obj;
1407 return expr;
1408}
1409
685b0d13 1410
b258210c
MJ
1411/* Given that an actual argument is an SSA_NAME that is a result of a phi
1412 statement PHI, try to find out whether NAME is in fact a
1413 multiple-inheritance typecast from a descendant into an ancestor of a formal
1414 parameter and thus can be described by an ancestor jump function and if so,
1415 write the appropriate function into JFUNC.
1416
1417 Essentially we want to match the following pattern:
1418
1419 if (obj_2(D) != 0B)
1420 goto <bb 3>;
1421 else
1422 goto <bb 4>;
1423
1424 <bb 3>:
1425 iftmp.1_3 = &obj_2(D)->D.1762;
1426
1427 <bb 4>:
1428 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1429 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1430 return D.1879_6; */
1431
1432static void
56b40062 1433compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1434 class ipa_node_params *info,
b258210c 1435 struct ipa_jump_func *jfunc,
538dd0b7 1436 gcall *call, gphi *phi)
b258210c 1437{
40591473 1438 HOST_WIDE_INT offset;
355fe088 1439 gimple *assign, *cond;
b258210c 1440 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1441 tree tmp, parm, expr, obj;
b258210c
MJ
1442 int index, i;
1443
54e348cb 1444 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1445 return;
1446
54e348cb
MJ
1447 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1448 tmp = PHI_ARG_DEF (phi, 0);
1449 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1450 tmp = PHI_ARG_DEF (phi, 1);
1451 else
1452 return;
b258210c
MJ
1453 if (TREE_CODE (tmp) != SSA_NAME
1454 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1455 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1456 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1457 return;
1458
1459 assign = SSA_NAME_DEF_STMT (tmp);
1460 assign_bb = gimple_bb (assign);
40591473 1461 if (!single_pred_p (assign_bb))
b258210c 1462 return;
40591473
MJ
1463 expr = get_ancestor_addr_info (assign, &obj, &offset);
1464 if (!expr)
b258210c
MJ
1465 return;
1466 parm = TREE_OPERAND (expr, 0);
b258210c 1467 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1468 if (index < 0)
1469 return;
b258210c
MJ
1470
1471 cond_bb = single_pred (assign_bb);
1472 cond = last_stmt (cond_bb);
69610617
SB
1473 if (!cond
1474 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1475 || gimple_cond_code (cond) != NE_EXPR
1476 || gimple_cond_lhs (cond) != parm
1477 || !integer_zerop (gimple_cond_rhs (cond)))
1478 return;
1479
b258210c
MJ
1480 phi_bb = gimple_bb (phi);
1481 for (i = 0; i < 2; i++)
1482 {
1483 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1484 if (pred != assign_bb && pred != cond_bb)
1485 return;
1486 }
1487
3b97a5c7
MJ
1488 ipa_set_ancestor_jf (jfunc, offset, index,
1489 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1490}
1491
be95e2b9
MJ
1492/* Inspect the given TYPE and return true iff it has the same structure (the
1493 same number of fields of the same types) as a C++ member pointer. If
1494 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1495 corresponding fields there. */
1496
3e293154
MJ
1497static bool
1498type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1499{
1500 tree fld;
1501
1502 if (TREE_CODE (type) != RECORD_TYPE)
1503 return false;
1504
1505 fld = TYPE_FIELDS (type);
1506 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1507 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1508 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1509 return false;
1510
1511 if (method_ptr)
1512 *method_ptr = fld;
1513
910ad8de 1514 fld = DECL_CHAIN (fld);
8b7773a4 1515 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1516 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1517 return false;
1518 if (delta)
1519 *delta = fld;
1520
910ad8de 1521 if (DECL_CHAIN (fld))
3e293154
MJ
1522 return false;
1523
1524 return true;
1525}
1526
61502ca8 1527/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
eb270950
FX
1528 return the rhs of its defining statement, and this statement is stored in
1529 *RHS_STMT. Otherwise return RHS as it is. */
7ec49257
MJ
1530
1531static inline tree
eb270950 1532get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
7ec49257
MJ
1533{
1534 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1535 {
355fe088 1536 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1537
1538 if (gimple_assign_single_p (def_stmt))
1539 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1540 else
1541 break;
eb270950 1542 *rhs_stmt = def_stmt;
7ec49257
MJ
1543 }
1544 return rhs;
1545}
1546
eb270950 1547/* Simple linked list, describing contents of an aggregate before call. */
8b7773a4
MJ
1548
1549struct ipa_known_agg_contents_list
1550{
1551 /* Offset and size of the described part of the aggregate. */
1552 HOST_WIDE_INT offset, size;
eb270950
FX
1553
1554 /* Type of the described part of the aggregate. */
1555 tree type;
1556
1557 /* Known constant value or jump function data describing contents. */
1558 struct ipa_load_agg_data value;
1559
8b7773a4
MJ
1560 /* Pointer to the next structure in the list. */
1561 struct ipa_known_agg_contents_list *next;
1562};
3e293154 1563
eb270950
FX
1564/* Add an aggregate content item into a linked list of
1565 ipa_known_agg_contents_list structure, in which all elements
1566 are sorted ascendingly by offset. */
0d48ee34 1567
46771da5
FX
1568static inline void
1569add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1570 struct ipa_known_agg_contents_list *item)
0d48ee34 1571{
46771da5
FX
1572 struct ipa_known_agg_contents_list *list = *plist;
1573
1574 for (; list; list = list->next)
0d48ee34 1575 {
46771da5
FX
1576 if (list->offset >= item->offset)
1577 break;
1578
1579 plist = &list->next;
0d48ee34
MJ
1580 }
1581
46771da5
FX
1582 item->next = list;
1583 *plist = item;
1584}
1585
eb270950 1586/* Check whether a given aggregate content is clobbered by certain element in
46771da5
FX
1587 a linked list of ipa_known_agg_contents_list. */
1588
1589static inline bool
1590clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1591 struct ipa_known_agg_contents_list *item)
1592{
1593 for (; list; list = list->next)
0d48ee34 1594 {
46771da5
FX
1595 if (list->offset >= item->offset)
1596 return list->offset < item->offset + item->size;
1597
1598 if (list->offset + list->size > item->offset)
1599 return true;
0d48ee34 1600 }
46771da5
FX
1601
1602 return false;
0d48ee34
MJ
1603}
1604
1605/* Build aggregate jump function from LIST, assuming there are exactly
eb270950 1606 VALUE_COUNT entries there and that offset of the passed argument
0d48ee34
MJ
1607 is ARG_OFFSET and store it into JFUNC. */
1608
1609static void
1610build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
eb270950 1611 int value_count, HOST_WIDE_INT arg_offset,
0d48ee34
MJ
1612 struct ipa_jump_func *jfunc)
1613{
eb270950
FX
1614 vec_alloc (jfunc->agg.items, value_count);
1615 for (; list; list = list->next)
1616 {
1617 struct ipa_agg_jf_item item;
1618 tree operand = list->value.pass_through.operand;
1619
1620 if (list->value.pass_through.formal_id >= 0)
1621 {
1622 /* Content value is derived from some formal paramerter. */
1623 if (list->value.offset >= 0)
1624 item.jftype = IPA_JF_LOAD_AGG;
1625 else
1626 item.jftype = IPA_JF_PASS_THROUGH;
1627
1628 item.value.load_agg = list->value;
1629 if (operand)
1630 item.value.pass_through.operand
1631 = unshare_expr_without_location (operand);
1632 }
1633 else if (operand)
1634 {
1635 /* Content value is known constant. */
1636 item.jftype = IPA_JF_CONST;
1637 item.value.constant = unshare_expr_without_location (operand);
1638 }
1639 else
1640 continue;
1641
1642 item.type = list->type;
1643 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1644
1645 item.offset = list->offset - arg_offset;
1646 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1647
1648 jfunc->agg.items->quick_push (item);
1649 }
1650}
1651
1652/* Given an assignment statement STMT, try to collect information into
1653 AGG_VALUE that will be used to construct jump function for RHS of the
1654 assignment, from which content value of an aggregate part comes.
1655
1656 Besides constant and simple pass-through jump functions, also try to
1657 identify whether it matches the following pattern that can be described by
1658 a load-value-from-aggregate jump function, which is a derivative of simple
1659 pass-through jump function.
1660
1661 foo (int *p)
1662 {
1663 ...
1664
1665 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1666 bar (q_5);
1667 }
1668
1669 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1670 constant, simple pass-through and load-vale-from-aggregate. If value
1671 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1672 set to -1. For simple pass-through and load-value-from-aggregate, field
1673 FORMAL_ID specifies the related formal parameter index, and field
1674 OFFSET can be used to distinguish them, -1 means simple pass-through,
1675 otherwise means load-value-from-aggregate. */
1676
1677static void
1678analyze_agg_content_value (struct ipa_func_body_info *fbi,
1679 struct ipa_load_agg_data *agg_value,
1680 gimple *stmt)
1681{
1682 tree lhs = gimple_assign_lhs (stmt);
1683 tree rhs1 = gimple_assign_rhs1 (stmt);
1684 enum tree_code code;
1685 int index = -1;
1686
1687 /* Initialize jump function data for the aggregate part. */
1688 memset (agg_value, 0, sizeof (*agg_value));
1689 agg_value->pass_through.operation = NOP_EXPR;
1690 agg_value->pass_through.formal_id = -1;
1691 agg_value->offset = -1;
1692
1693 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1694 || TREE_THIS_VOLATILE (lhs)
1695 || TREE_CODE (lhs) == BIT_FIELD_REF
1696 || contains_bitfld_component_ref_p (lhs))
1697 return;
1698
1699 /* Skip SSA copies. */
1700 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1701 {
1702 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1703 break;
1704
1705 stmt = SSA_NAME_DEF_STMT (rhs1);
1706 if (!is_gimple_assign (stmt))
1707 return;
1708
1709 rhs1 = gimple_assign_rhs1 (stmt);
1710 }
1711
1712 code = gimple_assign_rhs_code (stmt);
1713 switch (gimple_assign_rhs_class (stmt))
0d48ee34 1714 {
eb270950
FX
1715 case GIMPLE_SINGLE_RHS:
1716 if (is_gimple_ip_invariant (rhs1))
0d48ee34 1717 {
eb270950
FX
1718 agg_value->pass_through.operand = rhs1;
1719 return;
0d48ee34 1720 }
eb270950
FX
1721 code = NOP_EXPR;
1722 break;
1723
1724 case GIMPLE_UNARY_RHS:
1725 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1726 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1727 tcc_binary, this subtleness is somewhat misleading.
1728
1729 Since tcc_unary is widely used in IPA-CP code to check an operation
1730 with one operand, here we only allow tc_unary operation to avoid
1731 possible problem. Then we can use (opclass == tc_unary) or not to
1732 distinguish unary and binary. */
1733 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1734 return;
1735
1736 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1737 break;
1738
1739 case GIMPLE_BINARY_RHS:
1740 {
1741 gimple *rhs1_stmt = stmt;
1742 gimple *rhs2_stmt = stmt;
1743 tree rhs2 = gimple_assign_rhs2 (stmt);
1744
1745 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1746 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1747
1748 if (is_gimple_ip_invariant (rhs2))
1749 {
1750 agg_value->pass_through.operand = rhs2;
1751 stmt = rhs1_stmt;
1752 }
1753 else if (is_gimple_ip_invariant (rhs1))
1754 {
1755 if (TREE_CODE_CLASS (code) == tcc_comparison)
1756 code = swap_tree_comparison (code);
1757 else if (!commutative_tree_code (code))
1758 return;
1759
1760 agg_value->pass_through.operand = rhs1;
1761 stmt = rhs2_stmt;
1762 rhs1 = rhs2;
1763 }
1764 else
1765 return;
1766
1767 if (TREE_CODE_CLASS (code) != tcc_comparison
1768 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs1)))
1769 return;
1770 }
1771 break;
1772
1773 default:
1774 return;
1775 }
1776
1777 if (TREE_CODE (rhs1) != SSA_NAME)
1778 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1779 &agg_value->offset,
1780 &agg_value->by_ref);
1781 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1782 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1783
1784 if (index >= 0)
1785 {
1786 if (agg_value->offset >= 0)
1787 agg_value->type = TREE_TYPE (rhs1);
1788 agg_value->pass_through.formal_id = index;
1789 agg_value->pass_through.operation = code;
0d48ee34 1790 }
eb270950
FX
1791 else
1792 agg_value->pass_through.operand = NULL_TREE;
0d48ee34
MJ
1793}
1794
46771da5
FX
1795/* If STMT is a memory store to the object whose address is BASE, extract
1796 information (offset, size, and value) into CONTENT, and return true,
1797 otherwise we conservatively assume the whole object is modified with
1798 unknown content, and return false. CHECK_REF means that access to object
1799 is expected to be in form of MEM_REF expression. */
1800
1801static bool
eb270950
FX
1802extract_mem_content (struct ipa_func_body_info *fbi,
1803 gimple *stmt, tree base, bool check_ref,
46771da5
FX
1804 struct ipa_known_agg_contents_list *content)
1805{
1806 HOST_WIDE_INT lhs_offset, lhs_size;
46771da5
FX
1807 bool reverse;
1808
eb270950 1809 if (!is_gimple_assign (stmt))
46771da5
FX
1810 return false;
1811
eb270950
FX
1812 tree lhs = gimple_assign_lhs (stmt);
1813 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1814 &reverse);
46771da5
FX
1815 if (!lhs_base)
1816 return false;
1817
1818 if (check_ref)
1819 {
1820 if (TREE_CODE (lhs_base) != MEM_REF
1821 || TREE_OPERAND (lhs_base, 0) != base
1822 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1823 return false;
1824 }
1825 else if (lhs_base != base)
1826 return false;
1827
46771da5 1828 content->offset = lhs_offset;
eb270950
FX
1829 content->size = lhs_size;
1830 content->type = TREE_TYPE (lhs);
46771da5
FX
1831 content->next = NULL;
1832
eb270950 1833 analyze_agg_content_value (fbi, &content->value, stmt);
46771da5
FX
1834 return true;
1835}
1836
8b7773a4 1837/* Traverse statements from CALL backwards, scanning whether an aggregate given
eb270950
FX
1838 in ARG is filled in constants or values that are derived from caller's
1839 formal parameter in the way described by some kinds of jump functions. FBI
1840 is the context of the caller function for interprocedural analysis. ARG can
1841 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1842 the type of the aggregate, JFUNC is the jump function for the aggregate. */
be95e2b9 1843
3e293154 1844static void
eb270950
FX
1845determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1846 gcall *call, tree arg,
46771da5 1847 tree arg_type,
eb270950 1848 struct ipa_jump_func *jfunc)
3e293154 1849{
46771da5
FX
1850 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1851 bitmap visited = NULL;
eb270950 1852 int item_count = 0, value_count = 0;
8b7773a4 1853 HOST_WIDE_INT arg_offset, arg_size;
8b7773a4
MJ
1854 tree arg_base;
1855 bool check_ref, by_ref;
1856 ao_ref r;
de2e0835 1857 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
3e293154 1858
de2e0835 1859 if (max_agg_items == 0)
29799e9d
MJ
1860 return;
1861
8b7773a4
MJ
1862 /* The function operates in three stages. First, we prepare check_ref, r,
1863 arg_base and arg_offset based on what is actually passed as an actual
1864 argument. */
3e293154 1865
85942f45 1866 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1867 {
1868 by_ref = true;
1869 if (TREE_CODE (arg) == SSA_NAME)
1870 {
1871 tree type_size;
63831879
MJ
1872 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1873 || !POINTER_TYPE_P (TREE_TYPE (arg)))
8b7773a4
MJ
1874 return;
1875 check_ref = true;
1876 arg_base = arg;
1877 arg_offset = 0;
85942f45 1878 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1879 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1880 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1881 }
1882 else if (TREE_CODE (arg) == ADDR_EXPR)
1883 {
ee45a32d 1884 bool reverse;
8b7773a4
MJ
1885
1886 arg = TREE_OPERAND (arg, 0);
588db50c
RS
1887 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1888 &arg_size, &reverse);
1889 if (!arg_base)
8b7773a4
MJ
1890 return;
1891 if (DECL_P (arg_base))
1892 {
8b7773a4 1893 check_ref = false;
0d48ee34 1894 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1895 }
1896 else
1897 return;
1898 }
1899 else
1900 return;
1901 }
1902 else
1903 {
ee45a32d 1904 bool reverse;
8b7773a4
MJ
1905
1906 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1907
1908 by_ref = false;
1909 check_ref = false;
588db50c
RS
1910 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1911 &arg_size, &reverse);
1912 if (!arg_base)
8b7773a4
MJ
1913 return;
1914
1915 ao_ref_init (&r, arg);
1916 }
1917
46771da5
FX
1918 /* Second stage traverses virtual SSA web backwards starting from the call
1919 statement, only looks at individual dominating virtual operand (its
1920 definition dominates the call), as long as it is confident that content
1921 of the aggregate is affected by definition of the virtual operand, it
1922 builds a sorted linked list of ipa_agg_jf_list describing that. */
3e293154 1923
46771da5
FX
1924 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1925 {
1926 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
3e293154 1927
46771da5 1928 if (gimple_code (stmt) == GIMPLE_PHI)
518dc859 1929 {
fb4697e3 1930 dom_vuse = get_continuation_for_phi (stmt, &r, true,
eb270950 1931 fbi->aa_walk_budget,
46771da5
FX
1932 &visited, false, NULL, NULL);
1933 continue;
3e293154 1934 }
46771da5
FX
1935
1936 if (stmt_may_clobber_ref_p_1 (stmt, &r))
774b8a55 1937 {
46771da5
FX
1938 struct ipa_known_agg_contents_list *content
1939 = XALLOCA (struct ipa_known_agg_contents_list);
1940
eb270950 1941 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
774b8a55 1942 break;
3e293154 1943
46771da5
FX
1944 /* Now we get a dominating virtual operand, and need to check
1945 whether its value is clobbered any other dominating one. */
eb270950
FX
1946 if ((content->value.pass_through.formal_id >= 0
1947 || content->value.pass_through.operand)
46771da5
FX
1948 && !clobber_by_agg_contents_list_p (all_list, content))
1949 {
1950 struct ipa_known_agg_contents_list *copy
1951 = XALLOCA (struct ipa_known_agg_contents_list);
3e293154 1952
46771da5
FX
1953 /* Add to the list consisting of only dominating virtual
1954 operands, whose definitions can finally reach the call. */
1955 add_to_agg_contents_list (&list, (*copy = *content, copy));
1956
de2e0835 1957 if (++value_count == max_agg_items)
46771da5
FX
1958 break;
1959 }
1960
1961 /* Add to the list consisting of all dominating virtual operands. */
1962 add_to_agg_contents_list (&all_list, content);
1963
de2e0835 1964 if (++item_count == 2 * max_agg_items)
46771da5 1965 break;
8b7773a4 1966 }
46771da5
FX
1967 dom_vuse = gimple_vuse (stmt);
1968 }
3e293154 1969
46771da5
FX
1970 if (visited)
1971 BITMAP_FREE (visited);
be95e2b9 1972
8b7773a4 1973 /* Third stage just goes over the list and creates an appropriate vector of
46771da5 1974 ipa_agg_jf_item structures out of it, of course only if there are
eb270950 1975 any meaningful items to begin with. */
3e293154 1976
eb270950 1977 if (value_count)
3e293154 1978 {
8b7773a4 1979 jfunc->agg.by_ref = by_ref;
eb270950 1980 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
3e293154
MJ
1981 }
1982}
1983
46771da5 1984
5d5f1e95
KV
1985/* Return the Ith param type of callee associated with call graph
1986 edge E. */
1987
1988tree
06d65050
JH
1989ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1990{
1991 int n;
1992 tree type = (e->callee
67348ccc 1993 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1994 : gimple_call_fntype (e->call_stmt));
1995 tree t = TYPE_ARG_TYPES (type);
1996
1997 for (n = 0; n < i; n++)
1998 {
1999 if (!t)
2000 break;
2001 t = TREE_CHAIN (t);
2002 }
2003 if (t)
2004 return TREE_VALUE (t);
2005 if (!e->callee)
2006 return NULL;
67348ccc 2007 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
2008 for (n = 0; n < i; n++)
2009 {
2010 if (!t)
2011 return NULL;
2012 t = TREE_CHAIN (t);
2013 }
2014 if (t)
2015 return TREE_TYPE (t);
2016 return NULL;
2017}
2018
86cd0334
MJ
2019/* Return ipa_bits with VALUE and MASK values, which can be either a newly
2020 allocated structure or a previously existing one shared with other jump
2021 functions and/or transformation summaries. */
2022
2023ipa_bits *
2024ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2025{
2026 ipa_bits tmp;
2027 tmp.value = value;
2028 tmp.mask = mask;
2029
2030 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2031 if (*slot)
2032 return *slot;
2033
2034 ipa_bits *res = ggc_alloc<ipa_bits> ();
2035 res->value = value;
2036 res->mask = mask;
2037 *slot = res;
2038
2039 return res;
2040}
2041
2042/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2043 table in order to avoid creating multiple same ipa_bits structures. */
2044
2045static void
2046ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2047 const widest_int &mask)
2048{
2049 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2050}
2051
2052/* Return a pointer to a value_range just like *TMP, but either find it in
2053 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2054
028d81b1
AH
2055static value_range *
2056ipa_get_value_range (value_range *tmp)
86cd0334 2057{
028d81b1 2058 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
86cd0334
MJ
2059 if (*slot)
2060 return *slot;
2061
028d81b1 2062 value_range *vr = ggc_alloc<value_range> ();
86cd0334
MJ
2063 *vr = *tmp;
2064 *slot = vr;
2065
2066 return vr;
2067}
2068
2069/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2070 equiv set. Use hash table in order to avoid creating multiple same copies of
2071 value_ranges. */
2072
028d81b1 2073static value_range *
5d462877 2074ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
86cd0334 2075{
5d462877 2076 value_range tmp (min, max, kind);
86cd0334
MJ
2077 return ipa_get_value_range (&tmp);
2078}
2079
2080/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2081 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2082 same value_range structures. */
2083
2084static void
54994253 2085ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
86cd0334
MJ
2086 tree min, tree max)
2087{
2088 jf->m_vr = ipa_get_value_range (type, min, max);
2089}
2090
46771da5 2091/* Assign to JF a pointer to a value_range just like TMP but either fetch a
86cd0334
MJ
2092 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2093
2094static void
028d81b1 2095ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
86cd0334
MJ
2096{
2097 jf->m_vr = ipa_get_value_range (tmp);
2098}
2099
3e293154
MJ
2100/* Compute jump function for all arguments of callsite CS and insert the
2101 information in the jump_functions array in the ipa_edge_args corresponding
2102 to this callsite. */
be95e2b9 2103
749aa96d 2104static void
56b40062 2105ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 2106 struct cgraph_edge *cs)
3e293154 2107{
99b1c316 2108 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
a33c028e 2109 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
538dd0b7 2110 gcall *call = cs->call_stmt;
8b7773a4 2111 int n, arg_num = gimple_call_num_args (call);
5ce97055 2112 bool useful_context = false;
3e293154 2113
606d9a09 2114 if (arg_num == 0 || args->jump_functions)
3e293154 2115 return;
9771b263 2116 vec_safe_grow_cleared (args->jump_functions, arg_num);
5ce97055
JH
2117 if (flag_devirtualize)
2118 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
3e293154 2119
96e24d49
JJ
2120 if (gimple_call_internal_p (call))
2121 return;
5fe8e757
MJ
2122 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2123 return;
2124
8b7773a4
MJ
2125 for (n = 0; n < arg_num; n++)
2126 {
2127 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2128 tree arg = gimple_call_arg (call, n);
06d65050 2129 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
2130 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2131 {
049e6d36 2132 tree instance;
99b1c316 2133 class ipa_polymorphic_call_context context (cs->caller->decl,
5ce97055 2134 arg, cs->call_stmt,
049e6d36 2135 &instance);
c628d1c3
MJ
2136 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2137 &fbi->aa_walk_budget);
5ce97055
JH
2138 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2139 if (!context.useless_p ())
2140 useful_context = true;
2141 }
3e293154 2142
718625ad
KV
2143 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2144 {
f7503699
KV
2145 bool addr_nonzero = false;
2146 bool strict_overflow = false;
2147
718625ad
KV
2148 if (TREE_CODE (arg) == SSA_NAME
2149 && param_type
2150 && get_ptr_nonnull (arg))
f7503699
KV
2151 addr_nonzero = true;
2152 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2153 addr_nonzero = true;
2154
2155 if (addr_nonzero)
718625ad 2156 {
86cd0334
MJ
2157 tree z = build_int_cst (TREE_TYPE (arg), 0);
2158 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
718625ad
KV
2159 }
2160 else
86cd0334 2161 gcc_assert (!jfunc->m_vr);
718625ad
KV
2162 }
2163 else
8bc5448f
KV
2164 {
2165 wide_int min, max;
5d462877 2166 value_range_kind kind;
8bc5448f
KV
2167 if (TREE_CODE (arg) == SSA_NAME
2168 && param_type
5d462877
AH
2169 && (kind = get_range_info (arg, &min, &max))
2170 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
8bc5448f 2171 {
028d81b1 2172 value_range resvr;
5d462877
AH
2173 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2174 wide_int_to_tree (TREE_TYPE (arg), max),
2175 kind);
38a73435
AH
2176 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2177 &tmpvr, TREE_TYPE (arg));
54994253 2178 if (!resvr.undefined_p () && !resvr.varying_p ())
86cd0334 2179 ipa_set_jfunc_vr (jfunc, &resvr);
3a4228ba 2180 else
86cd0334 2181 gcc_assert (!jfunc->m_vr);
8bc5448f
KV
2182 }
2183 else
86cd0334 2184 gcc_assert (!jfunc->m_vr);
8bc5448f 2185 }
04be694e 2186
209ca542
PK
2187 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2188 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2189 {
209ca542 2190 if (TREE_CODE (arg) == SSA_NAME)
86cd0334
MJ
2191 ipa_set_jfunc_bits (jfunc, 0,
2192 widest_int::from (get_nonzero_bits (arg),
2193 TYPE_SIGN (TREE_TYPE (arg))));
209ca542 2194 else
86cd0334 2195 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
209ca542 2196 }
67b97478
PK
2197 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2198 {
2199 unsigned HOST_WIDE_INT bitpos;
2200 unsigned align;
2201
67b97478 2202 get_pointer_alignment_1 (arg, &align, &bitpos);
7b27cb4b
RS
2203 widest_int mask = wi::bit_and_not
2204 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2205 align / BITS_PER_UNIT - 1);
86cd0334
MJ
2206 widest_int value = bitpos / BITS_PER_UNIT;
2207 ipa_set_jfunc_bits (jfunc, value, mask);
67b97478 2208 }
209ca542 2209 else
86cd0334 2210 gcc_assert (!jfunc->bits);
209ca542 2211
04643334 2212 if (is_gimple_ip_invariant (arg)
8813a647 2213 || (VAR_P (arg)
04643334
MJ
2214 && is_global_var (arg)
2215 && TREE_READONLY (arg)))
4502fe8d 2216 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
2217 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2218 && TREE_CODE (arg) == PARM_DECL)
2219 {
2220 int index = ipa_get_param_decl_index (info, arg);
2221
2222 gcc_assert (index >=0);
2223 /* Aggregate passed by value, check for pass-through, otherwise we
2224 will attempt to fill in aggregate contents later in this
2225 for cycle. */
8aab5218 2226 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 2227 {
3b97a5c7 2228 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
2229 continue;
2230 }
2231 }
2232 else if (TREE_CODE (arg) == SSA_NAME)
2233 {
2234 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2235 {
2236 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 2237 if (index >= 0)
8b7773a4 2238 {
3b97a5c7 2239 bool agg_p;
8aab5218 2240 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 2241 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
2242 }
2243 }
2244 else
2245 {
355fe088 2246 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 2247 if (is_gimple_assign (stmt))
8aab5218 2248 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 2249 call, stmt, arg, param_type);
8b7773a4 2250 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 2251 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
2252 call,
2253 as_a <gphi *> (stmt));
8b7773a4
MJ
2254 }
2255 }
3e293154 2256
67914693 2257 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
85942f45
JH
2258 passed (because type conversions are ignored in gimple). Usually we can
2259 safely get type from function declaration, but in case of K&R prototypes or
2260 variadic functions we can try our luck with type of the pointer passed.
2261 TODO: Since we look for actual initialization of the memory object, we may better
2262 work out the type based on the memory stores we find. */
2263 if (!param_type)
2264 param_type = TREE_TYPE (arg);
2265
8b7773a4
MJ
2266 if ((jfunc->type != IPA_JF_PASS_THROUGH
2267 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2268 && (jfunc->type != IPA_JF_ANCESTOR
2269 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2270 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 2271 || POINTER_TYPE_P (param_type)))
eb270950 2272 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
8b7773a4 2273 }
5ce97055
JH
2274 if (!useful_context)
2275 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
2276}
2277
749aa96d 2278/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 2279 from BB. */
749aa96d 2280
062c604f 2281static void
56b40062 2282ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 2283{
8aab5218
MJ
2284 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2285 int i;
749aa96d
MJ
2286 struct cgraph_edge *cs;
2287
8aab5218 2288 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 2289 {
8aab5218 2290 struct cgraph_node *callee = cs->callee;
749aa96d 2291
8aab5218
MJ
2292 if (callee)
2293 {
d7caa129 2294 callee = callee->ultimate_alias_target ();
8aab5218
MJ
2295 /* We do not need to bother analyzing calls to unknown functions
2296 unless they may become known during lto/whopr. */
2297 if (!callee->definition && !flag_lto)
2298 continue;
2299 }
2300 ipa_compute_jump_functions_for_edge (fbi, cs);
2301 }
749aa96d
MJ
2302}
2303
8b7773a4
MJ
2304/* If STMT looks like a statement loading a value from a member pointer formal
2305 parameter, return that parameter and store the offset of the field to
2306 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2307 might be clobbered). If USE_DELTA, then we look for a use of the delta
2308 field rather than the pfn. */
be95e2b9 2309
3e293154 2310static tree
355fe088 2311ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 2312 HOST_WIDE_INT *offset_p)
3e293154 2313{
8b7773a4
MJ
2314 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2315
2316 if (!gimple_assign_single_p (stmt))
2317 return NULL_TREE;
3e293154 2318
8b7773a4 2319 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2320 if (TREE_CODE (rhs) == COMPONENT_REF)
2321 {
2322 ref_field = TREE_OPERAND (rhs, 1);
2323 rhs = TREE_OPERAND (rhs, 0);
2324 }
2325 else
2326 ref_field = NULL_TREE;
d242d063 2327 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2328 return NULL_TREE;
3e293154 2329 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2330 if (TREE_CODE (rec) != ADDR_EXPR)
2331 return NULL_TREE;
2332 rec = TREE_OPERAND (rec, 0);
3e293154 2333 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2334 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2335 return NULL_TREE;
d242d063 2336 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2337
8b7773a4
MJ
2338 if (use_delta)
2339 fld = delta_field;
2340 else
2341 fld = ptr_field;
2342 if (offset_p)
2343 *offset_p = int_bit_position (fld);
2344
ae788515
EB
2345 if (ref_field)
2346 {
2347 if (integer_nonzerop (ref_offset))
2348 return NULL_TREE;
ae788515
EB
2349 return ref_field == fld ? rec : NULL_TREE;
2350 }
3e293154 2351 else
8b7773a4
MJ
2352 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2353 : NULL_TREE;
3e293154
MJ
2354}
2355
2356/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2357
3e293154
MJ
2358static bool
2359ipa_is_ssa_with_stmt_def (tree t)
2360{
2361 if (TREE_CODE (t) == SSA_NAME
2362 && !SSA_NAME_IS_DEFAULT_DEF (t))
2363 return true;
2364 else
2365 return false;
2366}
2367
40591473
MJ
2368/* Find the indirect call graph edge corresponding to STMT and mark it as a
2369 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
40a777e8
JH
2370 indirect call graph edge.
2371 If POLYMORPHIC is true record is as a destination of polymorphic call. */
be95e2b9 2372
40591473 2373static struct cgraph_edge *
538dd0b7 2374ipa_note_param_call (struct cgraph_node *node, int param_index,
40a777e8 2375 gcall *stmt, bool polymorphic)
3e293154 2376{
e33c6cd6 2377 struct cgraph_edge *cs;
3e293154 2378
d52f5295 2379 cs = node->get_edge (stmt);
b258210c 2380 cs->indirect_info->param_index = param_index;
8b7773a4 2381 cs->indirect_info->agg_contents = 0;
c13bc3d9 2382 cs->indirect_info->member_ptr = 0;
91bb9f80 2383 cs->indirect_info->guaranteed_unmodified = 0;
40a777e8
JH
2384 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2385 param_index, true);
2386 if (cs->indirect_info->polymorphic || polymorphic)
2387 ipa_set_param_used_by_polymorphic_call
2388 (IPA_NODE_REF (node), param_index, true);
40591473 2389 return cs;
3e293154
MJ
2390}
2391
e33c6cd6 2392/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2393 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2394 intermediate information about each formal parameter. Currently it checks
2395 whether the call calls a pointer that is a formal parameter and if so, the
2396 parameter is marked with the called flag and an indirect call graph edge
2397 describing the call is created. This is very simple for ordinary pointers
2398 represented in SSA but not-so-nice when it comes to member pointers. The
2399 ugly part of this function does nothing more than trying to match the
2400 pattern of such a call. An example of such a pattern is the gimple dump
2401 below, the call is on the last line:
3e293154 2402
ae788515
EB
2403 <bb 2>:
2404 f$__delta_5 = f.__delta;
2405 f$__pfn_24 = f.__pfn;
2406
2407 or
3e293154 2408 <bb 2>:
d242d063
MJ
2409 f$__delta_5 = MEM[(struct *)&f];
2410 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2411
ae788515 2412 and a few lines below:
8aa29647
MJ
2413
2414 <bb 5>
3e293154
MJ
2415 D.2496_3 = (int) f$__pfn_24;
2416 D.2497_4 = D.2496_3 & 1;
2417 if (D.2497_4 != 0)
2418 goto <bb 3>;
2419 else
2420 goto <bb 4>;
2421
8aa29647 2422 <bb 6>:
3e293154
MJ
2423 D.2500_7 = (unsigned int) f$__delta_5;
2424 D.2501_8 = &S + D.2500_7;
2425 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2426 D.2503_10 = *D.2502_9;
2427 D.2504_12 = f$__pfn_24 + -1;
2428 D.2505_13 = (unsigned int) D.2504_12;
2429 D.2506_14 = D.2503_10 + D.2505_13;
2430 D.2507_15 = *D.2506_14;
2431 iftmp.11_16 = (String:: *) D.2507_15;
2432
8aa29647 2433 <bb 7>:
3e293154
MJ
2434 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2435 D.2500_19 = (unsigned int) f$__delta_5;
2436 D.2508_20 = &S + D.2500_19;
2437 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2438
2439 Such patterns are results of simple calls to a member pointer:
2440
2441 int doprinting (int (MyString::* f)(int) const)
2442 {
2443 MyString S ("somestring");
2444
2445 return (S.*f)(4);
2446 }
8b7773a4
MJ
2447
2448 Moreover, the function also looks for called pointers loaded from aggregates
2449 passed by value or reference. */
3e293154
MJ
2450
2451static void
56b40062 2452ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 2453 tree target)
3e293154 2454{
99b1c316 2455 class ipa_node_params *info = fbi->info;
8b7773a4
MJ
2456 HOST_WIDE_INT offset;
2457 bool by_ref;
3e293154 2458
3e293154
MJ
2459 if (SSA_NAME_IS_DEFAULT_DEF (target))
2460 {
b258210c 2461 tree var = SSA_NAME_VAR (target);
8aab5218 2462 int index = ipa_get_param_decl_index (info, var);
3e293154 2463 if (index >= 0)
40a777e8 2464 ipa_note_param_call (fbi->node, index, call, false);
3e293154
MJ
2465 return;
2466 }
2467
8aab5218 2468 int index;
355fe088 2469 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 2470 bool guaranteed_unmodified;
8b7773a4 2471 if (gimple_assign_single_p (def)
ff302741
PB
2472 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2473 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2474 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2475 {
40a777e8
JH
2476 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2477 call, false);
8b7773a4
MJ
2478 cs->indirect_info->offset = offset;
2479 cs->indirect_info->agg_contents = 1;
2480 cs->indirect_info->by_ref = by_ref;
91bb9f80 2481 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2482 return;
2483 }
2484
3e293154
MJ
2485 /* Now we need to try to match the complex pattern of calling a member
2486 pointer. */
8b7773a4
MJ
2487 if (gimple_code (def) != GIMPLE_PHI
2488 || gimple_phi_num_args (def) != 2
2489 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2490 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2491 return;
2492
3e293154
MJ
2493 /* First, we need to check whether one of these is a load from a member
2494 pointer that is a parameter to this function. */
8aab5218
MJ
2495 tree n1 = PHI_ARG_DEF (def, 0);
2496 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2497 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2498 return;
355fe088
TS
2499 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2500 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2501
8aab5218
MJ
2502 tree rec;
2503 basic_block bb, virt_bb;
2504 basic_block join = gimple_bb (def);
8b7773a4 2505 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2506 {
8b7773a4 2507 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2508 return;
2509
8aa29647 2510 bb = EDGE_PRED (join, 0)->src;
726a989a 2511 virt_bb = gimple_bb (d2);
3e293154 2512 }
8b7773a4 2513 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2514 {
8aa29647 2515 bb = EDGE_PRED (join, 1)->src;
726a989a 2516 virt_bb = gimple_bb (d1);
3e293154
MJ
2517 }
2518 else
2519 return;
2520
2521 /* Second, we need to check that the basic blocks are laid out in the way
2522 corresponding to the pattern. */
2523
3e293154
MJ
2524 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2525 || single_pred (virt_bb) != bb
2526 || single_succ (virt_bb) != join)
2527 return;
2528
2529 /* Third, let's see that the branching is done depending on the least
2530 significant bit of the pfn. */
2531
355fe088 2532 gimple *branch = last_stmt (bb);
8aa29647 2533 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2534 return;
2535
12430896
RG
2536 if ((gimple_cond_code (branch) != NE_EXPR
2537 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2538 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2539 return;
3e293154 2540
8aab5218 2541 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2542 if (!ipa_is_ssa_with_stmt_def (cond))
2543 return;
2544
726a989a 2545 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2546 if (!is_gimple_assign (def)
726a989a
RB
2547 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2548 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2549 return;
726a989a
RB
2550
2551 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2552 if (!ipa_is_ssa_with_stmt_def (cond))
2553 return;
2554
726a989a 2555 def = SSA_NAME_DEF_STMT (cond);
3e293154 2556
8b75fc9b
MJ
2557 if (is_gimple_assign (def)
2558 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2559 {
726a989a 2560 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2561 if (!ipa_is_ssa_with_stmt_def (cond))
2562 return;
726a989a 2563 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2564 }
2565
8aab5218 2566 tree rec2;
6f7b8b70
RE
2567 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2568 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2569 == ptrmemfunc_vbit_in_delta),
2570 NULL);
3e293154
MJ
2571 if (rec != rec2)
2572 return;
2573
2574 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2575 if (index >= 0
8aab5218 2576 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2577 {
40a777e8
JH
2578 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2579 call, false);
8b7773a4
MJ
2580 cs->indirect_info->offset = offset;
2581 cs->indirect_info->agg_contents = 1;
c13bc3d9 2582 cs->indirect_info->member_ptr = 1;
91bb9f80 2583 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2584 }
3e293154
MJ
2585
2586 return;
2587}
2588
b258210c
MJ
2589/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2590 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2591 FBI->node (described by FBI->info), create a call note for the
2592 statement. */
b258210c
MJ
2593
2594static void
56b40062 2595ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2596 gcall *call, tree target)
b258210c
MJ
2597{
2598 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2599 int index;
40591473 2600 HOST_WIDE_INT anc_offset;
b258210c 2601
05842ff5
MJ
2602 if (!flag_devirtualize)
2603 return;
2604
40591473 2605 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2606 return;
2607
99b1c316 2608 class ipa_node_params *info = fbi->info;
40591473
MJ
2609 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2610 {
2611 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2612 return;
b258210c 2613
40591473
MJ
2614 anc_offset = 0;
2615 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2616 gcc_assert (index >= 0);
c628d1c3 2617 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
c199f329 2618 call))
40591473
MJ
2619 return;
2620 }
2621 else
2622 {
355fe088 2623 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2624 tree expr;
2625
2626 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2627 if (!expr)
2628 return;
2629 index = ipa_get_param_decl_index (info,
2630 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2631 gcc_assert (index >= 0);
c628d1c3 2632 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
c199f329 2633 call, anc_offset))
40591473
MJ
2634 return;
2635 }
2636
40a777e8
JH
2637 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2638 call, true);
99b1c316 2639 class cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2640 ii->offset = anc_offset;
ae7e9ddd 2641 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2642 ii->otr_type = obj_type_ref_class (target);
40591473 2643 ii->polymorphic = 1;
b258210c
MJ
2644}
2645
2646/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2647 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2648 containing intermediate information about each formal parameter. */
b258210c
MJ
2649
2650static void
56b40062 2651ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2652{
2653 tree target = gimple_call_fn (call);
b786d31f
JH
2654
2655 if (!target
2656 || (TREE_CODE (target) != SSA_NAME
2657 && !virtual_method_call_p (target)))
2658 return;
b258210c 2659
7d0aa05b 2660 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2661 /* If we previously turned the call into a direct call, there is
2662 no need to analyze. */
b786d31f 2663 if (cs && !cs->indirect_unknown_callee)
25583c4f 2664 return;
7d0aa05b 2665
a5b58b28 2666 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2667 {
7d0aa05b
JH
2668 tree instance;
2669 tree target = gimple_call_fn (call);
6f8091fc
JH
2670 ipa_polymorphic_call_context context (current_function_decl,
2671 target, call, &instance);
7d0aa05b 2672
ba392339
JH
2673 gcc_checking_assert (cs->indirect_info->otr_type
2674 == obj_type_ref_class (target));
2675 gcc_checking_assert (cs->indirect_info->otr_token
2676 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2677
29c43c83
JH
2678 cs->indirect_info->vptr_changed
2679 = !context.get_dynamic_type (instance,
2680 OBJ_TYPE_REF_OBJECT (target),
c628d1c3
MJ
2681 obj_type_ref_class (target), call,
2682 &fbi->aa_walk_budget);
0127c169 2683 cs->indirect_info->context = context;
7d0aa05b
JH
2684 }
2685
b258210c 2686 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2687 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2688 else if (virtual_method_call_p (target))
8aab5218 2689 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2690}
2691
2692
e33c6cd6 2693/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2694 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2695 formal parameters are called. */
be95e2b9 2696
3e293154 2697static void
355fe088 2698ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2699{
726a989a 2700 if (is_gimple_call (stmt))
538dd0b7 2701 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2702}
2703
2704/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2705 If OP is a parameter declaration, mark it as used in the info structure
2706 passed in DATA. */
2707
2708static bool
355fe088 2709visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f 2710{
99b1c316 2711 class ipa_node_params *info = (class ipa_node_params *) data;
062c604f
MJ
2712
2713 op = get_base_address (op);
2714 if (op
2715 && TREE_CODE (op) == PARM_DECL)
2716 {
2717 int index = ipa_get_param_decl_index (info, op);
2718 gcc_assert (index >= 0);
310bc633 2719 ipa_set_param_used (info, index, true);
062c604f
MJ
2720 }
2721
2722 return false;
3e293154
MJ
2723}
2724
8aab5218
MJ
2725/* Scan the statements in BB and inspect the uses of formal parameters. Store
2726 the findings in various structures of the associated ipa_node_params
2727 structure, such as parameter flags, notes etc. FBI holds various data about
2728 the function being analyzed. */
be95e2b9 2729
062c604f 2730static void
56b40062 2731ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2732{
726a989a 2733 gimple_stmt_iterator gsi;
8aab5218
MJ
2734 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2735 {
355fe088 2736 gimple *stmt = gsi_stmt (gsi);
3e293154 2737
8aab5218
MJ
2738 if (is_gimple_debug (stmt))
2739 continue;
3e293154 2740
8aab5218
MJ
2741 ipa_analyze_stmt_uses (fbi, stmt);
2742 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2743 visit_ref_for_mod_analysis,
2744 visit_ref_for_mod_analysis,
2745 visit_ref_for_mod_analysis);
5fe8e757 2746 }
8aab5218
MJ
2747 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2748 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2749 visit_ref_for_mod_analysis,
2750 visit_ref_for_mod_analysis,
2751 visit_ref_for_mod_analysis);
2752}
2753
2754/* Calculate controlled uses of parameters of NODE. */
2755
2756static void
2757ipa_analyze_controlled_uses (struct cgraph_node *node)
2758{
99b1c316 2759 class ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2760
8aab5218 2761 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2762 {
2763 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2764 int controlled_uses = 0;
2765
062c604f
MJ
2766 /* For SSA regs see if parameter is used. For non-SSA we compute
2767 the flag during modification analysis. */
4502fe8d
MJ
2768 if (is_gimple_reg (parm))
2769 {
67348ccc 2770 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2771 parm);
2772 if (ddef && !has_zero_uses (ddef))
2773 {
2774 imm_use_iterator imm_iter;
2775 use_operand_p use_p;
2776
2777 ipa_set_param_used (info, i, true);
2778 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2779 if (!is_gimple_call (USE_STMT (use_p)))
2780 {
c6de6665
JJ
2781 if (!is_gimple_debug (USE_STMT (use_p)))
2782 {
2783 controlled_uses = IPA_UNDESCRIBED_USE;
2784 break;
2785 }
4502fe8d
MJ
2786 }
2787 else
2788 controlled_uses++;
2789 }
2790 else
2791 controlled_uses = 0;
2792 }
2793 else
2794 controlled_uses = IPA_UNDESCRIBED_USE;
2795 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2796 }
8aab5218 2797}
062c604f 2798
8aab5218 2799/* Free stuff in BI. */
062c604f 2800
8aab5218
MJ
2801static void
2802free_ipa_bb_info (struct ipa_bb_info *bi)
2803{
2804 bi->cg_edges.release ();
2805 bi->param_aa_statuses.release ();
3e293154
MJ
2806}
2807
8aab5218 2808/* Dominator walker driving the analysis. */
2c9561b5 2809
8aab5218 2810class analysis_dom_walker : public dom_walker
2c9561b5 2811{
8aab5218 2812public:
56b40062 2813 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 2814 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2815
3daacdcd 2816 virtual edge before_dom_children (basic_block);
8aab5218
MJ
2817
2818private:
56b40062 2819 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
2820};
2821
3daacdcd 2822edge
8aab5218
MJ
2823analysis_dom_walker::before_dom_children (basic_block bb)
2824{
2825 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2826 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 2827 return NULL;
2c9561b5
MJ
2828}
2829
c3431191
ML
2830/* Release body info FBI. */
2831
2832void
2833ipa_release_body_info (struct ipa_func_body_info *fbi)
2834{
2835 int i;
2836 struct ipa_bb_info *bi;
2837
2838 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2839 free_ipa_bb_info (bi);
2840 fbi->bb_infos.release ();
2841}
2842
026c3cfd 2843/* Initialize the array describing properties of formal parameters
dd5a833e
MS
2844 of NODE, analyze their uses and compute jump functions associated
2845 with actual arguments of calls from within NODE. */
062c604f
MJ
2846
2847void
2848ipa_analyze_node (struct cgraph_node *node)
2849{
56b40062 2850 struct ipa_func_body_info fbi;
99b1c316 2851 class ipa_node_params *info;
062c604f 2852
57dbdc5a
MJ
2853 ipa_check_create_node_params ();
2854 ipa_check_create_edge_args ();
6cf67b62 2855 info = IPA_NODE_REF_GET_CREATE (node);
8aab5218
MJ
2856
2857 if (info->analysis_done)
2858 return;
2859 info->analysis_done = 1;
2860
2861 if (ipa_func_spec_opts_forbid_analysis_p (node))
2862 {
2863 for (int i = 0; i < ipa_get_param_count (info); i++)
2864 {
2865 ipa_set_param_used (info, i, true);
2866 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2867 }
2868 return;
2869 }
2870
2871 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2872 push_cfun (func);
2873 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2874 ipa_initialize_node_params (node);
8aab5218 2875 ipa_analyze_controlled_uses (node);
062c604f 2876
8aab5218
MJ
2877 fbi.node = node;
2878 fbi.info = IPA_NODE_REF (node);
2879 fbi.bb_infos = vNULL;
2880 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2881 fbi.param_count = ipa_get_param_count (info);
fdfd7f53 2882 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
062c604f 2883
8aab5218
MJ
2884 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2885 {
2886 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2887 bi->cg_edges.safe_push (cs);
2888 }
062c604f 2889
8aab5218
MJ
2890 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2891 {
2892 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2893 bi->cg_edges.safe_push (cs);
2894 }
2895
2896 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2897
c3431191 2898 ipa_release_body_info (&fbi);
8aab5218 2899 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2900 pop_cfun ();
062c604f 2901}
062c604f 2902
be95e2b9 2903/* Update the jump functions associated with call graph edge E when the call
3e293154 2904 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2905 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2906
3e293154
MJ
2907static void
2908update_jump_functions_after_inlining (struct cgraph_edge *cs,
2909 struct cgraph_edge *e)
2910{
99b1c316
MS
2911 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2912 class ipa_edge_args *args = IPA_EDGE_REF (e);
a33c028e
JH
2913 if (!args)
2914 return;
3e293154
MJ
2915 int count = ipa_get_cs_argument_count (args);
2916 int i;
2917
2918 for (i = 0; i < count; i++)
2919 {
b258210c 2920 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
99b1c316 2921 class ipa_polymorphic_call_context *dst_ctx
5ce97055 2922 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2923
eb270950
FX
2924 if (dst->agg.items)
2925 {
2926 struct ipa_agg_jf_item *item;
2927 int j;
2928
2929 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
2930 {
2931 int dst_fid;
2932 struct ipa_jump_func *src;
2933
2934 if (item->jftype != IPA_JF_PASS_THROUGH
2935 && item->jftype != IPA_JF_LOAD_AGG)
2936 continue;
2937
2938 dst_fid = item->value.pass_through.formal_id;
1c3c3f45 2939 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
eb270950
FX
2940 {
2941 item->jftype = IPA_JF_UNKNOWN;
2942 continue;
2943 }
2944
2945 item->value.pass_through.formal_id = -1;
2946 src = ipa_get_ith_jump_func (top, dst_fid);
2947 if (src->type == IPA_JF_CONST)
2948 {
2949 if (item->jftype == IPA_JF_PASS_THROUGH
2950 && item->value.pass_through.operation == NOP_EXPR)
2951 {
2952 item->jftype = IPA_JF_CONST;
2953 item->value.constant = src->value.constant.value;
2954 continue;
2955 }
2956 }
2957 else if (src->type == IPA_JF_PASS_THROUGH
2958 && src->value.pass_through.operation == NOP_EXPR)
2959 {
2960 if (item->jftype == IPA_JF_PASS_THROUGH
2961 || !item->value.load_agg.by_ref
2962 || src->value.pass_through.agg_preserved)
2963 item->value.pass_through.formal_id
2964 = src->value.pass_through.formal_id;
2965 }
2966 else if (src->type == IPA_JF_ANCESTOR)
2967 {
2968 if (item->jftype == IPA_JF_PASS_THROUGH)
2969 {
2970 if (!src->value.ancestor.offset)
2971 item->value.pass_through.formal_id
2972 = src->value.ancestor.formal_id;
2973 }
2974 else if (src->value.ancestor.agg_preserved)
2975 {
2976 gcc_checking_assert (item->value.load_agg.by_ref);
2977
2978 item->value.pass_through.formal_id
2979 = src->value.ancestor.formal_id;
2980 item->value.load_agg.offset
2981 += src->value.ancestor.offset;
2982 }
2983 }
2984
2985 if (item->value.pass_through.formal_id < 0)
2986 item->jftype = IPA_JF_UNKNOWN;
2987 }
2988 }
2989
1c3c3f45
FX
2990 if (!top)
2991 {
2992 ipa_set_jf_unknown (dst);
2993 continue;
2994 }
2995
685b0d13
MJ
2996 if (dst->type == IPA_JF_ANCESTOR)
2997 {
b258210c 2998 struct ipa_jump_func *src;
8b7773a4 2999 int dst_fid = dst->value.ancestor.formal_id;
99b1c316 3000 class ipa_polymorphic_call_context *src_ctx
5ce97055 3001 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 3002
b258210c
MJ
3003 /* Variable number of arguments can cause havoc if we try to access
3004 one that does not exist in the inlined edge. So make sure we
3005 don't. */
8b7773a4 3006 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 3007 {
04be694e 3008 ipa_set_jf_unknown (dst);
b258210c
MJ
3009 continue;
3010 }
3011
8b7773a4
MJ
3012 src = ipa_get_ith_jump_func (top, dst_fid);
3013
5ce97055
JH
3014 if (src_ctx && !src_ctx->useless_p ())
3015 {
99b1c316 3016 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
3017
3018 /* TODO: Make type preserved safe WRT contexts. */
44210a96 3019 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 3020 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
3021 ctx.offset_by (dst->value.ancestor.offset);
3022 if (!ctx.useless_p ())
3023 {
a7d1f3fe
ML
3024 if (!dst_ctx)
3025 {
3026 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3027 count);
3028 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3029 }
3030
3031 dst_ctx->combine_with (ctx);
5ce97055
JH
3032 }
3033 }
3034
eb270950
FX
3035 /* Parameter and argument in ancestor jump function must be pointer
3036 type, which means access to aggregate must be by-reference. */
3037 gcc_assert (!src->agg.items || src->agg.by_ref);
3038
3039 if (src->agg.items && dst->value.ancestor.agg_preserved)
8b7773a4
MJ
3040 {
3041 struct ipa_agg_jf_item *item;
3042 int j;
3043
3044 /* Currently we do not produce clobber aggregate jump functions,
3045 replace with merging when we do. */
3046 gcc_assert (!dst->agg.items);
3047
9771b263 3048 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 3049 dst->agg.by_ref = src->agg.by_ref;
9771b263 3050 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
3051 item->offset -= dst->value.ancestor.offset;
3052 }
3053
3b97a5c7
MJ
3054 if (src->type == IPA_JF_PASS_THROUGH
3055 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
3056 {
3057 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3058 dst->value.ancestor.agg_preserved &=
3059 src->value.pass_through.agg_preserved;
3060 }
b258210c
MJ
3061 else if (src->type == IPA_JF_ANCESTOR)
3062 {
3063 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3064 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
3065 dst->value.ancestor.agg_preserved &=
3066 src->value.ancestor.agg_preserved;
b258210c
MJ
3067 }
3068 else
04be694e 3069 ipa_set_jf_unknown (dst);
b258210c
MJ
3070 }
3071 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 3072 {
b258210c
MJ
3073 struct ipa_jump_func *src;
3074 /* We must check range due to calls with variable number of arguments
3075 and we cannot combine jump functions with operations. */
3076 if (dst->value.pass_through.operation == NOP_EXPR
5a0236f8 3077 && (top && dst->value.pass_through.formal_id
b258210c
MJ
3078 < ipa_get_cs_argument_count (top)))
3079 {
8b7773a4
MJ
3080 int dst_fid = dst->value.pass_through.formal_id;
3081 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 3082 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
99b1c316 3083 class ipa_polymorphic_call_context *src_ctx
5ce97055 3084 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 3085
5ce97055
JH
3086 if (src_ctx && !src_ctx->useless_p ())
3087 {
99b1c316 3088 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
3089
3090 /* TODO: Make type preserved safe WRT contexts. */
44210a96 3091 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 3092 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
3093 if (!ctx.useless_p ())
3094 {
3095 if (!dst_ctx)
3096 {
3097 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3098 count);
3099 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3100 }
3101 dst_ctx->combine_with (ctx);
3102 }
3103 }
b8f6e610
MJ
3104 switch (src->type)
3105 {
3106 case IPA_JF_UNKNOWN:
04be694e 3107 ipa_set_jf_unknown (dst);
b8f6e610 3108 break;
b8f6e610
MJ
3109 case IPA_JF_CONST:
3110 ipa_set_jf_cst_copy (dst, src);
3111 break;
3112
3113 case IPA_JF_PASS_THROUGH:
3114 {
3115 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3116 enum tree_code operation;
3117 operation = ipa_get_jf_pass_through_operation (src);
3118
3119 if (operation == NOP_EXPR)
3120 {
3b97a5c7 3121 bool agg_p;
b8f6e610
MJ
3122 agg_p = dst_agg_p
3123 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 3124 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610 3125 }
a2b4c188
KV
3126 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3127 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
b8f6e610
MJ
3128 else
3129 {
3130 tree operand = ipa_get_jf_pass_through_operand (src);
3131 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3132 operation);
3133 }
3134 break;
3135 }
3136 case IPA_JF_ANCESTOR:
3137 {
3b97a5c7 3138 bool agg_p;
b8f6e610
MJ
3139 agg_p = dst_agg_p
3140 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
3141 ipa_set_ancestor_jf (dst,
3142 ipa_get_jf_ancestor_offset (src),
b8f6e610 3143 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 3144 agg_p);
b8f6e610
MJ
3145 break;
3146 }
3147 default:
3148 gcc_unreachable ();
3149 }
8b7773a4
MJ
3150
3151 if (src->agg.items
b8f6e610 3152 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
3153 {
3154 /* Currently we do not produce clobber aggregate jump
3155 functions, replace with merging when we do. */
3156 gcc_assert (!dst->agg.items);
3157
3158 dst->agg.by_ref = src->agg.by_ref;
9771b263 3159 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 3160 }
b258210c
MJ
3161 }
3162 else
04be694e 3163 ipa_set_jf_unknown (dst);
3e293154 3164 }
b258210c
MJ
3165 }
3166}
3167
5ce97055
JH
3168/* If TARGET is an addr_expr of a function declaration, make it the
3169 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3170 Otherwise, return NULL. */
b258210c 3171
3949c4a7 3172struct cgraph_edge *
5ce97055
JH
3173ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3174 bool speculative)
b258210c
MJ
3175{
3176 struct cgraph_node *callee;
48b1474e 3177 bool unreachable = false;
b258210c 3178
ceeffab0
MJ
3179 if (TREE_CODE (target) == ADDR_EXPR)
3180 target = TREE_OPERAND (target, 0);
b258210c 3181 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
3182 {
3183 target = canonicalize_constructor_val (target, NULL);
3184 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3185 {
db66bf68
JH
3186 /* Member pointer call that goes through a VMT lookup. */
3187 if (ie->indirect_info->member_ptr
3188 /* Or if target is not an invariant expression and we do not
3189 know if it will evaulate to function at runtime.
3190 This can happen when folding through &VAR, where &VAR
3191 is IP invariant, but VAR itself is not.
3192
3193 TODO: Revisit this when GCC 5 is branched. It seems that
3194 member_ptr check is not needed and that we may try to fold
3195 the expression and see if VAR is readonly. */
3196 || !is_gimple_ip_invariant (target))
3197 {
3198 if (dump_enabled_p ())
3199 {
4f5b9c80 3200 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118
ML
3201 "discovered direct call non-invariant %s\n",
3202 ie->caller->dump_name ());
db66bf68
JH
3203 }
3204 return NULL;
3205 }
3206
c13bc3d9 3207
2b5f0895
XDL
3208 if (dump_enabled_p ())
3209 {
4f5b9c80 3210 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118 3211 "discovered direct call to non-function in %s, "
807b7d62 3212 "making it __builtin_unreachable\n",
464d0118 3213 ie->caller->dump_name ());
2b5f0895 3214 }
3c9e6fca 3215
48b1474e 3216 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3217 callee = cgraph_node::get_create (target);
48b1474e 3218 unreachable = true;
a0a7b611 3219 }
48b1474e 3220 else
d52f5295 3221 callee = cgraph_node::get (target);
a0a7b611 3222 }
48b1474e 3223 else
d52f5295 3224 callee = cgraph_node::get (target);
a0a7b611
JH
3225
3226 /* Because may-edges are not explicitely represented and vtable may be external,
3227 we may create the first reference to the object in the unit. */
a62bfab5 3228 if (!callee || callee->inlined_to)
a0a7b611 3229 {
a0a7b611
JH
3230
3231 /* We are better to ensure we can refer to it.
3232 In the case of static functions we are out of luck, since we already
3233 removed its body. In the case of public functions we may or may
3234 not introduce the reference. */
3235 if (!canonicalize_constructor_val (target, NULL)
3236 || !TREE_PUBLIC (target))
3237 {
3238 if (dump_file)
3239 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
67914693 3240 "(%s -> %s) but cannot refer to it. Giving up.\n",
464d0118
ML
3241 ie->caller->dump_name (),
3242 ie->callee->dump_name ());
a0a7b611
JH
3243 return NULL;
3244 }
d52f5295 3245 callee = cgraph_node::get_create (target);
a0a7b611 3246 }
2b5f0895 3247
0127c169
JH
3248 /* If the edge is already speculated. */
3249 if (speculative && ie->speculative)
3250 {
3251 struct cgraph_edge *e2;
3252 struct ipa_ref *ref;
3253 ie->speculative_call_info (e2, ie, ref);
3254 if (e2->callee->ultimate_alias_target ()
3255 != callee->ultimate_alias_target ())
3256 {
3257 if (dump_file)
464d0118
ML
3258 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
3259 "target (%s -> %s) but the call is already "
3260 "speculated to %s. Giving up.\n",
3261 ie->caller->dump_name (), callee->dump_name (),
3262 e2->callee->dump_name ());
0127c169
JH
3263 }
3264 else
3265 {
3266 if (dump_file)
3267 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
464d0118
ML
3268 "(%s -> %s) this agree with previous speculation.\n",
3269 ie->caller->dump_name (), callee->dump_name ());
0127c169
JH
3270 }
3271 return NULL;
3272 }
3273
2b5f0895
XDL
3274 if (!dbg_cnt (devirt))
3275 return NULL;
3276
1dbee8c9 3277 ipa_check_create_node_params ();
ceeffab0 3278
67914693 3279 /* We cannot make edges to inline clones. It is bug that someone removed
81fa35bd 3280 the cgraph node too early. */
a62bfab5 3281 gcc_assert (!callee->inlined_to);
17afc0fe 3282
48b1474e 3283 if (dump_file && !unreachable)
b258210c 3284 {
5ce97055 3285 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
464d0118 3286 "(%s -> %s), for stmt ",
b258210c 3287 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 3288 speculative ? "speculative" : "known",
464d0118
ML
3289 ie->caller->dump_name (),
3290 callee->dump_name ());
b258210c
MJ
3291 if (ie->call_stmt)
3292 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3293 else
3294 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 3295 }
2b5f0895
XDL
3296 if (dump_enabled_p ())
3297 {
4f5b9c80 3298 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
807b7d62 3299 "converting indirect call in %s to direct call to %s\n",
3629ff8a 3300 ie->caller->dump_name (), callee->dump_name ());
2b5f0895 3301 }
5ce97055 3302 if (!speculative)
d8d5aef1
JH
3303 {
3304 struct cgraph_edge *orig = ie;
27c5a177 3305 ie = cgraph_edge::make_direct (ie, callee);
d8d5aef1
JH
3306 /* If we resolved speculative edge the cost is already up to date
3307 for direct call (adjusted by inline_edge_duplication_hook). */
3308 if (ie == orig)
3309 {
56f62793 3310 ipa_call_summary *es = ipa_call_summaries->get (ie);
d8d5aef1
JH
3311 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3312 - eni_size_weights.call_cost);
3313 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3314 - eni_time_weights.call_cost);
3315 }
3316 }
5ce97055
JH
3317 else
3318 {
3319 if (!callee->can_be_discarded_p ())
3320 {
3321 cgraph_node *alias;
3322 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3323 if (alias)
3324 callee = alias;
3325 }
d8d5aef1 3326 /* make_speculative will update ie's cost to direct call cost. */
5ce97055 3327 ie = ie->make_speculative
1bad9c18 3328 (callee, ie->count.apply_scale (8, 10));
5ce97055 3329 }
749aa96d 3330
b258210c 3331 return ie;
3e293154
MJ
3332}
3333
91bb9f80
MJ
3334/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3335 CONSTRUCTOR and return it. Return NULL if the search fails for some
3336 reason. */
3337
3338static tree
3339find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3340{
3341 tree type = TREE_TYPE (constructor);
3342 if (TREE_CODE (type) != ARRAY_TYPE
3343 && TREE_CODE (type) != RECORD_TYPE)
3344 return NULL;
3345
3346 unsigned ix;
3347 tree index, val;
3348 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3349 {
3350 HOST_WIDE_INT elt_offset;
3351 if (TREE_CODE (type) == ARRAY_TYPE)
3352 {
3353 offset_int off;
3354 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3355 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3356
3357 if (index)
3358 {
db9bbdec
RB
3359 if (TREE_CODE (index) == RANGE_EXPR)
3360 off = wi::to_offset (TREE_OPERAND (index, 0));
3361 else
3362 off = wi::to_offset (index);
91bb9f80
MJ
3363 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3364 {
3365 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3366 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3367 off = wi::sext (off - wi::to_offset (low_bound),
3368 TYPE_PRECISION (TREE_TYPE (index)));
3369 }
3370 off *= wi::to_offset (unit_size);
db9bbdec
RB
3371 /* ??? Handle more than just the first index of a
3372 RANGE_EXPR. */
91bb9f80
MJ
3373 }
3374 else
3375 off = wi::to_offset (unit_size) * ix;
3376
3377 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3378 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3379 continue;
3380 elt_offset = off.to_shwi ();
3381 }
3382 else if (TREE_CODE (type) == RECORD_TYPE)
3383 {
3384 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3385 if (DECL_BIT_FIELD (index))
3386 continue;
3387 elt_offset = int_bit_position (index);
3388 }
3389 else
3390 gcc_unreachable ();
3391
3392 if (elt_offset > req_offset)
3393 return NULL;
3394
3395 if (TREE_CODE (val) == CONSTRUCTOR)
3396 return find_constructor_constant_at_offset (val,
3397 req_offset - elt_offset);
3398
3399 if (elt_offset == req_offset
3400 && is_gimple_reg_type (TREE_TYPE (val))
3401 && is_gimple_ip_invariant (val))
3402 return val;
3403 }
3404 return NULL;
3405}
3406
3407/* Check whether SCALAR could be used to look up an aggregate interprocedural
3408 invariant from a static constructor and if so, return it. Otherwise return
3409 NULL. */
3410
3411static tree
3412ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3413{
3414 if (by_ref)
3415 {
3416 if (TREE_CODE (scalar) != ADDR_EXPR)
3417 return NULL;
3418 scalar = TREE_OPERAND (scalar, 0);
3419 }
3420
8813a647 3421 if (!VAR_P (scalar)
91bb9f80
MJ
3422 || !is_global_var (scalar)
3423 || !TREE_READONLY (scalar)
3424 || !DECL_INITIAL (scalar)
3425 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3426 return NULL;
3427
3428 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3429}
3430
eb270950
FX
3431/* Retrieve value from AGG, a set of known offset/value for an aggregate or
3432 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3433 return NULL if there is none. BY_REF specifies whether the value has to be
3434 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3435 the boolean it points to is set to true if the value comes from an
3436 initializer of a constant. */
8b7773a4
MJ
3437
3438tree
eb270950 3439ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
91bb9f80
MJ
3440 HOST_WIDE_INT offset, bool by_ref,
3441 bool *from_global_constant)
8b7773a4 3442{
eb270950 3443 struct ipa_agg_value *item;
8b7773a4
MJ
3444 int i;
3445
91bb9f80
MJ
3446 if (scalar)
3447 {
3448 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3449 if (res)
3450 {
3451 if (from_global_constant)
3452 *from_global_constant = true;
3453 return res;
3454 }
3455 }
3456
3457 if (!agg
3458 || by_ref != agg->by_ref)
8b7773a4
MJ
3459 return NULL;
3460
eb270950 3461 FOR_EACH_VEC_ELT (agg->items, i, item)
2c9561b5
MJ
3462 if (item->offset == offset)
3463 {
3464 /* Currently we do not have clobber values, return NULL for them once
3465 we do. */
3466 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
3467 if (from_global_constant)
3468 *from_global_constant = false;
2c9561b5
MJ
3469 return item->value;
3470 }
8b7773a4
MJ
3471 return NULL;
3472}
3473
4502fe8d 3474/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
3475 reference description RDESC. Return true if the reference has been
3476 successfully found and removed. */
4502fe8d 3477
568cda29 3478static bool
5e20cdc9 3479remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3480{
3481 struct ipa_ref *to_del;
3482 struct cgraph_edge *origin;
3483
3484 origin = rdesc->cs;
a854f856
MJ
3485 if (!origin)
3486 return false;
d122681a
ML
3487 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3488 origin->lto_stmt_uid);
568cda29
MJ
3489 if (!to_del)
3490 return false;
3491
d122681a 3492 to_del->remove_reference ();
4502fe8d 3493 if (dump_file)
464d0118 3494 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3629ff8a 3495 origin->caller->dump_name (), symbol->dump_name ());
568cda29 3496 return true;
4502fe8d
MJ
3497}
3498
3499/* If JFUNC has a reference description with refcount different from
3500 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3501 NULL. JFUNC must be a constant jump function. */
3502
3503static struct ipa_cst_ref_desc *
3504jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3505{
3506 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3507 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3508 return rdesc;
3509 else
3510 return NULL;
3511}
3512
568cda29
MJ
3513/* If the value of constant jump function JFUNC is an address of a function
3514 declaration, return the associated call graph node. Otherwise return
3515 NULL. */
3516
3517static cgraph_node *
3518cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3519{
3520 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3521 tree cst = ipa_get_jf_constant (jfunc);
3522 if (TREE_CODE (cst) != ADDR_EXPR
3523 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3524 return NULL;
3525
d52f5295 3526 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3527}
3528
3529
3530/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3531 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3532 the edge specified in the rdesc. Return false if either the symbol or the
3533 reference could not be found, otherwise return true. */
3534
3535static bool
3536try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3537{
3538 struct ipa_cst_ref_desc *rdesc;
3539 if (jfunc->type == IPA_JF_CONST
3540 && (rdesc = jfunc_rdesc_usable (jfunc))
3541 && --rdesc->refcount == 0)
3542 {
5e20cdc9 3543 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
3544 if (!symbol)
3545 return false;
3546
3547 return remove_described_reference (symbol, rdesc);
3548 }
3549 return true;
3550}
3551
b258210c
MJ
3552/* Try to find a destination for indirect edge IE that corresponds to a simple
3553 call or a call of a member function pointer and where the destination is a
e5cf5e11
PK
3554 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3555 the type of the parameter to which the result of JFUNC is passed. If it can
3556 be determined, return the newly direct edge, otherwise return NULL.
eb270950
FX
3557 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3558 relative to. */
be95e2b9 3559
b258210c
MJ
3560static struct cgraph_edge *
3561try_make_edge_direct_simple_call (struct cgraph_edge *ie,
e5cf5e11 3562 struct ipa_jump_func *jfunc, tree target_type,
eb270950 3563 struct cgraph_node *new_root,
99b1c316 3564 class ipa_node_params *new_root_info)
b258210c 3565{
4502fe8d 3566 struct cgraph_edge *cs;
b258210c 3567 tree target;
042ae7d2 3568 bool agg_contents = ie->indirect_info->agg_contents;
e5cf5e11 3569 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
91bb9f80
MJ
3570 if (agg_contents)
3571 {
3572 bool from_global_constant;
eb270950
FX
3573 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3574 new_root,
3575 &jfunc->agg);
3576 target = ipa_find_agg_cst_for_param (&agg, scalar,
91bb9f80
MJ
3577 ie->indirect_info->offset,
3578 ie->indirect_info->by_ref,
3579 &from_global_constant);
eb270950 3580 agg.release ();
91bb9f80
MJ
3581 if (target
3582 && !from_global_constant
3583 && !ie->indirect_info->guaranteed_unmodified)
3584 return NULL;
3585 }
b258210c 3586 else
91bb9f80 3587 target = scalar;
d250540a
MJ
3588 if (!target)
3589 return NULL;
4502fe8d
MJ
3590 cs = ipa_make_edge_direct_to_target (ie, target);
3591
a12cd2db 3592 if (cs && !agg_contents)
568cda29
MJ
3593 {
3594 bool ok;
3595 gcc_checking_assert (cs->callee
ae6d0907
MJ
3596 && (cs != ie
3597 || jfunc->type != IPA_JF_CONST
568cda29
MJ
3598 || !cgraph_node_for_jfunc (jfunc)
3599 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3600 ok = try_decrement_rdesc_refcount (jfunc);
3601 gcc_checking_assert (ok);
3602 }
4502fe8d
MJ
3603
3604 return cs;
b258210c
MJ
3605}
3606
bec81025
MJ
3607/* Return the target to be used in cases of impossible devirtualization. IE
3608 and target (the latter can be NULL) are dumped when dumping is enabled. */
3609
72972c22
MJ
3610tree
3611ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3612{
3613 if (dump_file)
3614 {
3615 if (target)
3616 fprintf (dump_file,
464d0118
ML
3617 "Type inconsistent devirtualization: %s->%s\n",
3618 ie->caller->dump_name (),
bec81025
MJ
3619 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3620 else
3621 fprintf (dump_file,
464d0118
ML
3622 "No devirtualization target in %s\n",
3623 ie->caller->dump_name ());
bec81025
MJ
3624 }
3625 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3626 cgraph_node::get_create (new_target);
bec81025
MJ
3627 return new_target;
3628}
3629
d250540a
MJ
3630/* Try to find a destination for indirect edge IE that corresponds to a virtual
3631 call based on a formal parameter which is described by jump function JFUNC
3632 and if it can be determined, make it direct and return the direct edge.
44210a96 3633 Otherwise, return NULL. CTX describes the polymorphic context that the
eb270950
FX
3634 parameter the call is based on brings along with it. NEW_ROOT and
3635 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3636 to. */
b258210c
MJ
3637
3638static struct cgraph_edge *
3639try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3640 struct ipa_jump_func *jfunc,
eb270950
FX
3641 class ipa_polymorphic_call_context ctx,
3642 struct cgraph_node *new_root,
3643 class ipa_node_params *new_root_info)
3e293154 3644{
44210a96 3645 tree target = NULL;
5ce97055 3646 bool speculative = false;
85942f45 3647
2bf86c84 3648 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3649 return NULL;
b258210c 3650
44210a96 3651 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3652
3653 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3654 if (!ie->indirect_info->vptr_changed
3655 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3656 {
9de2f554
JH
3657 tree vtable;
3658 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3659 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3660 : NULL;
eb270950
FX
3661 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3662 new_root,
3663 &jfunc->agg);
3664 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
85942f45
JH
3665 ie->indirect_info->offset,
3666 true);
eb270950 3667 agg.release ();
9de2f554
JH
3668 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3669 {
2994ab20 3670 bool can_refer;
0127c169 3671 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3672 vtable, offset, &can_refer);
3673 if (can_refer)
9de2f554 3674 {
2994ab20 3675 if (!t
cb1180d5 3676 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
9de2f554 3677 || !possible_polymorphic_call_target_p
0127c169
JH
3678 (ie, cgraph_node::get (t)))
3679 {
33c3b6be 3680 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3681 if (!ie->indirect_info->vptr_changed)
3682 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3683 else
3684 target = NULL;
0127c169
JH
3685 }
3686 else
3687 {
3688 target = t;
3689 speculative = ie->indirect_info->vptr_changed;
3690 }
9de2f554
JH
3691 }
3692 }
85942f45
JH
3693 }
3694
44210a96
MJ
3695 ipa_polymorphic_call_context ie_context (ie);
3696 vec <cgraph_node *>targets;
3697 bool final;
d250540a 3698
44210a96
MJ
3699 ctx.offset_by (ie->indirect_info->offset);
3700 if (ie->indirect_info->vptr_changed)
3701 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3702 ie->indirect_info->otr_type);
3703 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3704 targets = possible_polymorphic_call_targets
3705 (ie->indirect_info->otr_type,
3706 ie->indirect_info->otr_token,
3707 ctx, &final);
3708 if (final && targets.length () <= 1)
5ce97055 3709 {
33c3b6be 3710 speculative = false;
44210a96
MJ
3711 if (targets.length () == 1)
3712 target = targets[0]->decl;
3713 else
3714 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3715 }
2bf86c84 3716 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3717 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3718 {
44210a96
MJ
3719 cgraph_node *n;
3720 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3721 ie->indirect_info->otr_token,
3722 ie->indirect_info->context);
3723 if (n)
5ce97055 3724 {
44210a96
MJ
3725 target = n->decl;
3726 speculative = true;
5ce97055 3727 }
5bccb77a 3728 }
b258210c
MJ
3729
3730 if (target)
450ad0cd 3731 {
44210a96
MJ
3732 if (!possible_polymorphic_call_target_p
3733 (ie, cgraph_node::get_create (target)))
0127c169 3734 {
29c43c83 3735 if (speculative)
0127c169
JH
3736 return NULL;
3737 target = ipa_impossible_devirt_target (ie, target);
3738 }
5ce97055 3739 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3740 }
b258210c
MJ
3741 else
3742 return NULL;
3e293154
MJ
3743}
3744
3745/* Update the param called notes associated with NODE when CS is being inlined,
3746 assuming NODE is (potentially indirectly) inlined into CS->callee.
3747 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3748 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3749 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3750
f8e2a1ed 3751static bool
e33c6cd6
MJ
3752update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3753 struct cgraph_node *node,
d52f5295 3754 vec<cgraph_edge *> *new_edges)
3e293154 3755{
99b1c316 3756 class ipa_edge_args *top;
b258210c 3757 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
eb270950 3758 struct cgraph_node *new_root;
99b1c316 3759 class ipa_node_params *new_root_info, *inlined_node_info;
f8e2a1ed 3760 bool res = false;
3e293154 3761
e33c6cd6 3762 ipa_check_create_edge_args ();
9e97ff61 3763 top = IPA_EDGE_REF (cs);
eb270950
FX
3764 new_root = cs->caller->inlined_to
3765 ? cs->caller->inlined_to : cs->caller;
3766 new_root_info = IPA_NODE_REF (new_root);
e5cf5e11 3767 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
e33c6cd6
MJ
3768
3769 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3770 {
99b1c316 3771 class cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3772 struct ipa_jump_func *jfunc;
8b7773a4 3773 int param_index;
3ff29913 3774 cgraph_node *spec_target = NULL;
3e293154 3775
e33c6cd6 3776 next_ie = ie->next_callee;
3e293154 3777
5f902d76
JH
3778 if (ici->param_index == -1)
3779 continue;
e33c6cd6 3780
3e293154 3781 /* We must check range due to calls with variable number of arguments: */
5a0236f8 3782 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3783 {
5ee53a06 3784 ici->param_index = -1;
3e293154
MJ
3785 continue;
3786 }
3787
8b7773a4
MJ
3788 param_index = ici->param_index;
3789 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3790
3ff29913
JH
3791 if (ie->speculative)
3792 {
3793 struct cgraph_edge *de;
3794 struct ipa_ref *ref;
3795 ie->speculative_call_info (de, ie, ref);
3796 spec_target = de->callee;
3797 }
3798
2bf86c84 3799 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3800 new_direct_edge = NULL;
3801 else if (ici->polymorphic)
5ce97055 3802 {
44210a96
MJ
3803 ipa_polymorphic_call_context ctx;
3804 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
eb270950
FX
3805 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3806 new_root,
3807 new_root_info);
5ce97055 3808 }
b258210c 3809 else
e5cf5e11
PK
3810 {
3811 tree target_type = ipa_get_type (inlined_node_info, param_index);
3812 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3813 target_type,
eb270950 3814 new_root,
e5cf5e11
PK
3815 new_root_info);
3816 }
3817
042ae7d2 3818 /* If speculation was removed, then we need to do nothing. */
3ff29913
JH
3819 if (new_direct_edge && new_direct_edge != ie
3820 && new_direct_edge->callee == spec_target)
042ae7d2
JH
3821 {
3822 new_direct_edge->indirect_inlining_edge = 1;
3823 top = IPA_EDGE_REF (cs);
3824 res = true;
73d098df
JH
3825 if (!new_direct_edge->speculative)
3826 continue;
042ae7d2
JH
3827 }
3828 else if (new_direct_edge)
685b0d13 3829 {
b258210c
MJ
3830 new_direct_edge->indirect_inlining_edge = 1;
3831 if (new_edges)
3832 {
9771b263 3833 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3834 res = true;
3835 }
042ae7d2 3836 top = IPA_EDGE_REF (cs);
3ff29913
JH
3837 /* If speculative edge was introduced we still need to update
3838 call info of the indirect edge. */
3839 if (!new_direct_edge->speculative)
3840 continue;
685b0d13 3841 }
3ff29913
JH
3842 if (jfunc->type == IPA_JF_PASS_THROUGH
3843 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 3844 {
d0502276
JH
3845 if (ici->agg_contents
3846 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3847 && !ici->polymorphic)
36b72910
JH
3848 ici->param_index = -1;
3849 else
d0502276
JH
3850 {
3851 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3852 if (ici->polymorphic
3853 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3854 ici->vptr_changed = true;
40a777e8
JH
3855 ipa_set_param_used_by_indirect_call (new_root_info,
3856 ici->param_index, true);
3857 if (ici->polymorphic)
3858 ipa_set_param_used_by_polymorphic_call (new_root_info,
3859 ici->param_index, true);
d0502276 3860 }
36b72910
JH
3861 }
3862 else if (jfunc->type == IPA_JF_ANCESTOR)
3863 {
d0502276
JH
3864 if (ici->agg_contents
3865 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3866 && !ici->polymorphic)
36b72910
JH
3867 ici->param_index = -1;
3868 else
3869 {
3870 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3871 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
3872 if (ici->polymorphic
3873 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3874 ici->vptr_changed = true;
b914768c
JH
3875 ipa_set_param_used_by_indirect_call (new_root_info,
3876 ici->param_index, true);
3877 if (ici->polymorphic)
3878 ipa_set_param_used_by_polymorphic_call (new_root_info,
3879 ici->param_index, true);
36b72910
JH
3880 }
3881 }
3882 else
3883 /* Either we can find a destination for this edge now or never. */
3884 ici->param_index = -1;
3e293154 3885 }
e33c6cd6 3886
f8e2a1ed 3887 return res;
3e293154
MJ
3888}
3889
3890/* Recursively traverse subtree of NODE (including node) made of inlined
3891 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3892 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3893 update_jump_functions_after_inlining on all non-inlined edges that lead out
3894 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3895 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3896 created. */
be95e2b9 3897
f8e2a1ed 3898static bool
3e293154
MJ
3899propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3900 struct cgraph_node *node,
d52f5295 3901 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3902{
3903 struct cgraph_edge *e;
f8e2a1ed 3904 bool res;
3e293154 3905
e33c6cd6 3906 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3907
3908 for (e = node->callees; e; e = e->next_callee)
3909 if (!e->inline_failed)
f8e2a1ed 3910 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3911 else
3912 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3913 for (e = node->indirect_calls; e; e = e->next_callee)
3914 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3915
3916 return res;
3e293154
MJ
3917}
3918
4502fe8d
MJ
3919/* Combine two controlled uses counts as done during inlining. */
3920
3921static int
3922combine_controlled_uses_counters (int c, int d)
3923{
3924 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3925 return IPA_UNDESCRIBED_USE;
3926 else
3927 return c + d - 1;
3928}
3929
3930/* Propagate number of controlled users from CS->caleee to the new root of the
3931 tree of inlined nodes. */
3932
3933static void
3934propagate_controlled_uses (struct cgraph_edge *cs)
3935{
99b1c316 3936 class ipa_edge_args *args = IPA_EDGE_REF (cs);
a33c028e
JH
3937 if (!args)
3938 return;
a62bfab5
ML
3939 struct cgraph_node *new_root = cs->caller->inlined_to
3940 ? cs->caller->inlined_to : cs->caller;
99b1c316
MS
3941 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3942 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
4502fe8d
MJ
3943 int count, i;
3944
6cf67b62
JH
3945 if (!old_root_info)
3946 return;
3947
4502fe8d
MJ
3948 count = MIN (ipa_get_cs_argument_count (args),
3949 ipa_get_param_count (old_root_info));
3950 for (i = 0; i < count; i++)
3951 {
3952 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3953 struct ipa_cst_ref_desc *rdesc;
3954
3955 if (jf->type == IPA_JF_PASS_THROUGH)
3956 {
3957 int src_idx, c, d;
3958 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3959 c = ipa_get_controlled_uses (new_root_info, src_idx);
3960 d = ipa_get_controlled_uses (old_root_info, i);
3961
3962 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3963 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3964 c = combine_controlled_uses_counters (c, d);
3965 ipa_set_controlled_uses (new_root_info, src_idx, c);
3966 if (c == 0 && new_root_info->ipcp_orig_node)
3967 {
3968 struct cgraph_node *n;
3969 struct ipa_ref *ref;
44210a96 3970 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
3971
3972 if (t && TREE_CODE (t) == ADDR_EXPR
3973 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 3974 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 3975 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
3976 {
3977 if (dump_file)
3978 fprintf (dump_file, "ipa-prop: Removing cloning-created "
464d0118
ML
3979 "reference from %s to %s.\n",
3980 new_root->dump_name (),
3981 n->dump_name ());
d122681a 3982 ref->remove_reference ();
4502fe8d
MJ
3983 }
3984 }
3985 }
3986 else if (jf->type == IPA_JF_CONST
3987 && (rdesc = jfunc_rdesc_usable (jf)))
3988 {
3989 int d = ipa_get_controlled_uses (old_root_info, i);
3990 int c = rdesc->refcount;
3991 rdesc->refcount = combine_controlled_uses_counters (c, d);
3992 if (rdesc->refcount == 0)
3993 {
3994 tree cst = ipa_get_jf_constant (jf);
3995 struct cgraph_node *n;
3996 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3997 && TREE_CODE (TREE_OPERAND (cst, 0))
3998 == FUNCTION_DECL);
d52f5295 3999 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
4000 if (n)
4001 {
4002 struct cgraph_node *clone;
568cda29 4003 bool ok;
67348ccc 4004 ok = remove_described_reference (n, rdesc);
568cda29 4005 gcc_checking_assert (ok);
4502fe8d
MJ
4006
4007 clone = cs->caller;
a62bfab5 4008 while (clone->inlined_to
6cf67b62
JH
4009 && clone->ipcp_clone
4010 && clone != rdesc->cs->caller)
4502fe8d
MJ
4011 {
4012 struct ipa_ref *ref;
d122681a 4013 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
4014 if (ref)
4015 {
4016 if (dump_file)
4017 fprintf (dump_file, "ipa-prop: Removing "
4018 "cloning-created reference "
464d0118
ML
4019 "from %s to %s.\n",
4020 clone->dump_name (),
4021 n->dump_name ());
d122681a 4022 ref->remove_reference ();
4502fe8d
MJ
4023 }
4024 clone = clone->callers->caller;
4025 }
4026 }
4027 }
4028 }
4029 }
4030
4031 for (i = ipa_get_param_count (old_root_info);
4032 i < ipa_get_cs_argument_count (args);
4033 i++)
4034 {
4035 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4036
4037 if (jf->type == IPA_JF_CONST)
4038 {
4039 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4040 if (rdesc)
4041 rdesc->refcount = IPA_UNDESCRIBED_USE;
4042 }
4043 else if (jf->type == IPA_JF_PASS_THROUGH)
4044 ipa_set_controlled_uses (new_root_info,
4045 jf->value.pass_through.formal_id,
4046 IPA_UNDESCRIBED_USE);
4047 }
4048}
4049
3e293154
MJ
4050/* Update jump functions and call note functions on inlining the call site CS.
4051 CS is expected to lead to a node already cloned by
4052 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
4053 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4054 created. */
be95e2b9 4055
f8e2a1ed 4056bool
3e293154 4057ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 4058 vec<cgraph_edge *> *new_edges)
3e293154 4059{
5ee53a06 4060 bool changed;
f8e2a1ed
MJ
4061 /* Do nothing if the preparation phase has not been carried out yet
4062 (i.e. during early inlining). */
dd912cb8 4063 if (!ipa_node_params_sum)
f8e2a1ed 4064 return false;
6fe906a3 4065 gcc_assert (ipa_edge_args_sum);
f8e2a1ed 4066
4502fe8d 4067 propagate_controlled_uses (cs);
5ee53a06 4068 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
6cf67b62 4069 ipa_node_params_sum->remove (cs->callee);
5ee53a06 4070
d40e2362
JH
4071 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4072 if (args)
4073 {
4074 bool ok = true;
4075 if (args->jump_functions)
4076 {
4077 struct ipa_jump_func *jf;
4078 int i;
4079 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4080 if (jf->type == IPA_JF_CONST
4081 && ipa_get_jf_constant_rdesc (jf))
4082 {
4083 ok = false;
4084 break;
4085 }
4086 }
4087 if (ok)
4088 ipa_edge_args_sum->remove (cs);
4089 }
98aad294
JH
4090 if (ipcp_transformation_sum)
4091 ipcp_transformation_sum->remove (cs->callee);
d40e2362 4092
5ee53a06 4093 return changed;
518dc859
RL
4094}
4095
86cd0334
MJ
4096/* Ensure that array of edge arguments infos is big enough to accommodate a
4097 structure for all edges and reallocates it if not. Also, allocate
4098 associated hash tables is they do not already exist. */
4099
4100void
4101ipa_check_create_edge_args (void)
4102{
6fe906a3
MJ
4103 if (!ipa_edge_args_sum)
4104 ipa_edge_args_sum
78cd68c0 4105 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
6fe906a3 4106 ipa_edge_args_sum_t (symtab, true));
86cd0334
MJ
4107 if (!ipa_bits_hash_table)
4108 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4109 if (!ipa_vr_hash_table)
4110 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4111}
4112
771578a0 4113/* Free all ipa_edge structures. */
be95e2b9 4114
518dc859 4115void
771578a0 4116ipa_free_all_edge_args (void)
518dc859 4117{
6fe906a3 4118 if (!ipa_edge_args_sum)
9771b263
DN
4119 return;
4120
ddf628e4 4121 ggc_delete (ipa_edge_args_sum);
6fe906a3 4122 ipa_edge_args_sum = NULL;
518dc859
RL
4123}
4124
771578a0 4125/* Free all ipa_node_params structures. */
be95e2b9 4126
518dc859 4127void
771578a0 4128ipa_free_all_node_params (void)
518dc859 4129{
ddf628e4 4130 ggc_delete (ipa_node_params_sum);
dd912cb8 4131 ipa_node_params_sum = NULL;
771578a0
MJ
4132}
4133
9d3e0adc 4134/* Initialize IPA CP transformation summary and also allocate any necessary hash
86cd0334 4135 tables if they do not already exist. */
04be694e
MJ
4136
4137void
9d3e0adc 4138ipcp_transformation_initialize (void)
04be694e 4139{
86cd0334
MJ
4140 if (!ipa_bits_hash_table)
4141 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4142 if (!ipa_vr_hash_table)
4143 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
9d3e0adc
ML
4144 if (ipcp_transformation_sum == NULL)
4145 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
04be694e
MJ
4146}
4147
12e088ba
AC
4148/* Release the IPA CP transformation summary. */
4149
4150void
4151ipcp_free_transformation_sum (void)
4152{
4153 if (!ipcp_transformation_sum)
4154 return;
4155
78cd68c0
ML
4156 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4157 ggc_free (ipcp_transformation_sum);
12e088ba
AC
4158 ipcp_transformation_sum = NULL;
4159}
4160
2c9561b5
MJ
4161/* Set the aggregate replacements of NODE to be AGGVALS. */
4162
4163void
4164ipa_set_node_agg_value_chain (struct cgraph_node *node,
4165 struct ipa_agg_replacement_value *aggvals)
4166{
9d3e0adc
ML
4167 ipcp_transformation_initialize ();
4168 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4169 s->agg_values = aggvals;
2c9561b5
MJ
4170}
4171
6fe906a3
MJ
4172/* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4173 count data structures accordingly. */
be95e2b9 4174
6fe906a3
MJ
4175void
4176ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
771578a0 4177{
568cda29
MJ
4178 if (args->jump_functions)
4179 {
4180 struct ipa_jump_func *jf;
4181 int i;
4182 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
4183 {
4184 struct ipa_cst_ref_desc *rdesc;
4185 try_decrement_rdesc_refcount (jf);
4186 if (jf->type == IPA_JF_CONST
4187 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4188 && rdesc->cs == cs)
4189 rdesc->cs = NULL;
4190 }
568cda29 4191 }
518dc859
RL
4192}
4193
6fe906a3
MJ
4194/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4195 reference count data strucutres accordingly. */
be95e2b9 4196
6fe906a3
MJ
4197void
4198ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4199 ipa_edge_args *old_args, ipa_edge_args *new_args)
771578a0 4200{
8b7773a4 4201 unsigned int i;
771578a0 4202
9771b263 4203 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
4204 if (old_args->polymorphic_call_contexts)
4205 new_args->polymorphic_call_contexts
4206 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 4207
9771b263 4208 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
4209 {
4210 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4211 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4212
4213 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4214
4215 if (src_jf->type == IPA_JF_CONST)
4216 {
4217 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4218
4219 if (!src_rdesc)
4220 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
4221 else if (src->caller == dst->caller)
4222 {
4223 struct ipa_ref *ref;
5e20cdc9 4224 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 4225 gcc_checking_assert (n);
d122681a
ML
4226 ref = src->caller->find_reference (n, src->call_stmt,
4227 src->lto_stmt_uid);
568cda29 4228 gcc_checking_assert (ref);
d122681a 4229 dst->caller->clone_reference (ref, ref->stmt);
568cda29 4230
601f3293 4231 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
568cda29
MJ
4232 dst_rdesc->cs = dst;
4233 dst_rdesc->refcount = src_rdesc->refcount;
4234 dst_rdesc->next_duplicate = NULL;
4235 dst_jf->value.constant.rdesc = dst_rdesc;
4236 }
4502fe8d
MJ
4237 else if (src_rdesc->cs == src)
4238 {
601f3293 4239 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 4240 dst_rdesc->cs = dst;
4502fe8d 4241 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
4242 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4243 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
4244 dst_jf->value.constant.rdesc = dst_rdesc;
4245 }
4246 else
4247 {
4248 struct ipa_cst_ref_desc *dst_rdesc;
4249 /* This can happen during inlining, when a JFUNC can refer to a
4250 reference taken in a function up in the tree of inline clones.
4251 We need to find the duplicate that refers to our tree of
4252 inline clones. */
4253
a62bfab5 4254 gcc_assert (dst->caller->inlined_to);
4502fe8d
MJ
4255 for (dst_rdesc = src_rdesc->next_duplicate;
4256 dst_rdesc;
4257 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
4258 {
4259 struct cgraph_node *top;
a62bfab5
ML
4260 top = dst_rdesc->cs->caller->inlined_to
4261 ? dst_rdesc->cs->caller->inlined_to
2fd0985c 4262 : dst_rdesc->cs->caller;
a62bfab5 4263 if (dst->caller->inlined_to == top)
2fd0985c
MJ
4264 break;
4265 }
44a60244 4266 gcc_assert (dst_rdesc);
4502fe8d
MJ
4267 dst_jf->value.constant.rdesc = dst_rdesc;
4268 }
4269 }
6fe45955
MJ
4270 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4271 && src->caller == dst->caller)
4272 {
a62bfab5
ML
4273 struct cgraph_node *inline_root = dst->caller->inlined_to
4274 ? dst->caller->inlined_to : dst->caller;
99b1c316 4275 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
6fe45955
MJ
4276 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4277
4278 int c = ipa_get_controlled_uses (root_info, idx);
4279 if (c != IPA_UNDESCRIBED_USE)
4280 {
4281 c++;
4282 ipa_set_controlled_uses (root_info, idx, c);
4283 }
4284 }
4502fe8d 4285 }
771578a0
MJ
4286}
4287
dd912cb8 4288/* Analyze newly added function into callgraph. */
be95e2b9 4289
771578a0 4290static void
dd912cb8 4291ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 4292{
dd912cb8
ML
4293 if (node->has_gimple_body_p ())
4294 ipa_analyze_node (node);
4295}
771578a0 4296
dd912cb8
ML
4297/* Hook that is called by summary when a node is duplicated. */
4298
4299void
4300ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4301 ipa_node_params *old_info,
4302 ipa_node_params *new_info)
4303{
4304 ipa_agg_replacement_value *old_av, *new_av;
771578a0 4305
f65f1ae3 4306 new_info->descriptors = vec_safe_copy (old_info->descriptors);
310bc633 4307 new_info->lattices = NULL;
771578a0 4308 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
f65f1ae3
MJ
4309 new_info->known_csts = old_info->known_csts.copy ();
4310 new_info->known_contexts = old_info->known_contexts.copy ();
3949c4a7 4311
8aab5218 4312 new_info->analysis_done = old_info->analysis_done;
3949c4a7 4313 new_info->node_enqueued = old_info->node_enqueued;
7e729474 4314 new_info->versionable = old_info->versionable;
2c9561b5
MJ
4315
4316 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 4317 if (old_av)
2c9561b5 4318 {
04be694e
MJ
4319 new_av = NULL;
4320 while (old_av)
4321 {
4322 struct ipa_agg_replacement_value *v;
2c9561b5 4323
04be694e
MJ
4324 v = ggc_alloc<ipa_agg_replacement_value> ();
4325 memcpy (v, old_av, sizeof (*v));
4326 v->next = new_av;
4327 new_av = v;
4328 old_av = old_av->next;
4329 }
4330 ipa_set_node_agg_value_chain (dst, new_av);
4331 }
98aad294 4332}
04be694e 4333
98aad294 4334/* Duplication of ipcp transformation summaries. */
86cd0334 4335
98aad294
JH
4336void
4337ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4338 ipcp_transformation *src_trans,
4339 ipcp_transformation *dst_trans)
4340{
4341 /* Avoid redundant work of duplicating vectors we will never use. */
4342 if (dst->inlined_to)
4343 return;
4344 dst_trans->bits = vec_safe_copy (src_trans->bits);
4345 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4346 ipa_agg_replacement_value *agg = src_trans->agg_values,
4347 **aggptr = &dst_trans->agg_values;
4348 while (agg)
4349 {
4350 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4351 **aggptr = *agg;
4352 agg = agg->next;
4353 aggptr = &(*aggptr)->next;
2c9561b5 4354 }
771578a0
MJ
4355}
4356
4357/* Register our cgraph hooks if they are not already there. */
be95e2b9 4358
518dc859 4359void
771578a0 4360ipa_register_cgraph_hooks (void)
518dc859 4361{
dd912cb8 4362 ipa_check_create_node_params ();
6fe906a3 4363 ipa_check_create_edge_args ();
dd912cb8 4364
dd912cb8 4365 function_insertion_hook_holder =
3dafb85c 4366 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 4367}
518dc859 4368
771578a0 4369/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 4370
771578a0
MJ
4371static void
4372ipa_unregister_cgraph_hooks (void)
4373{
3dafb85c 4374 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 4375 function_insertion_hook_holder = NULL;
771578a0
MJ
4376}
4377
4378/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4379 longer needed after ipa-cp. */
be95e2b9 4380
771578a0 4381void
e33c6cd6 4382ipa_free_all_structures_after_ipa_cp (void)
3e293154 4383{
2bf86c84 4384 if (!optimize && !in_lto_p)
3e293154
MJ
4385 {
4386 ipa_free_all_edge_args ();
4387 ipa_free_all_node_params ();
2651e637
ML
4388 ipcp_sources_pool.release ();
4389 ipcp_cst_values_pool.release ();
4390 ipcp_poly_ctx_values_pool.release ();
4391 ipcp_agg_lattice_pool.release ();
3e293154 4392 ipa_unregister_cgraph_hooks ();
601f3293 4393 ipa_refdesc_pool.release ();
3e293154
MJ
4394 }
4395}
4396
4397/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4398 longer needed after indirect inlining. */
be95e2b9 4399
3e293154 4400void
e33c6cd6 4401ipa_free_all_structures_after_iinln (void)
771578a0
MJ
4402{
4403 ipa_free_all_edge_args ();
4404 ipa_free_all_node_params ();
4405 ipa_unregister_cgraph_hooks ();
2651e637
ML
4406 ipcp_sources_pool.release ();
4407 ipcp_cst_values_pool.release ();
4408 ipcp_poly_ctx_values_pool.release ();
4409 ipcp_agg_lattice_pool.release ();
601f3293 4410 ipa_refdesc_pool.release ();
518dc859
RL
4411}
4412
dcd416e3 4413/* Print ipa_tree_map data structures of all functions in the
518dc859 4414 callgraph to F. */
be95e2b9 4415
518dc859 4416void
2c9561b5 4417ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
4418{
4419 int i, count;
99b1c316 4420 class ipa_node_params *info;
518dc859 4421
67348ccc 4422 if (!node->definition)
3e293154
MJ
4423 return;
4424 info = IPA_NODE_REF (node);
464d0118 4425 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
0302955a
JH
4426 if (!info)
4427 {
4428 fprintf (f, " no params return\n");
4429 return;
4430 }
3e293154
MJ
4431 count = ipa_get_param_count (info);
4432 for (i = 0; i < count; i++)
518dc859 4433 {
4502fe8d
MJ
4434 int c;
4435
a4e33812 4436 fprintf (f, " ");
e067bd43 4437 ipa_dump_param (f, info, i);
339f49ec
JH
4438 if (ipa_is_param_used (info, i))
4439 fprintf (f, " used");
40a777e8
JH
4440 if (ipa_is_param_used_by_ipa_predicates (info, i))
4441 fprintf (f, " used_by_ipa_predicates");
4442 if (ipa_is_param_used_by_indirect_call (info, i))
4443 fprintf (f, " used_by_indirect_call");
4444 if (ipa_is_param_used_by_polymorphic_call (info, i))
4445 fprintf (f, " used_by_polymorphic_call");
4502fe8d
MJ
4446 c = ipa_get_controlled_uses (info, i);
4447 if (c == IPA_UNDESCRIBED_USE)
4448 fprintf (f, " undescribed_use");
4449 else
4450 fprintf (f, " controlled_uses=%i", c);
3e293154 4451 fprintf (f, "\n");
518dc859
RL
4452 }
4453}
dcd416e3 4454
ca30a539 4455/* Print ipa_tree_map data structures of all functions in the
3e293154 4456 callgraph to F. */
be95e2b9 4457
3e293154 4458void
ca30a539 4459ipa_print_all_params (FILE * f)
3e293154
MJ
4460{
4461 struct cgraph_node *node;
4462
ca30a539 4463 fprintf (f, "\nFunction parameters:\n");
65c70e6b 4464 FOR_EACH_FUNCTION (node)
ca30a539 4465 ipa_print_node_params (f, node);
3e293154 4466}
3f84bf08 4467
2c9561b5
MJ
4468/* Dump the AV linked list. */
4469
4470void
4471ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4472{
4473 bool comma = false;
4474 fprintf (f, " Aggregate replacements:");
4475 for (; av; av = av->next)
4476 {
4477 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4478 av->index, av->offset);
ef6cb4c7 4479 print_generic_expr (f, av->value);
2c9561b5
MJ
4480 comma = true;
4481 }
4482 fprintf (f, "\n");
4483}
4484
fb3f88cc
JH
4485/* Stream out jump function JUMP_FUNC to OB. */
4486
4487static void
4488ipa_write_jump_function (struct output_block *ob,
4489 struct ipa_jump_func *jump_func)
4490{
8b7773a4
MJ
4491 struct ipa_agg_jf_item *item;
4492 struct bitpack_d bp;
4493 int i, count;
f714ecf5 4494 int flag = 0;
fb3f88cc 4495
f714ecf5
JH
4496 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4497 as well as WPA memory by handling them specially. */
4498 if (jump_func->type == IPA_JF_CONST
4499 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4500 flag = 1;
4501
4502 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
fb3f88cc
JH
4503 switch (jump_func->type)
4504 {
4505 case IPA_JF_UNKNOWN:
4506 break;
4507 case IPA_JF_CONST:
5368224f 4508 gcc_assert (
4502fe8d 4509 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
f714ecf5
JH
4510 stream_write_tree (ob,
4511 flag
4512 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4513 : jump_func->value.constant.value, true);
fb3f88cc
JH
4514 break;
4515 case IPA_JF_PASS_THROUGH:
412288f1 4516 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4517 if (jump_func->value.pass_through.operation == NOP_EXPR)
4518 {
4519 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4520 bp = bitpack_create (ob->main_stream);
4521 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4522 streamer_write_bitpack (&bp);
4523 }
a2b4c188
KV
4524 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4525 == tcc_unary)
4526 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4a53743e
MJ
4527 else
4528 {
4529 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4530 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4531 }
fb3f88cc
JH
4532 break;
4533 case IPA_JF_ANCESTOR:
412288f1 4534 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4535 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4536 bp = bitpack_create (ob->main_stream);
4537 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4538 streamer_write_bitpack (&bp);
fb3f88cc 4539 break;
eb270950
FX
4540 default:
4541 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4542 }
4543
9771b263 4544 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4545 streamer_write_uhwi (ob, count);
4546 if (count)
4547 {
4548 bp = bitpack_create (ob->main_stream);
4549 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4550 streamer_write_bitpack (&bp);
4551 }
4552
9771b263 4553 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4 4554 {
eb270950 4555 stream_write_tree (ob, item->type, true);
8b7773a4 4556 streamer_write_uhwi (ob, item->offset);
eb270950
FX
4557 streamer_write_uhwi (ob, item->jftype);
4558 switch (item->jftype)
4559 {
4560 case IPA_JF_UNKNOWN:
4561 break;
4562 case IPA_JF_CONST:
4563 stream_write_tree (ob, item->value.constant, true);
4564 break;
4565 case IPA_JF_PASS_THROUGH:
4566 case IPA_JF_LOAD_AGG:
4567 streamer_write_uhwi (ob, item->value.pass_through.operation);
4568 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4569 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4570 != tcc_unary)
4571 stream_write_tree (ob, item->value.pass_through.operand, true);
4572 if (item->jftype == IPA_JF_LOAD_AGG)
4573 {
4574 stream_write_tree (ob, item->value.load_agg.type, true);
4575 streamer_write_uhwi (ob, item->value.load_agg.offset);
4576 bp = bitpack_create (ob->main_stream);
4577 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4578 streamer_write_bitpack (&bp);
4579 }
4580 break;
4581 default:
4582 fatal_error (UNKNOWN_LOCATION,
4583 "invalid jump function in LTO stream");
4584 }
fb3f88cc 4585 }
04be694e 4586
209ca542 4587 bp = bitpack_create (ob->main_stream);
86cd0334 4588 bp_pack_value (&bp, !!jump_func->bits, 1);
209ca542 4589 streamer_write_bitpack (&bp);
86cd0334 4590 if (jump_func->bits)
209ca542 4591 {
86cd0334
MJ
4592 streamer_write_widest_int (ob, jump_func->bits->value);
4593 streamer_write_widest_int (ob, jump_func->bits->mask);
a5e14a42 4594 }
86cd0334 4595 bp_pack_value (&bp, !!jump_func->m_vr, 1);
8bc5448f 4596 streamer_write_bitpack (&bp);
86cd0334 4597 if (jump_func->m_vr)
8bc5448f
KV
4598 {
4599 streamer_write_enum (ob->main_stream, value_rang_type,
54994253
AH
4600 VR_LAST, jump_func->m_vr->kind ());
4601 stream_write_tree (ob, jump_func->m_vr->min (), true);
4602 stream_write_tree (ob, jump_func->m_vr->max (), true);
8bc5448f 4603 }
fb3f88cc
JH
4604}
4605
4606/* Read in jump function JUMP_FUNC from IB. */
4607
4608static void
99b1c316 4609ipa_read_jump_function (class lto_input_block *ib,
fb3f88cc 4610 struct ipa_jump_func *jump_func,
4502fe8d 4611 struct cgraph_edge *cs,
99b1c316 4612 class data_in *data_in,
f714ecf5 4613 bool prevails)
fb3f88cc 4614{
4a53743e
MJ
4615 enum jump_func_type jftype;
4616 enum tree_code operation;
8b7773a4 4617 int i, count;
f714ecf5
JH
4618 int val = streamer_read_uhwi (ib);
4619 bool flag = val & 1;
fb3f88cc 4620
f714ecf5 4621 jftype = (enum jump_func_type) (val / 2);
4a53743e 4622 switch (jftype)
fb3f88cc
JH
4623 {
4624 case IPA_JF_UNKNOWN:
04be694e 4625 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4626 break;
4627 case IPA_JF_CONST:
f714ecf5
JH
4628 {
4629 tree t = stream_read_tree (ib, data_in);
4630 if (flag && prevails)
4631 t = build_fold_addr_expr (t);
4632 ipa_set_jf_constant (jump_func, t, cs);
4633 }
fb3f88cc
JH
4634 break;
4635 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4636 operation = (enum tree_code) streamer_read_uhwi (ib);
4637 if (operation == NOP_EXPR)
4638 {
4639 int formal_id = streamer_read_uhwi (ib);
4640 struct bitpack_d bp = streamer_read_bitpack (ib);
4641 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4642 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e 4643 }
a2b4c188
KV
4644 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4645 {
4646 int formal_id = streamer_read_uhwi (ib);
4647 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4648 }
4a53743e
MJ
4649 else
4650 {
4651 tree operand = stream_read_tree (ib, data_in);
4652 int formal_id = streamer_read_uhwi (ib);
4653 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4654 operation);
4655 }
fb3f88cc
JH
4656 break;
4657 case IPA_JF_ANCESTOR:
4a53743e
MJ
4658 {
4659 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4660 int formal_id = streamer_read_uhwi (ib);
4661 struct bitpack_d bp = streamer_read_bitpack (ib);
4662 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4663 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4664 break;
4665 }
f714ecf5
JH
4666 default:
4667 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4668 }
4669
4670 count = streamer_read_uhwi (ib);
f714ecf5
JH
4671 if (prevails)
4672 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4673 if (count)
4674 {
4a53743e 4675 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4676 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4677 }
4678 for (i = 0; i < count; i++)
4679 {
f32682ca 4680 struct ipa_agg_jf_item item;
eb270950 4681 item.type = stream_read_tree (ib, data_in);
f32682ca 4682 item.offset = streamer_read_uhwi (ib);
eb270950
FX
4683 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4684
4685 switch (item.jftype)
4686 {
4687 case IPA_JF_UNKNOWN:
4688 break;
4689 case IPA_JF_CONST:
4690 item.value.constant = stream_read_tree (ib, data_in);
4691 break;
4692 case IPA_JF_PASS_THROUGH:
4693 case IPA_JF_LOAD_AGG:
4694 operation = (enum tree_code) streamer_read_uhwi (ib);
4695 item.value.pass_through.operation = operation;
4696 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4697 if (TREE_CODE_CLASS (operation) == tcc_unary)
4698 item.value.pass_through.operand = NULL_TREE;
4699 else
4700 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4701 if (item.jftype == IPA_JF_LOAD_AGG)
4702 {
4703 struct bitpack_d bp;
4704 item.value.load_agg.type = stream_read_tree (ib, data_in);
4705 item.value.load_agg.offset = streamer_read_uhwi (ib);
4706 bp = streamer_read_bitpack (ib);
4707 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4708 }
4709 break;
4710 default:
4711 fatal_error (UNKNOWN_LOCATION,
4712 "invalid jump function in LTO stream");
4713 }
f714ecf5
JH
4714 if (prevails)
4715 jump_func->agg.items->quick_push (item);
fb3f88cc 4716 }
04be694e
MJ
4717
4718 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4719 bool bits_known = bp_unpack_value (&bp, 1);
4720 if (bits_known)
4721 {
86cd0334
MJ
4722 widest_int value = streamer_read_widest_int (ib);
4723 widest_int mask = streamer_read_widest_int (ib);
f714ecf5
JH
4724 if (prevails)
4725 ipa_set_jfunc_bits (jump_func, value, mask);
209ca542
PK
4726 }
4727 else
86cd0334 4728 jump_func->bits = NULL;
8bc5448f
KV
4729
4730 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4731 bool vr_known = bp_unpack_value (&vr_bp, 1);
4732 if (vr_known)
4733 {
54994253 4734 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
86cd0334
MJ
4735 VR_LAST);
4736 tree min = stream_read_tree (ib, data_in);
4737 tree max = stream_read_tree (ib, data_in);
f714ecf5
JH
4738 if (prevails)
4739 ipa_set_jfunc_vr (jump_func, type, min, max);
8bc5448f
KV
4740 }
4741 else
86cd0334 4742 jump_func->m_vr = NULL;
fb3f88cc
JH
4743}
4744
e33c6cd6
MJ
4745/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4746 relevant to indirect inlining to OB. */
661e7330
MJ
4747
4748static void
e33c6cd6
MJ
4749ipa_write_indirect_edge_info (struct output_block *ob,
4750 struct cgraph_edge *cs)
661e7330 4751{
99b1c316 4752 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4753 struct bitpack_d bp;
e33c6cd6 4754
412288f1 4755 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4756 bp = bitpack_create (ob->main_stream);
4757 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4758 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4759 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4760 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4761 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4762 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4763 streamer_write_bitpack (&bp);
ba392339
JH
4764 if (ii->agg_contents || ii->polymorphic)
4765 streamer_write_hwi (ob, ii->offset);
4766 else
4767 gcc_assert (ii->offset == 0);
b258210c
MJ
4768
4769 if (ii->polymorphic)
4770 {
412288f1 4771 streamer_write_hwi (ob, ii->otr_token);
b9393656 4772 stream_write_tree (ob, ii->otr_type, true);
ba392339 4773 ii->context.stream_out (ob);
b258210c 4774 }
661e7330
MJ
4775}
4776
e33c6cd6
MJ
4777/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4778 relevant to indirect inlining from IB. */
661e7330
MJ
4779
4780static void
99b1c316
MS
4781ipa_read_indirect_edge_info (class lto_input_block *ib,
4782 class data_in *data_in,
40a777e8
JH
4783 struct cgraph_edge *cs,
4784 class ipa_node_params *info)
661e7330 4785{
99b1c316 4786 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4787 struct bitpack_d bp;
661e7330 4788
412288f1 4789 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 4790 bp = streamer_read_bitpack (ib);
2465dcc2 4791 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4792 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4793 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4794 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 4795 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 4796 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
4797 if (ii->agg_contents || ii->polymorphic)
4798 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4799 else
4800 ii->offset = 0;
b258210c
MJ
4801 if (ii->polymorphic)
4802 {
412288f1 4803 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4804 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 4805 ii->context.stream_in (ib, data_in);
b258210c 4806 }
40a777e8
JH
4807 if (info && ii->param_index >= 0)
4808 {
4809 if (ii->polymorphic)
4810 ipa_set_param_used_by_polymorphic_call (info,
4811 ii->param_index , true);
4812 ipa_set_param_used_by_indirect_call (info,
4813 ii->param_index, true);
4814 }
661e7330
MJ
4815}
4816
fb3f88cc
JH
4817/* Stream out NODE info to OB. */
4818
4819static void
4820ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4821{
4822 int node_ref;
7380e6ef 4823 lto_symtab_encoder_t encoder;
99b1c316 4824 class ipa_node_params *info = IPA_NODE_REF (node);
fb3f88cc
JH
4825 int j;
4826 struct cgraph_edge *e;
2465dcc2 4827 struct bitpack_d bp;
fb3f88cc 4828
7380e6ef 4829 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4830 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4831 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4832
0e8853ee
JH
4833 streamer_write_uhwi (ob, ipa_get_param_count (info));
4834 for (j = 0; j < ipa_get_param_count (info); j++)
4835 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4836 bp = bitpack_create (ob->main_stream);
8aab5218 4837 gcc_assert (info->analysis_done
661e7330 4838 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4839 gcc_assert (!info->node_enqueued);
4840 gcc_assert (!info->ipcp_orig_node);
4841 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4842 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4843 streamer_write_bitpack (&bp);
4502fe8d 4844 for (j = 0; j < ipa_get_param_count (info); j++)
a5e14a42
MJ
4845 {
4846 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4847 stream_write_tree (ob, ipa_get_type (info, j), true);
4848 }
fb3f88cc
JH
4849 for (e = node->callees; e; e = e->next_callee)
4850 {
99b1c316 4851 class ipa_edge_args *args = IPA_EDGE_REF (e);
fb3f88cc 4852
a33c028e
JH
4853 if (!args)
4854 {
4855 streamer_write_uhwi (ob, 0);
4856 continue;
4857 }
4858
5ce97055
JH
4859 streamer_write_uhwi (ob,
4860 ipa_get_cs_argument_count (args) * 2
4861 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 4862 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4863 {
4864 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4865 if (args->polymorphic_call_contexts != NULL)
4866 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4867 }
fb3f88cc 4868 }
e33c6cd6 4869 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 4870 {
99b1c316 4871 class ipa_edge_args *args = IPA_EDGE_REF (e);
a33c028e
JH
4872 if (!args)
4873 streamer_write_uhwi (ob, 0);
4874 else
5ce97055 4875 {
a33c028e
JH
4876 streamer_write_uhwi (ob,
4877 ipa_get_cs_argument_count (args) * 2
4878 + (args->polymorphic_call_contexts != NULL));
4879 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4880 {
4881 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4882 if (args->polymorphic_call_contexts != NULL)
4883 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4884 }
5ce97055 4885 }
c8246dbe
JH
4886 ipa_write_indirect_edge_info (ob, e);
4887 }
fb3f88cc
JH
4888}
4889
f714ecf5 4890/* Stream in edge E from IB. */
7b377b22 4891
f714ecf5 4892static void
99b1c316
MS
4893ipa_read_edge_info (class lto_input_block *ib,
4894 class data_in *data_in,
f714ecf5 4895 struct cgraph_edge *e, bool prevails)
7b377b22 4896{
f714ecf5
JH
4897 int count = streamer_read_uhwi (ib);
4898 bool contexts_computed = count & 1;
4899
4900 count /= 2;
4901 if (!count)
4902 return;
4903 if (prevails && e->possibly_call_in_translation_unit_p ())
4904 {
a33c028e 4905 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
f714ecf5
JH
4906 vec_safe_grow_cleared (args->jump_functions, count);
4907 if (contexts_computed)
4908 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4909 for (int k = 0; k < count; k++)
4910 {
4911 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4912 data_in, prevails);
4913 if (contexts_computed)
4914 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4915 (ib, data_in);
4916 }
4917 }
4918 else
4919 {
4920 for (int k = 0; k < count; k++)
4921 {
4922 struct ipa_jump_func dummy;
4923 ipa_read_jump_function (ib, &dummy, e,
4924 data_in, prevails);
4925 if (contexts_computed)
4926 {
99b1c316 4927 class ipa_polymorphic_call_context ctx;
f714ecf5
JH
4928 ctx.stream_in (ib, data_in);
4929 }
4930 }
4931 }
7b377b22
JH
4932}
4933
61502ca8 4934/* Stream in NODE info from IB. */
fb3f88cc
JH
4935
4936static void
99b1c316
MS
4937ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4938 class data_in *data_in)
fb3f88cc 4939{
fb3f88cc
JH
4940 int k;
4941 struct cgraph_edge *e;
2465dcc2 4942 struct bitpack_d bp;
f714ecf5 4943 bool prevails = node->prevailing_p ();
6cf67b62
JH
4944 class ipa_node_params *info = prevails
4945 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
fb3f88cc 4946
f714ecf5
JH
4947 int param_count = streamer_read_uhwi (ib);
4948 if (prevails)
4949 {
4950 ipa_alloc_node_params (node, param_count);
4951 for (k = 0; k < param_count; k++)
4952 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4953 if (ipa_get_param_count (info) != 0)
4954 info->analysis_done = true;
4955 info->node_enqueued = false;
4956 }
4957 else
4958 for (k = 0; k < param_count; k++)
4959 streamer_read_uhwi (ib);
a5e14a42 4960
412288f1 4961 bp = streamer_read_bitpack (ib);
f714ecf5 4962 for (k = 0; k < param_count; k++)
a5e14a42 4963 {
f714ecf5
JH
4964 bool used = bp_unpack_value (&bp, 1);
4965
4966 if (prevails)
4967 ipa_set_param_used (info, k, used);
a5e14a42 4968 }
f714ecf5 4969 for (k = 0; k < param_count; k++)
fb3f88cc 4970 {
f714ecf5
JH
4971 int nuses = streamer_read_hwi (ib);
4972 tree type = stream_read_tree (ib, data_in);
fb3f88cc 4973
f714ecf5 4974 if (prevails)
5ce97055 4975 {
f714ecf5
JH
4976 ipa_set_controlled_uses (info, k, nuses);
4977 (*info->descriptors)[k].decl_or_type = type;
5ce97055 4978 }
fb3f88cc 4979 }
f714ecf5
JH
4980 for (e = node->callees; e; e = e->next_callee)
4981 ipa_read_edge_info (ib, data_in, e, prevails);
e33c6cd6 4982 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 4983 {
f714ecf5 4984 ipa_read_edge_info (ib, data_in, e, prevails);
40a777e8 4985 ipa_read_indirect_edge_info (ib, data_in, e, info);
c8246dbe 4986 }
fb3f88cc
JH
4987}
4988
4989/* Write jump functions for nodes in SET. */
4990
4991void
f27c1867 4992ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4993{
4994 struct cgraph_node *node;
93536c97 4995 struct output_block *ob;
fb3f88cc 4996 unsigned int count = 0;
f27c1867
JH
4997 lto_symtab_encoder_iterator lsei;
4998 lto_symtab_encoder_t encoder;
4999
6fe906a3 5000 if (!ipa_node_params_sum || !ipa_edge_args_sum)
93536c97 5001 return;
fb3f88cc 5002
93536c97 5003 ob = create_output_block (LTO_section_jump_functions);
f27c1867 5004 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5005 ob->symbol = NULL;
f27c1867
JH
5006 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5007 lsei_next_function_in_partition (&lsei))
fb3f88cc 5008 {
f27c1867 5009 node = lsei_cgraph_node (lsei);
d52f5295 5010 if (node->has_gimple_body_p ()
c47d0034 5011 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5012 count++;
5013 }
5014
412288f1 5015 streamer_write_uhwi (ob, count);
fb3f88cc
JH
5016
5017 /* Process all of the functions. */
f27c1867
JH
5018 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5019 lsei_next_function_in_partition (&lsei))
fb3f88cc 5020 {
f27c1867 5021 node = lsei_cgraph_node (lsei);
d52f5295 5022 if (node->has_gimple_body_p ()
c47d0034 5023 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5024 ipa_write_node_info (ob, node);
5025 }
412288f1 5026 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
5027 produce_asm (ob, NULL);
5028 destroy_output_block (ob);
5029}
5030
5031/* Read section in file FILE_DATA of length LEN with data DATA. */
5032
5033static void
5034ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5035 size_t len)
5036{
5037 const struct lto_function_header *header =
5038 (const struct lto_function_header *) data;
4ad9a9de
EB
5039 const int cfg_offset = sizeof (struct lto_function_header);
5040 const int main_offset = cfg_offset + header->cfg_size;
5041 const int string_offset = main_offset + header->main_size;
99b1c316 5042 class data_in *data_in;
fb3f88cc
JH
5043 unsigned int i;
5044 unsigned int count;
5045
207c68cd 5046 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5047 header->main_size, file_data->mode_table);
fb3f88cc
JH
5048
5049 data_in =
5050 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5051 header->string_size, vNULL);
412288f1 5052 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
5053
5054 for (i = 0; i < count; i++)
5055 {
5056 unsigned int index;
5057 struct cgraph_node *node;
7380e6ef 5058 lto_symtab_encoder_t encoder;
fb3f88cc 5059
412288f1 5060 index = streamer_read_uhwi (&ib_main);
7380e6ef 5061 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5062 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5063 index));
67348ccc 5064 gcc_assert (node->definition);
fb3f88cc
JH
5065 ipa_read_node_info (&ib_main, node, data_in);
5066 }
5067 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5068 len);
5069 lto_data_in_delete (data_in);
5070}
5071
5072/* Read ipcp jump functions. */
5073
5074void
5075ipa_prop_read_jump_functions (void)
5076{
5077 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5078 struct lto_file_decl_data *file_data;
5079 unsigned int j = 0;
5080
5081 ipa_check_create_node_params ();
5082 ipa_check_create_edge_args ();
5083 ipa_register_cgraph_hooks ();
5084
5085 while ((file_data = file_data_vec[j++]))
5086 {
5087 size_t len;
3c56d8d8
ML
5088 const char *data
5089 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5090 &len);
fb3f88cc
JH
5091 if (data)
5092 ipa_prop_read_section (file_data, data, len);
5093 }
5094}
5095
2c9561b5 5096void
04be694e 5097write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
5098{
5099 int node_ref;
5100 unsigned int count = 0;
5101 lto_symtab_encoder_t encoder;
5102 struct ipa_agg_replacement_value *aggvals, *av;
5103
5104 aggvals = ipa_get_agg_replacements_for_node (node);
5105 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5106 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
5107 streamer_write_uhwi (ob, node_ref);
5108
5109 for (av = aggvals; av; av = av->next)
5110 count++;
5111 streamer_write_uhwi (ob, count);
5112
5113 for (av = aggvals; av; av = av->next)
5114 {
7b920a9a
MJ
5115 struct bitpack_d bp;
5116
2c9561b5
MJ
5117 streamer_write_uhwi (ob, av->offset);
5118 streamer_write_uhwi (ob, av->index);
5119 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
5120
5121 bp = bitpack_create (ob->main_stream);
5122 bp_pack_value (&bp, av->by_ref, 1);
5123 streamer_write_bitpack (&bp);
2c9561b5 5124 }
04be694e 5125
9d3e0adc 5126 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5127 if (ts && vec_safe_length (ts->m_vr) > 0)
5128 {
5129 count = ts->m_vr->length ();
5130 streamer_write_uhwi (ob, count);
5131 for (unsigned i = 0; i < count; ++i)
5132 {
5133 struct bitpack_d bp;
5134 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5135 bp = bitpack_create (ob->main_stream);
5136 bp_pack_value (&bp, parm_vr->known, 1);
5137 streamer_write_bitpack (&bp);
5138 if (parm_vr->known)
5139 {
5140 streamer_write_enum (ob->main_stream, value_rang_type,
5141 VR_LAST, parm_vr->type);
5142 streamer_write_wide_int (ob, parm_vr->min);
5143 streamer_write_wide_int (ob, parm_vr->max);
5144 }
5145 }
5146 }
5147 else
5148 streamer_write_uhwi (ob, 0);
5149
209ca542
PK
5150 if (ts && vec_safe_length (ts->bits) > 0)
5151 {
5152 count = ts->bits->length ();
5153 streamer_write_uhwi (ob, count);
5154
5155 for (unsigned i = 0; i < count; ++i)
5156 {
86cd0334 5157 const ipa_bits *bits_jfunc = (*ts->bits)[i];
209ca542 5158 struct bitpack_d bp = bitpack_create (ob->main_stream);
86cd0334 5159 bp_pack_value (&bp, !!bits_jfunc, 1);
209ca542 5160 streamer_write_bitpack (&bp);
86cd0334 5161 if (bits_jfunc)
209ca542 5162 {
86cd0334
MJ
5163 streamer_write_widest_int (ob, bits_jfunc->value);
5164 streamer_write_widest_int (ob, bits_jfunc->mask);
209ca542
PK
5165 }
5166 }
5167 }
5168 else
5169 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
5170}
5171
5172/* Stream in the aggregate value replacement chain for NODE from IB. */
5173
5174static void
04be694e
MJ
5175read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5176 data_in *data_in)
2c9561b5
MJ
5177{
5178 struct ipa_agg_replacement_value *aggvals = NULL;
5179 unsigned int count, i;
5180
5181 count = streamer_read_uhwi (ib);
5182 for (i = 0; i <count; i++)
5183 {
5184 struct ipa_agg_replacement_value *av;
7b920a9a 5185 struct bitpack_d bp;
2c9561b5 5186
766090c2 5187 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
5188 av->offset = streamer_read_uhwi (ib);
5189 av->index = streamer_read_uhwi (ib);
5190 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
5191 bp = streamer_read_bitpack (ib);
5192 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
5193 av->next = aggvals;
5194 aggvals = av;
5195 }
5196 ipa_set_node_agg_value_chain (node, aggvals);
67b97478 5197
209ca542
PK
5198 count = streamer_read_uhwi (ib);
5199 if (count > 0)
5200 {
9d3e0adc
ML
5201 ipcp_transformation_initialize ();
5202 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
8bc5448f
KV
5203 vec_safe_grow_cleared (ts->m_vr, count);
5204 for (i = 0; i < count; i++)
5205 {
5206 ipa_vr *parm_vr;
5207 parm_vr = &(*ts->m_vr)[i];
5208 struct bitpack_d bp;
5209 bp = streamer_read_bitpack (ib);
5210 parm_vr->known = bp_unpack_value (&bp, 1);
5211 if (parm_vr->known)
5212 {
54994253 5213 parm_vr->type = streamer_read_enum (ib, value_range_kind,
8bc5448f
KV
5214 VR_LAST);
5215 parm_vr->min = streamer_read_wide_int (ib);
5216 parm_vr->max = streamer_read_wide_int (ib);
5217 }
5218 }
5219 }
5220 count = streamer_read_uhwi (ib);
5221 if (count > 0)
5222 {
9d3e0adc
ML
5223 ipcp_transformation_initialize ();
5224 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
209ca542
PK
5225 vec_safe_grow_cleared (ts->bits, count);
5226
5227 for (i = 0; i < count; i++)
5228 {
209ca542 5229 struct bitpack_d bp = streamer_read_bitpack (ib);
86cd0334
MJ
5230 bool known = bp_unpack_value (&bp, 1);
5231 if (known)
209ca542 5232 {
2fb1d6d6
JH
5233 const widest_int value = streamer_read_widest_int (ib);
5234 const widest_int mask = streamer_read_widest_int (ib);
86cd0334 5235 ipa_bits *bits
2fb1d6d6 5236 = ipa_get_ipa_bits_for_value (value, mask);
86cd0334 5237 (*ts->bits)[i] = bits;
209ca542
PK
5238 }
5239 }
5240 }
2c9561b5
MJ
5241}
5242
5243/* Write all aggregate replacement for nodes in set. */
5244
5245void
04be694e 5246ipcp_write_transformation_summaries (void)
2c9561b5
MJ
5247{
5248 struct cgraph_node *node;
5249 struct output_block *ob;
5250 unsigned int count = 0;
5251 lto_symtab_encoder_iterator lsei;
5252 lto_symtab_encoder_t encoder;
5253
2c9561b5
MJ
5254 ob = create_output_block (LTO_section_ipcp_transform);
5255 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5256 ob->symbol = NULL;
2c9561b5
MJ
5257 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5258 lsei_next_function_in_partition (&lsei))
5259 {
5260 node = lsei_cgraph_node (lsei);
04be694e 5261 if (node->has_gimple_body_p ())
2c9561b5
MJ
5262 count++;
5263 }
5264
5265 streamer_write_uhwi (ob, count);
5266
5267 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5268 lsei_next_function_in_partition (&lsei))
5269 {
5270 node = lsei_cgraph_node (lsei);
04be694e
MJ
5271 if (node->has_gimple_body_p ())
5272 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
5273 }
5274 streamer_write_char_stream (ob->main_stream, 0);
5275 produce_asm (ob, NULL);
5276 destroy_output_block (ob);
5277}
5278
5279/* Read replacements section in file FILE_DATA of length LEN with data
5280 DATA. */
5281
5282static void
5283read_replacements_section (struct lto_file_decl_data *file_data,
5284 const char *data,
5285 size_t len)
5286{
5287 const struct lto_function_header *header =
5288 (const struct lto_function_header *) data;
5289 const int cfg_offset = sizeof (struct lto_function_header);
5290 const int main_offset = cfg_offset + header->cfg_size;
5291 const int string_offset = main_offset + header->main_size;
99b1c316 5292 class data_in *data_in;
2c9561b5
MJ
5293 unsigned int i;
5294 unsigned int count;
5295
207c68cd 5296 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5297 header->main_size, file_data->mode_table);
2c9561b5
MJ
5298
5299 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5300 header->string_size, vNULL);
2c9561b5
MJ
5301 count = streamer_read_uhwi (&ib_main);
5302
5303 for (i = 0; i < count; i++)
5304 {
5305 unsigned int index;
5306 struct cgraph_node *node;
5307 lto_symtab_encoder_t encoder;
5308
5309 index = streamer_read_uhwi (&ib_main);
5310 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5311 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5312 index));
67348ccc 5313 gcc_assert (node->definition);
04be694e 5314 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
5315 }
5316 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5317 len);
5318 lto_data_in_delete (data_in);
5319}
5320
5321/* Read IPA-CP aggregate replacements. */
5322
5323void
04be694e 5324ipcp_read_transformation_summaries (void)
2c9561b5
MJ
5325{
5326 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5327 struct lto_file_decl_data *file_data;
5328 unsigned int j = 0;
5329
5330 while ((file_data = file_data_vec[j++]))
5331 {
5332 size_t len;
3c56d8d8
ML
5333 const char *data
5334 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5335 &len);
2c9561b5
MJ
5336 if (data)
5337 read_replacements_section (file_data, data, len);
5338 }
5339}
5340
5341/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5342 NODE. */
5343
5344static void
5345adjust_agg_replacement_values (struct cgraph_node *node,
5346 struct ipa_agg_replacement_value *aggval)
5347{
5348 struct ipa_agg_replacement_value *v;
2c9561b5 5349
ff6686d2 5350 if (!node->clone.param_adjustments)
2c9561b5
MJ
5351 return;
5352
ff6686d2
MJ
5353 auto_vec<int, 16> new_indices;
5354 node->clone.param_adjustments->get_updated_indices (&new_indices);
2c9561b5
MJ
5355 for (v = aggval; v; v = v->next)
5356 {
ff6686d2 5357 gcc_checking_assert (v->index >= 0);
2c9561b5 5358
ff6686d2
MJ
5359 if ((unsigned) v->index < new_indices.length ())
5360 v->index = new_indices[v->index];
5361 else
5362 /* This can happen if we know about a constant passed by reference by
5363 an argument which is never actually used for anything, let alone
5364 loading that constant. */
5365 v->index = -1;
5366 }
2c9561b5
MJ
5367}
5368
8aab5218
MJ
5369/* Dominator walker driving the ipcp modification phase. */
5370
5371class ipcp_modif_dom_walker : public dom_walker
5372{
5373public:
56b40062 5374 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
f65f1ae3 5375 vec<ipa_param_descriptor, va_gc> *descs,
8aab5218
MJ
5376 struct ipa_agg_replacement_value *av,
5377 bool *sc, bool *cc)
5378 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5379 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5380
3daacdcd 5381 virtual edge before_dom_children (basic_block);
8aab5218
MJ
5382
5383private:
56b40062 5384 struct ipa_func_body_info *m_fbi;
f65f1ae3 5385 vec<ipa_param_descriptor, va_gc> *m_descriptors;
8aab5218
MJ
5386 struct ipa_agg_replacement_value *m_aggval;
5387 bool *m_something_changed, *m_cfg_changed;
5388};
5389
3daacdcd 5390edge
8aab5218
MJ
5391ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5392{
5393 gimple_stmt_iterator gsi;
5394 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5395 {
5396 struct ipa_agg_replacement_value *v;
355fe088 5397 gimple *stmt = gsi_stmt (gsi);
8aab5218 5398 tree rhs, val, t;
86003645
RS
5399 HOST_WIDE_INT offset;
5400 poly_int64 size;
8aab5218
MJ
5401 int index;
5402 bool by_ref, vce;
5403
5404 if (!gimple_assign_load_p (stmt))
5405 continue;
5406 rhs = gimple_assign_rhs1 (stmt);
5407 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5408 continue;
2c9561b5 5409
8aab5218
MJ
5410 vce = false;
5411 t = rhs;
5412 while (handled_component_p (t))
5413 {
5414 /* V_C_E can do things like convert an array of integers to one
5415 bigger integer and similar things we do not handle below. */
b66113e9 5416 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8aab5218
MJ
5417 {
5418 vce = true;
5419 break;
5420 }
5421 t = TREE_OPERAND (t, 0);
5422 }
5423 if (vce)
5424 continue;
5425
ff302741
PB
5426 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5427 &offset, &size, &by_ref))
8aab5218
MJ
5428 continue;
5429 for (v = m_aggval; v; v = v->next)
5430 if (v->index == index
5431 && v->offset == offset)
5432 break;
5433 if (!v
5434 || v->by_ref != by_ref
86003645
RS
5435 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5436 size))
8aab5218
MJ
5437 continue;
5438
5439 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5440 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5441 {
5442 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5443 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5444 else if (TYPE_SIZE (TREE_TYPE (rhs))
5445 == TYPE_SIZE (TREE_TYPE (v->value)))
5446 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5447 else
5448 {
5449 if (dump_file)
5450 {
5451 fprintf (dump_file, " const ");
ef6cb4c7 5452 print_generic_expr (dump_file, v->value);
8aab5218 5453 fprintf (dump_file, " can't be converted to type of ");
ef6cb4c7 5454 print_generic_expr (dump_file, rhs);
8aab5218
MJ
5455 fprintf (dump_file, "\n");
5456 }
5457 continue;
5458 }
5459 }
5460 else
5461 val = v->value;
5462
5463 if (dump_file && (dump_flags & TDF_DETAILS))
5464 {
5465 fprintf (dump_file, "Modifying stmt:\n ");
ef6cb4c7 5466 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5467 }
5468 gimple_assign_set_rhs_from_tree (&gsi, val);
5469 update_stmt (stmt);
5470
5471 if (dump_file && (dump_flags & TDF_DETAILS))
5472 {
5473 fprintf (dump_file, "into:\n ");
ef6cb4c7 5474 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5475 fprintf (dump_file, "\n");
5476 }
5477
5478 *m_something_changed = true;
5479 if (maybe_clean_eh_stmt (stmt)
5480 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5481 *m_cfg_changed = true;
5482 }
3daacdcd 5483 return NULL;
8aab5218
MJ
5484}
5485
c7ac9a0c
JH
5486/* Return true if we have recorded VALUE and MASK about PARM.
5487 Set VALUE and MASk accordingly. */
5488
5489bool
5490ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5491{
5492 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5493 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5494 if (!ts || vec_safe_length (ts->bits) == 0)
5495 return false;
5496
5497 int i = 0;
5498 for (tree p = DECL_ARGUMENTS (current_function_decl);
5499 p != parm; p = DECL_CHAIN (p))
5500 {
5501 i++;
5502 /* Ignore static chain. */
5503 if (!p)
5504 return false;
5505 }
5506
5507 if (cnode->clone.param_adjustments)
5508 {
5509 i = cnode->clone.param_adjustments->get_original_index (i);
5510 if (i < 0)
5511 return false;
5512 }
5513
5514 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5515 if (!bits[i])
5516 return false;
5517 *mask = bits[i]->mask;
5518 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5519 return true;
5520}
5521
5522
209ca542 5523/* Update bits info of formal parameters as described in
9d3e0adc 5524 ipcp_transformation. */
209ca542
PK
5525
5526static void
5527ipcp_update_bits (struct cgraph_node *node)
5528{
9d3e0adc 5529 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
209ca542
PK
5530
5531 if (!ts || vec_safe_length (ts->bits) == 0)
5532 return;
86cd0334 5533 vec<ipa_bits *, va_gc> &bits = *ts->bits;
209ca542 5534 unsigned count = bits.length ();
ff6686d2
MJ
5535 if (!count)
5536 return;
209ca542 5537
ff6686d2
MJ
5538 auto_vec<int, 16> new_indices;
5539 bool need_remapping = false;
5540 if (node->clone.param_adjustments)
209ca542 5541 {
ff6686d2
MJ
5542 node->clone.param_adjustments->get_updated_indices (&new_indices);
5543 need_remapping = true;
5544 }
5545 auto_vec <tree, 16> parm_decls;
5546 push_function_arg_decls (&parm_decls, node->decl);
209ca542 5547
ff6686d2
MJ
5548 for (unsigned i = 0; i < count; ++i)
5549 {
5550 tree parm;
5551 if (need_remapping)
5552 {
5553 if (i >= new_indices.length ())
5554 continue;
5555 int idx = new_indices[i];
5556 if (idx < 0)
5557 continue;
5558 parm = parm_decls[idx];
5559 }
5560 else
5561 parm = parm_decls[i];
209ca542 5562 gcc_checking_assert (parm);
ff6686d2 5563
209ca542 5564
86cd0334
MJ
5565 if (!bits[i]
5566 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5567 || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542 5568 || !is_gimple_reg (parm))
86cd0334 5569 continue;
209ca542
PK
5570
5571 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5572 if (!ddef)
5573 continue;
5574
5575 if (dump_file)
5576 {
86cd0334
MJ
5577 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5578 print_hex (bits[i]->mask, dump_file);
209ca542
PK
5579 fprintf (dump_file, "\n");
5580 }
5581
67b97478
PK
5582 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5583 {
5584 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5585 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5586
86cd0334
MJ
5587 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5588 | wide_int::from (bits[i]->value, prec, sgn);
67b97478
PK
5589 set_nonzero_bits (ddef, nonzero_bits);
5590 }
5591 else
5592 {
86cd0334
MJ
5593 unsigned tem = bits[i]->mask.to_uhwi ();
5594 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
67b97478
PK
5595 unsigned align = tem & -tem;
5596 unsigned misalign = bitpos & (align - 1);
209ca542 5597
67b97478
PK
5598 if (align > 1)
5599 {
5600 if (dump_file)
5601 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5602
5603 unsigned old_align, old_misalign;
5604 struct ptr_info_def *pi = get_ptr_info (ddef);
5605 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5606
5607 if (old_known
5608 && old_align > align)
5609 {
5610 if (dump_file)
5611 {
5612 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5613 if ((old_misalign & (align - 1)) != misalign)
5614 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5615 old_misalign, misalign);
5616 }
5617 continue;
5618 }
5619
5620 if (old_known
5621 && ((misalign & (old_align - 1)) != old_misalign)
5622 && dump_file)
5623 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5624 old_misalign, misalign);
5625
5626 set_ptr_info_alignment (pi, align, misalign);
5627 }
5628 }
209ca542
PK
5629 }
5630}
5631
523fe5b6
AH
5632bool
5633ipa_vr::nonzero_p (tree expr_type) const
5634{
5635 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5636 return true;
5637
5638 unsigned prec = TYPE_PRECISION (expr_type);
5639 return (type == VR_RANGE
19849d15 5640 && TYPE_UNSIGNED (expr_type)
523fe5b6
AH
5641 && wi::eq_p (min, wi::one (prec))
5642 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5643}
5644
8bc5448f 5645/* Update value range of formal parameters as described in
9d3e0adc 5646 ipcp_transformation. */
8bc5448f
KV
5647
5648static void
5649ipcp_update_vr (struct cgraph_node *node)
5650{
9d3e0adc 5651 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5652 if (!ts || vec_safe_length (ts->m_vr) == 0)
5653 return;
5654 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5655 unsigned count = vr.length ();
ff6686d2
MJ
5656 if (!count)
5657 return;
8bc5448f 5658
ff6686d2
MJ
5659 auto_vec<int, 16> new_indices;
5660 bool need_remapping = false;
5661 if (node->clone.param_adjustments)
8bc5448f 5662 {
ff6686d2
MJ
5663 node->clone.param_adjustments->get_updated_indices (&new_indices);
5664 need_remapping = true;
5665 }
5666 auto_vec <tree, 16> parm_decls;
5667 push_function_arg_decls (&parm_decls, node->decl);
5668
5669 for (unsigned i = 0; i < count; ++i)
5670 {
5671 tree parm;
5672 int remapped_idx;
5673 if (need_remapping)
5674 {
5675 if (i >= new_indices.length ())
5676 continue;
5677 remapped_idx = new_indices[i];
5678 if (remapped_idx < 0)
5679 continue;
5680 }
5681 else
5682 remapped_idx = i;
5683
5684 parm = parm_decls[remapped_idx];
5685
8bc5448f 5686 gcc_checking_assert (parm);
8bc5448f
KV
5687 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5688
5689 if (!ddef || !is_gimple_reg (parm))
5690 continue;
5691
5692 if (vr[i].known
8bc5448f
KV
5693 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5694 {
5695 tree type = TREE_TYPE (ddef);
5696 unsigned prec = TYPE_PRECISION (type);
718625ad
KV
5697 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5698 {
5699 if (dump_file)
5700 {
ff6686d2
MJ
5701 fprintf (dump_file, "Setting value range of param %u "
5702 "(now %i) ", i, remapped_idx);
718625ad
KV
5703 fprintf (dump_file, "%s[",
5704 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5705 print_decs (vr[i].min, dump_file);
5706 fprintf (dump_file, ", ");
5707 print_decs (vr[i].max, dump_file);
5708 fprintf (dump_file, "]\n");
5709 }
5710 set_range_info (ddef, vr[i].type,
5711 wide_int_storage::from (vr[i].min, prec,
5712 TYPE_SIGN (type)),
5713 wide_int_storage::from (vr[i].max, prec,
5714 TYPE_SIGN (type)));
5715 }
5716 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
523fe5b6 5717 && vr[i].nonzero_p (TREE_TYPE (ddef)))
8bc5448f 5718 {
718625ad
KV
5719 if (dump_file)
5720 fprintf (dump_file, "Setting nonnull for %u\n", i);
5721 set_ptr_nonnull (ddef);
8bc5448f 5722 }
8bc5448f
KV
5723 }
5724 }
5725}
5726
8aab5218 5727/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5728
5729unsigned int
5730ipcp_transform_function (struct cgraph_node *node)
5731{
f65f1ae3 5732 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
56b40062 5733 struct ipa_func_body_info fbi;
2c9561b5 5734 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5735 int param_count;
5736 bool cfg_changed = false, something_changed = false;
5737
5738 gcc_checking_assert (cfun);
5739 gcc_checking_assert (current_function_decl);
5740
5741 if (dump_file)
464d0118
ML
5742 fprintf (dump_file, "Modification phase of node %s\n",
5743 node->dump_name ());
2c9561b5 5744
209ca542 5745 ipcp_update_bits (node);
8bc5448f 5746 ipcp_update_vr (node);
2c9561b5
MJ
5747 aggval = ipa_get_agg_replacements_for_node (node);
5748 if (!aggval)
5749 return 0;
67348ccc 5750 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5751 if (param_count == 0)
5752 return 0;
5753 adjust_agg_replacement_values (node, aggval);
5754 if (dump_file)
5755 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5756
8aab5218
MJ
5757 fbi.node = node;
5758 fbi.info = NULL;
5759 fbi.bb_infos = vNULL;
5760 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5761 fbi.param_count = param_count;
fdfd7f53 5762 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
2c9561b5 5763
f65f1ae3
MJ
5764 vec_safe_grow_cleared (descriptors, param_count);
5765 ipa_populate_param_decls (node, *descriptors);
8aab5218
MJ
5766 calculate_dominance_info (CDI_DOMINATORS);
5767 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5768 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5769
8aab5218
MJ
5770 int i;
5771 struct ipa_bb_info *bi;
5772 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5773 free_ipa_bb_info (bi);
5774 fbi.bb_infos.release ();
5775 free_dominance_info (CDI_DOMINATORS);
9d3e0adc
ML
5776
5777 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5778 s->agg_values = NULL;
5779 s->bits = NULL;
5780 s->m_vr = NULL;
676b4899 5781
f65f1ae3 5782 vec_free (descriptors);
2c9561b5
MJ
5783
5784 if (!something_changed)
5785 return 0;
95a2ed03
MJ
5786
5787 if (cfg_changed)
5788 delete_unreachable_blocks_update_callgraph (node, false);
5789
5790 return TODO_update_ssa_only_virtuals;
2c9561b5 5791}
86cd0334 5792
ac6f2e59 5793
eb270950 5794/* Return true if OTHER describes same agg value. */
ac6f2e59 5795bool
eb270950 5796ipa_agg_value::equal_to (const ipa_agg_value &other)
ac6f2e59
JH
5797{
5798 return offset == other.offset
5799 && operand_equal_p (value, other.value, 0);
5800}
86cd0334 5801#include "gt-ipa-prop.h"