]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
re PR ipa/92498 (gcc.dg/tree-prof/crossmodule-indircall-1.c fails starting with r278100)
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
a5544970 2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
27d020cf 48#include "ipa-fnsummary.h"
cf835838 49#include "gimple-pretty-print.h"
450ad0cd 50#include "ipa-utils.h"
2b5f0895 51#include "dbgcnt.h"
8aab5218 52#include "domwalk.h"
9b2b7279 53#include "builtins.h"
95a2ed03 54#include "tree-cfgcleanup.h"
771578a0 55
dd912cb8
ML
56/* Function summary where the parameter infos are actually stored. */
57ipa_node_params_t *ipa_node_params_sum = NULL;
9d3e0adc
ML
58
59function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
60
6fe906a3
MJ
61/* Edge summary for IPA-CP edge information. */
62ipa_edge_args_sum_t *ipa_edge_args_sum;
771578a0 63
86cd0334
MJ
64/* Traits for a hash table for reusing already existing ipa_bits. */
65
66struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67{
68 typedef ipa_bits *value_type;
69 typedef ipa_bits *compare_type;
70 static hashval_t
71 hash (const ipa_bits *p)
72 {
73 hashval_t t = (hashval_t) p->value.to_shwi ();
74 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 }
76 static bool
77 equal (const ipa_bits *a, const ipa_bits *b)
78 {
79 return a->value == b->value && a->mask == b->mask;
80 }
81 static void
82 mark_empty (ipa_bits *&p)
83 {
84 p = NULL;
85 }
86 static bool
87 is_empty (const ipa_bits *p)
88 {
89 return p == NULL;
90 }
91 static bool
92 is_deleted (const ipa_bits *p)
93 {
94 return p == reinterpret_cast<const ipa_bits *> (1);
95 }
96 static void
97 mark_deleted (ipa_bits *&p)
98 {
99 p = reinterpret_cast<ipa_bits *> (1);
100 }
101};
102
103/* Hash table for avoid repeated allocations of equal ipa_bits. */
104static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
105
106/* Traits for a hash table for reusing value_ranges used for IPA. Note that
107 the equiv bitmap is not hashed and is expected to be NULL. */
108
028d81b1 109struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
86cd0334 110{
028d81b1
AH
111 typedef value_range *value_type;
112 typedef value_range *compare_type;
86cd0334 113 static hashval_t
028d81b1 114 hash (const value_range *p)
59b2c134 115 {
54994253 116 inchash::hash hstate (p->kind ());
74ca1c01
ML
117 inchash::add_expr (p->min (), hstate);
118 inchash::add_expr (p->max (), hstate);
59b2c134
JJ
119 return hstate.end ();
120 }
86cd0334 121 static bool
028d81b1 122 equal (const value_range *a, const value_range *b)
86cd0334 123 {
ff361cc6 124 return a->equal_p (*b);
86cd0334
MJ
125 }
126 static void
028d81b1 127 mark_empty (value_range *&p)
86cd0334
MJ
128 {
129 p = NULL;
130 }
131 static bool
028d81b1 132 is_empty (const value_range *p)
86cd0334
MJ
133 {
134 return p == NULL;
135 }
136 static bool
028d81b1 137 is_deleted (const value_range *p)
86cd0334 138 {
028d81b1 139 return p == reinterpret_cast<const value_range *> (1);
86cd0334
MJ
140 }
141 static void
028d81b1 142 mark_deleted (value_range *&p)
86cd0334 143 {
028d81b1 144 p = reinterpret_cast<value_range *> (1);
86cd0334
MJ
145 }
146};
147
148/* Hash table for avoid repeated allocations of equal value_ranges. */
149static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
150
771578a0 151/* Holders of ipa cgraph hooks: */
40982661 152static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 153
4502fe8d
MJ
154/* Description of a reference to an IPA constant. */
155struct ipa_cst_ref_desc
156{
157 /* Edge that corresponds to the statement which took the reference. */
158 struct cgraph_edge *cs;
159 /* Linked list of duplicates created when call graph edges are cloned. */
160 struct ipa_cst_ref_desc *next_duplicate;
161 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
162 if out of control. */
163 int refcount;
164};
165
166/* Allocation pool for reference descriptions. */
167
fb0b2914 168static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 169 ("IPA-PROP ref descriptions");
4502fe8d 170
5fe8e757
MJ
171/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
172 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
173
174static bool
175ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
176{
67348ccc 177 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
178
179 if (!fs_opts)
180 return false;
2bf86c84 181 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
182}
183
be95e2b9
MJ
184/* Return index of the formal whose tree is PTREE in function which corresponds
185 to INFO. */
186
d044dd17 187static int
f65f1ae3
MJ
188ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
189 tree ptree)
518dc859
RL
190{
191 int i, count;
192
f65f1ae3 193 count = vec_safe_length (descriptors);
518dc859 194 for (i = 0; i < count; i++)
f65f1ae3 195 if ((*descriptors)[i].decl_or_type == ptree)
518dc859
RL
196 return i;
197
198 return -1;
199}
200
d044dd17
MJ
201/* Return index of the formal whose tree is PTREE in function which corresponds
202 to INFO. */
203
204int
99b1c316 205ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
d044dd17
MJ
206{
207 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
208}
209
210/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
211 NODE. */
be95e2b9 212
f8e2a1ed
MJ
213static void
214ipa_populate_param_decls (struct cgraph_node *node,
f65f1ae3 215 vec<ipa_param_descriptor, va_gc> &descriptors)
518dc859
RL
216{
217 tree fndecl;
218 tree fnargs;
219 tree parm;
220 int param_num;
3e293154 221
67348ccc 222 fndecl = node->decl;
0e8853ee 223 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
224 fnargs = DECL_ARGUMENTS (fndecl);
225 param_num = 0;
910ad8de 226 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 227 {
209ca542 228 descriptors[param_num].decl_or_type = parm;
40a777e8
JH
229 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
230 descriptors[param_num].move_cost = cost;
231 /* Watch overflow, move_cost is a bitfield. */
232 gcc_checking_assert (cost == descriptors[param_num].move_cost);
518dc859
RL
233 param_num++;
234 }
235}
236
3f84bf08
MJ
237/* Return how many formal parameters FNDECL has. */
238
fd29c024 239int
310bc633 240count_formal_params (tree fndecl)
3f84bf08
MJ
241{
242 tree parm;
243 int count = 0;
0e8853ee 244 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 245
910ad8de 246 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
247 count++;
248
249 return count;
250}
251
0e8853ee
JH
252/* Return the declaration of Ith formal parameter of the function corresponding
253 to INFO. Note there is no setter function as this array is built just once
254 using ipa_initialize_node_params. */
255
256void
99b1c316 257ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
0e8853ee
JH
258{
259 fprintf (file, "param #%i", i);
f65f1ae3 260 if ((*info->descriptors)[i].decl_or_type)
0e8853ee
JH
261 {
262 fprintf (file, " ");
ef6cb4c7 263 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
0e8853ee
JH
264 }
265}
266
159f01f8
MJ
267/* If necessary, allocate vector of parameter descriptors in info of NODE.
268 Return true if they were allocated, false if not. */
0e8853ee 269
159f01f8 270static bool
0e8853ee
JH
271ipa_alloc_node_params (struct cgraph_node *node, int param_count)
272{
6cf67b62 273 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
0e8853ee 274
f65f1ae3 275 if (!info->descriptors && param_count)
159f01f8
MJ
276 {
277 vec_safe_grow_cleared (info->descriptors, param_count);
278 return true;
279 }
280 else
281 return false;
0e8853ee
JH
282}
283
f8e2a1ed
MJ
284/* Initialize the ipa_node_params structure associated with NODE by counting
285 the function parameters, creating the descriptors and populating their
286 param_decls. */
be95e2b9 287
f8e2a1ed
MJ
288void
289ipa_initialize_node_params (struct cgraph_node *node)
290{
6cf67b62 291 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
f8e2a1ed 292
159f01f8
MJ
293 if (!info->descriptors
294 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
295 ipa_populate_param_decls (node, *info->descriptors);
518dc859
RL
296}
297
749aa96d
MJ
298/* Print the jump functions associated with call graph edge CS to file F. */
299
300static void
301ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
302{
303 int i, count;
304
305 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
306 for (i = 0; i < count; i++)
307 {
308 struct ipa_jump_func *jump_func;
309 enum jump_func_type type;
310
311 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
312 type = jump_func->type;
313
314 fprintf (f, " param %d: ", i);
315 if (type == IPA_JF_UNKNOWN)
316 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
317 else if (type == IPA_JF_CONST)
318 {
4502fe8d 319 tree val = jump_func->value.constant.value;
749aa96d 320 fprintf (f, "CONST: ");
ef6cb4c7 321 print_generic_expr (f, val);
749aa96d
MJ
322 if (TREE_CODE (val) == ADDR_EXPR
323 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
324 {
325 fprintf (f, " -> ");
ef6cb4c7 326 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
749aa96d
MJ
327 }
328 fprintf (f, "\n");
329 }
749aa96d
MJ
330 else if (type == IPA_JF_PASS_THROUGH)
331 {
332 fprintf (f, "PASS THROUGH: ");
8b7773a4 333 fprintf (f, "%d, op %s",
749aa96d 334 jump_func->value.pass_through.formal_id,
5806f481 335 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 336 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
337 {
338 fprintf (f, " ");
ef6cb4c7 339 print_generic_expr (f, jump_func->value.pass_through.operand);
8b7773a4
MJ
340 }
341 if (jump_func->value.pass_through.agg_preserved)
342 fprintf (f, ", agg_preserved");
3ea6239f 343 fprintf (f, "\n");
749aa96d
MJ
344 }
345 else if (type == IPA_JF_ANCESTOR)
346 {
347 fprintf (f, "ANCESTOR: ");
16998094 348 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
349 jump_func->value.ancestor.formal_id,
350 jump_func->value.ancestor.offset);
8b7773a4
MJ
351 if (jump_func->value.ancestor.agg_preserved)
352 fprintf (f, ", agg_preserved");
3ea6239f 353 fprintf (f, "\n");
749aa96d 354 }
8b7773a4
MJ
355
356 if (jump_func->agg.items)
357 {
358 struct ipa_agg_jf_item *item;
359 int j;
360
361 fprintf (f, " Aggregate passed by %s:\n",
362 jump_func->agg.by_ref ? "reference" : "value");
9771b263 363 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
364 {
365 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
366 item->offset);
367 if (TYPE_P (item->value))
368 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 369 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
370 else
371 {
372 fprintf (f, "cst: ");
ef6cb4c7 373 print_generic_expr (f, item->value);
8b7773a4
MJ
374 }
375 fprintf (f, "\n");
376 }
377 }
44210a96 378
99b1c316 379 class ipa_polymorphic_call_context *ctx
44210a96
MJ
380 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
381 if (ctx && !ctx->useless_p ())
382 {
383 fprintf (f, " Context: ");
384 ctx->dump (dump_file);
385 }
04be694e 386
86cd0334 387 if (jump_func->bits)
209ca542 388 {
86cd0334
MJ
389 fprintf (f, " value: ");
390 print_hex (jump_func->bits->value, f);
391 fprintf (f, ", mask: ");
392 print_hex (jump_func->bits->mask, f);
209ca542
PK
393 fprintf (f, "\n");
394 }
395 else
396 fprintf (f, " Unknown bits\n");
8bc5448f 397
86cd0334 398 if (jump_func->m_vr)
8bc5448f
KV
399 {
400 fprintf (f, " VR ");
401 fprintf (f, "%s[",
54994253
AH
402 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
403 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
8bc5448f 404 fprintf (f, ", ");
54994253 405 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
8bc5448f
KV
406 fprintf (f, "]\n");
407 }
408 else
409 fprintf (f, " Unknown VR\n");
749aa96d
MJ
410 }
411}
412
413
be95e2b9
MJ
414/* Print the jump functions of all arguments on all call graph edges going from
415 NODE to file F. */
416
518dc859 417void
3e293154 418ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 419{
3e293154 420 struct cgraph_edge *cs;
518dc859 421
464d0118 422 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
3e293154
MJ
423 for (cs = node->callees; cs; cs = cs->next_callee)
424 {
3e293154 425
464d0118
ML
426 fprintf (f, " callsite %s -> %s : \n",
427 node->dump_name (),
428 cs->callee->dump_name ());
0302955a
JH
429 if (!ipa_edge_args_info_available_for_edge_p (cs))
430 fprintf (f, " no arg info\n");
431 else
432 ipa_print_node_jump_functions_for_edge (f, cs);
749aa96d 433 }
518dc859 434
9de04252 435 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 436 {
99b1c316 437 class cgraph_indirect_call_info *ii;
3e293154 438
9de04252
MJ
439 ii = cs->indirect_info;
440 if (ii->agg_contents)
c13bc3d9 441 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 442 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 443 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
444 ii->param_index, ii->offset,
445 ii->by_ref ? "by reference" : "by_value");
446 else
85942f45
JH
447 fprintf (f, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC,
449 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
450 ii->offset);
9de04252 451
749aa96d
MJ
452 if (cs->call_stmt)
453 {
9de04252 454 fprintf (f, ", for stmt ");
749aa96d 455 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 456 }
749aa96d 457 else
9de04252 458 fprintf (f, "\n");
ba392339
JH
459 if (ii->polymorphic)
460 ii->context.dump (f);
0302955a
JH
461 if (!ipa_edge_args_info_available_for_edge_p (cs))
462 fprintf (f, " no arg info\n");
463 else
464 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
465 }
466}
467
468/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 469
3e293154
MJ
470void
471ipa_print_all_jump_functions (FILE *f)
472{
473 struct cgraph_node *node;
474
ca30a539 475 fprintf (f, "\nJump functions:\n");
65c70e6b 476 FOR_EACH_FUNCTION (node)
3e293154
MJ
477 {
478 ipa_print_node_jump_functions (f, node);
479 }
480}
481
04be694e
MJ
482/* Set jfunc to be a know-really nothing jump function. */
483
484static void
485ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
486{
487 jfunc->type = IPA_JF_UNKNOWN;
86cd0334
MJ
488 jfunc->bits = NULL;
489 jfunc->m_vr = NULL;
04be694e
MJ
490}
491
b8f6e610
MJ
492/* Set JFUNC to be a copy of another jmp (to be used by jump function
493 combination code). The two functions will share their rdesc. */
494
495static void
496ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
497 struct ipa_jump_func *src)
498
499{
500 gcc_checking_assert (src->type == IPA_JF_CONST);
501 dst->type = IPA_JF_CONST;
502 dst->value.constant = src->value.constant;
503}
504
7b872d9e
MJ
505/* Set JFUNC to be a constant jmp function. */
506
507static void
4502fe8d
MJ
508ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
509 struct cgraph_edge *cs)
7b872d9e
MJ
510{
511 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
512 jfunc->value.constant.value = unshare_expr_without_location (constant);
513
514 if (TREE_CODE (constant) == ADDR_EXPR
515 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
516 {
517 struct ipa_cst_ref_desc *rdesc;
4502fe8d 518
601f3293 519 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
520 rdesc->cs = cs;
521 rdesc->next_duplicate = NULL;
522 rdesc->refcount = 1;
523 jfunc->value.constant.rdesc = rdesc;
524 }
525 else
526 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
527}
528
529/* Set JFUNC to be a simple pass-through jump function. */
530static void
8b7773a4 531ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 532 bool agg_preserved)
7b872d9e
MJ
533{
534 jfunc->type = IPA_JF_PASS_THROUGH;
535 jfunc->value.pass_through.operand = NULL_TREE;
536 jfunc->value.pass_through.formal_id = formal_id;
537 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 538 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
539}
540
a2b4c188
KV
541/* Set JFUNC to be an unary pass through jump function. */
542
543static void
544ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
545 enum tree_code operation)
546{
547 jfunc->type = IPA_JF_PASS_THROUGH;
548 jfunc->value.pass_through.operand = NULL_TREE;
549 jfunc->value.pass_through.formal_id = formal_id;
550 jfunc->value.pass_through.operation = operation;
551 jfunc->value.pass_through.agg_preserved = false;
552}
7b872d9e
MJ
553/* Set JFUNC to be an arithmetic pass through jump function. */
554
555static void
556ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
557 tree operand, enum tree_code operation)
558{
559 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 560 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
561 jfunc->value.pass_through.formal_id = formal_id;
562 jfunc->value.pass_through.operation = operation;
8b7773a4 563 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
564}
565
566/* Set JFUNC to be an ancestor jump function. */
567
568static void
569ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 570 int formal_id, bool agg_preserved)
7b872d9e
MJ
571{
572 jfunc->type = IPA_JF_ANCESTOR;
573 jfunc->value.ancestor.formal_id = formal_id;
574 jfunc->value.ancestor.offset = offset;
8b7773a4 575 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
576}
577
8aab5218
MJ
578/* Get IPA BB information about the given BB. FBI is the context of analyzis
579 of this function body. */
580
581static struct ipa_bb_info *
56b40062 582ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
583{
584 gcc_checking_assert (fbi);
585 return &fbi->bb_infos[bb->index];
586}
587
f65cf2b7
MJ
588/* Structure to be passed in between detect_type_change and
589 check_stmt_for_type_change. */
590
11478306 591struct prop_type_change_info
f65cf2b7 592{
290ebcb7
MJ
593 /* Offset into the object where there is the virtual method pointer we are
594 looking for. */
595 HOST_WIDE_INT offset;
596 /* The declaration or SSA_NAME pointer of the base that we are checking for
597 type change. */
598 tree object;
f65cf2b7
MJ
599 /* Set to true if dynamic type change has been detected. */
600 bool type_maybe_changed;
601};
602
603/* Return true if STMT can modify a virtual method table pointer.
604
605 This function makes special assumptions about both constructors and
606 destructors which are all the functions that are allowed to alter the VMT
607 pointers. It assumes that destructors begin with assignment into all VMT
608 pointers and that constructors essentially look in the following way:
609
610 1) The very first thing they do is that they call constructors of ancestor
611 sub-objects that have them.
612
613 2) Then VMT pointers of this and all its ancestors is set to new values
614 corresponding to the type corresponding to the constructor.
615
616 3) Only afterwards, other stuff such as constructor of member sub-objects
617 and the code written by the user is run. Only this may include calling
618 virtual functions, directly or indirectly.
619
620 There is no way to call a constructor of an ancestor sub-object in any
621 other way.
622
623 This means that we do not have to care whether constructors get the correct
624 type information because they will always change it (in fact, if we define
625 the type to be given by the VMT pointer, it is undefined).
626
627 The most important fact to derive from the above is that if, for some
628 statement in the section 3, we try to detect whether the dynamic type has
629 changed, we can safely ignore all calls as we examine the function body
630 backwards until we reach statements in section 2 because these calls cannot
631 be ancestor constructors or destructors (if the input is not bogus) and so
632 do not change the dynamic type (this holds true only for automatically
633 allocated objects but at the moment we devirtualize only these). We then
634 must detect that statements in section 2 change the dynamic type and can try
635 to derive the new type. That is enough and we can stop, we will never see
636 the calls into constructors of sub-objects in this code. Therefore we can
637 safely ignore all call statements that we traverse.
638 */
639
640static bool
355fe088 641stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
642{
643 if (is_gimple_call (stmt))
644 return false;
70f633c5
JH
645 if (gimple_clobber_p (stmt))
646 return false;
f65cf2b7
MJ
647 else if (is_gimple_assign (stmt))
648 {
649 tree lhs = gimple_assign_lhs (stmt);
650
0004f992
MJ
651 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
652 {
653 if (flag_strict_aliasing
654 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
655 return false;
656
657 if (TREE_CODE (lhs) == COMPONENT_REF
658 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 659 return false;
450aa0ee 660 /* In the future we might want to use get_ref_base_and_extent to find
0004f992
MJ
661 if there is a field corresponding to the offset and if so, proceed
662 almost like if it was a component ref. */
663 }
f65cf2b7
MJ
664 }
665 return true;
666}
667
3b97a5c7
MJ
668/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
669 to check whether a particular statement may modify the virtual table
670 pointerIt stores its result into DATA, which points to a
11478306 671 prop_type_change_info structure. */
f65cf2b7
MJ
672
673static bool
674check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
675{
355fe088 676 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 677 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
678
679 if (stmt_may_be_vtbl_ptr_store (stmt))
680 {
681 tci->type_maybe_changed = true;
682 return true;
683 }
684 else
685 return false;
686}
687
058d0a90
JH
688/* See if ARG is PARAM_DECl describing instance passed by pointer
689 or reference in FUNCTION. Return false if the dynamic type may change
690 in between beggining of the function until CALL is invoked.
290ebcb7 691
058d0a90 692 Generally functions are not allowed to change type of such instances,
67914693 693 but they call destructors. We assume that methods cannot destroy the THIS
058d0a90
JH
694 pointer. Also as a special cases, constructor and destructors may change
695 type of the THIS pointer. */
696
697static bool
355fe088 698param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90 699{
67914693 700 /* Pure functions cannot do any changes on the dynamic type;
058d0a90
JH
701 that require writting to memory. */
702 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
703 return false;
704 /* We need to check if we are within inlined consturctor
705 or destructor (ideally we would have way to check that the
706 inline cdtor is actually working on ARG, but we don't have
707 easy tie on this, so punt on all non-pure cdtors.
708 We may also record the types of cdtors and once we know type
709 of the instance match them.
710
711 Also code unification optimizations may merge calls from
712 different blocks making return values unreliable. So
713 do nothing during late optimization. */
714 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
715 return true;
716 if (TREE_CODE (arg) == SSA_NAME
717 && SSA_NAME_IS_DEFAULT_DEF (arg)
718 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
719 {
720 /* Normal (non-THIS) argument. */
721 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
722 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 723 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
724 and destructors as those definitely may change the dynamic
725 type. */
726 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
727 && !DECL_CXX_CONSTRUCTOR_P (function)
728 && !DECL_CXX_DESTRUCTOR_P (function)
729 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
730 {
731 /* Walk the inline stack and watch out for ctors/dtors. */
732 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
733 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
734 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
735 return true;
058d0a90
JH
736 return false;
737 }
738 }
739 return true;
740}
290ebcb7 741
06d65050
JH
742/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
743 callsite CALL) by looking for assignments to its virtual table pointer. If
744 it is, return true and fill in the jump function JFUNC with relevant type
745 information or set it to unknown. ARG is the object itself (not a pointer
746 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
747 returned by get_ref_base_and_extent, as is the offset.
748
749 This is helper function for detect_type_change and detect_type_change_ssa
750 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
751
752static bool
c628d1c3
MJ
753detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
754 tree base, tree comp_type, gcall *call,
755 struct ipa_jump_func *jfunc,
058d0a90 756 HOST_WIDE_INT offset)
f65cf2b7 757{
11478306 758 struct prop_type_change_info tci;
f65cf2b7
MJ
759 ao_ref ao;
760
761 gcc_checking_assert (DECL_P (arg)
762 || TREE_CODE (arg) == MEM_REF
763 || handled_component_p (arg));
f65cf2b7 764
b49407f8
JH
765 comp_type = TYPE_MAIN_VARIANT (comp_type);
766
d570d364
JH
767 /* Const calls cannot call virtual methods through VMT and so type changes do
768 not matter. */
769 if (!flag_devirtualize || !gimple_vuse (call)
770 /* Be sure expected_type is polymorphic. */
771 || !comp_type
772 || TREE_CODE (comp_type) != RECORD_TYPE
773 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
774 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
775 return true;
4bf2a588 776
dd887943 777 ao_ref_init (&ao, arg);
f65cf2b7
MJ
778 ao.base = base;
779 ao.offset = offset;
780 ao.size = POINTER_SIZE;
781 ao.max_size = ao.size;
f65cf2b7 782
290ebcb7
MJ
783 tci.offset = offset;
784 tci.object = get_base_address (arg);
290ebcb7 785 tci.type_maybe_changed = false;
290ebcb7 786
c628d1c3
MJ
787 int walked
788 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
789 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
790
791 if (walked >= 0 && !tci.type_maybe_changed)
f65cf2b7
MJ
792 return false;
793
04be694e 794 ipa_set_jf_unknown (jfunc);
f65cf2b7
MJ
795 return true;
796}
797
058d0a90
JH
798/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
799 If it is, return true and fill in the jump function JFUNC with relevant type
800 information or set it to unknown. ARG is the object itself (not a pointer
801 to it, unless dereferenced). BASE is the base of the memory access as
802 returned by get_ref_base_and_extent, as is the offset. */
803
804static bool
c628d1c3
MJ
805detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
806 tree comp_type, gcall *call, struct ipa_jump_func *jfunc,
807 HOST_WIDE_INT offset)
058d0a90
JH
808{
809 if (!flag_devirtualize)
810 return false;
811
812 if (TREE_CODE (base) == MEM_REF
813 && !param_type_may_change_p (current_function_decl,
814 TREE_OPERAND (base, 0),
815 call))
816 return false;
c628d1c3 817 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
058d0a90
JH
818 call, jfunc, offset);
819}
820
f65cf2b7
MJ
821/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
822 SSA name (its dereference will become the base and the offset is assumed to
823 be zero). */
824
825static bool
c628d1c3 826detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
538dd0b7 827 gcall *call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
828{
829 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 830 if (!flag_devirtualize
06d65050 831 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
832 return false;
833
058d0a90
JH
834 if (!param_type_may_change_p (current_function_decl, arg, call))
835 return false;
836
f65cf2b7 837 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 838 build_int_cst (ptr_type_node, 0));
f65cf2b7 839
c628d1c3 840 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
058d0a90 841 call, jfunc, 0);
f65cf2b7
MJ
842}
843
fdb0e1b4
MJ
844/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
845 boolean variable pointed to by DATA. */
846
847static bool
848mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
849 void *data)
850{
851 bool *b = (bool *) data;
852 *b = true;
853 return true;
854}
855
8aab5218
MJ
856/* Find the nearest valid aa status for parameter specified by INDEX that
857 dominates BB. */
858
56b40062
MJ
859static struct ipa_param_aa_status *
860find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
861 int index)
862{
863 while (true)
864 {
865 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
866 if (!bb)
867 return NULL;
868 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
869 if (!bi->param_aa_statuses.is_empty ()
870 && bi->param_aa_statuses[index].valid)
871 return &bi->param_aa_statuses[index];
872 }
873}
874
875/* Get AA status structure for the given BB and parameter with INDEX. Allocate
876 structures and/or intialize the result with a dominating description as
877 necessary. */
878
56b40062
MJ
879static struct ipa_param_aa_status *
880parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
881 int index)
882{
883 gcc_checking_assert (fbi);
884 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
885 if (bi->param_aa_statuses.is_empty ())
886 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
56b40062 887 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
888 if (!paa->valid)
889 {
890 gcc_checking_assert (!paa->parm_modified
891 && !paa->ref_modified
892 && !paa->pt_modified);
56b40062 893 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
894 dom_paa = find_dominating_aa_status (fbi, bb, index);
895 if (dom_paa)
896 *paa = *dom_paa;
897 else
898 paa->valid = true;
899 }
900
901 return paa;
902}
903
688010ba 904/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 905 a value known not to be modified in this function before reaching the
8aab5218
MJ
906 statement STMT. FBI holds information about the function we have so far
907 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
908
909static bool
56b40062 910parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 911 gimple *stmt, tree parm_load)
fdb0e1b4 912{
56b40062 913 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
914 bool modified = false;
915 ao_ref refd;
916
776e4fe2
MJ
917 tree base = get_base_address (parm_load);
918 gcc_assert (TREE_CODE (base) == PARM_DECL);
919 if (TREE_READONLY (base))
920 return true;
921
c628d1c3
MJ
922 gcc_checking_assert (fbi);
923 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
924 if (paa->parm_modified)
925 return false;
fdb0e1b4
MJ
926
927 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 928 ao_ref_init (&refd, parm_load);
8aab5218 929 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3
MJ
930 &modified, NULL, NULL,
931 fbi->aa_walk_budget + 1);
932 if (walked < 0)
933 {
934 modified = true;
935 if (fbi)
936 fbi->aa_walk_budget = 0;
937 }
938 else if (fbi)
939 fbi->aa_walk_budget -= walked;
8aab5218
MJ
940 if (paa && modified)
941 paa->parm_modified = true;
8b7773a4 942 return !modified;
fdb0e1b4
MJ
943}
944
a2b4c188
KV
945/* If STMT is an assignment that loads a value from an parameter declaration,
946 return the index of the parameter in ipa_node_params which has not been
947 modified. Otherwise return -1. */
948
949static int
950load_from_unmodified_param (struct ipa_func_body_info *fbi,
f65f1ae3 951 vec<ipa_param_descriptor, va_gc> *descriptors,
a2b4c188
KV
952 gimple *stmt)
953{
bda2bc48
MJ
954 int index;
955 tree op1;
956
a2b4c188
KV
957 if (!gimple_assign_single_p (stmt))
958 return -1;
959
bda2bc48
MJ
960 op1 = gimple_assign_rhs1 (stmt);
961 if (TREE_CODE (op1) != PARM_DECL)
a2b4c188
KV
962 return -1;
963
bda2bc48
MJ
964 index = ipa_get_param_decl_index_1 (descriptors, op1);
965 if (index < 0
966 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
a2b4c188
KV
967 return -1;
968
bda2bc48 969 return index;
a2b4c188
KV
970}
971
8aab5218
MJ
972/* Return true if memory reference REF (which must be a load through parameter
973 with INDEX) loads data that are known to be unmodified in this function
974 before reaching statement STMT. */
8b7773a4
MJ
975
976static bool
56b40062 977parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 978 int index, gimple *stmt, tree ref)
8b7773a4 979{
56b40062 980 struct ipa_param_aa_status *paa;
8b7773a4
MJ
981 bool modified = false;
982 ao_ref refd;
983
c628d1c3
MJ
984 gcc_checking_assert (fbi);
985 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
986 if (paa->ref_modified)
987 return false;
8b7773a4 988
8aab5218 989 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 990 ao_ref_init (&refd, ref);
8aab5218 991 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3
MJ
992 &modified, NULL, NULL,
993 fbi->aa_walk_budget + 1);
994 if (walked < 0)
995 {
996 modified = true;
997 fbi->aa_walk_budget = 0;
998 }
999 else
1000 fbi->aa_walk_budget -= walked;
1001 if (modified)
8aab5218 1002 paa->ref_modified = true;
8b7773a4
MJ
1003 return !modified;
1004}
1005
8aab5218
MJ
1006/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1007 is known to be unmodified in this function before reaching call statement
1008 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1009
1010static bool
56b40062 1011parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 1012 gimple *call, tree parm)
8b7773a4
MJ
1013{
1014 bool modified = false;
1015 ao_ref refd;
1016
1017 /* It's unnecessary to calculate anything about memory contnets for a const
1018 function because it is not goin to use it. But do not cache the result
1019 either. Also, no such calculations for non-pointers. */
1020 if (!gimple_vuse (call)
c628d1c3 1021 || !POINTER_TYPE_P (TREE_TYPE (parm)))
8b7773a4
MJ
1022 return false;
1023
56b40062
MJ
1024 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1025 gimple_bb (call),
1026 index);
8aab5218 1027 if (paa->pt_modified)
8b7773a4
MJ
1028 return false;
1029
1030 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218 1031 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
c628d1c3
MJ
1032 &modified, NULL, NULL,
1033 fbi->aa_walk_budget + 1);
1034 if (walked < 0)
1035 {
1036 fbi->aa_walk_budget = 0;
1037 modified = true;
1038 }
1039 else
1040 fbi->aa_walk_budget -= walked;
8b7773a4 1041 if (modified)
8aab5218 1042 paa->pt_modified = true;
8b7773a4
MJ
1043 return !modified;
1044}
1045
91bb9f80
MJ
1046/* Return true if we can prove that OP is a memory reference loading
1047 data from an aggregate passed as a parameter.
1048
1049 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1050 false if it cannot prove that the value has not been modified before the
1051 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1052 if it cannot prove the value has not been modified, in that case it will
1053 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1054
8b7773a4
MJ
1055 INFO and PARMS_AINFO describe parameters of the current function (but the
1056 latter can be NULL), STMT is the load statement. If function returns true,
1057 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1058 within the aggregate and whether it is a load from a value passed by
1059 reference respectively. */
1060
ff302741 1061bool
56b40062 1062ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
f65f1ae3 1063 vec<ipa_param_descriptor, va_gc> *descriptors,
355fe088 1064 gimple *stmt, tree op, int *index_p,
86003645 1065 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
91bb9f80 1066 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
1067{
1068 int index;
588db50c 1069 HOST_WIDE_INT size;
ee45a32d 1070 bool reverse;
588db50c 1071 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
8b7773a4 1072
588db50c 1073 if (!base)
8b7773a4
MJ
1074 return false;
1075
1076 if (DECL_P (base))
1077 {
d044dd17 1078 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1079 if (index >= 0
8aab5218 1080 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1081 {
1082 *index_p = index;
1083 *by_ref_p = false;
3ff2ca23
JJ
1084 if (size_p)
1085 *size_p = size;
91bb9f80
MJ
1086 if (guaranteed_unmodified)
1087 *guaranteed_unmodified = true;
8b7773a4
MJ
1088 return true;
1089 }
1090 return false;
1091 }
1092
1093 if (TREE_CODE (base) != MEM_REF
1094 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1095 || !integer_zerop (TREE_OPERAND (base, 1)))
1096 return false;
1097
1098 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1099 {
1100 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1101 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1102 }
1103 else
1104 {
1105 /* This branch catches situations where a pointer parameter is not a
1106 gimple register, for example:
1107
1108 void hip7(S*) (struct S * p)
1109 {
1110 void (*<T2e4>) (struct S *) D.1867;
1111 struct S * p.1;
1112
1113 <bb 2>:
1114 p.1_1 = p;
1115 D.1867_2 = p.1_1->f;
1116 D.1867_2 ();
1117 gdp = &p;
1118 */
1119
355fe088 1120 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1121 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1122 }
1123
91bb9f80 1124 if (index >= 0)
8b7773a4 1125 {
91bb9f80
MJ
1126 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1127 if (!data_preserved && !guaranteed_unmodified)
1128 return false;
1129
8b7773a4
MJ
1130 *index_p = index;
1131 *by_ref_p = true;
3ff2ca23
JJ
1132 if (size_p)
1133 *size_p = size;
91bb9f80
MJ
1134 if (guaranteed_unmodified)
1135 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1136 return true;
1137 }
1138 return false;
1139}
1140
b258210c 1141/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1142 of an assignment statement STMT, try to determine whether we are actually
1143 handling any of the following cases and construct an appropriate jump
1144 function into JFUNC if so:
1145
1146 1) The passed value is loaded from a formal parameter which is not a gimple
1147 register (most probably because it is addressable, the value has to be
1148 scalar) and we can guarantee the value has not changed. This case can
1149 therefore be described by a simple pass-through jump function. For example:
1150
1151 foo (int a)
1152 {
1153 int a.0;
1154
1155 a.0_2 = a;
1156 bar (a.0_2);
1157
1158 2) The passed value can be described by a simple arithmetic pass-through
1159 jump function. E.g.
1160
1161 foo (int a)
1162 {
1163 int D.2064;
1164
1165 D.2064_4 = a.1(D) + 4;
1166 bar (D.2064_4);
1167
1168 This case can also occur in combination of the previous one, e.g.:
1169
1170 foo (int a, int z)
1171 {
1172 int a.0;
1173 int D.2064;
1174
1175 a.0_3 = a;
1176 D.2064_4 = a.0_3 + 4;
1177 foo (D.2064_4);
1178
1179 3) The passed value is an address of an object within another one (which
1180 also passed by reference). Such situations are described by an ancestor
1181 jump function and describe situations such as:
1182
1183 B::foo() (struct B * const this)
1184 {
1185 struct A * D.1845;
1186
1187 D.1845_2 = &this_1(D)->D.1748;
1188 A::bar (D.1845_2);
1189
1190 INFO is the structure describing individual parameters access different
1191 stages of IPA optimizations. PARMS_AINFO contains the information that is
1192 only needed for intraprocedural analysis. */
685b0d13
MJ
1193
1194static void
56b40062 1195compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1196 class ipa_node_params *info,
b258210c 1197 struct ipa_jump_func *jfunc,
355fe088 1198 gcall *call, gimple *stmt, tree name,
06d65050 1199 tree param_type)
685b0d13 1200{
588db50c 1201 HOST_WIDE_INT offset, size;
fdb0e1b4 1202 tree op1, tc_ssa, base, ssa;
ee45a32d 1203 bool reverse;
685b0d13 1204 int index;
685b0d13 1205
685b0d13 1206 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1207
fdb0e1b4 1208 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1209 {
fdb0e1b4
MJ
1210 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1211 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1212 else
bda2bc48
MJ
1213 index = load_from_unmodified_param (fbi, info->descriptors,
1214 SSA_NAME_DEF_STMT (op1));
fdb0e1b4
MJ
1215 tc_ssa = op1;
1216 }
1217 else
1218 {
bda2bc48 1219 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1220 tc_ssa = gimple_assign_lhs (stmt);
1221 }
1222
1223 if (index >= 0)
1224 {
a77af182 1225 switch (gimple_assign_rhs_class (stmt))
8b7773a4 1226 {
a77af182
RB
1227 case GIMPLE_BINARY_RHS:
1228 {
1229 tree op2 = gimple_assign_rhs2 (stmt);
1230 if (!is_gimple_ip_invariant (op2)
1231 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1232 != tcc_comparison)
1233 && !useless_type_conversion_p (TREE_TYPE (name),
1234 TREE_TYPE (op1))))
1235 return;
1236
1237 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1238 gimple_assign_rhs_code (stmt));
1239 break;
1240 }
1241 case GIMPLE_SINGLE_RHS:
1242 {
1243 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1244 tc_ssa);
1245 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1246 break;
1247 }
1248 case GIMPLE_UNARY_RHS:
b66113e9 1249 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
a77af182 1250 ipa_set_jf_unary_pass_through (jfunc, index,
bda2bc48 1251 gimple_assign_rhs_code (stmt));
a77af182 1252 default:;
8b7773a4 1253 }
685b0d13
MJ
1254 return;
1255 }
1256
1257 if (TREE_CODE (op1) != ADDR_EXPR)
1258 return;
1259 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1260 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1261 return;
588db50c 1262 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
aca52e6f
RS
1263 offset_int mem_offset;
1264 if (!base
1265 || TREE_CODE (base) != MEM_REF
1266 || !mem_ref_offset (base).is_constant (&mem_offset))
685b0d13 1267 return;
aca52e6f 1268 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1269 ssa = TREE_OPERAND (base, 0);
1270 if (TREE_CODE (ssa) != SSA_NAME
1271 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1272 || offset < 0)
685b0d13
MJ
1273 return;
1274
b8f6e610 1275 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1276 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1277 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1278 ipa_set_ancestor_jf (jfunc, offset, index,
1279 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1280}
1281
40591473
MJ
1282/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1283 it looks like:
1284
1285 iftmp.1_3 = &obj_2(D)->D.1762;
1286
1287 The base of the MEM_REF must be a default definition SSA NAME of a
1288 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1289 whole MEM_REF expression is returned and the offset calculated from any
1290 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1291 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1292
1293static tree
355fe088 1294get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473 1295{
588db50c 1296 HOST_WIDE_INT size;
40591473 1297 tree expr, parm, obj;
ee45a32d 1298 bool reverse;
40591473
MJ
1299
1300 if (!gimple_assign_single_p (assign))
1301 return NULL_TREE;
1302 expr = gimple_assign_rhs1 (assign);
1303
1304 if (TREE_CODE (expr) != ADDR_EXPR)
1305 return NULL_TREE;
1306 expr = TREE_OPERAND (expr, 0);
1307 obj = expr;
588db50c 1308 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
40591473 1309
aca52e6f
RS
1310 offset_int mem_offset;
1311 if (!expr
1312 || TREE_CODE (expr) != MEM_REF
1313 || !mem_ref_offset (expr).is_constant (&mem_offset))
40591473
MJ
1314 return NULL_TREE;
1315 parm = TREE_OPERAND (expr, 0);
1316 if (TREE_CODE (parm) != SSA_NAME
1317 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1318 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1319 return NULL_TREE;
1320
aca52e6f 1321 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1322 *obj_p = obj;
1323 return expr;
1324}
1325
685b0d13 1326
b258210c
MJ
1327/* Given that an actual argument is an SSA_NAME that is a result of a phi
1328 statement PHI, try to find out whether NAME is in fact a
1329 multiple-inheritance typecast from a descendant into an ancestor of a formal
1330 parameter and thus can be described by an ancestor jump function and if so,
1331 write the appropriate function into JFUNC.
1332
1333 Essentially we want to match the following pattern:
1334
1335 if (obj_2(D) != 0B)
1336 goto <bb 3>;
1337 else
1338 goto <bb 4>;
1339
1340 <bb 3>:
1341 iftmp.1_3 = &obj_2(D)->D.1762;
1342
1343 <bb 4>:
1344 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1345 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1346 return D.1879_6; */
1347
1348static void
56b40062 1349compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1350 class ipa_node_params *info,
b258210c 1351 struct ipa_jump_func *jfunc,
538dd0b7 1352 gcall *call, gphi *phi)
b258210c 1353{
40591473 1354 HOST_WIDE_INT offset;
355fe088 1355 gimple *assign, *cond;
b258210c 1356 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1357 tree tmp, parm, expr, obj;
b258210c
MJ
1358 int index, i;
1359
54e348cb 1360 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1361 return;
1362
54e348cb
MJ
1363 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1364 tmp = PHI_ARG_DEF (phi, 0);
1365 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1366 tmp = PHI_ARG_DEF (phi, 1);
1367 else
1368 return;
b258210c
MJ
1369 if (TREE_CODE (tmp) != SSA_NAME
1370 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1371 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1372 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1373 return;
1374
1375 assign = SSA_NAME_DEF_STMT (tmp);
1376 assign_bb = gimple_bb (assign);
40591473 1377 if (!single_pred_p (assign_bb))
b258210c 1378 return;
40591473
MJ
1379 expr = get_ancestor_addr_info (assign, &obj, &offset);
1380 if (!expr)
b258210c
MJ
1381 return;
1382 parm = TREE_OPERAND (expr, 0);
b258210c 1383 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1384 if (index < 0)
1385 return;
b258210c
MJ
1386
1387 cond_bb = single_pred (assign_bb);
1388 cond = last_stmt (cond_bb);
69610617
SB
1389 if (!cond
1390 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1391 || gimple_cond_code (cond) != NE_EXPR
1392 || gimple_cond_lhs (cond) != parm
1393 || !integer_zerop (gimple_cond_rhs (cond)))
1394 return;
1395
b258210c
MJ
1396 phi_bb = gimple_bb (phi);
1397 for (i = 0; i < 2; i++)
1398 {
1399 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1400 if (pred != assign_bb && pred != cond_bb)
1401 return;
1402 }
1403
3b97a5c7
MJ
1404 ipa_set_ancestor_jf (jfunc, offset, index,
1405 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1406}
1407
be95e2b9
MJ
1408/* Inspect the given TYPE and return true iff it has the same structure (the
1409 same number of fields of the same types) as a C++ member pointer. If
1410 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1411 corresponding fields there. */
1412
3e293154
MJ
1413static bool
1414type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1415{
1416 tree fld;
1417
1418 if (TREE_CODE (type) != RECORD_TYPE)
1419 return false;
1420
1421 fld = TYPE_FIELDS (type);
1422 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1423 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1424 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1425 return false;
1426
1427 if (method_ptr)
1428 *method_ptr = fld;
1429
910ad8de 1430 fld = DECL_CHAIN (fld);
8b7773a4 1431 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1432 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1433 return false;
1434 if (delta)
1435 *delta = fld;
1436
910ad8de 1437 if (DECL_CHAIN (fld))
3e293154
MJ
1438 return false;
1439
1440 return true;
1441}
1442
61502ca8 1443/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1444 return the rhs of its defining statement. Otherwise return RHS as it
1445 is. */
7ec49257
MJ
1446
1447static inline tree
1448get_ssa_def_if_simple_copy (tree rhs)
1449{
1450 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1451 {
355fe088 1452 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1453
1454 if (gimple_assign_single_p (def_stmt))
1455 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1456 else
1457 break;
7ec49257
MJ
1458 }
1459 return rhs;
1460}
1461
46771da5 1462/* Simple linked list, describing known contents of an aggregate before
8b7773a4
MJ
1463 call. */
1464
1465struct ipa_known_agg_contents_list
1466{
1467 /* Offset and size of the described part of the aggregate. */
1468 HOST_WIDE_INT offset, size;
1469 /* Known constant value or NULL if the contents is known to be unknown. */
1470 tree constant;
1471 /* Pointer to the next structure in the list. */
1472 struct ipa_known_agg_contents_list *next;
1473};
3e293154 1474
46771da5
FX
1475/* Add a known content item into a linked list of ipa_known_agg_contents_list
1476 structure, in which all elements are sorted ascendingly by offset. */
0d48ee34 1477
46771da5
FX
1478static inline void
1479add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1480 struct ipa_known_agg_contents_list *item)
0d48ee34 1481{
46771da5
FX
1482 struct ipa_known_agg_contents_list *list = *plist;
1483
1484 for (; list; list = list->next)
0d48ee34 1485 {
46771da5
FX
1486 if (list->offset >= item->offset)
1487 break;
1488
1489 plist = &list->next;
0d48ee34
MJ
1490 }
1491
46771da5
FX
1492 item->next = list;
1493 *plist = item;
1494}
1495
1496/* Check whether a given known content is clobbered by certain element in
1497 a linked list of ipa_known_agg_contents_list. */
1498
1499static inline bool
1500clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1501 struct ipa_known_agg_contents_list *item)
1502{
1503 for (; list; list = list->next)
0d48ee34 1504 {
46771da5
FX
1505 if (list->offset >= item->offset)
1506 return list->offset < item->offset + item->size;
1507
1508 if (list->offset + list->size > item->offset)
1509 return true;
0d48ee34 1510 }
46771da5
FX
1511
1512 return false;
0d48ee34
MJ
1513}
1514
1515/* Build aggregate jump function from LIST, assuming there are exactly
46771da5 1516 CONST_COUNT constant entries there and that offset of the passed argument
0d48ee34
MJ
1517 is ARG_OFFSET and store it into JFUNC. */
1518
1519static void
1520build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1521 int const_count, HOST_WIDE_INT arg_offset,
1522 struct ipa_jump_func *jfunc)
1523{
1524 vec_alloc (jfunc->agg.items, const_count);
1525 while (list)
1526 {
1527 if (list->constant)
1528 {
1529 struct ipa_agg_jf_item item;
1530 item.offset = list->offset - arg_offset;
1531 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1532 item.value = unshare_expr_without_location (list->constant);
1533 jfunc->agg.items->quick_push (item);
1534 }
1535 list = list->next;
1536 }
1537}
1538
46771da5
FX
1539/* If STMT is a memory store to the object whose address is BASE, extract
1540 information (offset, size, and value) into CONTENT, and return true,
1541 otherwise we conservatively assume the whole object is modified with
1542 unknown content, and return false. CHECK_REF means that access to object
1543 is expected to be in form of MEM_REF expression. */
1544
1545static bool
1546extract_mem_content (gimple *stmt, tree base, bool check_ref,
1547 struct ipa_known_agg_contents_list *content)
1548{
1549 HOST_WIDE_INT lhs_offset, lhs_size;
1550 tree lhs, rhs, lhs_base;
1551 bool reverse;
1552
1553 if (!gimple_assign_single_p (stmt))
1554 return false;
1555
1556 lhs = gimple_assign_lhs (stmt);
1557 rhs = gimple_assign_rhs1 (stmt);
1558
1559 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1560 || TREE_CODE (lhs) == BIT_FIELD_REF
1561 || contains_bitfld_component_ref_p (lhs))
1562 return false;
1563
1564 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1565 &lhs_size, &reverse);
1566 if (!lhs_base)
1567 return false;
1568
1569 if (check_ref)
1570 {
1571 if (TREE_CODE (lhs_base) != MEM_REF
1572 || TREE_OPERAND (lhs_base, 0) != base
1573 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1574 return false;
1575 }
1576 else if (lhs_base != base)
1577 return false;
1578
1579 rhs = get_ssa_def_if_simple_copy (rhs);
1580
1581 content->size = lhs_size;
1582 content->offset = lhs_offset;
1583 content->constant = is_gimple_ip_invariant (rhs) ? rhs : NULL_TREE;
1584 content->next = NULL;
1585
1586 return true;
1587}
1588
8b7773a4
MJ
1589/* Traverse statements from CALL backwards, scanning whether an aggregate given
1590 in ARG is filled in with constant values. ARG can either be an aggregate
0d48ee34
MJ
1591 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1592 aggregate. JFUNC is the jump function into which the constants are
46771da5
FX
1593 subsequently stored. AA_WALK_BUDGET_P points to limit on number of
1594 statements we allow get_continuation_for_phi to examine. */
be95e2b9 1595
3e293154 1596static void
46771da5
FX
1597determine_known_aggregate_parts (gcall *call, tree arg,
1598 tree arg_type,
1599 struct ipa_jump_func *jfunc,
1600 unsigned *aa_walk_budget_p)
3e293154 1601{
46771da5
FX
1602 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1603 bitmap visited = NULL;
8b7773a4 1604 int item_count = 0, const_count = 0;
028d4092 1605 int ipa_max_agg_items = param_ipa_max_agg_items;
8b7773a4 1606 HOST_WIDE_INT arg_offset, arg_size;
8b7773a4
MJ
1607 tree arg_base;
1608 bool check_ref, by_ref;
1609 ao_ref r;
3e293154 1610
46771da5 1611 if (ipa_max_agg_items == 0)
29799e9d
MJ
1612 return;
1613
8b7773a4
MJ
1614 /* The function operates in three stages. First, we prepare check_ref, r,
1615 arg_base and arg_offset based on what is actually passed as an actual
1616 argument. */
3e293154 1617
85942f45 1618 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1619 {
1620 by_ref = true;
1621 if (TREE_CODE (arg) == SSA_NAME)
1622 {
1623 tree type_size;
63831879
MJ
1624 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1625 || !POINTER_TYPE_P (TREE_TYPE (arg)))
8b7773a4
MJ
1626 return;
1627 check_ref = true;
1628 arg_base = arg;
1629 arg_offset = 0;
85942f45 1630 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1631 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1632 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1633 }
1634 else if (TREE_CODE (arg) == ADDR_EXPR)
1635 {
ee45a32d 1636 bool reverse;
8b7773a4
MJ
1637
1638 arg = TREE_OPERAND (arg, 0);
588db50c
RS
1639 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1640 &arg_size, &reverse);
1641 if (!arg_base)
8b7773a4
MJ
1642 return;
1643 if (DECL_P (arg_base))
1644 {
8b7773a4 1645 check_ref = false;
0d48ee34 1646 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1647 }
1648 else
1649 return;
1650 }
1651 else
1652 return;
1653 }
1654 else
1655 {
ee45a32d 1656 bool reverse;
8b7773a4
MJ
1657
1658 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1659
1660 by_ref = false;
1661 check_ref = false;
588db50c
RS
1662 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1663 &arg_size, &reverse);
1664 if (!arg_base)
8b7773a4
MJ
1665 return;
1666
1667 ao_ref_init (&r, arg);
1668 }
1669
46771da5
FX
1670 /* Second stage traverses virtual SSA web backwards starting from the call
1671 statement, only looks at individual dominating virtual operand (its
1672 definition dominates the call), as long as it is confident that content
1673 of the aggregate is affected by definition of the virtual operand, it
1674 builds a sorted linked list of ipa_agg_jf_list describing that. */
3e293154 1675
46771da5
FX
1676 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1677 {
1678 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
3e293154 1679
46771da5 1680 if (gimple_code (stmt) == GIMPLE_PHI)
518dc859 1681 {
fb4697e3
RB
1682 dom_vuse = get_continuation_for_phi (stmt, &r, true,
1683 *aa_walk_budget_p,
46771da5
FX
1684 &visited, false, NULL, NULL);
1685 continue;
3e293154 1686 }
46771da5
FX
1687
1688 if (stmt_may_clobber_ref_p_1 (stmt, &r))
774b8a55 1689 {
46771da5
FX
1690 struct ipa_known_agg_contents_list *content
1691 = XALLOCA (struct ipa_known_agg_contents_list);
1692
1693 if (!extract_mem_content (stmt, arg_base, check_ref, content))
774b8a55 1694 break;
3e293154 1695
46771da5
FX
1696 /* Now we get a dominating virtual operand, and need to check
1697 whether its value is clobbered any other dominating one. */
1698 if (content->constant
1699 && !clobber_by_agg_contents_list_p (all_list, content))
1700 {
1701 struct ipa_known_agg_contents_list *copy
1702 = XALLOCA (struct ipa_known_agg_contents_list);
3e293154 1703
46771da5
FX
1704 /* Add to the list consisting of only dominating virtual
1705 operands, whose definitions can finally reach the call. */
1706 add_to_agg_contents_list (&list, (*copy = *content, copy));
1707
1708 if (++const_count == ipa_max_agg_items)
1709 break;
1710 }
1711
1712 /* Add to the list consisting of all dominating virtual operands. */
1713 add_to_agg_contents_list (&all_list, content);
1714
1715 if (++item_count == 2 * ipa_max_agg_items)
1716 break;
8b7773a4 1717 }
46771da5
FX
1718 dom_vuse = gimple_vuse (stmt);
1719 }
3e293154 1720
46771da5
FX
1721 if (visited)
1722 BITMAP_FREE (visited);
be95e2b9 1723
8b7773a4 1724 /* Third stage just goes over the list and creates an appropriate vector of
46771da5 1725 ipa_agg_jf_item structures out of it, of course only if there are
8b7773a4 1726 any known constants to begin with. */
3e293154 1727
8b7773a4 1728 if (const_count)
3e293154 1729 {
8b7773a4 1730 jfunc->agg.by_ref = by_ref;
0d48ee34 1731 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
3e293154
MJ
1732 }
1733}
1734
46771da5 1735
5d5f1e95
KV
1736/* Return the Ith param type of callee associated with call graph
1737 edge E. */
1738
1739tree
06d65050
JH
1740ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1741{
1742 int n;
1743 tree type = (e->callee
67348ccc 1744 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1745 : gimple_call_fntype (e->call_stmt));
1746 tree t = TYPE_ARG_TYPES (type);
1747
1748 for (n = 0; n < i; n++)
1749 {
1750 if (!t)
1751 break;
1752 t = TREE_CHAIN (t);
1753 }
1754 if (t)
1755 return TREE_VALUE (t);
1756 if (!e->callee)
1757 return NULL;
67348ccc 1758 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1759 for (n = 0; n < i; n++)
1760 {
1761 if (!t)
1762 return NULL;
1763 t = TREE_CHAIN (t);
1764 }
1765 if (t)
1766 return TREE_TYPE (t);
1767 return NULL;
1768}
1769
86cd0334
MJ
1770/* Return ipa_bits with VALUE and MASK values, which can be either a newly
1771 allocated structure or a previously existing one shared with other jump
1772 functions and/or transformation summaries. */
1773
1774ipa_bits *
1775ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1776{
1777 ipa_bits tmp;
1778 tmp.value = value;
1779 tmp.mask = mask;
1780
1781 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1782 if (*slot)
1783 return *slot;
1784
1785 ipa_bits *res = ggc_alloc<ipa_bits> ();
1786 res->value = value;
1787 res->mask = mask;
1788 *slot = res;
1789
1790 return res;
1791}
1792
1793/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1794 table in order to avoid creating multiple same ipa_bits structures. */
1795
1796static void
1797ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1798 const widest_int &mask)
1799{
1800 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1801}
1802
1803/* Return a pointer to a value_range just like *TMP, but either find it in
1804 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1805
028d81b1
AH
1806static value_range *
1807ipa_get_value_range (value_range *tmp)
86cd0334 1808{
028d81b1 1809 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
86cd0334
MJ
1810 if (*slot)
1811 return *slot;
1812
028d81b1 1813 value_range *vr = ggc_alloc<value_range> ();
86cd0334
MJ
1814 *vr = *tmp;
1815 *slot = vr;
1816
1817 return vr;
1818}
1819
1820/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1821 equiv set. Use hash table in order to avoid creating multiple same copies of
1822 value_ranges. */
1823
028d81b1 1824static value_range *
5d462877 1825ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
86cd0334 1826{
5d462877 1827 value_range tmp (min, max, kind);
86cd0334
MJ
1828 return ipa_get_value_range (&tmp);
1829}
1830
1831/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1832 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1833 same value_range structures. */
1834
1835static void
54994253 1836ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
86cd0334
MJ
1837 tree min, tree max)
1838{
1839 jf->m_vr = ipa_get_value_range (type, min, max);
1840}
1841
46771da5 1842/* Assign to JF a pointer to a value_range just like TMP but either fetch a
86cd0334
MJ
1843 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1844
1845static void
028d81b1 1846ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
86cd0334
MJ
1847{
1848 jf->m_vr = ipa_get_value_range (tmp);
1849}
1850
3e293154
MJ
1851/* Compute jump function for all arguments of callsite CS and insert the
1852 information in the jump_functions array in the ipa_edge_args corresponding
1853 to this callsite. */
be95e2b9 1854
749aa96d 1855static void
56b40062 1856ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 1857 struct cgraph_edge *cs)
3e293154 1858{
99b1c316 1859 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
a33c028e 1860 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
538dd0b7 1861 gcall *call = cs->call_stmt;
8b7773a4 1862 int n, arg_num = gimple_call_num_args (call);
5ce97055 1863 bool useful_context = false;
3e293154 1864
606d9a09 1865 if (arg_num == 0 || args->jump_functions)
3e293154 1866 return;
9771b263 1867 vec_safe_grow_cleared (args->jump_functions, arg_num);
5ce97055
JH
1868 if (flag_devirtualize)
1869 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
3e293154 1870
96e24d49
JJ
1871 if (gimple_call_internal_p (call))
1872 return;
5fe8e757
MJ
1873 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1874 return;
1875
8b7773a4
MJ
1876 for (n = 0; n < arg_num; n++)
1877 {
1878 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1879 tree arg = gimple_call_arg (call, n);
06d65050 1880 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
1881 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1882 {
049e6d36 1883 tree instance;
99b1c316 1884 class ipa_polymorphic_call_context context (cs->caller->decl,
5ce97055 1885 arg, cs->call_stmt,
049e6d36 1886 &instance);
c628d1c3
MJ
1887 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
1888 &fbi->aa_walk_budget);
5ce97055
JH
1889 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1890 if (!context.useless_p ())
1891 useful_context = true;
1892 }
3e293154 1893
718625ad
KV
1894 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1895 {
f7503699
KV
1896 bool addr_nonzero = false;
1897 bool strict_overflow = false;
1898
718625ad
KV
1899 if (TREE_CODE (arg) == SSA_NAME
1900 && param_type
1901 && get_ptr_nonnull (arg))
f7503699
KV
1902 addr_nonzero = true;
1903 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1904 addr_nonzero = true;
1905
1906 if (addr_nonzero)
718625ad 1907 {
86cd0334
MJ
1908 tree z = build_int_cst (TREE_TYPE (arg), 0);
1909 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
718625ad
KV
1910 }
1911 else
86cd0334 1912 gcc_assert (!jfunc->m_vr);
718625ad
KV
1913 }
1914 else
8bc5448f
KV
1915 {
1916 wide_int min, max;
5d462877 1917 value_range_kind kind;
8bc5448f
KV
1918 if (TREE_CODE (arg) == SSA_NAME
1919 && param_type
5d462877
AH
1920 && (kind = get_range_info (arg, &min, &max))
1921 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
8bc5448f 1922 {
028d81b1 1923 value_range resvr;
5d462877
AH
1924 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
1925 wide_int_to_tree (TREE_TYPE (arg), max),
1926 kind);
38a73435
AH
1927 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
1928 &tmpvr, TREE_TYPE (arg));
54994253 1929 if (!resvr.undefined_p () && !resvr.varying_p ())
86cd0334 1930 ipa_set_jfunc_vr (jfunc, &resvr);
3a4228ba 1931 else
86cd0334 1932 gcc_assert (!jfunc->m_vr);
8bc5448f
KV
1933 }
1934 else
86cd0334 1935 gcc_assert (!jfunc->m_vr);
8bc5448f 1936 }
04be694e 1937
209ca542
PK
1938 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1939 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1940 {
209ca542 1941 if (TREE_CODE (arg) == SSA_NAME)
86cd0334
MJ
1942 ipa_set_jfunc_bits (jfunc, 0,
1943 widest_int::from (get_nonzero_bits (arg),
1944 TYPE_SIGN (TREE_TYPE (arg))));
209ca542 1945 else
86cd0334 1946 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
209ca542 1947 }
67b97478
PK
1948 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1949 {
1950 unsigned HOST_WIDE_INT bitpos;
1951 unsigned align;
1952
67b97478 1953 get_pointer_alignment_1 (arg, &align, &bitpos);
7b27cb4b
RS
1954 widest_int mask = wi::bit_and_not
1955 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1956 align / BITS_PER_UNIT - 1);
86cd0334
MJ
1957 widest_int value = bitpos / BITS_PER_UNIT;
1958 ipa_set_jfunc_bits (jfunc, value, mask);
67b97478 1959 }
209ca542 1960 else
86cd0334 1961 gcc_assert (!jfunc->bits);
209ca542 1962
04643334 1963 if (is_gimple_ip_invariant (arg)
8813a647 1964 || (VAR_P (arg)
04643334
MJ
1965 && is_global_var (arg)
1966 && TREE_READONLY (arg)))
4502fe8d 1967 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1968 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1969 && TREE_CODE (arg) == PARM_DECL)
1970 {
1971 int index = ipa_get_param_decl_index (info, arg);
1972
1973 gcc_assert (index >=0);
1974 /* Aggregate passed by value, check for pass-through, otherwise we
1975 will attempt to fill in aggregate contents later in this
1976 for cycle. */
8aab5218 1977 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 1978 {
3b97a5c7 1979 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
1980 continue;
1981 }
1982 }
1983 else if (TREE_CODE (arg) == SSA_NAME)
1984 {
1985 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1986 {
1987 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1988 if (index >= 0)
8b7773a4 1989 {
3b97a5c7 1990 bool agg_p;
8aab5218 1991 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 1992 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
1993 }
1994 }
1995 else
1996 {
355fe088 1997 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 1998 if (is_gimple_assign (stmt))
8aab5218 1999 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 2000 call, stmt, arg, param_type);
8b7773a4 2001 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 2002 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
2003 call,
2004 as_a <gphi *> (stmt));
8b7773a4
MJ
2005 }
2006 }
3e293154 2007
67914693 2008 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
85942f45
JH
2009 passed (because type conversions are ignored in gimple). Usually we can
2010 safely get type from function declaration, but in case of K&R prototypes or
2011 variadic functions we can try our luck with type of the pointer passed.
2012 TODO: Since we look for actual initialization of the memory object, we may better
2013 work out the type based on the memory stores we find. */
2014 if (!param_type)
2015 param_type = TREE_TYPE (arg);
2016
8b7773a4
MJ
2017 if ((jfunc->type != IPA_JF_PASS_THROUGH
2018 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2019 && (jfunc->type != IPA_JF_ANCESTOR
2020 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2021 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 2022 || POINTER_TYPE_P (param_type)))
46771da5
FX
2023 determine_known_aggregate_parts (call, arg, param_type, jfunc,
2024 &fbi->aa_walk_budget);
8b7773a4 2025 }
5ce97055
JH
2026 if (!useful_context)
2027 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
2028}
2029
749aa96d 2030/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 2031 from BB. */
749aa96d 2032
062c604f 2033static void
56b40062 2034ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 2035{
8aab5218
MJ
2036 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2037 int i;
749aa96d
MJ
2038 struct cgraph_edge *cs;
2039
8aab5218 2040 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 2041 {
8aab5218 2042 struct cgraph_node *callee = cs->callee;
749aa96d 2043
8aab5218
MJ
2044 if (callee)
2045 {
d7caa129 2046 callee = callee->ultimate_alias_target ();
8aab5218
MJ
2047 /* We do not need to bother analyzing calls to unknown functions
2048 unless they may become known during lto/whopr. */
2049 if (!callee->definition && !flag_lto)
2050 continue;
2051 }
2052 ipa_compute_jump_functions_for_edge (fbi, cs);
2053 }
749aa96d
MJ
2054}
2055
8b7773a4
MJ
2056/* If STMT looks like a statement loading a value from a member pointer formal
2057 parameter, return that parameter and store the offset of the field to
2058 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2059 might be clobbered). If USE_DELTA, then we look for a use of the delta
2060 field rather than the pfn. */
be95e2b9 2061
3e293154 2062static tree
355fe088 2063ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 2064 HOST_WIDE_INT *offset_p)
3e293154 2065{
8b7773a4
MJ
2066 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2067
2068 if (!gimple_assign_single_p (stmt))
2069 return NULL_TREE;
3e293154 2070
8b7773a4 2071 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2072 if (TREE_CODE (rhs) == COMPONENT_REF)
2073 {
2074 ref_field = TREE_OPERAND (rhs, 1);
2075 rhs = TREE_OPERAND (rhs, 0);
2076 }
2077 else
2078 ref_field = NULL_TREE;
d242d063 2079 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2080 return NULL_TREE;
3e293154 2081 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2082 if (TREE_CODE (rec) != ADDR_EXPR)
2083 return NULL_TREE;
2084 rec = TREE_OPERAND (rec, 0);
3e293154 2085 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2086 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2087 return NULL_TREE;
d242d063 2088 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2089
8b7773a4
MJ
2090 if (use_delta)
2091 fld = delta_field;
2092 else
2093 fld = ptr_field;
2094 if (offset_p)
2095 *offset_p = int_bit_position (fld);
2096
ae788515
EB
2097 if (ref_field)
2098 {
2099 if (integer_nonzerop (ref_offset))
2100 return NULL_TREE;
ae788515
EB
2101 return ref_field == fld ? rec : NULL_TREE;
2102 }
3e293154 2103 else
8b7773a4
MJ
2104 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2105 : NULL_TREE;
3e293154
MJ
2106}
2107
2108/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2109
3e293154
MJ
2110static bool
2111ipa_is_ssa_with_stmt_def (tree t)
2112{
2113 if (TREE_CODE (t) == SSA_NAME
2114 && !SSA_NAME_IS_DEFAULT_DEF (t))
2115 return true;
2116 else
2117 return false;
2118}
2119
40591473
MJ
2120/* Find the indirect call graph edge corresponding to STMT and mark it as a
2121 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
40a777e8
JH
2122 indirect call graph edge.
2123 If POLYMORPHIC is true record is as a destination of polymorphic call. */
be95e2b9 2124
40591473 2125static struct cgraph_edge *
538dd0b7 2126ipa_note_param_call (struct cgraph_node *node, int param_index,
40a777e8 2127 gcall *stmt, bool polymorphic)
3e293154 2128{
e33c6cd6 2129 struct cgraph_edge *cs;
3e293154 2130
d52f5295 2131 cs = node->get_edge (stmt);
b258210c 2132 cs->indirect_info->param_index = param_index;
8b7773a4 2133 cs->indirect_info->agg_contents = 0;
c13bc3d9 2134 cs->indirect_info->member_ptr = 0;
91bb9f80 2135 cs->indirect_info->guaranteed_unmodified = 0;
40a777e8
JH
2136 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2137 param_index, true);
2138 if (cs->indirect_info->polymorphic || polymorphic)
2139 ipa_set_param_used_by_polymorphic_call
2140 (IPA_NODE_REF (node), param_index, true);
40591473 2141 return cs;
3e293154
MJ
2142}
2143
e33c6cd6 2144/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2145 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2146 intermediate information about each formal parameter. Currently it checks
2147 whether the call calls a pointer that is a formal parameter and if so, the
2148 parameter is marked with the called flag and an indirect call graph edge
2149 describing the call is created. This is very simple for ordinary pointers
2150 represented in SSA but not-so-nice when it comes to member pointers. The
2151 ugly part of this function does nothing more than trying to match the
2152 pattern of such a call. An example of such a pattern is the gimple dump
2153 below, the call is on the last line:
3e293154 2154
ae788515
EB
2155 <bb 2>:
2156 f$__delta_5 = f.__delta;
2157 f$__pfn_24 = f.__pfn;
2158
2159 or
3e293154 2160 <bb 2>:
d242d063
MJ
2161 f$__delta_5 = MEM[(struct *)&f];
2162 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2163
ae788515 2164 and a few lines below:
8aa29647
MJ
2165
2166 <bb 5>
3e293154
MJ
2167 D.2496_3 = (int) f$__pfn_24;
2168 D.2497_4 = D.2496_3 & 1;
2169 if (D.2497_4 != 0)
2170 goto <bb 3>;
2171 else
2172 goto <bb 4>;
2173
8aa29647 2174 <bb 6>:
3e293154
MJ
2175 D.2500_7 = (unsigned int) f$__delta_5;
2176 D.2501_8 = &S + D.2500_7;
2177 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2178 D.2503_10 = *D.2502_9;
2179 D.2504_12 = f$__pfn_24 + -1;
2180 D.2505_13 = (unsigned int) D.2504_12;
2181 D.2506_14 = D.2503_10 + D.2505_13;
2182 D.2507_15 = *D.2506_14;
2183 iftmp.11_16 = (String:: *) D.2507_15;
2184
8aa29647 2185 <bb 7>:
3e293154
MJ
2186 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2187 D.2500_19 = (unsigned int) f$__delta_5;
2188 D.2508_20 = &S + D.2500_19;
2189 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2190
2191 Such patterns are results of simple calls to a member pointer:
2192
2193 int doprinting (int (MyString::* f)(int) const)
2194 {
2195 MyString S ("somestring");
2196
2197 return (S.*f)(4);
2198 }
8b7773a4
MJ
2199
2200 Moreover, the function also looks for called pointers loaded from aggregates
2201 passed by value or reference. */
3e293154
MJ
2202
2203static void
56b40062 2204ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 2205 tree target)
3e293154 2206{
99b1c316 2207 class ipa_node_params *info = fbi->info;
8b7773a4
MJ
2208 HOST_WIDE_INT offset;
2209 bool by_ref;
3e293154 2210
3e293154
MJ
2211 if (SSA_NAME_IS_DEFAULT_DEF (target))
2212 {
b258210c 2213 tree var = SSA_NAME_VAR (target);
8aab5218 2214 int index = ipa_get_param_decl_index (info, var);
3e293154 2215 if (index >= 0)
40a777e8 2216 ipa_note_param_call (fbi->node, index, call, false);
3e293154
MJ
2217 return;
2218 }
2219
8aab5218 2220 int index;
355fe088 2221 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 2222 bool guaranteed_unmodified;
8b7773a4 2223 if (gimple_assign_single_p (def)
ff302741
PB
2224 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2225 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2226 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2227 {
40a777e8
JH
2228 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2229 call, false);
8b7773a4
MJ
2230 cs->indirect_info->offset = offset;
2231 cs->indirect_info->agg_contents = 1;
2232 cs->indirect_info->by_ref = by_ref;
91bb9f80 2233 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2234 return;
2235 }
2236
3e293154
MJ
2237 /* Now we need to try to match the complex pattern of calling a member
2238 pointer. */
8b7773a4
MJ
2239 if (gimple_code (def) != GIMPLE_PHI
2240 || gimple_phi_num_args (def) != 2
2241 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2242 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2243 return;
2244
3e293154
MJ
2245 /* First, we need to check whether one of these is a load from a member
2246 pointer that is a parameter to this function. */
8aab5218
MJ
2247 tree n1 = PHI_ARG_DEF (def, 0);
2248 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2249 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2250 return;
355fe088
TS
2251 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2252 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2253
8aab5218
MJ
2254 tree rec;
2255 basic_block bb, virt_bb;
2256 basic_block join = gimple_bb (def);
8b7773a4 2257 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2258 {
8b7773a4 2259 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2260 return;
2261
8aa29647 2262 bb = EDGE_PRED (join, 0)->src;
726a989a 2263 virt_bb = gimple_bb (d2);
3e293154 2264 }
8b7773a4 2265 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2266 {
8aa29647 2267 bb = EDGE_PRED (join, 1)->src;
726a989a 2268 virt_bb = gimple_bb (d1);
3e293154
MJ
2269 }
2270 else
2271 return;
2272
2273 /* Second, we need to check that the basic blocks are laid out in the way
2274 corresponding to the pattern. */
2275
3e293154
MJ
2276 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2277 || single_pred (virt_bb) != bb
2278 || single_succ (virt_bb) != join)
2279 return;
2280
2281 /* Third, let's see that the branching is done depending on the least
2282 significant bit of the pfn. */
2283
355fe088 2284 gimple *branch = last_stmt (bb);
8aa29647 2285 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2286 return;
2287
12430896
RG
2288 if ((gimple_cond_code (branch) != NE_EXPR
2289 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2290 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2291 return;
3e293154 2292
8aab5218 2293 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2294 if (!ipa_is_ssa_with_stmt_def (cond))
2295 return;
2296
726a989a 2297 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2298 if (!is_gimple_assign (def)
726a989a
RB
2299 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2300 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2301 return;
726a989a
RB
2302
2303 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2304 if (!ipa_is_ssa_with_stmt_def (cond))
2305 return;
2306
726a989a 2307 def = SSA_NAME_DEF_STMT (cond);
3e293154 2308
8b75fc9b
MJ
2309 if (is_gimple_assign (def)
2310 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2311 {
726a989a 2312 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2313 if (!ipa_is_ssa_with_stmt_def (cond))
2314 return;
726a989a 2315 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2316 }
2317
8aab5218 2318 tree rec2;
6f7b8b70
RE
2319 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2320 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2321 == ptrmemfunc_vbit_in_delta),
2322 NULL);
3e293154
MJ
2323 if (rec != rec2)
2324 return;
2325
2326 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2327 if (index >= 0
8aab5218 2328 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2329 {
40a777e8
JH
2330 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2331 call, false);
8b7773a4
MJ
2332 cs->indirect_info->offset = offset;
2333 cs->indirect_info->agg_contents = 1;
c13bc3d9 2334 cs->indirect_info->member_ptr = 1;
91bb9f80 2335 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2336 }
3e293154
MJ
2337
2338 return;
2339}
2340
b258210c
MJ
2341/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2342 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2343 FBI->node (described by FBI->info), create a call note for the
2344 statement. */
b258210c
MJ
2345
2346static void
56b40062 2347ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2348 gcall *call, tree target)
b258210c
MJ
2349{
2350 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2351 int index;
40591473 2352 HOST_WIDE_INT anc_offset;
b258210c 2353
05842ff5
MJ
2354 if (!flag_devirtualize)
2355 return;
2356
40591473 2357 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2358 return;
2359
99b1c316 2360 class ipa_node_params *info = fbi->info;
40591473
MJ
2361 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2362 {
8aab5218 2363 struct ipa_jump_func jfunc;
40591473
MJ
2364 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2365 return;
b258210c 2366
40591473
MJ
2367 anc_offset = 0;
2368 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2369 gcc_assert (index >= 0);
c628d1c3 2370 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
06d65050 2371 call, &jfunc))
40591473
MJ
2372 return;
2373 }
2374 else
2375 {
8aab5218 2376 struct ipa_jump_func jfunc;
355fe088 2377 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2378 tree expr;
2379
2380 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2381 if (!expr)
2382 return;
2383 index = ipa_get_param_decl_index (info,
2384 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2385 gcc_assert (index >= 0);
c628d1c3 2386 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
06d65050 2387 call, &jfunc, anc_offset))
40591473
MJ
2388 return;
2389 }
2390
40a777e8
JH
2391 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2392 call, true);
99b1c316 2393 class cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2394 ii->offset = anc_offset;
ae7e9ddd 2395 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2396 ii->otr_type = obj_type_ref_class (target);
40591473 2397 ii->polymorphic = 1;
b258210c
MJ
2398}
2399
2400/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2401 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2402 containing intermediate information about each formal parameter. */
b258210c
MJ
2403
2404static void
56b40062 2405ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2406{
2407 tree target = gimple_call_fn (call);
b786d31f
JH
2408
2409 if (!target
2410 || (TREE_CODE (target) != SSA_NAME
2411 && !virtual_method_call_p (target)))
2412 return;
b258210c 2413
7d0aa05b 2414 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2415 /* If we previously turned the call into a direct call, there is
2416 no need to analyze. */
b786d31f 2417 if (cs && !cs->indirect_unknown_callee)
25583c4f 2418 return;
7d0aa05b 2419
a5b58b28 2420 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2421 {
7d0aa05b
JH
2422 tree instance;
2423 tree target = gimple_call_fn (call);
6f8091fc
JH
2424 ipa_polymorphic_call_context context (current_function_decl,
2425 target, call, &instance);
7d0aa05b 2426
ba392339
JH
2427 gcc_checking_assert (cs->indirect_info->otr_type
2428 == obj_type_ref_class (target));
2429 gcc_checking_assert (cs->indirect_info->otr_token
2430 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2431
29c43c83
JH
2432 cs->indirect_info->vptr_changed
2433 = !context.get_dynamic_type (instance,
2434 OBJ_TYPE_REF_OBJECT (target),
c628d1c3
MJ
2435 obj_type_ref_class (target), call,
2436 &fbi->aa_walk_budget);
0127c169 2437 cs->indirect_info->context = context;
7d0aa05b
JH
2438 }
2439
b258210c 2440 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2441 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2442 else if (virtual_method_call_p (target))
8aab5218 2443 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2444}
2445
2446
e33c6cd6 2447/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2448 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2449 formal parameters are called. */
be95e2b9 2450
3e293154 2451static void
355fe088 2452ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2453{
726a989a 2454 if (is_gimple_call (stmt))
538dd0b7 2455 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2456}
2457
2458/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2459 If OP is a parameter declaration, mark it as used in the info structure
2460 passed in DATA. */
2461
2462static bool
355fe088 2463visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f 2464{
99b1c316 2465 class ipa_node_params *info = (class ipa_node_params *) data;
062c604f
MJ
2466
2467 op = get_base_address (op);
2468 if (op
2469 && TREE_CODE (op) == PARM_DECL)
2470 {
2471 int index = ipa_get_param_decl_index (info, op);
2472 gcc_assert (index >= 0);
310bc633 2473 ipa_set_param_used (info, index, true);
062c604f
MJ
2474 }
2475
2476 return false;
3e293154
MJ
2477}
2478
8aab5218
MJ
2479/* Scan the statements in BB and inspect the uses of formal parameters. Store
2480 the findings in various structures of the associated ipa_node_params
2481 structure, such as parameter flags, notes etc. FBI holds various data about
2482 the function being analyzed. */
be95e2b9 2483
062c604f 2484static void
56b40062 2485ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2486{
726a989a 2487 gimple_stmt_iterator gsi;
8aab5218
MJ
2488 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2489 {
355fe088 2490 gimple *stmt = gsi_stmt (gsi);
3e293154 2491
8aab5218
MJ
2492 if (is_gimple_debug (stmt))
2493 continue;
3e293154 2494
8aab5218
MJ
2495 ipa_analyze_stmt_uses (fbi, stmt);
2496 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2497 visit_ref_for_mod_analysis,
2498 visit_ref_for_mod_analysis,
2499 visit_ref_for_mod_analysis);
5fe8e757 2500 }
8aab5218
MJ
2501 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2502 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2503 visit_ref_for_mod_analysis,
2504 visit_ref_for_mod_analysis,
2505 visit_ref_for_mod_analysis);
2506}
2507
2508/* Calculate controlled uses of parameters of NODE. */
2509
2510static void
2511ipa_analyze_controlled_uses (struct cgraph_node *node)
2512{
99b1c316 2513 class ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2514
8aab5218 2515 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2516 {
2517 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2518 int controlled_uses = 0;
2519
062c604f
MJ
2520 /* For SSA regs see if parameter is used. For non-SSA we compute
2521 the flag during modification analysis. */
4502fe8d
MJ
2522 if (is_gimple_reg (parm))
2523 {
67348ccc 2524 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2525 parm);
2526 if (ddef && !has_zero_uses (ddef))
2527 {
2528 imm_use_iterator imm_iter;
2529 use_operand_p use_p;
2530
2531 ipa_set_param_used (info, i, true);
2532 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2533 if (!is_gimple_call (USE_STMT (use_p)))
2534 {
c6de6665
JJ
2535 if (!is_gimple_debug (USE_STMT (use_p)))
2536 {
2537 controlled_uses = IPA_UNDESCRIBED_USE;
2538 break;
2539 }
4502fe8d
MJ
2540 }
2541 else
2542 controlled_uses++;
2543 }
2544 else
2545 controlled_uses = 0;
2546 }
2547 else
2548 controlled_uses = IPA_UNDESCRIBED_USE;
2549 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2550 }
8aab5218 2551}
062c604f 2552
8aab5218 2553/* Free stuff in BI. */
062c604f 2554
8aab5218
MJ
2555static void
2556free_ipa_bb_info (struct ipa_bb_info *bi)
2557{
2558 bi->cg_edges.release ();
2559 bi->param_aa_statuses.release ();
3e293154
MJ
2560}
2561
8aab5218 2562/* Dominator walker driving the analysis. */
2c9561b5 2563
8aab5218 2564class analysis_dom_walker : public dom_walker
2c9561b5 2565{
8aab5218 2566public:
56b40062 2567 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 2568 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2569
3daacdcd 2570 virtual edge before_dom_children (basic_block);
8aab5218
MJ
2571
2572private:
56b40062 2573 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
2574};
2575
3daacdcd 2576edge
8aab5218
MJ
2577analysis_dom_walker::before_dom_children (basic_block bb)
2578{
2579 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2580 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 2581 return NULL;
2c9561b5
MJ
2582}
2583
c3431191
ML
2584/* Release body info FBI. */
2585
2586void
2587ipa_release_body_info (struct ipa_func_body_info *fbi)
2588{
2589 int i;
2590 struct ipa_bb_info *bi;
2591
2592 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2593 free_ipa_bb_info (bi);
2594 fbi->bb_infos.release ();
2595}
2596
026c3cfd 2597/* Initialize the array describing properties of formal parameters
dd5a833e
MS
2598 of NODE, analyze their uses and compute jump functions associated
2599 with actual arguments of calls from within NODE. */
062c604f
MJ
2600
2601void
2602ipa_analyze_node (struct cgraph_node *node)
2603{
56b40062 2604 struct ipa_func_body_info fbi;
99b1c316 2605 class ipa_node_params *info;
062c604f 2606
57dbdc5a
MJ
2607 ipa_check_create_node_params ();
2608 ipa_check_create_edge_args ();
6cf67b62 2609 info = IPA_NODE_REF_GET_CREATE (node);
8aab5218
MJ
2610
2611 if (info->analysis_done)
2612 return;
2613 info->analysis_done = 1;
2614
2615 if (ipa_func_spec_opts_forbid_analysis_p (node))
2616 {
2617 for (int i = 0; i < ipa_get_param_count (info); i++)
2618 {
2619 ipa_set_param_used (info, i, true);
2620 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2621 }
2622 return;
2623 }
2624
2625 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2626 push_cfun (func);
2627 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2628 ipa_initialize_node_params (node);
8aab5218 2629 ipa_analyze_controlled_uses (node);
062c604f 2630
8aab5218
MJ
2631 fbi.node = node;
2632 fbi.info = IPA_NODE_REF (node);
2633 fbi.bb_infos = vNULL;
2634 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2635 fbi.param_count = ipa_get_param_count (info);
028d4092 2636 fbi.aa_walk_budget = param_ipa_max_aa_steps;
062c604f 2637
8aab5218
MJ
2638 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2639 {
2640 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2641 bi->cg_edges.safe_push (cs);
2642 }
062c604f 2643
8aab5218
MJ
2644 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2645 {
2646 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2647 bi->cg_edges.safe_push (cs);
2648 }
2649
2650 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2651
c3431191 2652 ipa_release_body_info (&fbi);
8aab5218 2653 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2654 pop_cfun ();
062c604f 2655}
062c604f 2656
be95e2b9 2657/* Update the jump functions associated with call graph edge E when the call
3e293154 2658 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2659 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2660
3e293154
MJ
2661static void
2662update_jump_functions_after_inlining (struct cgraph_edge *cs,
2663 struct cgraph_edge *e)
2664{
99b1c316
MS
2665 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2666 class ipa_edge_args *args = IPA_EDGE_REF (e);
a33c028e
JH
2667 if (!args)
2668 return;
3e293154
MJ
2669 int count = ipa_get_cs_argument_count (args);
2670 int i;
2671
2672 for (i = 0; i < count; i++)
2673 {
b258210c 2674 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
fd776d28
JH
2675 if (!top)
2676 {
2677 ipa_set_jf_unknown (dst);
2678 continue;
2679 }
99b1c316 2680 class ipa_polymorphic_call_context *dst_ctx
5ce97055 2681 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2682
685b0d13
MJ
2683 if (dst->type == IPA_JF_ANCESTOR)
2684 {
b258210c 2685 struct ipa_jump_func *src;
8b7773a4 2686 int dst_fid = dst->value.ancestor.formal_id;
99b1c316 2687 class ipa_polymorphic_call_context *src_ctx
5ce97055 2688 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 2689
b258210c
MJ
2690 /* Variable number of arguments can cause havoc if we try to access
2691 one that does not exist in the inlined edge. So make sure we
2692 don't. */
8b7773a4 2693 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 2694 {
04be694e 2695 ipa_set_jf_unknown (dst);
b258210c
MJ
2696 continue;
2697 }
2698
8b7773a4
MJ
2699 src = ipa_get_ith_jump_func (top, dst_fid);
2700
5ce97055
JH
2701 if (src_ctx && !src_ctx->useless_p ())
2702 {
99b1c316 2703 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
2704
2705 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2706 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 2707 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2708 ctx.offset_by (dst->value.ancestor.offset);
2709 if (!ctx.useless_p ())
2710 {
a7d1f3fe
ML
2711 if (!dst_ctx)
2712 {
2713 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2714 count);
2715 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2716 }
2717
2718 dst_ctx->combine_with (ctx);
5ce97055
JH
2719 }
2720 }
2721
8b7773a4
MJ
2722 if (src->agg.items
2723 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2724 {
2725 struct ipa_agg_jf_item *item;
2726 int j;
2727
2728 /* Currently we do not produce clobber aggregate jump functions,
2729 replace with merging when we do. */
2730 gcc_assert (!dst->agg.items);
2731
9771b263 2732 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2733 dst->agg.by_ref = src->agg.by_ref;
9771b263 2734 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2735 item->offset -= dst->value.ancestor.offset;
2736 }
2737
3b97a5c7
MJ
2738 if (src->type == IPA_JF_PASS_THROUGH
2739 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2740 {
2741 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2742 dst->value.ancestor.agg_preserved &=
2743 src->value.pass_through.agg_preserved;
2744 }
b258210c
MJ
2745 else if (src->type == IPA_JF_ANCESTOR)
2746 {
2747 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2748 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2749 dst->value.ancestor.agg_preserved &=
2750 src->value.ancestor.agg_preserved;
b258210c
MJ
2751 }
2752 else
04be694e 2753 ipa_set_jf_unknown (dst);
b258210c
MJ
2754 }
2755 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2756 {
b258210c
MJ
2757 struct ipa_jump_func *src;
2758 /* We must check range due to calls with variable number of arguments
2759 and we cannot combine jump functions with operations. */
2760 if (dst->value.pass_through.operation == NOP_EXPR
5a0236f8 2761 && (top && dst->value.pass_through.formal_id
b258210c
MJ
2762 < ipa_get_cs_argument_count (top)))
2763 {
8b7773a4
MJ
2764 int dst_fid = dst->value.pass_through.formal_id;
2765 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2766 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
99b1c316 2767 class ipa_polymorphic_call_context *src_ctx
5ce97055 2768 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 2769
5ce97055
JH
2770 if (src_ctx && !src_ctx->useless_p ())
2771 {
99b1c316 2772 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
2773
2774 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2775 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 2776 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2777 if (!ctx.useless_p ())
2778 {
2779 if (!dst_ctx)
2780 {
2781 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2782 count);
2783 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2784 }
2785 dst_ctx->combine_with (ctx);
2786 }
2787 }
b8f6e610
MJ
2788 switch (src->type)
2789 {
2790 case IPA_JF_UNKNOWN:
04be694e 2791 ipa_set_jf_unknown (dst);
b8f6e610 2792 break;
b8f6e610
MJ
2793 case IPA_JF_CONST:
2794 ipa_set_jf_cst_copy (dst, src);
2795 break;
2796
2797 case IPA_JF_PASS_THROUGH:
2798 {
2799 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2800 enum tree_code operation;
2801 operation = ipa_get_jf_pass_through_operation (src);
2802
2803 if (operation == NOP_EXPR)
2804 {
3b97a5c7 2805 bool agg_p;
b8f6e610
MJ
2806 agg_p = dst_agg_p
2807 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 2808 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610 2809 }
a2b4c188
KV
2810 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2811 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
b8f6e610
MJ
2812 else
2813 {
2814 tree operand = ipa_get_jf_pass_through_operand (src);
2815 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2816 operation);
2817 }
2818 break;
2819 }
2820 case IPA_JF_ANCESTOR:
2821 {
3b97a5c7 2822 bool agg_p;
b8f6e610
MJ
2823 agg_p = dst_agg_p
2824 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
2825 ipa_set_ancestor_jf (dst,
2826 ipa_get_jf_ancestor_offset (src),
b8f6e610 2827 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 2828 agg_p);
b8f6e610
MJ
2829 break;
2830 }
2831 default:
2832 gcc_unreachable ();
2833 }
8b7773a4
MJ
2834
2835 if (src->agg.items
b8f6e610 2836 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2837 {
2838 /* Currently we do not produce clobber aggregate jump
2839 functions, replace with merging when we do. */
2840 gcc_assert (!dst->agg.items);
2841
2842 dst->agg.by_ref = src->agg.by_ref;
9771b263 2843 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2844 }
b258210c
MJ
2845 }
2846 else
04be694e 2847 ipa_set_jf_unknown (dst);
3e293154 2848 }
b258210c
MJ
2849 }
2850}
2851
5ce97055
JH
2852/* If TARGET is an addr_expr of a function declaration, make it the
2853 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2854 Otherwise, return NULL. */
b258210c 2855
3949c4a7 2856struct cgraph_edge *
5ce97055
JH
2857ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2858 bool speculative)
b258210c
MJ
2859{
2860 struct cgraph_node *callee;
48b1474e 2861 bool unreachable = false;
b258210c 2862
ceeffab0
MJ
2863 if (TREE_CODE (target) == ADDR_EXPR)
2864 target = TREE_OPERAND (target, 0);
b258210c 2865 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2866 {
2867 target = canonicalize_constructor_val (target, NULL);
2868 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2869 {
db66bf68
JH
2870 /* Member pointer call that goes through a VMT lookup. */
2871 if (ie->indirect_info->member_ptr
2872 /* Or if target is not an invariant expression and we do not
2873 know if it will evaulate to function at runtime.
2874 This can happen when folding through &VAR, where &VAR
2875 is IP invariant, but VAR itself is not.
2876
2877 TODO: Revisit this when GCC 5 is branched. It seems that
2878 member_ptr check is not needed and that we may try to fold
2879 the expression and see if VAR is readonly. */
2880 || !is_gimple_ip_invariant (target))
2881 {
2882 if (dump_enabled_p ())
2883 {
4f5b9c80 2884 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118
ML
2885 "discovered direct call non-invariant %s\n",
2886 ie->caller->dump_name ());
db66bf68
JH
2887 }
2888 return NULL;
2889 }
2890
c13bc3d9 2891
2b5f0895
XDL
2892 if (dump_enabled_p ())
2893 {
4f5b9c80 2894 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118 2895 "discovered direct call to non-function in %s, "
807b7d62 2896 "making it __builtin_unreachable\n",
464d0118 2897 ie->caller->dump_name ());
2b5f0895 2898 }
3c9e6fca 2899
48b1474e 2900 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 2901 callee = cgraph_node::get_create (target);
48b1474e 2902 unreachable = true;
a0a7b611 2903 }
48b1474e 2904 else
d52f5295 2905 callee = cgraph_node::get (target);
a0a7b611 2906 }
48b1474e 2907 else
d52f5295 2908 callee = cgraph_node::get (target);
a0a7b611
JH
2909
2910 /* Because may-edges are not explicitely represented and vtable may be external,
2911 we may create the first reference to the object in the unit. */
a62bfab5 2912 if (!callee || callee->inlined_to)
a0a7b611 2913 {
a0a7b611
JH
2914
2915 /* We are better to ensure we can refer to it.
2916 In the case of static functions we are out of luck, since we already
2917 removed its body. In the case of public functions we may or may
2918 not introduce the reference. */
2919 if (!canonicalize_constructor_val (target, NULL)
2920 || !TREE_PUBLIC (target))
2921 {
2922 if (dump_file)
2923 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
67914693 2924 "(%s -> %s) but cannot refer to it. Giving up.\n",
464d0118
ML
2925 ie->caller->dump_name (),
2926 ie->callee->dump_name ());
a0a7b611
JH
2927 return NULL;
2928 }
d52f5295 2929 callee = cgraph_node::get_create (target);
a0a7b611 2930 }
2b5f0895 2931
0127c169
JH
2932 /* If the edge is already speculated. */
2933 if (speculative && ie->speculative)
2934 {
2935 struct cgraph_edge *e2;
2936 struct ipa_ref *ref;
2937 ie->speculative_call_info (e2, ie, ref);
2938 if (e2->callee->ultimate_alias_target ()
2939 != callee->ultimate_alias_target ())
2940 {
2941 if (dump_file)
464d0118
ML
2942 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2943 "target (%s -> %s) but the call is already "
2944 "speculated to %s. Giving up.\n",
2945 ie->caller->dump_name (), callee->dump_name (),
2946 e2->callee->dump_name ());
0127c169
JH
2947 }
2948 else
2949 {
2950 if (dump_file)
2951 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
464d0118
ML
2952 "(%s -> %s) this agree with previous speculation.\n",
2953 ie->caller->dump_name (), callee->dump_name ());
0127c169
JH
2954 }
2955 return NULL;
2956 }
2957
2b5f0895
XDL
2958 if (!dbg_cnt (devirt))
2959 return NULL;
2960
1dbee8c9 2961 ipa_check_create_node_params ();
ceeffab0 2962
67914693 2963 /* We cannot make edges to inline clones. It is bug that someone removed
81fa35bd 2964 the cgraph node too early. */
a62bfab5 2965 gcc_assert (!callee->inlined_to);
17afc0fe 2966
48b1474e 2967 if (dump_file && !unreachable)
b258210c 2968 {
5ce97055 2969 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
464d0118 2970 "(%s -> %s), for stmt ",
b258210c 2971 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 2972 speculative ? "speculative" : "known",
464d0118
ML
2973 ie->caller->dump_name (),
2974 callee->dump_name ());
b258210c
MJ
2975 if (ie->call_stmt)
2976 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2977 else
2978 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 2979 }
2b5f0895
XDL
2980 if (dump_enabled_p ())
2981 {
4f5b9c80 2982 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
807b7d62
ML
2983 "converting indirect call in %s to direct call to %s\n",
2984 ie->caller->name (), callee->name ());
2b5f0895 2985 }
5ce97055 2986 if (!speculative)
d8d5aef1
JH
2987 {
2988 struct cgraph_edge *orig = ie;
2989 ie = ie->make_direct (callee);
2990 /* If we resolved speculative edge the cost is already up to date
2991 for direct call (adjusted by inline_edge_duplication_hook). */
2992 if (ie == orig)
2993 {
56f62793 2994 ipa_call_summary *es = ipa_call_summaries->get (ie);
d8d5aef1
JH
2995 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2996 - eni_size_weights.call_cost);
2997 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2998 - eni_time_weights.call_cost);
2999 }
3000 }
5ce97055
JH
3001 else
3002 {
3003 if (!callee->can_be_discarded_p ())
3004 {
3005 cgraph_node *alias;
3006 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3007 if (alias)
3008 callee = alias;
3009 }
d8d5aef1 3010 /* make_speculative will update ie's cost to direct call cost. */
5ce97055 3011 ie = ie->make_speculative
1bad9c18 3012 (callee, ie->count.apply_scale (8, 10));
5ce97055 3013 }
749aa96d 3014
b258210c 3015 return ie;
3e293154
MJ
3016}
3017
91bb9f80
MJ
3018/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3019 CONSTRUCTOR and return it. Return NULL if the search fails for some
3020 reason. */
3021
3022static tree
3023find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3024{
3025 tree type = TREE_TYPE (constructor);
3026 if (TREE_CODE (type) != ARRAY_TYPE
3027 && TREE_CODE (type) != RECORD_TYPE)
3028 return NULL;
3029
3030 unsigned ix;
3031 tree index, val;
3032 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3033 {
3034 HOST_WIDE_INT elt_offset;
3035 if (TREE_CODE (type) == ARRAY_TYPE)
3036 {
3037 offset_int off;
3038 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3039 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3040
3041 if (index)
3042 {
db9bbdec
RB
3043 if (TREE_CODE (index) == RANGE_EXPR)
3044 off = wi::to_offset (TREE_OPERAND (index, 0));
3045 else
3046 off = wi::to_offset (index);
91bb9f80
MJ
3047 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3048 {
3049 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3050 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3051 off = wi::sext (off - wi::to_offset (low_bound),
3052 TYPE_PRECISION (TREE_TYPE (index)));
3053 }
3054 off *= wi::to_offset (unit_size);
db9bbdec
RB
3055 /* ??? Handle more than just the first index of a
3056 RANGE_EXPR. */
91bb9f80
MJ
3057 }
3058 else
3059 off = wi::to_offset (unit_size) * ix;
3060
3061 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3062 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3063 continue;
3064 elt_offset = off.to_shwi ();
3065 }
3066 else if (TREE_CODE (type) == RECORD_TYPE)
3067 {
3068 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3069 if (DECL_BIT_FIELD (index))
3070 continue;
3071 elt_offset = int_bit_position (index);
3072 }
3073 else
3074 gcc_unreachable ();
3075
3076 if (elt_offset > req_offset)
3077 return NULL;
3078
3079 if (TREE_CODE (val) == CONSTRUCTOR)
3080 return find_constructor_constant_at_offset (val,
3081 req_offset - elt_offset);
3082
3083 if (elt_offset == req_offset
3084 && is_gimple_reg_type (TREE_TYPE (val))
3085 && is_gimple_ip_invariant (val))
3086 return val;
3087 }
3088 return NULL;
3089}
3090
3091/* Check whether SCALAR could be used to look up an aggregate interprocedural
3092 invariant from a static constructor and if so, return it. Otherwise return
3093 NULL. */
3094
3095static tree
3096ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3097{
3098 if (by_ref)
3099 {
3100 if (TREE_CODE (scalar) != ADDR_EXPR)
3101 return NULL;
3102 scalar = TREE_OPERAND (scalar, 0);
3103 }
3104
8813a647 3105 if (!VAR_P (scalar)
91bb9f80
MJ
3106 || !is_global_var (scalar)
3107 || !TREE_READONLY (scalar)
3108 || !DECL_INITIAL (scalar)
3109 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3110 return NULL;
3111
3112 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3113}
3114
3115/* Retrieve value from aggregate jump function AGG or static initializer of
3116 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3117 none. BY_REF specifies whether the value has to be passed by reference or
3118 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3119 to is set to true if the value comes from an initializer of a constant. */
8b7773a4
MJ
3120
3121tree
91bb9f80
MJ
3122ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3123 HOST_WIDE_INT offset, bool by_ref,
3124 bool *from_global_constant)
8b7773a4
MJ
3125{
3126 struct ipa_agg_jf_item *item;
3127 int i;
3128
91bb9f80
MJ
3129 if (scalar)
3130 {
3131 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3132 if (res)
3133 {
3134 if (from_global_constant)
3135 *from_global_constant = true;
3136 return res;
3137 }
3138 }
3139
3140 if (!agg
3141 || by_ref != agg->by_ref)
8b7773a4
MJ
3142 return NULL;
3143
9771b263 3144 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
3145 if (item->offset == offset)
3146 {
3147 /* Currently we do not have clobber values, return NULL for them once
3148 we do. */
3149 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
3150 if (from_global_constant)
3151 *from_global_constant = false;
2c9561b5
MJ
3152 return item->value;
3153 }
8b7773a4
MJ
3154 return NULL;
3155}
3156
4502fe8d 3157/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
3158 reference description RDESC. Return true if the reference has been
3159 successfully found and removed. */
4502fe8d 3160
568cda29 3161static bool
5e20cdc9 3162remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3163{
3164 struct ipa_ref *to_del;
3165 struct cgraph_edge *origin;
3166
3167 origin = rdesc->cs;
a854f856
MJ
3168 if (!origin)
3169 return false;
d122681a
ML
3170 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3171 origin->lto_stmt_uid);
568cda29
MJ
3172 if (!to_del)
3173 return false;
3174
d122681a 3175 to_del->remove_reference ();
4502fe8d 3176 if (dump_file)
464d0118
ML
3177 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3178 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
568cda29 3179 return true;
4502fe8d
MJ
3180}
3181
3182/* If JFUNC has a reference description with refcount different from
3183 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3184 NULL. JFUNC must be a constant jump function. */
3185
3186static struct ipa_cst_ref_desc *
3187jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3188{
3189 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3190 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3191 return rdesc;
3192 else
3193 return NULL;
3194}
3195
568cda29
MJ
3196/* If the value of constant jump function JFUNC is an address of a function
3197 declaration, return the associated call graph node. Otherwise return
3198 NULL. */
3199
3200static cgraph_node *
3201cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3202{
3203 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3204 tree cst = ipa_get_jf_constant (jfunc);
3205 if (TREE_CODE (cst) != ADDR_EXPR
3206 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3207 return NULL;
3208
d52f5295 3209 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3210}
3211
3212
3213/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3214 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3215 the edge specified in the rdesc. Return false if either the symbol or the
3216 reference could not be found, otherwise return true. */
3217
3218static bool
3219try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3220{
3221 struct ipa_cst_ref_desc *rdesc;
3222 if (jfunc->type == IPA_JF_CONST
3223 && (rdesc = jfunc_rdesc_usable (jfunc))
3224 && --rdesc->refcount == 0)
3225 {
5e20cdc9 3226 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
3227 if (!symbol)
3228 return false;
3229
3230 return remove_described_reference (symbol, rdesc);
3231 }
3232 return true;
3233}
3234
b258210c
MJ
3235/* Try to find a destination for indirect edge IE that corresponds to a simple
3236 call or a call of a member function pointer and where the destination is a
e5cf5e11
PK
3237 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3238 the type of the parameter to which the result of JFUNC is passed. If it can
3239 be determined, return the newly direct edge, otherwise return NULL.
d250540a 3240 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 3241
b258210c
MJ
3242static struct cgraph_edge *
3243try_make_edge_direct_simple_call (struct cgraph_edge *ie,
e5cf5e11 3244 struct ipa_jump_func *jfunc, tree target_type,
99b1c316 3245 class ipa_node_params *new_root_info)
b258210c 3246{
4502fe8d 3247 struct cgraph_edge *cs;
b258210c 3248 tree target;
042ae7d2 3249 bool agg_contents = ie->indirect_info->agg_contents;
e5cf5e11 3250 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
91bb9f80
MJ
3251 if (agg_contents)
3252 {
3253 bool from_global_constant;
3254 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3255 ie->indirect_info->offset,
3256 ie->indirect_info->by_ref,
3257 &from_global_constant);
3258 if (target
3259 && !from_global_constant
3260 && !ie->indirect_info->guaranteed_unmodified)
3261 return NULL;
3262 }
b258210c 3263 else
91bb9f80 3264 target = scalar;
d250540a
MJ
3265 if (!target)
3266 return NULL;
4502fe8d
MJ
3267 cs = ipa_make_edge_direct_to_target (ie, target);
3268
a12cd2db 3269 if (cs && !agg_contents)
568cda29
MJ
3270 {
3271 bool ok;
3272 gcc_checking_assert (cs->callee
ae6d0907
MJ
3273 && (cs != ie
3274 || jfunc->type != IPA_JF_CONST
568cda29
MJ
3275 || !cgraph_node_for_jfunc (jfunc)
3276 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3277 ok = try_decrement_rdesc_refcount (jfunc);
3278 gcc_checking_assert (ok);
3279 }
4502fe8d
MJ
3280
3281 return cs;
b258210c
MJ
3282}
3283
bec81025
MJ
3284/* Return the target to be used in cases of impossible devirtualization. IE
3285 and target (the latter can be NULL) are dumped when dumping is enabled. */
3286
72972c22
MJ
3287tree
3288ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3289{
3290 if (dump_file)
3291 {
3292 if (target)
3293 fprintf (dump_file,
464d0118
ML
3294 "Type inconsistent devirtualization: %s->%s\n",
3295 ie->caller->dump_name (),
bec81025
MJ
3296 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3297 else
3298 fprintf (dump_file,
464d0118
ML
3299 "No devirtualization target in %s\n",
3300 ie->caller->dump_name ());
bec81025
MJ
3301 }
3302 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3303 cgraph_node::get_create (new_target);
bec81025
MJ
3304 return new_target;
3305}
3306
d250540a
MJ
3307/* Try to find a destination for indirect edge IE that corresponds to a virtual
3308 call based on a formal parameter which is described by jump function JFUNC
3309 and if it can be determined, make it direct and return the direct edge.
44210a96
MJ
3310 Otherwise, return NULL. CTX describes the polymorphic context that the
3311 parameter the call is based on brings along with it. */
b258210c
MJ
3312
3313static struct cgraph_edge *
3314try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3315 struct ipa_jump_func *jfunc,
99b1c316 3316 class ipa_polymorphic_call_context ctx)
3e293154 3317{
44210a96 3318 tree target = NULL;
5ce97055 3319 bool speculative = false;
85942f45 3320
2bf86c84 3321 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3322 return NULL;
b258210c 3323
44210a96 3324 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3325
3326 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3327 if (!ie->indirect_info->vptr_changed
3328 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3329 {
9de2f554
JH
3330 tree vtable;
3331 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3332 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3333 : NULL;
3334 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
85942f45
JH
3335 ie->indirect_info->offset,
3336 true);
9de2f554
JH
3337 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3338 {
2994ab20 3339 bool can_refer;
0127c169 3340 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3341 vtable, offset, &can_refer);
3342 if (can_refer)
9de2f554 3343 {
2994ab20 3344 if (!t
cb1180d5 3345 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
9de2f554 3346 || !possible_polymorphic_call_target_p
0127c169
JH
3347 (ie, cgraph_node::get (t)))
3348 {
33c3b6be 3349 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3350 if (!ie->indirect_info->vptr_changed)
3351 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3352 else
3353 target = NULL;
0127c169
JH
3354 }
3355 else
3356 {
3357 target = t;
3358 speculative = ie->indirect_info->vptr_changed;
3359 }
9de2f554
JH
3360 }
3361 }
85942f45
JH
3362 }
3363
44210a96
MJ
3364 ipa_polymorphic_call_context ie_context (ie);
3365 vec <cgraph_node *>targets;
3366 bool final;
d250540a 3367
44210a96
MJ
3368 ctx.offset_by (ie->indirect_info->offset);
3369 if (ie->indirect_info->vptr_changed)
3370 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3371 ie->indirect_info->otr_type);
3372 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3373 targets = possible_polymorphic_call_targets
3374 (ie->indirect_info->otr_type,
3375 ie->indirect_info->otr_token,
3376 ctx, &final);
3377 if (final && targets.length () <= 1)
5ce97055 3378 {
33c3b6be 3379 speculative = false;
44210a96
MJ
3380 if (targets.length () == 1)
3381 target = targets[0]->decl;
3382 else
3383 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3384 }
2bf86c84 3385 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3386 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3387 {
44210a96
MJ
3388 cgraph_node *n;
3389 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3390 ie->indirect_info->otr_token,
3391 ie->indirect_info->context);
3392 if (n)
5ce97055 3393 {
44210a96
MJ
3394 target = n->decl;
3395 speculative = true;
5ce97055 3396 }
5bccb77a 3397 }
b258210c
MJ
3398
3399 if (target)
450ad0cd 3400 {
44210a96
MJ
3401 if (!possible_polymorphic_call_target_p
3402 (ie, cgraph_node::get_create (target)))
0127c169 3403 {
29c43c83 3404 if (speculative)
0127c169
JH
3405 return NULL;
3406 target = ipa_impossible_devirt_target (ie, target);
3407 }
5ce97055 3408 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3409 }
b258210c
MJ
3410 else
3411 return NULL;
3e293154
MJ
3412}
3413
3414/* Update the param called notes associated with NODE when CS is being inlined,
3415 assuming NODE is (potentially indirectly) inlined into CS->callee.
3416 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3417 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3418 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3419
f8e2a1ed 3420static bool
e33c6cd6
MJ
3421update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3422 struct cgraph_node *node,
d52f5295 3423 vec<cgraph_edge *> *new_edges)
3e293154 3424{
99b1c316 3425 class ipa_edge_args *top;
b258210c 3426 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
99b1c316 3427 class ipa_node_params *new_root_info, *inlined_node_info;
f8e2a1ed 3428 bool res = false;
3e293154 3429
e33c6cd6 3430 ipa_check_create_edge_args ();
9e97ff61 3431 top = IPA_EDGE_REF (cs);
a62bfab5
ML
3432 new_root_info = IPA_NODE_REF (cs->caller->inlined_to
3433 ? cs->caller->inlined_to
d250540a 3434 : cs->caller);
e5cf5e11 3435 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
e33c6cd6
MJ
3436
3437 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3438 {
99b1c316 3439 class cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3440 struct ipa_jump_func *jfunc;
8b7773a4 3441 int param_index;
3ff29913 3442 cgraph_node *spec_target = NULL;
3e293154 3443
e33c6cd6 3444 next_ie = ie->next_callee;
3e293154 3445
5f902d76
JH
3446 if (ici->param_index == -1)
3447 continue;
e33c6cd6 3448
3e293154 3449 /* We must check range due to calls with variable number of arguments: */
5a0236f8 3450 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3451 {
5ee53a06 3452 ici->param_index = -1;
3e293154
MJ
3453 continue;
3454 }
3455
8b7773a4
MJ
3456 param_index = ici->param_index;
3457 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3458
3ff29913
JH
3459 if (ie->speculative)
3460 {
3461 struct cgraph_edge *de;
3462 struct ipa_ref *ref;
3463 ie->speculative_call_info (de, ie, ref);
3464 spec_target = de->callee;
3465 }
3466
2bf86c84 3467 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3468 new_direct_edge = NULL;
3469 else if (ici->polymorphic)
5ce97055 3470 {
44210a96
MJ
3471 ipa_polymorphic_call_context ctx;
3472 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3473 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
5ce97055 3474 }
b258210c 3475 else
e5cf5e11
PK
3476 {
3477 tree target_type = ipa_get_type (inlined_node_info, param_index);
3478 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3479 target_type,
3480 new_root_info);
3481 }
3482
042ae7d2 3483 /* If speculation was removed, then we need to do nothing. */
3ff29913
JH
3484 if (new_direct_edge && new_direct_edge != ie
3485 && new_direct_edge->callee == spec_target)
042ae7d2
JH
3486 {
3487 new_direct_edge->indirect_inlining_edge = 1;
3488 top = IPA_EDGE_REF (cs);
3489 res = true;
73d098df
JH
3490 if (!new_direct_edge->speculative)
3491 continue;
042ae7d2
JH
3492 }
3493 else if (new_direct_edge)
685b0d13 3494 {
b258210c
MJ
3495 new_direct_edge->indirect_inlining_edge = 1;
3496 if (new_edges)
3497 {
9771b263 3498 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3499 res = true;
3500 }
042ae7d2 3501 top = IPA_EDGE_REF (cs);
3ff29913
JH
3502 /* If speculative edge was introduced we still need to update
3503 call info of the indirect edge. */
3504 if (!new_direct_edge->speculative)
3505 continue;
685b0d13 3506 }
3ff29913
JH
3507 if (jfunc->type == IPA_JF_PASS_THROUGH
3508 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 3509 {
d0502276
JH
3510 if (ici->agg_contents
3511 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3512 && !ici->polymorphic)
36b72910
JH
3513 ici->param_index = -1;
3514 else
d0502276
JH
3515 {
3516 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3517 if (ici->polymorphic
3518 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3519 ici->vptr_changed = true;
40a777e8
JH
3520 ipa_set_param_used_by_indirect_call (new_root_info,
3521 ici->param_index, true);
3522 if (ici->polymorphic)
3523 ipa_set_param_used_by_polymorphic_call (new_root_info,
3524 ici->param_index, true);
d0502276 3525 }
36b72910
JH
3526 }
3527 else if (jfunc->type == IPA_JF_ANCESTOR)
3528 {
d0502276
JH
3529 if (ici->agg_contents
3530 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3531 && !ici->polymorphic)
36b72910
JH
3532 ici->param_index = -1;
3533 else
3534 {
3535 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3536 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
3537 if (ici->polymorphic
3538 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3539 ici->vptr_changed = true;
36b72910
JH
3540 }
3541 }
3542 else
3543 /* Either we can find a destination for this edge now or never. */
3544 ici->param_index = -1;
3e293154 3545 }
e33c6cd6 3546
f8e2a1ed 3547 return res;
3e293154
MJ
3548}
3549
3550/* Recursively traverse subtree of NODE (including node) made of inlined
3551 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3552 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3553 update_jump_functions_after_inlining on all non-inlined edges that lead out
3554 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3555 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3556 created. */
be95e2b9 3557
f8e2a1ed 3558static bool
3e293154
MJ
3559propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3560 struct cgraph_node *node,
d52f5295 3561 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3562{
3563 struct cgraph_edge *e;
f8e2a1ed 3564 bool res;
3e293154 3565
e33c6cd6 3566 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3567
3568 for (e = node->callees; e; e = e->next_callee)
3569 if (!e->inline_failed)
f8e2a1ed 3570 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3571 else
3572 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3573 for (e = node->indirect_calls; e; e = e->next_callee)
3574 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3575
3576 return res;
3e293154
MJ
3577}
3578
4502fe8d
MJ
3579/* Combine two controlled uses counts as done during inlining. */
3580
3581static int
3582combine_controlled_uses_counters (int c, int d)
3583{
3584 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3585 return IPA_UNDESCRIBED_USE;
3586 else
3587 return c + d - 1;
3588}
3589
3590/* Propagate number of controlled users from CS->caleee to the new root of the
3591 tree of inlined nodes. */
3592
3593static void
3594propagate_controlled_uses (struct cgraph_edge *cs)
3595{
99b1c316 3596 class ipa_edge_args *args = IPA_EDGE_REF (cs);
a33c028e
JH
3597 if (!args)
3598 return;
a62bfab5
ML
3599 struct cgraph_node *new_root = cs->caller->inlined_to
3600 ? cs->caller->inlined_to : cs->caller;
99b1c316
MS
3601 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3602 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
4502fe8d
MJ
3603 int count, i;
3604
6cf67b62
JH
3605 if (!old_root_info)
3606 return;
3607
4502fe8d
MJ
3608 count = MIN (ipa_get_cs_argument_count (args),
3609 ipa_get_param_count (old_root_info));
3610 for (i = 0; i < count; i++)
3611 {
3612 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3613 struct ipa_cst_ref_desc *rdesc;
3614
3615 if (jf->type == IPA_JF_PASS_THROUGH)
3616 {
3617 int src_idx, c, d;
3618 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3619 c = ipa_get_controlled_uses (new_root_info, src_idx);
3620 d = ipa_get_controlled_uses (old_root_info, i);
3621
3622 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3623 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3624 c = combine_controlled_uses_counters (c, d);
3625 ipa_set_controlled_uses (new_root_info, src_idx, c);
3626 if (c == 0 && new_root_info->ipcp_orig_node)
3627 {
3628 struct cgraph_node *n;
3629 struct ipa_ref *ref;
44210a96 3630 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
3631
3632 if (t && TREE_CODE (t) == ADDR_EXPR
3633 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 3634 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 3635 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
3636 {
3637 if (dump_file)
3638 fprintf (dump_file, "ipa-prop: Removing cloning-created "
464d0118
ML
3639 "reference from %s to %s.\n",
3640 new_root->dump_name (),
3641 n->dump_name ());
d122681a 3642 ref->remove_reference ();
4502fe8d
MJ
3643 }
3644 }
3645 }
3646 else if (jf->type == IPA_JF_CONST
3647 && (rdesc = jfunc_rdesc_usable (jf)))
3648 {
3649 int d = ipa_get_controlled_uses (old_root_info, i);
3650 int c = rdesc->refcount;
3651 rdesc->refcount = combine_controlled_uses_counters (c, d);
3652 if (rdesc->refcount == 0)
3653 {
3654 tree cst = ipa_get_jf_constant (jf);
3655 struct cgraph_node *n;
3656 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3657 && TREE_CODE (TREE_OPERAND (cst, 0))
3658 == FUNCTION_DECL);
d52f5295 3659 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
3660 if (n)
3661 {
3662 struct cgraph_node *clone;
568cda29 3663 bool ok;
67348ccc 3664 ok = remove_described_reference (n, rdesc);
568cda29 3665 gcc_checking_assert (ok);
4502fe8d
MJ
3666
3667 clone = cs->caller;
a62bfab5 3668 while (clone->inlined_to
6cf67b62
JH
3669 && clone->ipcp_clone
3670 && clone != rdesc->cs->caller)
4502fe8d
MJ
3671 {
3672 struct ipa_ref *ref;
d122681a 3673 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
3674 if (ref)
3675 {
3676 if (dump_file)
3677 fprintf (dump_file, "ipa-prop: Removing "
3678 "cloning-created reference "
464d0118
ML
3679 "from %s to %s.\n",
3680 clone->dump_name (),
3681 n->dump_name ());
d122681a 3682 ref->remove_reference ();
4502fe8d
MJ
3683 }
3684 clone = clone->callers->caller;
3685 }
3686 }
3687 }
3688 }
3689 }
3690
3691 for (i = ipa_get_param_count (old_root_info);
3692 i < ipa_get_cs_argument_count (args);
3693 i++)
3694 {
3695 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3696
3697 if (jf->type == IPA_JF_CONST)
3698 {
3699 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3700 if (rdesc)
3701 rdesc->refcount = IPA_UNDESCRIBED_USE;
3702 }
3703 else if (jf->type == IPA_JF_PASS_THROUGH)
3704 ipa_set_controlled_uses (new_root_info,
3705 jf->value.pass_through.formal_id,
3706 IPA_UNDESCRIBED_USE);
3707 }
3708}
3709
3e293154
MJ
3710/* Update jump functions and call note functions on inlining the call site CS.
3711 CS is expected to lead to a node already cloned by
3712 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3713 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3714 created. */
be95e2b9 3715
f8e2a1ed 3716bool
3e293154 3717ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 3718 vec<cgraph_edge *> *new_edges)
3e293154 3719{
5ee53a06 3720 bool changed;
f8e2a1ed
MJ
3721 /* Do nothing if the preparation phase has not been carried out yet
3722 (i.e. during early inlining). */
dd912cb8 3723 if (!ipa_node_params_sum)
f8e2a1ed 3724 return false;
6fe906a3 3725 gcc_assert (ipa_edge_args_sum);
f8e2a1ed 3726
4502fe8d 3727 propagate_controlled_uses (cs);
5ee53a06 3728 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
6cf67b62 3729 ipa_node_params_sum->remove (cs->callee);
5ee53a06 3730
d40e2362
JH
3731 class ipa_edge_args *args = IPA_EDGE_REF (cs);
3732 if (args)
3733 {
3734 bool ok = true;
3735 if (args->jump_functions)
3736 {
3737 struct ipa_jump_func *jf;
3738 int i;
3739 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3740 if (jf->type == IPA_JF_CONST
3741 && ipa_get_jf_constant_rdesc (jf))
3742 {
3743 ok = false;
3744 break;
3745 }
3746 }
3747 if (ok)
3748 ipa_edge_args_sum->remove (cs);
3749 }
98aad294
JH
3750 if (ipcp_transformation_sum)
3751 ipcp_transformation_sum->remove (cs->callee);
d40e2362 3752
5ee53a06 3753 return changed;
518dc859
RL
3754}
3755
86cd0334
MJ
3756/* Ensure that array of edge arguments infos is big enough to accommodate a
3757 structure for all edges and reallocates it if not. Also, allocate
3758 associated hash tables is they do not already exist. */
3759
3760void
3761ipa_check_create_edge_args (void)
3762{
6fe906a3
MJ
3763 if (!ipa_edge_args_sum)
3764 ipa_edge_args_sum
78cd68c0 3765 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
6fe906a3 3766 ipa_edge_args_sum_t (symtab, true));
86cd0334
MJ
3767 if (!ipa_bits_hash_table)
3768 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3769 if (!ipa_vr_hash_table)
3770 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3771}
3772
771578a0 3773/* Free all ipa_edge structures. */
be95e2b9 3774
518dc859 3775void
771578a0 3776ipa_free_all_edge_args (void)
518dc859 3777{
6fe906a3 3778 if (!ipa_edge_args_sum)
9771b263
DN
3779 return;
3780
ddf628e4 3781 ggc_delete (ipa_edge_args_sum);
6fe906a3 3782 ipa_edge_args_sum = NULL;
518dc859
RL
3783}
3784
771578a0 3785/* Free all ipa_node_params structures. */
be95e2b9 3786
518dc859 3787void
771578a0 3788ipa_free_all_node_params (void)
518dc859 3789{
ddf628e4 3790 ggc_delete (ipa_node_params_sum);
dd912cb8 3791 ipa_node_params_sum = NULL;
771578a0
MJ
3792}
3793
9d3e0adc 3794/* Initialize IPA CP transformation summary and also allocate any necessary hash
86cd0334 3795 tables if they do not already exist. */
04be694e
MJ
3796
3797void
9d3e0adc 3798ipcp_transformation_initialize (void)
04be694e 3799{
86cd0334
MJ
3800 if (!ipa_bits_hash_table)
3801 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3802 if (!ipa_vr_hash_table)
3803 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
9d3e0adc
ML
3804 if (ipcp_transformation_sum == NULL)
3805 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
04be694e
MJ
3806}
3807
12e088ba
AC
3808/* Release the IPA CP transformation summary. */
3809
3810void
3811ipcp_free_transformation_sum (void)
3812{
3813 if (!ipcp_transformation_sum)
3814 return;
3815
78cd68c0
ML
3816 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
3817 ggc_free (ipcp_transformation_sum);
12e088ba
AC
3818 ipcp_transformation_sum = NULL;
3819}
3820
2c9561b5
MJ
3821/* Set the aggregate replacements of NODE to be AGGVALS. */
3822
3823void
3824ipa_set_node_agg_value_chain (struct cgraph_node *node,
3825 struct ipa_agg_replacement_value *aggvals)
3826{
9d3e0adc
ML
3827 ipcp_transformation_initialize ();
3828 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3829 s->agg_values = aggvals;
2c9561b5
MJ
3830}
3831
6fe906a3
MJ
3832/* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3833 count data structures accordingly. */
be95e2b9 3834
6fe906a3
MJ
3835void
3836ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
771578a0 3837{
568cda29
MJ
3838 if (args->jump_functions)
3839 {
3840 struct ipa_jump_func *jf;
3841 int i;
3842 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3843 {
3844 struct ipa_cst_ref_desc *rdesc;
3845 try_decrement_rdesc_refcount (jf);
3846 if (jf->type == IPA_JF_CONST
3847 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3848 && rdesc->cs == cs)
3849 rdesc->cs = NULL;
3850 }
568cda29 3851 }
518dc859
RL
3852}
3853
6fe906a3
MJ
3854/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3855 reference count data strucutres accordingly. */
be95e2b9 3856
6fe906a3
MJ
3857void
3858ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3859 ipa_edge_args *old_args, ipa_edge_args *new_args)
771578a0 3860{
8b7773a4 3861 unsigned int i;
771578a0 3862
9771b263 3863 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
3864 if (old_args->polymorphic_call_contexts)
3865 new_args->polymorphic_call_contexts
3866 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 3867
9771b263 3868 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3869 {
3870 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3871 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3872
3873 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3874
3875 if (src_jf->type == IPA_JF_CONST)
3876 {
3877 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3878
3879 if (!src_rdesc)
3880 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3881 else if (src->caller == dst->caller)
3882 {
3883 struct ipa_ref *ref;
5e20cdc9 3884 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3885 gcc_checking_assert (n);
d122681a
ML
3886 ref = src->caller->find_reference (n, src->call_stmt,
3887 src->lto_stmt_uid);
568cda29 3888 gcc_checking_assert (ref);
d122681a 3889 dst->caller->clone_reference (ref, ref->stmt);
568cda29 3890
601f3293 3891 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
568cda29
MJ
3892 dst_rdesc->cs = dst;
3893 dst_rdesc->refcount = src_rdesc->refcount;
3894 dst_rdesc->next_duplicate = NULL;
3895 dst_jf->value.constant.rdesc = dst_rdesc;
3896 }
4502fe8d
MJ
3897 else if (src_rdesc->cs == src)
3898 {
601f3293 3899 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 3900 dst_rdesc->cs = dst;
4502fe8d 3901 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3902 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3903 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3904 dst_jf->value.constant.rdesc = dst_rdesc;
3905 }
3906 else
3907 {
3908 struct ipa_cst_ref_desc *dst_rdesc;
3909 /* This can happen during inlining, when a JFUNC can refer to a
3910 reference taken in a function up in the tree of inline clones.
3911 We need to find the duplicate that refers to our tree of
3912 inline clones. */
3913
a62bfab5 3914 gcc_assert (dst->caller->inlined_to);
4502fe8d
MJ
3915 for (dst_rdesc = src_rdesc->next_duplicate;
3916 dst_rdesc;
3917 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3918 {
3919 struct cgraph_node *top;
a62bfab5
ML
3920 top = dst_rdesc->cs->caller->inlined_to
3921 ? dst_rdesc->cs->caller->inlined_to
2fd0985c 3922 : dst_rdesc->cs->caller;
a62bfab5 3923 if (dst->caller->inlined_to == top)
2fd0985c
MJ
3924 break;
3925 }
44a60244 3926 gcc_assert (dst_rdesc);
4502fe8d
MJ
3927 dst_jf->value.constant.rdesc = dst_rdesc;
3928 }
3929 }
6fe45955
MJ
3930 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3931 && src->caller == dst->caller)
3932 {
a62bfab5
ML
3933 struct cgraph_node *inline_root = dst->caller->inlined_to
3934 ? dst->caller->inlined_to : dst->caller;
99b1c316 3935 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
6fe45955
MJ
3936 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3937
3938 int c = ipa_get_controlled_uses (root_info, idx);
3939 if (c != IPA_UNDESCRIBED_USE)
3940 {
3941 c++;
3942 ipa_set_controlled_uses (root_info, idx, c);
3943 }
3944 }
4502fe8d 3945 }
771578a0
MJ
3946}
3947
dd912cb8 3948/* Analyze newly added function into callgraph. */
be95e2b9 3949
771578a0 3950static void
dd912cb8 3951ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3952{
dd912cb8
ML
3953 if (node->has_gimple_body_p ())
3954 ipa_analyze_node (node);
3955}
771578a0 3956
dd912cb8
ML
3957/* Hook that is called by summary when a node is duplicated. */
3958
3959void
3960ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3961 ipa_node_params *old_info,
3962 ipa_node_params *new_info)
3963{
3964 ipa_agg_replacement_value *old_av, *new_av;
771578a0 3965
f65f1ae3 3966 new_info->descriptors = vec_safe_copy (old_info->descriptors);
310bc633 3967 new_info->lattices = NULL;
771578a0 3968 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
f65f1ae3
MJ
3969 new_info->known_csts = old_info->known_csts.copy ();
3970 new_info->known_contexts = old_info->known_contexts.copy ();
3949c4a7 3971
8aab5218 3972 new_info->analysis_done = old_info->analysis_done;
3949c4a7 3973 new_info->node_enqueued = old_info->node_enqueued;
7e729474 3974 new_info->versionable = old_info->versionable;
2c9561b5
MJ
3975
3976 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 3977 if (old_av)
2c9561b5 3978 {
04be694e
MJ
3979 new_av = NULL;
3980 while (old_av)
3981 {
3982 struct ipa_agg_replacement_value *v;
2c9561b5 3983
04be694e
MJ
3984 v = ggc_alloc<ipa_agg_replacement_value> ();
3985 memcpy (v, old_av, sizeof (*v));
3986 v->next = new_av;
3987 new_av = v;
3988 old_av = old_av->next;
3989 }
3990 ipa_set_node_agg_value_chain (dst, new_av);
3991 }
98aad294 3992}
04be694e 3993
98aad294 3994/* Duplication of ipcp transformation summaries. */
86cd0334 3995
98aad294
JH
3996void
3997ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
3998 ipcp_transformation *src_trans,
3999 ipcp_transformation *dst_trans)
4000{
4001 /* Avoid redundant work of duplicating vectors we will never use. */
4002 if (dst->inlined_to)
4003 return;
4004 dst_trans->bits = vec_safe_copy (src_trans->bits);
4005 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4006 ipa_agg_replacement_value *agg = src_trans->agg_values,
4007 **aggptr = &dst_trans->agg_values;
4008 while (agg)
4009 {
4010 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4011 **aggptr = *agg;
4012 agg = agg->next;
4013 aggptr = &(*aggptr)->next;
2c9561b5 4014 }
771578a0
MJ
4015}
4016
4017/* Register our cgraph hooks if they are not already there. */
be95e2b9 4018
518dc859 4019void
771578a0 4020ipa_register_cgraph_hooks (void)
518dc859 4021{
dd912cb8 4022 ipa_check_create_node_params ();
6fe906a3 4023 ipa_check_create_edge_args ();
dd912cb8 4024
dd912cb8 4025 function_insertion_hook_holder =
3dafb85c 4026 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 4027}
518dc859 4028
771578a0 4029/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 4030
771578a0
MJ
4031static void
4032ipa_unregister_cgraph_hooks (void)
4033{
3dafb85c 4034 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 4035 function_insertion_hook_holder = NULL;
771578a0
MJ
4036}
4037
4038/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4039 longer needed after ipa-cp. */
be95e2b9 4040
771578a0 4041void
e33c6cd6 4042ipa_free_all_structures_after_ipa_cp (void)
3e293154 4043{
2bf86c84 4044 if (!optimize && !in_lto_p)
3e293154
MJ
4045 {
4046 ipa_free_all_edge_args ();
4047 ipa_free_all_node_params ();
2651e637
ML
4048 ipcp_sources_pool.release ();
4049 ipcp_cst_values_pool.release ();
4050 ipcp_poly_ctx_values_pool.release ();
4051 ipcp_agg_lattice_pool.release ();
3e293154 4052 ipa_unregister_cgraph_hooks ();
601f3293 4053 ipa_refdesc_pool.release ();
3e293154
MJ
4054 }
4055}
4056
4057/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4058 longer needed after indirect inlining. */
be95e2b9 4059
3e293154 4060void
e33c6cd6 4061ipa_free_all_structures_after_iinln (void)
771578a0
MJ
4062{
4063 ipa_free_all_edge_args ();
4064 ipa_free_all_node_params ();
4065 ipa_unregister_cgraph_hooks ();
2651e637
ML
4066 ipcp_sources_pool.release ();
4067 ipcp_cst_values_pool.release ();
4068 ipcp_poly_ctx_values_pool.release ();
4069 ipcp_agg_lattice_pool.release ();
601f3293 4070 ipa_refdesc_pool.release ();
518dc859
RL
4071}
4072
dcd416e3 4073/* Print ipa_tree_map data structures of all functions in the
518dc859 4074 callgraph to F. */
be95e2b9 4075
518dc859 4076void
2c9561b5 4077ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
4078{
4079 int i, count;
99b1c316 4080 class ipa_node_params *info;
518dc859 4081
67348ccc 4082 if (!node->definition)
3e293154
MJ
4083 return;
4084 info = IPA_NODE_REF (node);
464d0118 4085 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
0302955a
JH
4086 if (!info)
4087 {
4088 fprintf (f, " no params return\n");
4089 return;
4090 }
3e293154
MJ
4091 count = ipa_get_param_count (info);
4092 for (i = 0; i < count; i++)
518dc859 4093 {
4502fe8d
MJ
4094 int c;
4095
a4e33812 4096 fprintf (f, " ");
e067bd43 4097 ipa_dump_param (f, info, i);
339f49ec
JH
4098 if (ipa_is_param_used (info, i))
4099 fprintf (f, " used");
40a777e8
JH
4100 if (ipa_is_param_used_by_ipa_predicates (info, i))
4101 fprintf (f, " used_by_ipa_predicates");
4102 if (ipa_is_param_used_by_indirect_call (info, i))
4103 fprintf (f, " used_by_indirect_call");
4104 if (ipa_is_param_used_by_polymorphic_call (info, i))
4105 fprintf (f, " used_by_polymorphic_call");
4502fe8d
MJ
4106 c = ipa_get_controlled_uses (info, i);
4107 if (c == IPA_UNDESCRIBED_USE)
4108 fprintf (f, " undescribed_use");
4109 else
4110 fprintf (f, " controlled_uses=%i", c);
3e293154 4111 fprintf (f, "\n");
518dc859
RL
4112 }
4113}
dcd416e3 4114
ca30a539 4115/* Print ipa_tree_map data structures of all functions in the
3e293154 4116 callgraph to F. */
be95e2b9 4117
3e293154 4118void
ca30a539 4119ipa_print_all_params (FILE * f)
3e293154
MJ
4120{
4121 struct cgraph_node *node;
4122
ca30a539 4123 fprintf (f, "\nFunction parameters:\n");
65c70e6b 4124 FOR_EACH_FUNCTION (node)
ca30a539 4125 ipa_print_node_params (f, node);
3e293154 4126}
3f84bf08 4127
2c9561b5
MJ
4128/* Dump the AV linked list. */
4129
4130void
4131ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4132{
4133 bool comma = false;
4134 fprintf (f, " Aggregate replacements:");
4135 for (; av; av = av->next)
4136 {
4137 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4138 av->index, av->offset);
ef6cb4c7 4139 print_generic_expr (f, av->value);
2c9561b5
MJ
4140 comma = true;
4141 }
4142 fprintf (f, "\n");
4143}
4144
fb3f88cc
JH
4145/* Stream out jump function JUMP_FUNC to OB. */
4146
4147static void
4148ipa_write_jump_function (struct output_block *ob,
4149 struct ipa_jump_func *jump_func)
4150{
8b7773a4
MJ
4151 struct ipa_agg_jf_item *item;
4152 struct bitpack_d bp;
4153 int i, count;
f714ecf5 4154 int flag = 0;
fb3f88cc 4155
f714ecf5
JH
4156 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4157 as well as WPA memory by handling them specially. */
4158 if (jump_func->type == IPA_JF_CONST
4159 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4160 flag = 1;
4161
4162 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
fb3f88cc
JH
4163 switch (jump_func->type)
4164 {
4165 case IPA_JF_UNKNOWN:
4166 break;
4167 case IPA_JF_CONST:
5368224f 4168 gcc_assert (
4502fe8d 4169 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
f714ecf5
JH
4170 stream_write_tree (ob,
4171 flag
4172 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4173 : jump_func->value.constant.value, true);
fb3f88cc
JH
4174 break;
4175 case IPA_JF_PASS_THROUGH:
412288f1 4176 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4177 if (jump_func->value.pass_through.operation == NOP_EXPR)
4178 {
4179 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4180 bp = bitpack_create (ob->main_stream);
4181 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4182 streamer_write_bitpack (&bp);
4183 }
a2b4c188
KV
4184 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4185 == tcc_unary)
4186 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4a53743e
MJ
4187 else
4188 {
4189 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4190 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4191 }
fb3f88cc
JH
4192 break;
4193 case IPA_JF_ANCESTOR:
412288f1 4194 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4195 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4196 bp = bitpack_create (ob->main_stream);
4197 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4198 streamer_write_bitpack (&bp);
fb3f88cc 4199 break;
8b7773a4
MJ
4200 }
4201
9771b263 4202 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4203 streamer_write_uhwi (ob, count);
4204 if (count)
4205 {
4206 bp = bitpack_create (ob->main_stream);
4207 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4208 streamer_write_bitpack (&bp);
4209 }
4210
9771b263 4211 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4212 {
4213 streamer_write_uhwi (ob, item->offset);
4214 stream_write_tree (ob, item->value, true);
fb3f88cc 4215 }
04be694e 4216
209ca542 4217 bp = bitpack_create (ob->main_stream);
86cd0334 4218 bp_pack_value (&bp, !!jump_func->bits, 1);
209ca542 4219 streamer_write_bitpack (&bp);
86cd0334 4220 if (jump_func->bits)
209ca542 4221 {
86cd0334
MJ
4222 streamer_write_widest_int (ob, jump_func->bits->value);
4223 streamer_write_widest_int (ob, jump_func->bits->mask);
a5e14a42 4224 }
86cd0334 4225 bp_pack_value (&bp, !!jump_func->m_vr, 1);
8bc5448f 4226 streamer_write_bitpack (&bp);
86cd0334 4227 if (jump_func->m_vr)
8bc5448f
KV
4228 {
4229 streamer_write_enum (ob->main_stream, value_rang_type,
54994253
AH
4230 VR_LAST, jump_func->m_vr->kind ());
4231 stream_write_tree (ob, jump_func->m_vr->min (), true);
4232 stream_write_tree (ob, jump_func->m_vr->max (), true);
8bc5448f 4233 }
fb3f88cc
JH
4234}
4235
4236/* Read in jump function JUMP_FUNC from IB. */
4237
4238static void
99b1c316 4239ipa_read_jump_function (class lto_input_block *ib,
fb3f88cc 4240 struct ipa_jump_func *jump_func,
4502fe8d 4241 struct cgraph_edge *cs,
99b1c316 4242 class data_in *data_in,
f714ecf5 4243 bool prevails)
fb3f88cc 4244{
4a53743e
MJ
4245 enum jump_func_type jftype;
4246 enum tree_code operation;
8b7773a4 4247 int i, count;
f714ecf5
JH
4248 int val = streamer_read_uhwi (ib);
4249 bool flag = val & 1;
fb3f88cc 4250
f714ecf5 4251 jftype = (enum jump_func_type) (val / 2);
4a53743e 4252 switch (jftype)
fb3f88cc
JH
4253 {
4254 case IPA_JF_UNKNOWN:
04be694e 4255 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4256 break;
4257 case IPA_JF_CONST:
f714ecf5
JH
4258 {
4259 tree t = stream_read_tree (ib, data_in);
4260 if (flag && prevails)
4261 t = build_fold_addr_expr (t);
4262 ipa_set_jf_constant (jump_func, t, cs);
4263 }
fb3f88cc
JH
4264 break;
4265 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4266 operation = (enum tree_code) streamer_read_uhwi (ib);
4267 if (operation == NOP_EXPR)
4268 {
4269 int formal_id = streamer_read_uhwi (ib);
4270 struct bitpack_d bp = streamer_read_bitpack (ib);
4271 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4272 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e 4273 }
a2b4c188
KV
4274 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4275 {
4276 int formal_id = streamer_read_uhwi (ib);
4277 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4278 }
4a53743e
MJ
4279 else
4280 {
4281 tree operand = stream_read_tree (ib, data_in);
4282 int formal_id = streamer_read_uhwi (ib);
4283 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4284 operation);
4285 }
fb3f88cc
JH
4286 break;
4287 case IPA_JF_ANCESTOR:
4a53743e
MJ
4288 {
4289 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4290 int formal_id = streamer_read_uhwi (ib);
4291 struct bitpack_d bp = streamer_read_bitpack (ib);
4292 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4293 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4294 break;
4295 }
f714ecf5
JH
4296 default:
4297 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4298 }
4299
4300 count = streamer_read_uhwi (ib);
f714ecf5
JH
4301 if (prevails)
4302 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4303 if (count)
4304 {
4a53743e 4305 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4306 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4307 }
4308 for (i = 0; i < count; i++)
4309 {
f32682ca
DN
4310 struct ipa_agg_jf_item item;
4311 item.offset = streamer_read_uhwi (ib);
4312 item.value = stream_read_tree (ib, data_in);
f714ecf5
JH
4313 if (prevails)
4314 jump_func->agg.items->quick_push (item);
fb3f88cc 4315 }
04be694e
MJ
4316
4317 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4318 bool bits_known = bp_unpack_value (&bp, 1);
4319 if (bits_known)
4320 {
86cd0334
MJ
4321 widest_int value = streamer_read_widest_int (ib);
4322 widest_int mask = streamer_read_widest_int (ib);
f714ecf5
JH
4323 if (prevails)
4324 ipa_set_jfunc_bits (jump_func, value, mask);
209ca542
PK
4325 }
4326 else
86cd0334 4327 jump_func->bits = NULL;
8bc5448f
KV
4328
4329 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4330 bool vr_known = bp_unpack_value (&vr_bp, 1);
4331 if (vr_known)
4332 {
54994253 4333 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
86cd0334
MJ
4334 VR_LAST);
4335 tree min = stream_read_tree (ib, data_in);
4336 tree max = stream_read_tree (ib, data_in);
f714ecf5
JH
4337 if (prevails)
4338 ipa_set_jfunc_vr (jump_func, type, min, max);
8bc5448f
KV
4339 }
4340 else
86cd0334 4341 jump_func->m_vr = NULL;
fb3f88cc
JH
4342}
4343
e33c6cd6
MJ
4344/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4345 relevant to indirect inlining to OB. */
661e7330
MJ
4346
4347static void
e33c6cd6
MJ
4348ipa_write_indirect_edge_info (struct output_block *ob,
4349 struct cgraph_edge *cs)
661e7330 4350{
99b1c316 4351 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4352 struct bitpack_d bp;
e33c6cd6 4353
412288f1 4354 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4355 bp = bitpack_create (ob->main_stream);
4356 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4357 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4358 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4359 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4360 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4361 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4362 streamer_write_bitpack (&bp);
ba392339
JH
4363 if (ii->agg_contents || ii->polymorphic)
4364 streamer_write_hwi (ob, ii->offset);
4365 else
4366 gcc_assert (ii->offset == 0);
b258210c
MJ
4367
4368 if (ii->polymorphic)
4369 {
412288f1 4370 streamer_write_hwi (ob, ii->otr_token);
b9393656 4371 stream_write_tree (ob, ii->otr_type, true);
ba392339 4372 ii->context.stream_out (ob);
b258210c 4373 }
661e7330
MJ
4374}
4375
e33c6cd6
MJ
4376/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4377 relevant to indirect inlining from IB. */
661e7330
MJ
4378
4379static void
99b1c316
MS
4380ipa_read_indirect_edge_info (class lto_input_block *ib,
4381 class data_in *data_in,
40a777e8
JH
4382 struct cgraph_edge *cs,
4383 class ipa_node_params *info)
661e7330 4384{
99b1c316 4385 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4386 struct bitpack_d bp;
661e7330 4387
412288f1 4388 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 4389 bp = streamer_read_bitpack (ib);
2465dcc2 4390 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4391 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4392 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4393 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 4394 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 4395 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
4396 if (ii->agg_contents || ii->polymorphic)
4397 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4398 else
4399 ii->offset = 0;
b258210c
MJ
4400 if (ii->polymorphic)
4401 {
412288f1 4402 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4403 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 4404 ii->context.stream_in (ib, data_in);
b258210c 4405 }
40a777e8
JH
4406 if (info && ii->param_index >= 0)
4407 {
4408 if (ii->polymorphic)
4409 ipa_set_param_used_by_polymorphic_call (info,
4410 ii->param_index , true);
4411 ipa_set_param_used_by_indirect_call (info,
4412 ii->param_index, true);
4413 }
661e7330
MJ
4414}
4415
fb3f88cc
JH
4416/* Stream out NODE info to OB. */
4417
4418static void
4419ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4420{
4421 int node_ref;
7380e6ef 4422 lto_symtab_encoder_t encoder;
99b1c316 4423 class ipa_node_params *info = IPA_NODE_REF (node);
fb3f88cc
JH
4424 int j;
4425 struct cgraph_edge *e;
2465dcc2 4426 struct bitpack_d bp;
fb3f88cc 4427
7380e6ef 4428 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4429 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4430 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4431
0e8853ee
JH
4432 streamer_write_uhwi (ob, ipa_get_param_count (info));
4433 for (j = 0; j < ipa_get_param_count (info); j++)
4434 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4435 bp = bitpack_create (ob->main_stream);
8aab5218 4436 gcc_assert (info->analysis_done
661e7330 4437 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4438 gcc_assert (!info->node_enqueued);
4439 gcc_assert (!info->ipcp_orig_node);
4440 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4441 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4442 streamer_write_bitpack (&bp);
4502fe8d 4443 for (j = 0; j < ipa_get_param_count (info); j++)
a5e14a42
MJ
4444 {
4445 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4446 stream_write_tree (ob, ipa_get_type (info, j), true);
4447 }
fb3f88cc
JH
4448 for (e = node->callees; e; e = e->next_callee)
4449 {
99b1c316 4450 class ipa_edge_args *args = IPA_EDGE_REF (e);
fb3f88cc 4451
a33c028e
JH
4452 if (!args)
4453 {
4454 streamer_write_uhwi (ob, 0);
4455 continue;
4456 }
4457
5ce97055
JH
4458 streamer_write_uhwi (ob,
4459 ipa_get_cs_argument_count (args) * 2
4460 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 4461 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4462 {
4463 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4464 if (args->polymorphic_call_contexts != NULL)
4465 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4466 }
fb3f88cc 4467 }
e33c6cd6 4468 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 4469 {
99b1c316 4470 class ipa_edge_args *args = IPA_EDGE_REF (e);
a33c028e
JH
4471 if (!args)
4472 streamer_write_uhwi (ob, 0);
4473 else
5ce97055 4474 {
a33c028e
JH
4475 streamer_write_uhwi (ob,
4476 ipa_get_cs_argument_count (args) * 2
4477 + (args->polymorphic_call_contexts != NULL));
4478 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4479 {
4480 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4481 if (args->polymorphic_call_contexts != NULL)
4482 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4483 }
5ce97055 4484 }
c8246dbe
JH
4485 ipa_write_indirect_edge_info (ob, e);
4486 }
fb3f88cc
JH
4487}
4488
f714ecf5 4489/* Stream in edge E from IB. */
7b377b22 4490
f714ecf5 4491static void
99b1c316
MS
4492ipa_read_edge_info (class lto_input_block *ib,
4493 class data_in *data_in,
f714ecf5 4494 struct cgraph_edge *e, bool prevails)
7b377b22 4495{
f714ecf5
JH
4496 int count = streamer_read_uhwi (ib);
4497 bool contexts_computed = count & 1;
4498
4499 count /= 2;
4500 if (!count)
4501 return;
4502 if (prevails && e->possibly_call_in_translation_unit_p ())
4503 {
a33c028e 4504 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
f714ecf5
JH
4505 vec_safe_grow_cleared (args->jump_functions, count);
4506 if (contexts_computed)
4507 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4508 for (int k = 0; k < count; k++)
4509 {
4510 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4511 data_in, prevails);
4512 if (contexts_computed)
4513 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4514 (ib, data_in);
4515 }
4516 }
4517 else
4518 {
4519 for (int k = 0; k < count; k++)
4520 {
4521 struct ipa_jump_func dummy;
4522 ipa_read_jump_function (ib, &dummy, e,
4523 data_in, prevails);
4524 if (contexts_computed)
4525 {
99b1c316 4526 class ipa_polymorphic_call_context ctx;
f714ecf5
JH
4527 ctx.stream_in (ib, data_in);
4528 }
4529 }
4530 }
7b377b22
JH
4531}
4532
61502ca8 4533/* Stream in NODE info from IB. */
fb3f88cc
JH
4534
4535static void
99b1c316
MS
4536ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4537 class data_in *data_in)
fb3f88cc 4538{
fb3f88cc
JH
4539 int k;
4540 struct cgraph_edge *e;
2465dcc2 4541 struct bitpack_d bp;
f714ecf5 4542 bool prevails = node->prevailing_p ();
6cf67b62
JH
4543 class ipa_node_params *info = prevails
4544 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
fb3f88cc 4545
f714ecf5
JH
4546 int param_count = streamer_read_uhwi (ib);
4547 if (prevails)
4548 {
4549 ipa_alloc_node_params (node, param_count);
4550 for (k = 0; k < param_count; k++)
4551 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4552 if (ipa_get_param_count (info) != 0)
4553 info->analysis_done = true;
4554 info->node_enqueued = false;
4555 }
4556 else
4557 for (k = 0; k < param_count; k++)
4558 streamer_read_uhwi (ib);
a5e14a42 4559
412288f1 4560 bp = streamer_read_bitpack (ib);
f714ecf5 4561 for (k = 0; k < param_count; k++)
a5e14a42 4562 {
f714ecf5
JH
4563 bool used = bp_unpack_value (&bp, 1);
4564
4565 if (prevails)
4566 ipa_set_param_used (info, k, used);
a5e14a42 4567 }
f714ecf5 4568 for (k = 0; k < param_count; k++)
fb3f88cc 4569 {
f714ecf5
JH
4570 int nuses = streamer_read_hwi (ib);
4571 tree type = stream_read_tree (ib, data_in);
fb3f88cc 4572
f714ecf5 4573 if (prevails)
5ce97055 4574 {
f714ecf5
JH
4575 ipa_set_controlled_uses (info, k, nuses);
4576 (*info->descriptors)[k].decl_or_type = type;
5ce97055 4577 }
fb3f88cc 4578 }
f714ecf5
JH
4579 for (e = node->callees; e; e = e->next_callee)
4580 ipa_read_edge_info (ib, data_in, e, prevails);
e33c6cd6 4581 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 4582 {
f714ecf5 4583 ipa_read_edge_info (ib, data_in, e, prevails);
40a777e8 4584 ipa_read_indirect_edge_info (ib, data_in, e, info);
c8246dbe 4585 }
fb3f88cc
JH
4586}
4587
4588/* Write jump functions for nodes in SET. */
4589
4590void
f27c1867 4591ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4592{
4593 struct cgraph_node *node;
93536c97 4594 struct output_block *ob;
fb3f88cc 4595 unsigned int count = 0;
f27c1867
JH
4596 lto_symtab_encoder_iterator lsei;
4597 lto_symtab_encoder_t encoder;
4598
6fe906a3 4599 if (!ipa_node_params_sum || !ipa_edge_args_sum)
93536c97 4600 return;
fb3f88cc 4601
93536c97 4602 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4603 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 4604 ob->symbol = NULL;
f27c1867
JH
4605 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4606 lsei_next_function_in_partition (&lsei))
fb3f88cc 4607 {
f27c1867 4608 node = lsei_cgraph_node (lsei);
d52f5295 4609 if (node->has_gimple_body_p ()
c47d0034 4610 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4611 count++;
4612 }
4613
412288f1 4614 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4615
4616 /* Process all of the functions. */
f27c1867
JH
4617 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4618 lsei_next_function_in_partition (&lsei))
fb3f88cc 4619 {
f27c1867 4620 node = lsei_cgraph_node (lsei);
d52f5295 4621 if (node->has_gimple_body_p ()
c47d0034 4622 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4623 ipa_write_node_info (ob, node);
4624 }
412288f1 4625 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4626 produce_asm (ob, NULL);
4627 destroy_output_block (ob);
4628}
4629
4630/* Read section in file FILE_DATA of length LEN with data DATA. */
4631
4632static void
4633ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4634 size_t len)
4635{
4636 const struct lto_function_header *header =
4637 (const struct lto_function_header *) data;
4ad9a9de
EB
4638 const int cfg_offset = sizeof (struct lto_function_header);
4639 const int main_offset = cfg_offset + header->cfg_size;
4640 const int string_offset = main_offset + header->main_size;
99b1c316 4641 class data_in *data_in;
fb3f88cc
JH
4642 unsigned int i;
4643 unsigned int count;
4644
207c68cd 4645 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 4646 header->main_size, file_data->mode_table);
fb3f88cc
JH
4647
4648 data_in =
4649 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4650 header->string_size, vNULL);
412288f1 4651 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4652
4653 for (i = 0; i < count; i++)
4654 {
4655 unsigned int index;
4656 struct cgraph_node *node;
7380e6ef 4657 lto_symtab_encoder_t encoder;
fb3f88cc 4658
412288f1 4659 index = streamer_read_uhwi (&ib_main);
7380e6ef 4660 encoder = file_data->symtab_node_encoder;
d52f5295
ML
4661 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4662 index));
67348ccc 4663 gcc_assert (node->definition);
fb3f88cc
JH
4664 ipa_read_node_info (&ib_main, node, data_in);
4665 }
4666 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4667 len);
4668 lto_data_in_delete (data_in);
4669}
4670
4671/* Read ipcp jump functions. */
4672
4673void
4674ipa_prop_read_jump_functions (void)
4675{
4676 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4677 struct lto_file_decl_data *file_data;
4678 unsigned int j = 0;
4679
4680 ipa_check_create_node_params ();
4681 ipa_check_create_edge_args ();
4682 ipa_register_cgraph_hooks ();
4683
4684 while ((file_data = file_data_vec[j++]))
4685 {
4686 size_t len;
3c56d8d8
ML
4687 const char *data
4688 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
4689 &len);
fb3f88cc
JH
4690 if (data)
4691 ipa_prop_read_section (file_data, data, len);
4692 }
4693}
4694
2c9561b5 4695void
04be694e 4696write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
4697{
4698 int node_ref;
4699 unsigned int count = 0;
4700 lto_symtab_encoder_t encoder;
4701 struct ipa_agg_replacement_value *aggvals, *av;
4702
4703 aggvals = ipa_get_agg_replacements_for_node (node);
4704 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4705 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4706 streamer_write_uhwi (ob, node_ref);
4707
4708 for (av = aggvals; av; av = av->next)
4709 count++;
4710 streamer_write_uhwi (ob, count);
4711
4712 for (av = aggvals; av; av = av->next)
4713 {
7b920a9a
MJ
4714 struct bitpack_d bp;
4715
2c9561b5
MJ
4716 streamer_write_uhwi (ob, av->offset);
4717 streamer_write_uhwi (ob, av->index);
4718 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4719
4720 bp = bitpack_create (ob->main_stream);
4721 bp_pack_value (&bp, av->by_ref, 1);
4722 streamer_write_bitpack (&bp);
2c9561b5 4723 }
04be694e 4724
9d3e0adc 4725 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
4726 if (ts && vec_safe_length (ts->m_vr) > 0)
4727 {
4728 count = ts->m_vr->length ();
4729 streamer_write_uhwi (ob, count);
4730 for (unsigned i = 0; i < count; ++i)
4731 {
4732 struct bitpack_d bp;
4733 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4734 bp = bitpack_create (ob->main_stream);
4735 bp_pack_value (&bp, parm_vr->known, 1);
4736 streamer_write_bitpack (&bp);
4737 if (parm_vr->known)
4738 {
4739 streamer_write_enum (ob->main_stream, value_rang_type,
4740 VR_LAST, parm_vr->type);
4741 streamer_write_wide_int (ob, parm_vr->min);
4742 streamer_write_wide_int (ob, parm_vr->max);
4743 }
4744 }
4745 }
4746 else
4747 streamer_write_uhwi (ob, 0);
4748
209ca542
PK
4749 if (ts && vec_safe_length (ts->bits) > 0)
4750 {
4751 count = ts->bits->length ();
4752 streamer_write_uhwi (ob, count);
4753
4754 for (unsigned i = 0; i < count; ++i)
4755 {
86cd0334 4756 const ipa_bits *bits_jfunc = (*ts->bits)[i];
209ca542 4757 struct bitpack_d bp = bitpack_create (ob->main_stream);
86cd0334 4758 bp_pack_value (&bp, !!bits_jfunc, 1);
209ca542 4759 streamer_write_bitpack (&bp);
86cd0334 4760 if (bits_jfunc)
209ca542 4761 {
86cd0334
MJ
4762 streamer_write_widest_int (ob, bits_jfunc->value);
4763 streamer_write_widest_int (ob, bits_jfunc->mask);
209ca542
PK
4764 }
4765 }
4766 }
4767 else
4768 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
4769}
4770
4771/* Stream in the aggregate value replacement chain for NODE from IB. */
4772
4773static void
04be694e
MJ
4774read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4775 data_in *data_in)
2c9561b5
MJ
4776{
4777 struct ipa_agg_replacement_value *aggvals = NULL;
4778 unsigned int count, i;
4779
4780 count = streamer_read_uhwi (ib);
4781 for (i = 0; i <count; i++)
4782 {
4783 struct ipa_agg_replacement_value *av;
7b920a9a 4784 struct bitpack_d bp;
2c9561b5 4785
766090c2 4786 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
4787 av->offset = streamer_read_uhwi (ib);
4788 av->index = streamer_read_uhwi (ib);
4789 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4790 bp = streamer_read_bitpack (ib);
4791 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4792 av->next = aggvals;
4793 aggvals = av;
4794 }
4795 ipa_set_node_agg_value_chain (node, aggvals);
67b97478 4796
209ca542
PK
4797 count = streamer_read_uhwi (ib);
4798 if (count > 0)
4799 {
9d3e0adc
ML
4800 ipcp_transformation_initialize ();
4801 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
8bc5448f
KV
4802 vec_safe_grow_cleared (ts->m_vr, count);
4803 for (i = 0; i < count; i++)
4804 {
4805 ipa_vr *parm_vr;
4806 parm_vr = &(*ts->m_vr)[i];
4807 struct bitpack_d bp;
4808 bp = streamer_read_bitpack (ib);
4809 parm_vr->known = bp_unpack_value (&bp, 1);
4810 if (parm_vr->known)
4811 {
54994253 4812 parm_vr->type = streamer_read_enum (ib, value_range_kind,
8bc5448f
KV
4813 VR_LAST);
4814 parm_vr->min = streamer_read_wide_int (ib);
4815 parm_vr->max = streamer_read_wide_int (ib);
4816 }
4817 }
4818 }
4819 count = streamer_read_uhwi (ib);
4820 if (count > 0)
4821 {
9d3e0adc
ML
4822 ipcp_transformation_initialize ();
4823 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
209ca542
PK
4824 vec_safe_grow_cleared (ts->bits, count);
4825
4826 for (i = 0; i < count; i++)
4827 {
209ca542 4828 struct bitpack_d bp = streamer_read_bitpack (ib);
86cd0334
MJ
4829 bool known = bp_unpack_value (&bp, 1);
4830 if (known)
209ca542 4831 {
86cd0334
MJ
4832 ipa_bits *bits
4833 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4834 streamer_read_widest_int (ib));
4835 (*ts->bits)[i] = bits;
209ca542
PK
4836 }
4837 }
4838 }
2c9561b5
MJ
4839}
4840
4841/* Write all aggregate replacement for nodes in set. */
4842
4843void
04be694e 4844ipcp_write_transformation_summaries (void)
2c9561b5
MJ
4845{
4846 struct cgraph_node *node;
4847 struct output_block *ob;
4848 unsigned int count = 0;
4849 lto_symtab_encoder_iterator lsei;
4850 lto_symtab_encoder_t encoder;
4851
2c9561b5
MJ
4852 ob = create_output_block (LTO_section_ipcp_transform);
4853 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 4854 ob->symbol = NULL;
2c9561b5
MJ
4855 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4856 lsei_next_function_in_partition (&lsei))
4857 {
4858 node = lsei_cgraph_node (lsei);
04be694e 4859 if (node->has_gimple_body_p ())
2c9561b5
MJ
4860 count++;
4861 }
4862
4863 streamer_write_uhwi (ob, count);
4864
4865 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4866 lsei_next_function_in_partition (&lsei))
4867 {
4868 node = lsei_cgraph_node (lsei);
04be694e
MJ
4869 if (node->has_gimple_body_p ())
4870 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
4871 }
4872 streamer_write_char_stream (ob->main_stream, 0);
4873 produce_asm (ob, NULL);
4874 destroy_output_block (ob);
4875}
4876
4877/* Read replacements section in file FILE_DATA of length LEN with data
4878 DATA. */
4879
4880static void
4881read_replacements_section (struct lto_file_decl_data *file_data,
4882 const char *data,
4883 size_t len)
4884{
4885 const struct lto_function_header *header =
4886 (const struct lto_function_header *) data;
4887 const int cfg_offset = sizeof (struct lto_function_header);
4888 const int main_offset = cfg_offset + header->cfg_size;
4889 const int string_offset = main_offset + header->main_size;
99b1c316 4890 class data_in *data_in;
2c9561b5
MJ
4891 unsigned int i;
4892 unsigned int count;
4893
207c68cd 4894 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 4895 header->main_size, file_data->mode_table);
2c9561b5
MJ
4896
4897 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4898 header->string_size, vNULL);
2c9561b5
MJ
4899 count = streamer_read_uhwi (&ib_main);
4900
4901 for (i = 0; i < count; i++)
4902 {
4903 unsigned int index;
4904 struct cgraph_node *node;
4905 lto_symtab_encoder_t encoder;
4906
4907 index = streamer_read_uhwi (&ib_main);
4908 encoder = file_data->symtab_node_encoder;
d52f5295
ML
4909 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4910 index));
67348ccc 4911 gcc_assert (node->definition);
04be694e 4912 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
4913 }
4914 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4915 len);
4916 lto_data_in_delete (data_in);
4917}
4918
4919/* Read IPA-CP aggregate replacements. */
4920
4921void
04be694e 4922ipcp_read_transformation_summaries (void)
2c9561b5
MJ
4923{
4924 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4925 struct lto_file_decl_data *file_data;
4926 unsigned int j = 0;
4927
4928 while ((file_data = file_data_vec[j++]))
4929 {
4930 size_t len;
3c56d8d8
ML
4931 const char *data
4932 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
4933 &len);
2c9561b5
MJ
4934 if (data)
4935 read_replacements_section (file_data, data, len);
4936 }
4937}
4938
4939/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4940 NODE. */
4941
4942static void
4943adjust_agg_replacement_values (struct cgraph_node *node,
4944 struct ipa_agg_replacement_value *aggval)
4945{
4946 struct ipa_agg_replacement_value *v;
2c9561b5 4947
ff6686d2 4948 if (!node->clone.param_adjustments)
2c9561b5
MJ
4949 return;
4950
ff6686d2
MJ
4951 auto_vec<int, 16> new_indices;
4952 node->clone.param_adjustments->get_updated_indices (&new_indices);
2c9561b5
MJ
4953 for (v = aggval; v; v = v->next)
4954 {
ff6686d2 4955 gcc_checking_assert (v->index >= 0);
2c9561b5 4956
ff6686d2
MJ
4957 if ((unsigned) v->index < new_indices.length ())
4958 v->index = new_indices[v->index];
4959 else
4960 /* This can happen if we know about a constant passed by reference by
4961 an argument which is never actually used for anything, let alone
4962 loading that constant. */
4963 v->index = -1;
4964 }
2c9561b5
MJ
4965}
4966
8aab5218
MJ
4967/* Dominator walker driving the ipcp modification phase. */
4968
4969class ipcp_modif_dom_walker : public dom_walker
4970{
4971public:
56b40062 4972 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
f65f1ae3 4973 vec<ipa_param_descriptor, va_gc> *descs,
8aab5218
MJ
4974 struct ipa_agg_replacement_value *av,
4975 bool *sc, bool *cc)
4976 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4977 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4978
3daacdcd 4979 virtual edge before_dom_children (basic_block);
8aab5218
MJ
4980
4981private:
56b40062 4982 struct ipa_func_body_info *m_fbi;
f65f1ae3 4983 vec<ipa_param_descriptor, va_gc> *m_descriptors;
8aab5218
MJ
4984 struct ipa_agg_replacement_value *m_aggval;
4985 bool *m_something_changed, *m_cfg_changed;
4986};
4987
3daacdcd 4988edge
8aab5218
MJ
4989ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4990{
4991 gimple_stmt_iterator gsi;
4992 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4993 {
4994 struct ipa_agg_replacement_value *v;
355fe088 4995 gimple *stmt = gsi_stmt (gsi);
8aab5218 4996 tree rhs, val, t;
86003645
RS
4997 HOST_WIDE_INT offset;
4998 poly_int64 size;
8aab5218
MJ
4999 int index;
5000 bool by_ref, vce;
5001
5002 if (!gimple_assign_load_p (stmt))
5003 continue;
5004 rhs = gimple_assign_rhs1 (stmt);
5005 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5006 continue;
2c9561b5 5007
8aab5218
MJ
5008 vce = false;
5009 t = rhs;
5010 while (handled_component_p (t))
5011 {
5012 /* V_C_E can do things like convert an array of integers to one
5013 bigger integer and similar things we do not handle below. */
b66113e9 5014 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8aab5218
MJ
5015 {
5016 vce = true;
5017 break;
5018 }
5019 t = TREE_OPERAND (t, 0);
5020 }
5021 if (vce)
5022 continue;
5023
ff302741
PB
5024 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5025 &offset, &size, &by_ref))
8aab5218
MJ
5026 continue;
5027 for (v = m_aggval; v; v = v->next)
5028 if (v->index == index
5029 && v->offset == offset)
5030 break;
5031 if (!v
5032 || v->by_ref != by_ref
86003645
RS
5033 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5034 size))
8aab5218
MJ
5035 continue;
5036
5037 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5038 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5039 {
5040 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5041 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5042 else if (TYPE_SIZE (TREE_TYPE (rhs))
5043 == TYPE_SIZE (TREE_TYPE (v->value)))
5044 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5045 else
5046 {
5047 if (dump_file)
5048 {
5049 fprintf (dump_file, " const ");
ef6cb4c7 5050 print_generic_expr (dump_file, v->value);
8aab5218 5051 fprintf (dump_file, " can't be converted to type of ");
ef6cb4c7 5052 print_generic_expr (dump_file, rhs);
8aab5218
MJ
5053 fprintf (dump_file, "\n");
5054 }
5055 continue;
5056 }
5057 }
5058 else
5059 val = v->value;
5060
5061 if (dump_file && (dump_flags & TDF_DETAILS))
5062 {
5063 fprintf (dump_file, "Modifying stmt:\n ");
ef6cb4c7 5064 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5065 }
5066 gimple_assign_set_rhs_from_tree (&gsi, val);
5067 update_stmt (stmt);
5068
5069 if (dump_file && (dump_flags & TDF_DETAILS))
5070 {
5071 fprintf (dump_file, "into:\n ");
ef6cb4c7 5072 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5073 fprintf (dump_file, "\n");
5074 }
5075
5076 *m_something_changed = true;
5077 if (maybe_clean_eh_stmt (stmt)
5078 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5079 *m_cfg_changed = true;
5080 }
3daacdcd 5081 return NULL;
8aab5218
MJ
5082}
5083
209ca542 5084/* Update bits info of formal parameters as described in
9d3e0adc 5085 ipcp_transformation. */
209ca542
PK
5086
5087static void
5088ipcp_update_bits (struct cgraph_node *node)
5089{
9d3e0adc 5090 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
209ca542
PK
5091
5092 if (!ts || vec_safe_length (ts->bits) == 0)
5093 return;
86cd0334 5094 vec<ipa_bits *, va_gc> &bits = *ts->bits;
209ca542 5095 unsigned count = bits.length ();
ff6686d2
MJ
5096 if (!count)
5097 return;
209ca542 5098
ff6686d2
MJ
5099 auto_vec<int, 16> new_indices;
5100 bool need_remapping = false;
5101 if (node->clone.param_adjustments)
209ca542 5102 {
ff6686d2
MJ
5103 node->clone.param_adjustments->get_updated_indices (&new_indices);
5104 need_remapping = true;
5105 }
5106 auto_vec <tree, 16> parm_decls;
5107 push_function_arg_decls (&parm_decls, node->decl);
209ca542 5108
ff6686d2
MJ
5109 for (unsigned i = 0; i < count; ++i)
5110 {
5111 tree parm;
5112 if (need_remapping)
5113 {
5114 if (i >= new_indices.length ())
5115 continue;
5116 int idx = new_indices[i];
5117 if (idx < 0)
5118 continue;
5119 parm = parm_decls[idx];
5120 }
5121 else
5122 parm = parm_decls[i];
209ca542 5123 gcc_checking_assert (parm);
ff6686d2 5124
209ca542 5125
86cd0334
MJ
5126 if (!bits[i]
5127 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5128 || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542 5129 || !is_gimple_reg (parm))
86cd0334 5130 continue;
209ca542
PK
5131
5132 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5133 if (!ddef)
5134 continue;
5135
5136 if (dump_file)
5137 {
86cd0334
MJ
5138 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5139 print_hex (bits[i]->mask, dump_file);
209ca542
PK
5140 fprintf (dump_file, "\n");
5141 }
5142
67b97478
PK
5143 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5144 {
5145 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5146 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5147
86cd0334
MJ
5148 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5149 | wide_int::from (bits[i]->value, prec, sgn);
67b97478
PK
5150 set_nonzero_bits (ddef, nonzero_bits);
5151 }
5152 else
5153 {
86cd0334
MJ
5154 unsigned tem = bits[i]->mask.to_uhwi ();
5155 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
67b97478
PK
5156 unsigned align = tem & -tem;
5157 unsigned misalign = bitpos & (align - 1);
209ca542 5158
67b97478
PK
5159 if (align > 1)
5160 {
5161 if (dump_file)
5162 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5163
5164 unsigned old_align, old_misalign;
5165 struct ptr_info_def *pi = get_ptr_info (ddef);
5166 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5167
5168 if (old_known
5169 && old_align > align)
5170 {
5171 if (dump_file)
5172 {
5173 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5174 if ((old_misalign & (align - 1)) != misalign)
5175 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5176 old_misalign, misalign);
5177 }
5178 continue;
5179 }
5180
5181 if (old_known
5182 && ((misalign & (old_align - 1)) != old_misalign)
5183 && dump_file)
5184 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5185 old_misalign, misalign);
5186
5187 set_ptr_info_alignment (pi, align, misalign);
5188 }
5189 }
209ca542
PK
5190 }
5191}
5192
523fe5b6
AH
5193bool
5194ipa_vr::nonzero_p (tree expr_type) const
5195{
5196 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5197 return true;
5198
5199 unsigned prec = TYPE_PRECISION (expr_type);
5200 return (type == VR_RANGE
19849d15 5201 && TYPE_UNSIGNED (expr_type)
523fe5b6
AH
5202 && wi::eq_p (min, wi::one (prec))
5203 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5204}
5205
8bc5448f 5206/* Update value range of formal parameters as described in
9d3e0adc 5207 ipcp_transformation. */
8bc5448f
KV
5208
5209static void
5210ipcp_update_vr (struct cgraph_node *node)
5211{
9d3e0adc 5212 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5213 if (!ts || vec_safe_length (ts->m_vr) == 0)
5214 return;
5215 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5216 unsigned count = vr.length ();
ff6686d2
MJ
5217 if (!count)
5218 return;
8bc5448f 5219
ff6686d2
MJ
5220 auto_vec<int, 16> new_indices;
5221 bool need_remapping = false;
5222 if (node->clone.param_adjustments)
8bc5448f 5223 {
ff6686d2
MJ
5224 node->clone.param_adjustments->get_updated_indices (&new_indices);
5225 need_remapping = true;
5226 }
5227 auto_vec <tree, 16> parm_decls;
5228 push_function_arg_decls (&parm_decls, node->decl);
5229
5230 for (unsigned i = 0; i < count; ++i)
5231 {
5232 tree parm;
5233 int remapped_idx;
5234 if (need_remapping)
5235 {
5236 if (i >= new_indices.length ())
5237 continue;
5238 remapped_idx = new_indices[i];
5239 if (remapped_idx < 0)
5240 continue;
5241 }
5242 else
5243 remapped_idx = i;
5244
5245 parm = parm_decls[remapped_idx];
5246
8bc5448f 5247 gcc_checking_assert (parm);
8bc5448f
KV
5248 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5249
5250 if (!ddef || !is_gimple_reg (parm))
5251 continue;
5252
5253 if (vr[i].known
8bc5448f
KV
5254 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5255 {
5256 tree type = TREE_TYPE (ddef);
5257 unsigned prec = TYPE_PRECISION (type);
718625ad
KV
5258 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5259 {
5260 if (dump_file)
5261 {
ff6686d2
MJ
5262 fprintf (dump_file, "Setting value range of param %u "
5263 "(now %i) ", i, remapped_idx);
718625ad
KV
5264 fprintf (dump_file, "%s[",
5265 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5266 print_decs (vr[i].min, dump_file);
5267 fprintf (dump_file, ", ");
5268 print_decs (vr[i].max, dump_file);
5269 fprintf (dump_file, "]\n");
5270 }
5271 set_range_info (ddef, vr[i].type,
5272 wide_int_storage::from (vr[i].min, prec,
5273 TYPE_SIGN (type)),
5274 wide_int_storage::from (vr[i].max, prec,
5275 TYPE_SIGN (type)));
5276 }
5277 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
523fe5b6 5278 && vr[i].nonzero_p (TREE_TYPE (ddef)))
8bc5448f 5279 {
718625ad
KV
5280 if (dump_file)
5281 fprintf (dump_file, "Setting nonnull for %u\n", i);
5282 set_ptr_nonnull (ddef);
8bc5448f 5283 }
8bc5448f
KV
5284 }
5285 }
5286}
5287
8aab5218 5288/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5289
5290unsigned int
5291ipcp_transform_function (struct cgraph_node *node)
5292{
f65f1ae3 5293 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
56b40062 5294 struct ipa_func_body_info fbi;
2c9561b5 5295 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5296 int param_count;
5297 bool cfg_changed = false, something_changed = false;
5298
5299 gcc_checking_assert (cfun);
5300 gcc_checking_assert (current_function_decl);
5301
5302 if (dump_file)
464d0118
ML
5303 fprintf (dump_file, "Modification phase of node %s\n",
5304 node->dump_name ());
2c9561b5 5305
209ca542 5306 ipcp_update_bits (node);
8bc5448f 5307 ipcp_update_vr (node);
2c9561b5
MJ
5308 aggval = ipa_get_agg_replacements_for_node (node);
5309 if (!aggval)
5310 return 0;
67348ccc 5311 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5312 if (param_count == 0)
5313 return 0;
5314 adjust_agg_replacement_values (node, aggval);
5315 if (dump_file)
5316 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5317
8aab5218
MJ
5318 fbi.node = node;
5319 fbi.info = NULL;
5320 fbi.bb_infos = vNULL;
5321 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5322 fbi.param_count = param_count;
028d4092 5323 fbi.aa_walk_budget = param_ipa_max_aa_steps;
2c9561b5 5324
f65f1ae3
MJ
5325 vec_safe_grow_cleared (descriptors, param_count);
5326 ipa_populate_param_decls (node, *descriptors);
8aab5218
MJ
5327 calculate_dominance_info (CDI_DOMINATORS);
5328 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5329 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5330
8aab5218
MJ
5331 int i;
5332 struct ipa_bb_info *bi;
5333 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5334 free_ipa_bb_info (bi);
5335 fbi.bb_infos.release ();
5336 free_dominance_info (CDI_DOMINATORS);
9d3e0adc
ML
5337
5338 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5339 s->agg_values = NULL;
5340 s->bits = NULL;
5341 s->m_vr = NULL;
676b4899 5342
f65f1ae3 5343 vec_free (descriptors);
2c9561b5
MJ
5344
5345 if (!something_changed)
5346 return 0;
95a2ed03
MJ
5347
5348 if (cfg_changed)
5349 delete_unreachable_blocks_update_callgraph (node, false);
5350
5351 return TODO_update_ssa_only_virtuals;
2c9561b5 5352}
86cd0334 5353
ac6f2e59
JH
5354
5355/* Return true if OTHER describes same agg item. */
5356bool
5357ipa_agg_jf_item::equal_to (const ipa_agg_jf_item &other)
5358{
5359 return offset == other.offset
5360 && operand_equal_p (value, other.value, 0);
5361}
86cd0334 5362#include "gt-ipa-prop.h"