]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
[PR c++/90613] Fix using-decl debug bloat
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
a5544970 2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
27d020cf 48#include "ipa-fnsummary.h"
cf835838 49#include "gimple-pretty-print.h"
dfea20f1 50#include "params.h"
450ad0cd 51#include "ipa-utils.h"
2b5f0895 52#include "dbgcnt.h"
8aab5218 53#include "domwalk.h"
9b2b7279 54#include "builtins.h"
95a2ed03 55#include "tree-cfgcleanup.h"
771578a0 56
dd912cb8
ML
57/* Function summary where the parameter infos are actually stored. */
58ipa_node_params_t *ipa_node_params_sum = NULL;
9d3e0adc
ML
59
60function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
61
6fe906a3
MJ
62/* Edge summary for IPA-CP edge information. */
63ipa_edge_args_sum_t *ipa_edge_args_sum;
771578a0 64
86cd0334
MJ
65/* Traits for a hash table for reusing already existing ipa_bits. */
66
67struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
68{
69 typedef ipa_bits *value_type;
70 typedef ipa_bits *compare_type;
71 static hashval_t
72 hash (const ipa_bits *p)
73 {
74 hashval_t t = (hashval_t) p->value.to_shwi ();
75 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
76 }
77 static bool
78 equal (const ipa_bits *a, const ipa_bits *b)
79 {
80 return a->value == b->value && a->mask == b->mask;
81 }
82 static void
83 mark_empty (ipa_bits *&p)
84 {
85 p = NULL;
86 }
87 static bool
88 is_empty (const ipa_bits *p)
89 {
90 return p == NULL;
91 }
92 static bool
93 is_deleted (const ipa_bits *p)
94 {
95 return p == reinterpret_cast<const ipa_bits *> (1);
96 }
97 static void
98 mark_deleted (ipa_bits *&p)
99 {
100 p = reinterpret_cast<ipa_bits *> (1);
101 }
102};
103
104/* Hash table for avoid repeated allocations of equal ipa_bits. */
105static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106
107/* Traits for a hash table for reusing value_ranges used for IPA. Note that
108 the equiv bitmap is not hashed and is expected to be NULL. */
109
2506b92c 110struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range_base *>
86cd0334 111{
2506b92c
RB
112 typedef value_range_base *value_type;
113 typedef value_range_base *compare_type;
86cd0334 114 static hashval_t
2506b92c 115 hash (const value_range_base *p)
59b2c134 116 {
54994253 117 inchash::hash hstate (p->kind ());
74ca1c01
ML
118 inchash::add_expr (p->min (), hstate);
119 inchash::add_expr (p->max (), hstate);
59b2c134
JJ
120 return hstate.end ();
121 }
86cd0334 122 static bool
2506b92c 123 equal (const value_range_base *a, const value_range_base *b)
86cd0334 124 {
ff361cc6 125 return a->equal_p (*b);
86cd0334
MJ
126 }
127 static void
2506b92c 128 mark_empty (value_range_base *&p)
86cd0334
MJ
129 {
130 p = NULL;
131 }
132 static bool
2506b92c 133 is_empty (const value_range_base *p)
86cd0334
MJ
134 {
135 return p == NULL;
136 }
137 static bool
2506b92c 138 is_deleted (const value_range_base *p)
86cd0334 139 {
2506b92c 140 return p == reinterpret_cast<const value_range_base *> (1);
86cd0334
MJ
141 }
142 static void
2506b92c 143 mark_deleted (value_range_base *&p)
86cd0334 144 {
2506b92c 145 p = reinterpret_cast<value_range_base *> (1);
86cd0334
MJ
146 }
147};
148
149/* Hash table for avoid repeated allocations of equal value_ranges. */
150static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
151
771578a0 152/* Holders of ipa cgraph hooks: */
40982661 153static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 154
4502fe8d
MJ
155/* Description of a reference to an IPA constant. */
156struct ipa_cst_ref_desc
157{
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge *cs;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc *next_duplicate;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
164 int refcount;
165};
166
167/* Allocation pool for reference descriptions. */
168
fb0b2914 169static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 170 ("IPA-PROP ref descriptions");
4502fe8d 171
5fe8e757
MJ
172/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
174
175static bool
176ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
177{
67348ccc 178 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
179
180 if (!fs_opts)
181 return false;
2bf86c84 182 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
183}
184
be95e2b9
MJ
185/* Return index of the formal whose tree is PTREE in function which corresponds
186 to INFO. */
187
d044dd17 188static int
f65f1ae3
MJ
189ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
190 tree ptree)
518dc859
RL
191{
192 int i, count;
193
f65f1ae3 194 count = vec_safe_length (descriptors);
518dc859 195 for (i = 0; i < count; i++)
f65f1ae3 196 if ((*descriptors)[i].decl_or_type == ptree)
518dc859
RL
197 return i;
198
199 return -1;
200}
201
d044dd17
MJ
202/* Return index of the formal whose tree is PTREE in function which corresponds
203 to INFO. */
204
205int
99b1c316 206ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
d044dd17
MJ
207{
208 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
209}
210
211/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
212 NODE. */
be95e2b9 213
f8e2a1ed
MJ
214static void
215ipa_populate_param_decls (struct cgraph_node *node,
f65f1ae3 216 vec<ipa_param_descriptor, va_gc> &descriptors)
518dc859
RL
217{
218 tree fndecl;
219 tree fnargs;
220 tree parm;
221 int param_num;
3e293154 222
67348ccc 223 fndecl = node->decl;
0e8853ee 224 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
225 fnargs = DECL_ARGUMENTS (fndecl);
226 param_num = 0;
910ad8de 227 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 228 {
209ca542 229 descriptors[param_num].decl_or_type = parm;
b4c9af96
RB
230 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
231 true);
518dc859
RL
232 param_num++;
233 }
234}
235
3f84bf08
MJ
236/* Return how many formal parameters FNDECL has. */
237
fd29c024 238int
310bc633 239count_formal_params (tree fndecl)
3f84bf08
MJ
240{
241 tree parm;
242 int count = 0;
0e8853ee 243 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 244
910ad8de 245 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
246 count++;
247
248 return count;
249}
250
0e8853ee
JH
251/* Return the declaration of Ith formal parameter of the function corresponding
252 to INFO. Note there is no setter function as this array is built just once
253 using ipa_initialize_node_params. */
254
255void
99b1c316 256ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
0e8853ee
JH
257{
258 fprintf (file, "param #%i", i);
f65f1ae3 259 if ((*info->descriptors)[i].decl_or_type)
0e8853ee
JH
260 {
261 fprintf (file, " ");
ef6cb4c7 262 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
0e8853ee
JH
263 }
264}
265
159f01f8
MJ
266/* If necessary, allocate vector of parameter descriptors in info of NODE.
267 Return true if they were allocated, false if not. */
0e8853ee 268
159f01f8 269static bool
0e8853ee
JH
270ipa_alloc_node_params (struct cgraph_node *node, int param_count)
271{
99b1c316 272 class ipa_node_params *info = IPA_NODE_REF (node);
0e8853ee 273
f65f1ae3 274 if (!info->descriptors && param_count)
159f01f8
MJ
275 {
276 vec_safe_grow_cleared (info->descriptors, param_count);
277 return true;
278 }
279 else
280 return false;
0e8853ee
JH
281}
282
f8e2a1ed
MJ
283/* Initialize the ipa_node_params structure associated with NODE by counting
284 the function parameters, creating the descriptors and populating their
285 param_decls. */
be95e2b9 286
f8e2a1ed
MJ
287void
288ipa_initialize_node_params (struct cgraph_node *node)
289{
99b1c316 290 class ipa_node_params *info = IPA_NODE_REF (node);
f8e2a1ed 291
159f01f8
MJ
292 if (!info->descriptors
293 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
294 ipa_populate_param_decls (node, *info->descriptors);
518dc859
RL
295}
296
749aa96d
MJ
297/* Print the jump functions associated with call graph edge CS to file F. */
298
299static void
300ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
301{
302 int i, count;
303
304 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
305 for (i = 0; i < count; i++)
306 {
307 struct ipa_jump_func *jump_func;
308 enum jump_func_type type;
309
310 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
311 type = jump_func->type;
312
313 fprintf (f, " param %d: ", i);
314 if (type == IPA_JF_UNKNOWN)
315 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
316 else if (type == IPA_JF_CONST)
317 {
4502fe8d 318 tree val = jump_func->value.constant.value;
749aa96d 319 fprintf (f, "CONST: ");
ef6cb4c7 320 print_generic_expr (f, val);
749aa96d
MJ
321 if (TREE_CODE (val) == ADDR_EXPR
322 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
323 {
324 fprintf (f, " -> ");
ef6cb4c7 325 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
749aa96d
MJ
326 }
327 fprintf (f, "\n");
328 }
749aa96d
MJ
329 else if (type == IPA_JF_PASS_THROUGH)
330 {
331 fprintf (f, "PASS THROUGH: ");
8b7773a4 332 fprintf (f, "%d, op %s",
749aa96d 333 jump_func->value.pass_through.formal_id,
5806f481 334 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 335 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
336 {
337 fprintf (f, " ");
ef6cb4c7 338 print_generic_expr (f, jump_func->value.pass_through.operand);
8b7773a4
MJ
339 }
340 if (jump_func->value.pass_through.agg_preserved)
341 fprintf (f, ", agg_preserved");
3ea6239f 342 fprintf (f, "\n");
749aa96d
MJ
343 }
344 else if (type == IPA_JF_ANCESTOR)
345 {
346 fprintf (f, "ANCESTOR: ");
16998094 347 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
348 jump_func->value.ancestor.formal_id,
349 jump_func->value.ancestor.offset);
8b7773a4
MJ
350 if (jump_func->value.ancestor.agg_preserved)
351 fprintf (f, ", agg_preserved");
3ea6239f 352 fprintf (f, "\n");
749aa96d 353 }
8b7773a4
MJ
354
355 if (jump_func->agg.items)
356 {
357 struct ipa_agg_jf_item *item;
358 int j;
359
360 fprintf (f, " Aggregate passed by %s:\n",
361 jump_func->agg.by_ref ? "reference" : "value");
9771b263 362 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
363 {
364 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
365 item->offset);
366 if (TYPE_P (item->value))
367 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 368 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
369 else
370 {
371 fprintf (f, "cst: ");
ef6cb4c7 372 print_generic_expr (f, item->value);
8b7773a4
MJ
373 }
374 fprintf (f, "\n");
375 }
376 }
44210a96 377
99b1c316 378 class ipa_polymorphic_call_context *ctx
44210a96
MJ
379 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
380 if (ctx && !ctx->useless_p ())
381 {
382 fprintf (f, " Context: ");
383 ctx->dump (dump_file);
384 }
04be694e 385
86cd0334 386 if (jump_func->bits)
209ca542 387 {
86cd0334
MJ
388 fprintf (f, " value: ");
389 print_hex (jump_func->bits->value, f);
390 fprintf (f, ", mask: ");
391 print_hex (jump_func->bits->mask, f);
209ca542
PK
392 fprintf (f, "\n");
393 }
394 else
395 fprintf (f, " Unknown bits\n");
8bc5448f 396
86cd0334 397 if (jump_func->m_vr)
8bc5448f
KV
398 {
399 fprintf (f, " VR ");
400 fprintf (f, "%s[",
54994253
AH
401 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
402 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
8bc5448f 403 fprintf (f, ", ");
54994253 404 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
8bc5448f
KV
405 fprintf (f, "]\n");
406 }
407 else
408 fprintf (f, " Unknown VR\n");
749aa96d
MJ
409 }
410}
411
412
be95e2b9
MJ
413/* Print the jump functions of all arguments on all call graph edges going from
414 NODE to file F. */
415
518dc859 416void
3e293154 417ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 418{
3e293154 419 struct cgraph_edge *cs;
518dc859 420
464d0118 421 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
3e293154
MJ
422 for (cs = node->callees; cs; cs = cs->next_callee)
423 {
424 if (!ipa_edge_args_info_available_for_edge_p (cs))
425 continue;
426
464d0118
ML
427 fprintf (f, " callsite %s -> %s : \n",
428 node->dump_name (),
429 cs->callee->dump_name ());
749aa96d
MJ
430 ipa_print_node_jump_functions_for_edge (f, cs);
431 }
518dc859 432
9de04252 433 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 434 {
99b1c316 435 class cgraph_indirect_call_info *ii;
749aa96d
MJ
436 if (!ipa_edge_args_info_available_for_edge_p (cs))
437 continue;
3e293154 438
9de04252
MJ
439 ii = cs->indirect_info;
440 if (ii->agg_contents)
c13bc3d9 441 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 442 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 443 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
444 ii->param_index, ii->offset,
445 ii->by_ref ? "by reference" : "by_value");
446 else
85942f45
JH
447 fprintf (f, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC,
449 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
450 ii->offset);
9de04252 451
749aa96d
MJ
452 if (cs->call_stmt)
453 {
9de04252 454 fprintf (f, ", for stmt ");
749aa96d 455 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 456 }
749aa96d 457 else
9de04252 458 fprintf (f, "\n");
ba392339
JH
459 if (ii->polymorphic)
460 ii->context.dump (f);
749aa96d 461 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
462 }
463}
464
465/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 466
3e293154
MJ
467void
468ipa_print_all_jump_functions (FILE *f)
469{
470 struct cgraph_node *node;
471
ca30a539 472 fprintf (f, "\nJump functions:\n");
65c70e6b 473 FOR_EACH_FUNCTION (node)
3e293154
MJ
474 {
475 ipa_print_node_jump_functions (f, node);
476 }
477}
478
04be694e
MJ
479/* Set jfunc to be a know-really nothing jump function. */
480
481static void
482ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
483{
484 jfunc->type = IPA_JF_UNKNOWN;
86cd0334
MJ
485 jfunc->bits = NULL;
486 jfunc->m_vr = NULL;
04be694e
MJ
487}
488
b8f6e610
MJ
489/* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
491
492static void
493ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
494 struct ipa_jump_func *src)
495
496{
497 gcc_checking_assert (src->type == IPA_JF_CONST);
498 dst->type = IPA_JF_CONST;
499 dst->value.constant = src->value.constant;
500}
501
7b872d9e
MJ
502/* Set JFUNC to be a constant jmp function. */
503
504static void
4502fe8d
MJ
505ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
506 struct cgraph_edge *cs)
7b872d9e
MJ
507{
508 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
509 jfunc->value.constant.value = unshare_expr_without_location (constant);
510
511 if (TREE_CODE (constant) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
513 {
514 struct ipa_cst_ref_desc *rdesc;
4502fe8d 515
601f3293 516 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
517 rdesc->cs = cs;
518 rdesc->next_duplicate = NULL;
519 rdesc->refcount = 1;
520 jfunc->value.constant.rdesc = rdesc;
521 }
522 else
523 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
524}
525
526/* Set JFUNC to be a simple pass-through jump function. */
527static void
8b7773a4 528ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 529 bool agg_preserved)
7b872d9e
MJ
530{
531 jfunc->type = IPA_JF_PASS_THROUGH;
532 jfunc->value.pass_through.operand = NULL_TREE;
533 jfunc->value.pass_through.formal_id = formal_id;
534 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 535 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
536}
537
a2b4c188
KV
538/* Set JFUNC to be an unary pass through jump function. */
539
540static void
541ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
542 enum tree_code operation)
543{
544 jfunc->type = IPA_JF_PASS_THROUGH;
545 jfunc->value.pass_through.operand = NULL_TREE;
546 jfunc->value.pass_through.formal_id = formal_id;
547 jfunc->value.pass_through.operation = operation;
548 jfunc->value.pass_through.agg_preserved = false;
549}
7b872d9e
MJ
550/* Set JFUNC to be an arithmetic pass through jump function. */
551
552static void
553ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
554 tree operand, enum tree_code operation)
555{
556 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 557 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
558 jfunc->value.pass_through.formal_id = formal_id;
559 jfunc->value.pass_through.operation = operation;
8b7773a4 560 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
561}
562
563/* Set JFUNC to be an ancestor jump function. */
564
565static void
566ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 567 int formal_id, bool agg_preserved)
7b872d9e
MJ
568{
569 jfunc->type = IPA_JF_ANCESTOR;
570 jfunc->value.ancestor.formal_id = formal_id;
571 jfunc->value.ancestor.offset = offset;
8b7773a4 572 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
573}
574
8aab5218
MJ
575/* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
577
578static struct ipa_bb_info *
56b40062 579ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
580{
581 gcc_checking_assert (fbi);
582 return &fbi->bb_infos[bb->index];
583}
584
f65cf2b7
MJ
585/* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
587
11478306 588struct prop_type_change_info
f65cf2b7 589{
290ebcb7
MJ
590 /* Offset into the object where there is the virtual method pointer we are
591 looking for. */
592 HOST_WIDE_INT offset;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
594 type change. */
595 tree object;
f65cf2b7
MJ
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed;
598};
599
600/* Return true if STMT can modify a virtual method table pointer.
601
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
606
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
609
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
612
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
616
617 There is no way to call a constructor of an ancestor sub-object in any
618 other way.
619
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
623
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
635 */
636
637static bool
355fe088 638stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
639{
640 if (is_gimple_call (stmt))
641 return false;
70f633c5
JH
642 if (gimple_clobber_p (stmt))
643 return false;
f65cf2b7
MJ
644 else if (is_gimple_assign (stmt))
645 {
646 tree lhs = gimple_assign_lhs (stmt);
647
0004f992
MJ
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
649 {
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
652 return false;
653
654 if (TREE_CODE (lhs) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 656 return false;
450aa0ee 657 /* In the future we might want to use get_ref_base_and_extent to find
0004f992
MJ
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
660 }
f65cf2b7
MJ
661 }
662 return true;
663}
664
3b97a5c7
MJ
665/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
11478306 668 prop_type_change_info structure. */
f65cf2b7
MJ
669
670static bool
671check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
672{
355fe088 673 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 674 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
675
676 if (stmt_may_be_vtbl_ptr_store (stmt))
677 {
678 tci->type_maybe_changed = true;
679 return true;
680 }
681 else
682 return false;
683}
684
058d0a90
JH
685/* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
290ebcb7 688
058d0a90 689 Generally functions are not allowed to change type of such instances,
67914693 690 but they call destructors. We assume that methods cannot destroy the THIS
058d0a90
JH
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
693
694static bool
355fe088 695param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90 696{
67914693 697 /* Pure functions cannot do any changes on the dynamic type;
058d0a90
JH
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
700 return false;
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
707
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
712 return true;
713 if (TREE_CODE (arg) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg)
715 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
716 {
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
719 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 720 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
721 and destructors as those definitely may change the dynamic
722 type. */
723 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function)
725 && !DECL_CXX_DESTRUCTOR_P (function)
726 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
727 {
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
730 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
731 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
732 return true;
058d0a90
JH
733 return false;
734 }
735 }
736 return true;
737}
290ebcb7 738
06d65050
JH
739/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
744 returned by get_ref_base_and_extent, as is the offset.
745
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
748
749static bool
c628d1c3
MJ
750detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
751 tree base, tree comp_type, gcall *call,
752 struct ipa_jump_func *jfunc,
058d0a90 753 HOST_WIDE_INT offset)
f65cf2b7 754{
11478306 755 struct prop_type_change_info tci;
f65cf2b7
MJ
756 ao_ref ao;
757
758 gcc_checking_assert (DECL_P (arg)
759 || TREE_CODE (arg) == MEM_REF
760 || handled_component_p (arg));
f65cf2b7 761
b49407f8
JH
762 comp_type = TYPE_MAIN_VARIANT (comp_type);
763
d570d364
JH
764 /* Const calls cannot call virtual methods through VMT and so type changes do
765 not matter. */
766 if (!flag_devirtualize || !gimple_vuse (call)
767 /* Be sure expected_type is polymorphic. */
768 || !comp_type
769 || TREE_CODE (comp_type) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
772 return true;
4bf2a588 773
dd887943 774 ao_ref_init (&ao, arg);
f65cf2b7
MJ
775 ao.base = base;
776 ao.offset = offset;
777 ao.size = POINTER_SIZE;
778 ao.max_size = ao.size;
f65cf2b7 779
290ebcb7
MJ
780 tci.offset = offset;
781 tci.object = get_base_address (arg);
290ebcb7 782 tci.type_maybe_changed = false;
290ebcb7 783
c628d1c3
MJ
784 int walked
785 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
786 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
787
788 if (walked >= 0 && !tci.type_maybe_changed)
f65cf2b7
MJ
789 return false;
790
04be694e 791 ipa_set_jf_unknown (jfunc);
f65cf2b7
MJ
792 return true;
793}
794
058d0a90
JH
795/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
796 If it is, return true and fill in the jump function JFUNC with relevant type
797 information or set it to unknown. ARG is the object itself (not a pointer
798 to it, unless dereferenced). BASE is the base of the memory access as
799 returned by get_ref_base_and_extent, as is the offset. */
800
801static bool
c628d1c3
MJ
802detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
803 tree comp_type, gcall *call, struct ipa_jump_func *jfunc,
804 HOST_WIDE_INT offset)
058d0a90
JH
805{
806 if (!flag_devirtualize)
807 return false;
808
809 if (TREE_CODE (base) == MEM_REF
810 && !param_type_may_change_p (current_function_decl,
811 TREE_OPERAND (base, 0),
812 call))
813 return false;
c628d1c3 814 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
058d0a90
JH
815 call, jfunc, offset);
816}
817
f65cf2b7
MJ
818/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
819 SSA name (its dereference will become the base and the offset is assumed to
820 be zero). */
821
822static bool
c628d1c3 823detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
538dd0b7 824 gcall *call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
825{
826 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 827 if (!flag_devirtualize
06d65050 828 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
829 return false;
830
058d0a90
JH
831 if (!param_type_may_change_p (current_function_decl, arg, call))
832 return false;
833
f65cf2b7 834 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 835 build_int_cst (ptr_type_node, 0));
f65cf2b7 836
c628d1c3 837 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
058d0a90 838 call, jfunc, 0);
f65cf2b7
MJ
839}
840
fdb0e1b4
MJ
841/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
842 boolean variable pointed to by DATA. */
843
844static bool
845mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
846 void *data)
847{
848 bool *b = (bool *) data;
849 *b = true;
850 return true;
851}
852
8aab5218
MJ
853/* Find the nearest valid aa status for parameter specified by INDEX that
854 dominates BB. */
855
56b40062
MJ
856static struct ipa_param_aa_status *
857find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
858 int index)
859{
860 while (true)
861 {
862 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
863 if (!bb)
864 return NULL;
865 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
866 if (!bi->param_aa_statuses.is_empty ()
867 && bi->param_aa_statuses[index].valid)
868 return &bi->param_aa_statuses[index];
869 }
870}
871
872/* Get AA status structure for the given BB and parameter with INDEX. Allocate
873 structures and/or intialize the result with a dominating description as
874 necessary. */
875
56b40062
MJ
876static struct ipa_param_aa_status *
877parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
878 int index)
879{
880 gcc_checking_assert (fbi);
881 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
882 if (bi->param_aa_statuses.is_empty ())
883 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
56b40062 884 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
885 if (!paa->valid)
886 {
887 gcc_checking_assert (!paa->parm_modified
888 && !paa->ref_modified
889 && !paa->pt_modified);
56b40062 890 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
891 dom_paa = find_dominating_aa_status (fbi, bb, index);
892 if (dom_paa)
893 *paa = *dom_paa;
894 else
895 paa->valid = true;
896 }
897
898 return paa;
899}
900
688010ba 901/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 902 a value known not to be modified in this function before reaching the
8aab5218
MJ
903 statement STMT. FBI holds information about the function we have so far
904 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
905
906static bool
56b40062 907parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 908 gimple *stmt, tree parm_load)
fdb0e1b4 909{
56b40062 910 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
911 bool modified = false;
912 ao_ref refd;
913
776e4fe2
MJ
914 tree base = get_base_address (parm_load);
915 gcc_assert (TREE_CODE (base) == PARM_DECL);
916 if (TREE_READONLY (base))
917 return true;
918
c628d1c3
MJ
919 gcc_checking_assert (fbi);
920 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
921 if (paa->parm_modified)
922 return false;
fdb0e1b4
MJ
923
924 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 925 ao_ref_init (&refd, parm_load);
8aab5218 926 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3
MJ
927 &modified, NULL, NULL,
928 fbi->aa_walk_budget + 1);
929 if (walked < 0)
930 {
931 modified = true;
932 if (fbi)
933 fbi->aa_walk_budget = 0;
934 }
935 else if (fbi)
936 fbi->aa_walk_budget -= walked;
8aab5218
MJ
937 if (paa && modified)
938 paa->parm_modified = true;
8b7773a4 939 return !modified;
fdb0e1b4
MJ
940}
941
a2b4c188
KV
942/* If STMT is an assignment that loads a value from an parameter declaration,
943 return the index of the parameter in ipa_node_params which has not been
944 modified. Otherwise return -1. */
945
946static int
947load_from_unmodified_param (struct ipa_func_body_info *fbi,
f65f1ae3 948 vec<ipa_param_descriptor, va_gc> *descriptors,
a2b4c188
KV
949 gimple *stmt)
950{
bda2bc48
MJ
951 int index;
952 tree op1;
953
a2b4c188
KV
954 if (!gimple_assign_single_p (stmt))
955 return -1;
956
bda2bc48
MJ
957 op1 = gimple_assign_rhs1 (stmt);
958 if (TREE_CODE (op1) != PARM_DECL)
a2b4c188
KV
959 return -1;
960
bda2bc48
MJ
961 index = ipa_get_param_decl_index_1 (descriptors, op1);
962 if (index < 0
963 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
a2b4c188
KV
964 return -1;
965
bda2bc48 966 return index;
a2b4c188
KV
967}
968
8aab5218
MJ
969/* Return true if memory reference REF (which must be a load through parameter
970 with INDEX) loads data that are known to be unmodified in this function
971 before reaching statement STMT. */
8b7773a4
MJ
972
973static bool
56b40062 974parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 975 int index, gimple *stmt, tree ref)
8b7773a4 976{
56b40062 977 struct ipa_param_aa_status *paa;
8b7773a4
MJ
978 bool modified = false;
979 ao_ref refd;
980
c628d1c3
MJ
981 gcc_checking_assert (fbi);
982 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
983 if (paa->ref_modified)
984 return false;
8b7773a4 985
8aab5218 986 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 987 ao_ref_init (&refd, ref);
8aab5218 988 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
c628d1c3
MJ
989 &modified, NULL, NULL,
990 fbi->aa_walk_budget + 1);
991 if (walked < 0)
992 {
993 modified = true;
994 fbi->aa_walk_budget = 0;
995 }
996 else
997 fbi->aa_walk_budget -= walked;
998 if (modified)
8aab5218 999 paa->ref_modified = true;
8b7773a4
MJ
1000 return !modified;
1001}
1002
8aab5218
MJ
1003/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1004 is known to be unmodified in this function before reaching call statement
1005 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1006
1007static bool
56b40062 1008parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 1009 gimple *call, tree parm)
8b7773a4
MJ
1010{
1011 bool modified = false;
1012 ao_ref refd;
1013
1014 /* It's unnecessary to calculate anything about memory contnets for a const
1015 function because it is not goin to use it. But do not cache the result
1016 either. Also, no such calculations for non-pointers. */
1017 if (!gimple_vuse (call)
c628d1c3 1018 || !POINTER_TYPE_P (TREE_TYPE (parm)))
8b7773a4
MJ
1019 return false;
1020
56b40062
MJ
1021 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1022 gimple_bb (call),
1023 index);
8aab5218 1024 if (paa->pt_modified)
8b7773a4
MJ
1025 return false;
1026
1027 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218 1028 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
c628d1c3
MJ
1029 &modified, NULL, NULL,
1030 fbi->aa_walk_budget + 1);
1031 if (walked < 0)
1032 {
1033 fbi->aa_walk_budget = 0;
1034 modified = true;
1035 }
1036 else
1037 fbi->aa_walk_budget -= walked;
8b7773a4 1038 if (modified)
8aab5218 1039 paa->pt_modified = true;
8b7773a4
MJ
1040 return !modified;
1041}
1042
91bb9f80
MJ
1043/* Return true if we can prove that OP is a memory reference loading
1044 data from an aggregate passed as a parameter.
1045
1046 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1047 false if it cannot prove that the value has not been modified before the
1048 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1049 if it cannot prove the value has not been modified, in that case it will
1050 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1051
8b7773a4
MJ
1052 INFO and PARMS_AINFO describe parameters of the current function (but the
1053 latter can be NULL), STMT is the load statement. If function returns true,
1054 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1055 within the aggregate and whether it is a load from a value passed by
1056 reference respectively. */
1057
ff302741 1058bool
56b40062 1059ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
f65f1ae3 1060 vec<ipa_param_descriptor, va_gc> *descriptors,
355fe088 1061 gimple *stmt, tree op, int *index_p,
86003645 1062 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
91bb9f80 1063 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
1064{
1065 int index;
588db50c 1066 HOST_WIDE_INT size;
ee45a32d 1067 bool reverse;
588db50c 1068 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
8b7773a4 1069
588db50c 1070 if (!base)
8b7773a4
MJ
1071 return false;
1072
1073 if (DECL_P (base))
1074 {
d044dd17 1075 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1076 if (index >= 0
8aab5218 1077 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1078 {
1079 *index_p = index;
1080 *by_ref_p = false;
3ff2ca23
JJ
1081 if (size_p)
1082 *size_p = size;
91bb9f80
MJ
1083 if (guaranteed_unmodified)
1084 *guaranteed_unmodified = true;
8b7773a4
MJ
1085 return true;
1086 }
1087 return false;
1088 }
1089
1090 if (TREE_CODE (base) != MEM_REF
1091 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1092 || !integer_zerop (TREE_OPERAND (base, 1)))
1093 return false;
1094
1095 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1096 {
1097 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1098 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1099 }
1100 else
1101 {
1102 /* This branch catches situations where a pointer parameter is not a
1103 gimple register, for example:
1104
1105 void hip7(S*) (struct S * p)
1106 {
1107 void (*<T2e4>) (struct S *) D.1867;
1108 struct S * p.1;
1109
1110 <bb 2>:
1111 p.1_1 = p;
1112 D.1867_2 = p.1_1->f;
1113 D.1867_2 ();
1114 gdp = &p;
1115 */
1116
355fe088 1117 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1118 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1119 }
1120
91bb9f80 1121 if (index >= 0)
8b7773a4 1122 {
91bb9f80
MJ
1123 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1124 if (!data_preserved && !guaranteed_unmodified)
1125 return false;
1126
8b7773a4
MJ
1127 *index_p = index;
1128 *by_ref_p = true;
3ff2ca23
JJ
1129 if (size_p)
1130 *size_p = size;
91bb9f80
MJ
1131 if (guaranteed_unmodified)
1132 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1133 return true;
1134 }
1135 return false;
1136}
1137
b258210c 1138/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1139 of an assignment statement STMT, try to determine whether we are actually
1140 handling any of the following cases and construct an appropriate jump
1141 function into JFUNC if so:
1142
1143 1) The passed value is loaded from a formal parameter which is not a gimple
1144 register (most probably because it is addressable, the value has to be
1145 scalar) and we can guarantee the value has not changed. This case can
1146 therefore be described by a simple pass-through jump function. For example:
1147
1148 foo (int a)
1149 {
1150 int a.0;
1151
1152 a.0_2 = a;
1153 bar (a.0_2);
1154
1155 2) The passed value can be described by a simple arithmetic pass-through
1156 jump function. E.g.
1157
1158 foo (int a)
1159 {
1160 int D.2064;
1161
1162 D.2064_4 = a.1(D) + 4;
1163 bar (D.2064_4);
1164
1165 This case can also occur in combination of the previous one, e.g.:
1166
1167 foo (int a, int z)
1168 {
1169 int a.0;
1170 int D.2064;
1171
1172 a.0_3 = a;
1173 D.2064_4 = a.0_3 + 4;
1174 foo (D.2064_4);
1175
1176 3) The passed value is an address of an object within another one (which
1177 also passed by reference). Such situations are described by an ancestor
1178 jump function and describe situations such as:
1179
1180 B::foo() (struct B * const this)
1181 {
1182 struct A * D.1845;
1183
1184 D.1845_2 = &this_1(D)->D.1748;
1185 A::bar (D.1845_2);
1186
1187 INFO is the structure describing individual parameters access different
1188 stages of IPA optimizations. PARMS_AINFO contains the information that is
1189 only needed for intraprocedural analysis. */
685b0d13
MJ
1190
1191static void
56b40062 1192compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1193 class ipa_node_params *info,
b258210c 1194 struct ipa_jump_func *jfunc,
355fe088 1195 gcall *call, gimple *stmt, tree name,
06d65050 1196 tree param_type)
685b0d13 1197{
588db50c 1198 HOST_WIDE_INT offset, size;
fdb0e1b4 1199 tree op1, tc_ssa, base, ssa;
ee45a32d 1200 bool reverse;
685b0d13 1201 int index;
685b0d13 1202
685b0d13 1203 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1204
fdb0e1b4 1205 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1206 {
fdb0e1b4
MJ
1207 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1208 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1209 else
bda2bc48
MJ
1210 index = load_from_unmodified_param (fbi, info->descriptors,
1211 SSA_NAME_DEF_STMT (op1));
fdb0e1b4
MJ
1212 tc_ssa = op1;
1213 }
1214 else
1215 {
bda2bc48 1216 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1217 tc_ssa = gimple_assign_lhs (stmt);
1218 }
1219
1220 if (index >= 0)
1221 {
a77af182 1222 switch (gimple_assign_rhs_class (stmt))
8b7773a4 1223 {
a77af182
RB
1224 case GIMPLE_BINARY_RHS:
1225 {
1226 tree op2 = gimple_assign_rhs2 (stmt);
1227 if (!is_gimple_ip_invariant (op2)
1228 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1229 != tcc_comparison)
1230 && !useless_type_conversion_p (TREE_TYPE (name),
1231 TREE_TYPE (op1))))
1232 return;
1233
1234 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1235 gimple_assign_rhs_code (stmt));
1236 break;
1237 }
1238 case GIMPLE_SINGLE_RHS:
1239 {
1240 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1241 tc_ssa);
1242 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1243 break;
1244 }
1245 case GIMPLE_UNARY_RHS:
bda2bc48
MJ
1246 if (is_gimple_assign (stmt)
1247 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1248 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
a77af182 1249 ipa_set_jf_unary_pass_through (jfunc, index,
bda2bc48 1250 gimple_assign_rhs_code (stmt));
a77af182 1251 default:;
8b7773a4 1252 }
685b0d13
MJ
1253 return;
1254 }
1255
1256 if (TREE_CODE (op1) != ADDR_EXPR)
1257 return;
1258 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1259 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1260 return;
588db50c 1261 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
aca52e6f
RS
1262 offset_int mem_offset;
1263 if (!base
1264 || TREE_CODE (base) != MEM_REF
1265 || !mem_ref_offset (base).is_constant (&mem_offset))
685b0d13 1266 return;
aca52e6f 1267 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1268 ssa = TREE_OPERAND (base, 0);
1269 if (TREE_CODE (ssa) != SSA_NAME
1270 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1271 || offset < 0)
685b0d13
MJ
1272 return;
1273
b8f6e610 1274 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1275 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1276 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1277 ipa_set_ancestor_jf (jfunc, offset, index,
1278 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1279}
1280
40591473
MJ
1281/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1282 it looks like:
1283
1284 iftmp.1_3 = &obj_2(D)->D.1762;
1285
1286 The base of the MEM_REF must be a default definition SSA NAME of a
1287 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1288 whole MEM_REF expression is returned and the offset calculated from any
1289 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1290 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1291
1292static tree
355fe088 1293get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473 1294{
588db50c 1295 HOST_WIDE_INT size;
40591473 1296 tree expr, parm, obj;
ee45a32d 1297 bool reverse;
40591473
MJ
1298
1299 if (!gimple_assign_single_p (assign))
1300 return NULL_TREE;
1301 expr = gimple_assign_rhs1 (assign);
1302
1303 if (TREE_CODE (expr) != ADDR_EXPR)
1304 return NULL_TREE;
1305 expr = TREE_OPERAND (expr, 0);
1306 obj = expr;
588db50c 1307 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
40591473 1308
aca52e6f
RS
1309 offset_int mem_offset;
1310 if (!expr
1311 || TREE_CODE (expr) != MEM_REF
1312 || !mem_ref_offset (expr).is_constant (&mem_offset))
40591473
MJ
1313 return NULL_TREE;
1314 parm = TREE_OPERAND (expr, 0);
1315 if (TREE_CODE (parm) != SSA_NAME
1316 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1317 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1318 return NULL_TREE;
1319
aca52e6f 1320 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1321 *obj_p = obj;
1322 return expr;
1323}
1324
685b0d13 1325
b258210c
MJ
1326/* Given that an actual argument is an SSA_NAME that is a result of a phi
1327 statement PHI, try to find out whether NAME is in fact a
1328 multiple-inheritance typecast from a descendant into an ancestor of a formal
1329 parameter and thus can be described by an ancestor jump function and if so,
1330 write the appropriate function into JFUNC.
1331
1332 Essentially we want to match the following pattern:
1333
1334 if (obj_2(D) != 0B)
1335 goto <bb 3>;
1336 else
1337 goto <bb 4>;
1338
1339 <bb 3>:
1340 iftmp.1_3 = &obj_2(D)->D.1762;
1341
1342 <bb 4>:
1343 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1344 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1345 return D.1879_6; */
1346
1347static void
56b40062 1348compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
99b1c316 1349 class ipa_node_params *info,
b258210c 1350 struct ipa_jump_func *jfunc,
538dd0b7 1351 gcall *call, gphi *phi)
b258210c 1352{
40591473 1353 HOST_WIDE_INT offset;
355fe088 1354 gimple *assign, *cond;
b258210c 1355 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1356 tree tmp, parm, expr, obj;
b258210c
MJ
1357 int index, i;
1358
54e348cb 1359 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1360 return;
1361
54e348cb
MJ
1362 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1363 tmp = PHI_ARG_DEF (phi, 0);
1364 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1365 tmp = PHI_ARG_DEF (phi, 1);
1366 else
1367 return;
b258210c
MJ
1368 if (TREE_CODE (tmp) != SSA_NAME
1369 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1370 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1371 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1372 return;
1373
1374 assign = SSA_NAME_DEF_STMT (tmp);
1375 assign_bb = gimple_bb (assign);
40591473 1376 if (!single_pred_p (assign_bb))
b258210c 1377 return;
40591473
MJ
1378 expr = get_ancestor_addr_info (assign, &obj, &offset);
1379 if (!expr)
b258210c
MJ
1380 return;
1381 parm = TREE_OPERAND (expr, 0);
b258210c 1382 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1383 if (index < 0)
1384 return;
b258210c
MJ
1385
1386 cond_bb = single_pred (assign_bb);
1387 cond = last_stmt (cond_bb);
69610617
SB
1388 if (!cond
1389 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1390 || gimple_cond_code (cond) != NE_EXPR
1391 || gimple_cond_lhs (cond) != parm
1392 || !integer_zerop (gimple_cond_rhs (cond)))
1393 return;
1394
b258210c
MJ
1395 phi_bb = gimple_bb (phi);
1396 for (i = 0; i < 2; i++)
1397 {
1398 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1399 if (pred != assign_bb && pred != cond_bb)
1400 return;
1401 }
1402
3b97a5c7
MJ
1403 ipa_set_ancestor_jf (jfunc, offset, index,
1404 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1405}
1406
be95e2b9
MJ
1407/* Inspect the given TYPE and return true iff it has the same structure (the
1408 same number of fields of the same types) as a C++ member pointer. If
1409 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1410 corresponding fields there. */
1411
3e293154
MJ
1412static bool
1413type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1414{
1415 tree fld;
1416
1417 if (TREE_CODE (type) != RECORD_TYPE)
1418 return false;
1419
1420 fld = TYPE_FIELDS (type);
1421 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1422 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1423 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1424 return false;
1425
1426 if (method_ptr)
1427 *method_ptr = fld;
1428
910ad8de 1429 fld = DECL_CHAIN (fld);
8b7773a4 1430 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1431 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1432 return false;
1433 if (delta)
1434 *delta = fld;
1435
910ad8de 1436 if (DECL_CHAIN (fld))
3e293154
MJ
1437 return false;
1438
1439 return true;
1440}
1441
61502ca8 1442/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1443 return the rhs of its defining statement. Otherwise return RHS as it
1444 is. */
7ec49257
MJ
1445
1446static inline tree
1447get_ssa_def_if_simple_copy (tree rhs)
1448{
1449 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1450 {
355fe088 1451 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1452
1453 if (gimple_assign_single_p (def_stmt))
1454 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1455 else
1456 break;
7ec49257
MJ
1457 }
1458 return rhs;
1459}
1460
46771da5 1461/* Simple linked list, describing known contents of an aggregate before
8b7773a4
MJ
1462 call. */
1463
1464struct ipa_known_agg_contents_list
1465{
1466 /* Offset and size of the described part of the aggregate. */
1467 HOST_WIDE_INT offset, size;
1468 /* Known constant value or NULL if the contents is known to be unknown. */
1469 tree constant;
1470 /* Pointer to the next structure in the list. */
1471 struct ipa_known_agg_contents_list *next;
1472};
3e293154 1473
46771da5
FX
1474/* Add a known content item into a linked list of ipa_known_agg_contents_list
1475 structure, in which all elements are sorted ascendingly by offset. */
0d48ee34 1476
46771da5
FX
1477static inline void
1478add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1479 struct ipa_known_agg_contents_list *item)
0d48ee34 1480{
46771da5
FX
1481 struct ipa_known_agg_contents_list *list = *plist;
1482
1483 for (; list; list = list->next)
0d48ee34 1484 {
46771da5
FX
1485 if (list->offset >= item->offset)
1486 break;
1487
1488 plist = &list->next;
0d48ee34
MJ
1489 }
1490
46771da5
FX
1491 item->next = list;
1492 *plist = item;
1493}
1494
1495/* Check whether a given known content is clobbered by certain element in
1496 a linked list of ipa_known_agg_contents_list. */
1497
1498static inline bool
1499clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1500 struct ipa_known_agg_contents_list *item)
1501{
1502 for (; list; list = list->next)
0d48ee34 1503 {
46771da5
FX
1504 if (list->offset >= item->offset)
1505 return list->offset < item->offset + item->size;
1506
1507 if (list->offset + list->size > item->offset)
1508 return true;
0d48ee34 1509 }
46771da5
FX
1510
1511 return false;
0d48ee34
MJ
1512}
1513
1514/* Build aggregate jump function from LIST, assuming there are exactly
46771da5 1515 CONST_COUNT constant entries there and that offset of the passed argument
0d48ee34
MJ
1516 is ARG_OFFSET and store it into JFUNC. */
1517
1518static void
1519build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1520 int const_count, HOST_WIDE_INT arg_offset,
1521 struct ipa_jump_func *jfunc)
1522{
1523 vec_alloc (jfunc->agg.items, const_count);
1524 while (list)
1525 {
1526 if (list->constant)
1527 {
1528 struct ipa_agg_jf_item item;
1529 item.offset = list->offset - arg_offset;
1530 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1531 item.value = unshare_expr_without_location (list->constant);
1532 jfunc->agg.items->quick_push (item);
1533 }
1534 list = list->next;
1535 }
1536}
1537
46771da5
FX
1538/* If STMT is a memory store to the object whose address is BASE, extract
1539 information (offset, size, and value) into CONTENT, and return true,
1540 otherwise we conservatively assume the whole object is modified with
1541 unknown content, and return false. CHECK_REF means that access to object
1542 is expected to be in form of MEM_REF expression. */
1543
1544static bool
1545extract_mem_content (gimple *stmt, tree base, bool check_ref,
1546 struct ipa_known_agg_contents_list *content)
1547{
1548 HOST_WIDE_INT lhs_offset, lhs_size;
1549 tree lhs, rhs, lhs_base;
1550 bool reverse;
1551
1552 if (!gimple_assign_single_p (stmt))
1553 return false;
1554
1555 lhs = gimple_assign_lhs (stmt);
1556 rhs = gimple_assign_rhs1 (stmt);
1557
1558 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1559 || TREE_CODE (lhs) == BIT_FIELD_REF
1560 || contains_bitfld_component_ref_p (lhs))
1561 return false;
1562
1563 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1564 &lhs_size, &reverse);
1565 if (!lhs_base)
1566 return false;
1567
1568 if (check_ref)
1569 {
1570 if (TREE_CODE (lhs_base) != MEM_REF
1571 || TREE_OPERAND (lhs_base, 0) != base
1572 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1573 return false;
1574 }
1575 else if (lhs_base != base)
1576 return false;
1577
1578 rhs = get_ssa_def_if_simple_copy (rhs);
1579
1580 content->size = lhs_size;
1581 content->offset = lhs_offset;
1582 content->constant = is_gimple_ip_invariant (rhs) ? rhs : NULL_TREE;
1583 content->next = NULL;
1584
1585 return true;
1586}
1587
8b7773a4
MJ
1588/* Traverse statements from CALL backwards, scanning whether an aggregate given
1589 in ARG is filled in with constant values. ARG can either be an aggregate
0d48ee34
MJ
1590 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1591 aggregate. JFUNC is the jump function into which the constants are
46771da5
FX
1592 subsequently stored. AA_WALK_BUDGET_P points to limit on number of
1593 statements we allow get_continuation_for_phi to examine. */
be95e2b9 1594
3e293154 1595static void
46771da5
FX
1596determine_known_aggregate_parts (gcall *call, tree arg,
1597 tree arg_type,
1598 struct ipa_jump_func *jfunc,
1599 unsigned *aa_walk_budget_p)
3e293154 1600{
46771da5
FX
1601 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1602 bitmap visited = NULL;
8b7773a4 1603 int item_count = 0, const_count = 0;
46771da5 1604 int ipa_max_agg_items = PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS);
8b7773a4 1605 HOST_WIDE_INT arg_offset, arg_size;
8b7773a4
MJ
1606 tree arg_base;
1607 bool check_ref, by_ref;
1608 ao_ref r;
3e293154 1609
46771da5 1610 if (ipa_max_agg_items == 0)
29799e9d
MJ
1611 return;
1612
8b7773a4
MJ
1613 /* The function operates in three stages. First, we prepare check_ref, r,
1614 arg_base and arg_offset based on what is actually passed as an actual
1615 argument. */
3e293154 1616
85942f45 1617 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1618 {
1619 by_ref = true;
1620 if (TREE_CODE (arg) == SSA_NAME)
1621 {
1622 tree type_size;
63831879
MJ
1623 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1624 || !POINTER_TYPE_P (TREE_TYPE (arg)))
8b7773a4
MJ
1625 return;
1626 check_ref = true;
1627 arg_base = arg;
1628 arg_offset = 0;
85942f45 1629 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1630 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1631 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1632 }
1633 else if (TREE_CODE (arg) == ADDR_EXPR)
1634 {
ee45a32d 1635 bool reverse;
8b7773a4
MJ
1636
1637 arg = TREE_OPERAND (arg, 0);
588db50c
RS
1638 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1639 &arg_size, &reverse);
1640 if (!arg_base)
8b7773a4
MJ
1641 return;
1642 if (DECL_P (arg_base))
1643 {
8b7773a4 1644 check_ref = false;
0d48ee34 1645 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1646 }
1647 else
1648 return;
1649 }
1650 else
1651 return;
1652 }
1653 else
1654 {
ee45a32d 1655 bool reverse;
8b7773a4
MJ
1656
1657 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1658
1659 by_ref = false;
1660 check_ref = false;
588db50c
RS
1661 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1662 &arg_size, &reverse);
1663 if (!arg_base)
8b7773a4
MJ
1664 return;
1665
1666 ao_ref_init (&r, arg);
1667 }
1668
46771da5
FX
1669 /* Second stage traverses virtual SSA web backwards starting from the call
1670 statement, only looks at individual dominating virtual operand (its
1671 definition dominates the call), as long as it is confident that content
1672 of the aggregate is affected by definition of the virtual operand, it
1673 builds a sorted linked list of ipa_agg_jf_list describing that. */
3e293154 1674
46771da5
FX
1675 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1676 {
1677 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
3e293154 1678
46771da5 1679 if (gimple_code (stmt) == GIMPLE_PHI)
518dc859 1680 {
fb4697e3
RB
1681 dom_vuse = get_continuation_for_phi (stmt, &r, true,
1682 *aa_walk_budget_p,
46771da5
FX
1683 &visited, false, NULL, NULL);
1684 continue;
3e293154 1685 }
46771da5
FX
1686
1687 if (stmt_may_clobber_ref_p_1 (stmt, &r))
774b8a55 1688 {
46771da5
FX
1689 struct ipa_known_agg_contents_list *content
1690 = XALLOCA (struct ipa_known_agg_contents_list);
1691
1692 if (!extract_mem_content (stmt, arg_base, check_ref, content))
774b8a55 1693 break;
3e293154 1694
46771da5
FX
1695 /* Now we get a dominating virtual operand, and need to check
1696 whether its value is clobbered any other dominating one. */
1697 if (content->constant
1698 && !clobber_by_agg_contents_list_p (all_list, content))
1699 {
1700 struct ipa_known_agg_contents_list *copy
1701 = XALLOCA (struct ipa_known_agg_contents_list);
3e293154 1702
46771da5
FX
1703 /* Add to the list consisting of only dominating virtual
1704 operands, whose definitions can finally reach the call. */
1705 add_to_agg_contents_list (&list, (*copy = *content, copy));
1706
1707 if (++const_count == ipa_max_agg_items)
1708 break;
1709 }
1710
1711 /* Add to the list consisting of all dominating virtual operands. */
1712 add_to_agg_contents_list (&all_list, content);
1713
1714 if (++item_count == 2 * ipa_max_agg_items)
1715 break;
8b7773a4 1716 }
46771da5
FX
1717 dom_vuse = gimple_vuse (stmt);
1718 }
3e293154 1719
46771da5
FX
1720 if (visited)
1721 BITMAP_FREE (visited);
be95e2b9 1722
8b7773a4 1723 /* Third stage just goes over the list and creates an appropriate vector of
46771da5 1724 ipa_agg_jf_item structures out of it, of course only if there are
8b7773a4 1725 any known constants to begin with. */
3e293154 1726
8b7773a4 1727 if (const_count)
3e293154 1728 {
8b7773a4 1729 jfunc->agg.by_ref = by_ref;
0d48ee34 1730 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
3e293154
MJ
1731 }
1732}
1733
46771da5 1734
5d5f1e95
KV
1735/* Return the Ith param type of callee associated with call graph
1736 edge E. */
1737
1738tree
06d65050
JH
1739ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1740{
1741 int n;
1742 tree type = (e->callee
67348ccc 1743 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1744 : gimple_call_fntype (e->call_stmt));
1745 tree t = TYPE_ARG_TYPES (type);
1746
1747 for (n = 0; n < i; n++)
1748 {
1749 if (!t)
1750 break;
1751 t = TREE_CHAIN (t);
1752 }
1753 if (t)
1754 return TREE_VALUE (t);
1755 if (!e->callee)
1756 return NULL;
67348ccc 1757 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1758 for (n = 0; n < i; n++)
1759 {
1760 if (!t)
1761 return NULL;
1762 t = TREE_CHAIN (t);
1763 }
1764 if (t)
1765 return TREE_TYPE (t);
1766 return NULL;
1767}
1768
86cd0334
MJ
1769/* Return ipa_bits with VALUE and MASK values, which can be either a newly
1770 allocated structure or a previously existing one shared with other jump
1771 functions and/or transformation summaries. */
1772
1773ipa_bits *
1774ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1775{
1776 ipa_bits tmp;
1777 tmp.value = value;
1778 tmp.mask = mask;
1779
1780 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1781 if (*slot)
1782 return *slot;
1783
1784 ipa_bits *res = ggc_alloc<ipa_bits> ();
1785 res->value = value;
1786 res->mask = mask;
1787 *slot = res;
1788
1789 return res;
1790}
1791
1792/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1793 table in order to avoid creating multiple same ipa_bits structures. */
1794
1795static void
1796ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1797 const widest_int &mask)
1798{
1799 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1800}
1801
1802/* Return a pointer to a value_range just like *TMP, but either find it in
1803 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1804
2506b92c
RB
1805static value_range_base *
1806ipa_get_value_range (value_range_base *tmp)
86cd0334 1807{
2506b92c 1808 value_range_base **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
86cd0334
MJ
1809 if (*slot)
1810 return *slot;
1811
2506b92c 1812 value_range_base *vr = ggc_alloc<value_range_base> ();
86cd0334
MJ
1813 *vr = *tmp;
1814 *slot = vr;
1815
1816 return vr;
1817}
1818
1819/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1820 equiv set. Use hash table in order to avoid creating multiple same copies of
1821 value_ranges. */
1822
2506b92c 1823static value_range_base *
54994253 1824ipa_get_value_range (enum value_range_kind type, tree min, tree max)
86cd0334 1825{
2506b92c 1826 value_range_base tmp (type, min, max);
86cd0334
MJ
1827 return ipa_get_value_range (&tmp);
1828}
1829
1830/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1831 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1832 same value_range structures. */
1833
1834static void
54994253 1835ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
86cd0334
MJ
1836 tree min, tree max)
1837{
1838 jf->m_vr = ipa_get_value_range (type, min, max);
1839}
1840
46771da5 1841/* Assign to JF a pointer to a value_range just like TMP but either fetch a
86cd0334
MJ
1842 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1843
1844static void
2506b92c 1845ipa_set_jfunc_vr (ipa_jump_func *jf, value_range_base *tmp)
86cd0334
MJ
1846{
1847 jf->m_vr = ipa_get_value_range (tmp);
1848}
1849
3e293154
MJ
1850/* Compute jump function for all arguments of callsite CS and insert the
1851 information in the jump_functions array in the ipa_edge_args corresponding
1852 to this callsite. */
be95e2b9 1853
749aa96d 1854static void
56b40062 1855ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 1856 struct cgraph_edge *cs)
3e293154 1857{
99b1c316
MS
1858 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
1859 class ipa_edge_args *args = IPA_EDGE_REF (cs);
538dd0b7 1860 gcall *call = cs->call_stmt;
8b7773a4 1861 int n, arg_num = gimple_call_num_args (call);
5ce97055 1862 bool useful_context = false;
3e293154 1863
606d9a09 1864 if (arg_num == 0 || args->jump_functions)
3e293154 1865 return;
9771b263 1866 vec_safe_grow_cleared (args->jump_functions, arg_num);
5ce97055
JH
1867 if (flag_devirtualize)
1868 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
3e293154 1869
96e24d49
JJ
1870 if (gimple_call_internal_p (call))
1871 return;
5fe8e757
MJ
1872 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1873 return;
1874
8b7773a4
MJ
1875 for (n = 0; n < arg_num; n++)
1876 {
1877 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1878 tree arg = gimple_call_arg (call, n);
06d65050 1879 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
1880 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1881 {
049e6d36 1882 tree instance;
99b1c316 1883 class ipa_polymorphic_call_context context (cs->caller->decl,
5ce97055 1884 arg, cs->call_stmt,
049e6d36 1885 &instance);
c628d1c3
MJ
1886 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
1887 &fbi->aa_walk_budget);
5ce97055
JH
1888 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1889 if (!context.useless_p ())
1890 useful_context = true;
1891 }
3e293154 1892
718625ad
KV
1893 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1894 {
f7503699
KV
1895 bool addr_nonzero = false;
1896 bool strict_overflow = false;
1897
718625ad
KV
1898 if (TREE_CODE (arg) == SSA_NAME
1899 && param_type
1900 && get_ptr_nonnull (arg))
f7503699
KV
1901 addr_nonzero = true;
1902 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1903 addr_nonzero = true;
1904
1905 if (addr_nonzero)
718625ad 1906 {
86cd0334
MJ
1907 tree z = build_int_cst (TREE_TYPE (arg), 0);
1908 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
718625ad
KV
1909 }
1910 else
86cd0334 1911 gcc_assert (!jfunc->m_vr);
718625ad
KV
1912 }
1913 else
8bc5448f
KV
1914 {
1915 wide_int min, max;
54994253 1916 value_range_kind type;
8bc5448f
KV
1917 if (TREE_CODE (arg) == SSA_NAME
1918 && param_type
1919 && (type = get_range_info (arg, &min, &max))
3a4228ba 1920 && (type == VR_RANGE || type == VR_ANTI_RANGE))
8bc5448f 1921 {
27922d51
RB
1922 value_range_base resvr;
1923 value_range_base tmpvr (type,
1924 wide_int_to_tree (TREE_TYPE (arg), min),
1925 wide_int_to_tree (TREE_TYPE (arg), max));
86cd0334
MJ
1926 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1927 &tmpvr, TREE_TYPE (arg));
54994253 1928 if (!resvr.undefined_p () && !resvr.varying_p ())
86cd0334 1929 ipa_set_jfunc_vr (jfunc, &resvr);
3a4228ba 1930 else
86cd0334 1931 gcc_assert (!jfunc->m_vr);
8bc5448f
KV
1932 }
1933 else
86cd0334 1934 gcc_assert (!jfunc->m_vr);
8bc5448f 1935 }
04be694e 1936
209ca542
PK
1937 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1938 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1939 {
209ca542 1940 if (TREE_CODE (arg) == SSA_NAME)
86cd0334
MJ
1941 ipa_set_jfunc_bits (jfunc, 0,
1942 widest_int::from (get_nonzero_bits (arg),
1943 TYPE_SIGN (TREE_TYPE (arg))));
209ca542 1944 else
86cd0334 1945 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
209ca542 1946 }
67b97478
PK
1947 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1948 {
1949 unsigned HOST_WIDE_INT bitpos;
1950 unsigned align;
1951
67b97478 1952 get_pointer_alignment_1 (arg, &align, &bitpos);
7b27cb4b
RS
1953 widest_int mask = wi::bit_and_not
1954 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1955 align / BITS_PER_UNIT - 1);
86cd0334
MJ
1956 widest_int value = bitpos / BITS_PER_UNIT;
1957 ipa_set_jfunc_bits (jfunc, value, mask);
67b97478 1958 }
209ca542 1959 else
86cd0334 1960 gcc_assert (!jfunc->bits);
209ca542 1961
04643334 1962 if (is_gimple_ip_invariant (arg)
8813a647 1963 || (VAR_P (arg)
04643334
MJ
1964 && is_global_var (arg)
1965 && TREE_READONLY (arg)))
4502fe8d 1966 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1967 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1968 && TREE_CODE (arg) == PARM_DECL)
1969 {
1970 int index = ipa_get_param_decl_index (info, arg);
1971
1972 gcc_assert (index >=0);
1973 /* Aggregate passed by value, check for pass-through, otherwise we
1974 will attempt to fill in aggregate contents later in this
1975 for cycle. */
8aab5218 1976 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 1977 {
3b97a5c7 1978 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
1979 continue;
1980 }
1981 }
1982 else if (TREE_CODE (arg) == SSA_NAME)
1983 {
1984 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1985 {
1986 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1987 if (index >= 0)
8b7773a4 1988 {
3b97a5c7 1989 bool agg_p;
8aab5218 1990 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 1991 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
1992 }
1993 }
1994 else
1995 {
355fe088 1996 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 1997 if (is_gimple_assign (stmt))
8aab5218 1998 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 1999 call, stmt, arg, param_type);
8b7773a4 2000 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 2001 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
2002 call,
2003 as_a <gphi *> (stmt));
8b7773a4
MJ
2004 }
2005 }
3e293154 2006
67914693 2007 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
85942f45
JH
2008 passed (because type conversions are ignored in gimple). Usually we can
2009 safely get type from function declaration, but in case of K&R prototypes or
2010 variadic functions we can try our luck with type of the pointer passed.
2011 TODO: Since we look for actual initialization of the memory object, we may better
2012 work out the type based on the memory stores we find. */
2013 if (!param_type)
2014 param_type = TREE_TYPE (arg);
2015
8b7773a4
MJ
2016 if ((jfunc->type != IPA_JF_PASS_THROUGH
2017 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2018 && (jfunc->type != IPA_JF_ANCESTOR
2019 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2020 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 2021 || POINTER_TYPE_P (param_type)))
46771da5
FX
2022 determine_known_aggregate_parts (call, arg, param_type, jfunc,
2023 &fbi->aa_walk_budget);
8b7773a4 2024 }
5ce97055
JH
2025 if (!useful_context)
2026 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
2027}
2028
749aa96d 2029/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 2030 from BB. */
749aa96d 2031
062c604f 2032static void
56b40062 2033ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 2034{
8aab5218
MJ
2035 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2036 int i;
749aa96d
MJ
2037 struct cgraph_edge *cs;
2038
8aab5218 2039 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 2040 {
8aab5218 2041 struct cgraph_node *callee = cs->callee;
749aa96d 2042
8aab5218
MJ
2043 if (callee)
2044 {
d52f5295 2045 callee->ultimate_alias_target ();
8aab5218
MJ
2046 /* We do not need to bother analyzing calls to unknown functions
2047 unless they may become known during lto/whopr. */
2048 if (!callee->definition && !flag_lto)
2049 continue;
2050 }
2051 ipa_compute_jump_functions_for_edge (fbi, cs);
2052 }
749aa96d
MJ
2053}
2054
8b7773a4
MJ
2055/* If STMT looks like a statement loading a value from a member pointer formal
2056 parameter, return that parameter and store the offset of the field to
2057 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2058 might be clobbered). If USE_DELTA, then we look for a use of the delta
2059 field rather than the pfn. */
be95e2b9 2060
3e293154 2061static tree
355fe088 2062ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 2063 HOST_WIDE_INT *offset_p)
3e293154 2064{
8b7773a4
MJ
2065 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2066
2067 if (!gimple_assign_single_p (stmt))
2068 return NULL_TREE;
3e293154 2069
8b7773a4 2070 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2071 if (TREE_CODE (rhs) == COMPONENT_REF)
2072 {
2073 ref_field = TREE_OPERAND (rhs, 1);
2074 rhs = TREE_OPERAND (rhs, 0);
2075 }
2076 else
2077 ref_field = NULL_TREE;
d242d063 2078 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2079 return NULL_TREE;
3e293154 2080 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2081 if (TREE_CODE (rec) != ADDR_EXPR)
2082 return NULL_TREE;
2083 rec = TREE_OPERAND (rec, 0);
3e293154 2084 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2085 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2086 return NULL_TREE;
d242d063 2087 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2088
8b7773a4
MJ
2089 if (use_delta)
2090 fld = delta_field;
2091 else
2092 fld = ptr_field;
2093 if (offset_p)
2094 *offset_p = int_bit_position (fld);
2095
ae788515
EB
2096 if (ref_field)
2097 {
2098 if (integer_nonzerop (ref_offset))
2099 return NULL_TREE;
ae788515
EB
2100 return ref_field == fld ? rec : NULL_TREE;
2101 }
3e293154 2102 else
8b7773a4
MJ
2103 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2104 : NULL_TREE;
3e293154
MJ
2105}
2106
2107/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2108
3e293154
MJ
2109static bool
2110ipa_is_ssa_with_stmt_def (tree t)
2111{
2112 if (TREE_CODE (t) == SSA_NAME
2113 && !SSA_NAME_IS_DEFAULT_DEF (t))
2114 return true;
2115 else
2116 return false;
2117}
2118
40591473
MJ
2119/* Find the indirect call graph edge corresponding to STMT and mark it as a
2120 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2121 indirect call graph edge. */
be95e2b9 2122
40591473 2123static struct cgraph_edge *
538dd0b7
DM
2124ipa_note_param_call (struct cgraph_node *node, int param_index,
2125 gcall *stmt)
3e293154 2126{
e33c6cd6 2127 struct cgraph_edge *cs;
3e293154 2128
d52f5295 2129 cs = node->get_edge (stmt);
b258210c 2130 cs->indirect_info->param_index = param_index;
8b7773a4 2131 cs->indirect_info->agg_contents = 0;
c13bc3d9 2132 cs->indirect_info->member_ptr = 0;
91bb9f80 2133 cs->indirect_info->guaranteed_unmodified = 0;
40591473 2134 return cs;
3e293154
MJ
2135}
2136
e33c6cd6 2137/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2138 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2139 intermediate information about each formal parameter. Currently it checks
2140 whether the call calls a pointer that is a formal parameter and if so, the
2141 parameter is marked with the called flag and an indirect call graph edge
2142 describing the call is created. This is very simple for ordinary pointers
2143 represented in SSA but not-so-nice when it comes to member pointers. The
2144 ugly part of this function does nothing more than trying to match the
2145 pattern of such a call. An example of such a pattern is the gimple dump
2146 below, the call is on the last line:
3e293154 2147
ae788515
EB
2148 <bb 2>:
2149 f$__delta_5 = f.__delta;
2150 f$__pfn_24 = f.__pfn;
2151
2152 or
3e293154 2153 <bb 2>:
d242d063
MJ
2154 f$__delta_5 = MEM[(struct *)&f];
2155 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2156
ae788515 2157 and a few lines below:
8aa29647
MJ
2158
2159 <bb 5>
3e293154
MJ
2160 D.2496_3 = (int) f$__pfn_24;
2161 D.2497_4 = D.2496_3 & 1;
2162 if (D.2497_4 != 0)
2163 goto <bb 3>;
2164 else
2165 goto <bb 4>;
2166
8aa29647 2167 <bb 6>:
3e293154
MJ
2168 D.2500_7 = (unsigned int) f$__delta_5;
2169 D.2501_8 = &S + D.2500_7;
2170 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2171 D.2503_10 = *D.2502_9;
2172 D.2504_12 = f$__pfn_24 + -1;
2173 D.2505_13 = (unsigned int) D.2504_12;
2174 D.2506_14 = D.2503_10 + D.2505_13;
2175 D.2507_15 = *D.2506_14;
2176 iftmp.11_16 = (String:: *) D.2507_15;
2177
8aa29647 2178 <bb 7>:
3e293154
MJ
2179 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2180 D.2500_19 = (unsigned int) f$__delta_5;
2181 D.2508_20 = &S + D.2500_19;
2182 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2183
2184 Such patterns are results of simple calls to a member pointer:
2185
2186 int doprinting (int (MyString::* f)(int) const)
2187 {
2188 MyString S ("somestring");
2189
2190 return (S.*f)(4);
2191 }
8b7773a4
MJ
2192
2193 Moreover, the function also looks for called pointers loaded from aggregates
2194 passed by value or reference. */
3e293154
MJ
2195
2196static void
56b40062 2197ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 2198 tree target)
3e293154 2199{
99b1c316 2200 class ipa_node_params *info = fbi->info;
8b7773a4
MJ
2201 HOST_WIDE_INT offset;
2202 bool by_ref;
3e293154 2203
3e293154
MJ
2204 if (SSA_NAME_IS_DEFAULT_DEF (target))
2205 {
b258210c 2206 tree var = SSA_NAME_VAR (target);
8aab5218 2207 int index = ipa_get_param_decl_index (info, var);
3e293154 2208 if (index >= 0)
8aab5218 2209 ipa_note_param_call (fbi->node, index, call);
3e293154
MJ
2210 return;
2211 }
2212
8aab5218 2213 int index;
355fe088 2214 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 2215 bool guaranteed_unmodified;
8b7773a4 2216 if (gimple_assign_single_p (def)
ff302741
PB
2217 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2218 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2219 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2220 {
8aab5218 2221 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2222 cs->indirect_info->offset = offset;
2223 cs->indirect_info->agg_contents = 1;
2224 cs->indirect_info->by_ref = by_ref;
91bb9f80 2225 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2226 return;
2227 }
2228
3e293154
MJ
2229 /* Now we need to try to match the complex pattern of calling a member
2230 pointer. */
8b7773a4
MJ
2231 if (gimple_code (def) != GIMPLE_PHI
2232 || gimple_phi_num_args (def) != 2
2233 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2234 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2235 return;
2236
3e293154
MJ
2237 /* First, we need to check whether one of these is a load from a member
2238 pointer that is a parameter to this function. */
8aab5218
MJ
2239 tree n1 = PHI_ARG_DEF (def, 0);
2240 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2241 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2242 return;
355fe088
TS
2243 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2244 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2245
8aab5218
MJ
2246 tree rec;
2247 basic_block bb, virt_bb;
2248 basic_block join = gimple_bb (def);
8b7773a4 2249 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2250 {
8b7773a4 2251 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2252 return;
2253
8aa29647 2254 bb = EDGE_PRED (join, 0)->src;
726a989a 2255 virt_bb = gimple_bb (d2);
3e293154 2256 }
8b7773a4 2257 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2258 {
8aa29647 2259 bb = EDGE_PRED (join, 1)->src;
726a989a 2260 virt_bb = gimple_bb (d1);
3e293154
MJ
2261 }
2262 else
2263 return;
2264
2265 /* Second, we need to check that the basic blocks are laid out in the way
2266 corresponding to the pattern. */
2267
3e293154
MJ
2268 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2269 || single_pred (virt_bb) != bb
2270 || single_succ (virt_bb) != join)
2271 return;
2272
2273 /* Third, let's see that the branching is done depending on the least
2274 significant bit of the pfn. */
2275
355fe088 2276 gimple *branch = last_stmt (bb);
8aa29647 2277 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2278 return;
2279
12430896
RG
2280 if ((gimple_cond_code (branch) != NE_EXPR
2281 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2282 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2283 return;
3e293154 2284
8aab5218 2285 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2286 if (!ipa_is_ssa_with_stmt_def (cond))
2287 return;
2288
726a989a 2289 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2290 if (!is_gimple_assign (def)
726a989a
RB
2291 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2292 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2293 return;
726a989a
RB
2294
2295 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2296 if (!ipa_is_ssa_with_stmt_def (cond))
2297 return;
2298
726a989a 2299 def = SSA_NAME_DEF_STMT (cond);
3e293154 2300
8b75fc9b
MJ
2301 if (is_gimple_assign (def)
2302 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2303 {
726a989a 2304 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2305 if (!ipa_is_ssa_with_stmt_def (cond))
2306 return;
726a989a 2307 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2308 }
2309
8aab5218 2310 tree rec2;
6f7b8b70
RE
2311 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2312 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2313 == ptrmemfunc_vbit_in_delta),
2314 NULL);
3e293154
MJ
2315 if (rec != rec2)
2316 return;
2317
2318 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2319 if (index >= 0
8aab5218 2320 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2321 {
8aab5218 2322 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2323 cs->indirect_info->offset = offset;
2324 cs->indirect_info->agg_contents = 1;
c13bc3d9 2325 cs->indirect_info->member_ptr = 1;
91bb9f80 2326 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2327 }
3e293154
MJ
2328
2329 return;
2330}
2331
b258210c
MJ
2332/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2333 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2334 FBI->node (described by FBI->info), create a call note for the
2335 statement. */
b258210c
MJ
2336
2337static void
56b40062 2338ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2339 gcall *call, tree target)
b258210c
MJ
2340{
2341 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2342 int index;
40591473 2343 HOST_WIDE_INT anc_offset;
b258210c 2344
05842ff5
MJ
2345 if (!flag_devirtualize)
2346 return;
2347
40591473 2348 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2349 return;
2350
99b1c316 2351 class ipa_node_params *info = fbi->info;
40591473
MJ
2352 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2353 {
8aab5218 2354 struct ipa_jump_func jfunc;
40591473
MJ
2355 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2356 return;
b258210c 2357
40591473
MJ
2358 anc_offset = 0;
2359 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2360 gcc_assert (index >= 0);
c628d1c3 2361 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
06d65050 2362 call, &jfunc))
40591473
MJ
2363 return;
2364 }
2365 else
2366 {
8aab5218 2367 struct ipa_jump_func jfunc;
355fe088 2368 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2369 tree expr;
2370
2371 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2372 if (!expr)
2373 return;
2374 index = ipa_get_param_decl_index (info,
2375 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2376 gcc_assert (index >= 0);
c628d1c3 2377 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
06d65050 2378 call, &jfunc, anc_offset))
40591473
MJ
2379 return;
2380 }
2381
8aab5218 2382 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
99b1c316 2383 class cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2384 ii->offset = anc_offset;
ae7e9ddd 2385 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2386 ii->otr_type = obj_type_ref_class (target);
40591473 2387 ii->polymorphic = 1;
b258210c
MJ
2388}
2389
2390/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2391 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2392 containing intermediate information about each formal parameter. */
b258210c
MJ
2393
2394static void
56b40062 2395ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2396{
2397 tree target = gimple_call_fn (call);
b786d31f
JH
2398
2399 if (!target
2400 || (TREE_CODE (target) != SSA_NAME
2401 && !virtual_method_call_p (target)))
2402 return;
b258210c 2403
7d0aa05b 2404 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2405 /* If we previously turned the call into a direct call, there is
2406 no need to analyze. */
b786d31f 2407 if (cs && !cs->indirect_unknown_callee)
25583c4f 2408 return;
7d0aa05b 2409
a5b58b28 2410 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2411 {
7d0aa05b
JH
2412 tree instance;
2413 tree target = gimple_call_fn (call);
6f8091fc
JH
2414 ipa_polymorphic_call_context context (current_function_decl,
2415 target, call, &instance);
7d0aa05b 2416
ba392339
JH
2417 gcc_checking_assert (cs->indirect_info->otr_type
2418 == obj_type_ref_class (target));
2419 gcc_checking_assert (cs->indirect_info->otr_token
2420 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2421
29c43c83
JH
2422 cs->indirect_info->vptr_changed
2423 = !context.get_dynamic_type (instance,
2424 OBJ_TYPE_REF_OBJECT (target),
c628d1c3
MJ
2425 obj_type_ref_class (target), call,
2426 &fbi->aa_walk_budget);
0127c169 2427 cs->indirect_info->context = context;
7d0aa05b
JH
2428 }
2429
b258210c 2430 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2431 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2432 else if (virtual_method_call_p (target))
8aab5218 2433 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2434}
2435
2436
e33c6cd6 2437/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2438 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2439 formal parameters are called. */
be95e2b9 2440
3e293154 2441static void
355fe088 2442ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2443{
726a989a 2444 if (is_gimple_call (stmt))
538dd0b7 2445 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2446}
2447
2448/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2449 If OP is a parameter declaration, mark it as used in the info structure
2450 passed in DATA. */
2451
2452static bool
355fe088 2453visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f 2454{
99b1c316 2455 class ipa_node_params *info = (class ipa_node_params *) data;
062c604f
MJ
2456
2457 op = get_base_address (op);
2458 if (op
2459 && TREE_CODE (op) == PARM_DECL)
2460 {
2461 int index = ipa_get_param_decl_index (info, op);
2462 gcc_assert (index >= 0);
310bc633 2463 ipa_set_param_used (info, index, true);
062c604f
MJ
2464 }
2465
2466 return false;
3e293154
MJ
2467}
2468
8aab5218
MJ
2469/* Scan the statements in BB and inspect the uses of formal parameters. Store
2470 the findings in various structures of the associated ipa_node_params
2471 structure, such as parameter flags, notes etc. FBI holds various data about
2472 the function being analyzed. */
be95e2b9 2473
062c604f 2474static void
56b40062 2475ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2476{
726a989a 2477 gimple_stmt_iterator gsi;
8aab5218
MJ
2478 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2479 {
355fe088 2480 gimple *stmt = gsi_stmt (gsi);
3e293154 2481
8aab5218
MJ
2482 if (is_gimple_debug (stmt))
2483 continue;
3e293154 2484
8aab5218
MJ
2485 ipa_analyze_stmt_uses (fbi, stmt);
2486 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2487 visit_ref_for_mod_analysis,
2488 visit_ref_for_mod_analysis,
2489 visit_ref_for_mod_analysis);
5fe8e757 2490 }
8aab5218
MJ
2491 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2492 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2493 visit_ref_for_mod_analysis,
2494 visit_ref_for_mod_analysis,
2495 visit_ref_for_mod_analysis);
2496}
2497
2498/* Calculate controlled uses of parameters of NODE. */
2499
2500static void
2501ipa_analyze_controlled_uses (struct cgraph_node *node)
2502{
99b1c316 2503 class ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2504
8aab5218 2505 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2506 {
2507 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2508 int controlled_uses = 0;
2509
062c604f
MJ
2510 /* For SSA regs see if parameter is used. For non-SSA we compute
2511 the flag during modification analysis. */
4502fe8d
MJ
2512 if (is_gimple_reg (parm))
2513 {
67348ccc 2514 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2515 parm);
2516 if (ddef && !has_zero_uses (ddef))
2517 {
2518 imm_use_iterator imm_iter;
2519 use_operand_p use_p;
2520
2521 ipa_set_param_used (info, i, true);
2522 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2523 if (!is_gimple_call (USE_STMT (use_p)))
2524 {
c6de6665
JJ
2525 if (!is_gimple_debug (USE_STMT (use_p)))
2526 {
2527 controlled_uses = IPA_UNDESCRIBED_USE;
2528 break;
2529 }
4502fe8d
MJ
2530 }
2531 else
2532 controlled_uses++;
2533 }
2534 else
2535 controlled_uses = 0;
2536 }
2537 else
2538 controlled_uses = IPA_UNDESCRIBED_USE;
2539 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2540 }
8aab5218 2541}
062c604f 2542
8aab5218 2543/* Free stuff in BI. */
062c604f 2544
8aab5218
MJ
2545static void
2546free_ipa_bb_info (struct ipa_bb_info *bi)
2547{
2548 bi->cg_edges.release ();
2549 bi->param_aa_statuses.release ();
3e293154
MJ
2550}
2551
8aab5218 2552/* Dominator walker driving the analysis. */
2c9561b5 2553
8aab5218 2554class analysis_dom_walker : public dom_walker
2c9561b5 2555{
8aab5218 2556public:
56b40062 2557 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 2558 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2559
3daacdcd 2560 virtual edge before_dom_children (basic_block);
8aab5218
MJ
2561
2562private:
56b40062 2563 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
2564};
2565
3daacdcd 2566edge
8aab5218
MJ
2567analysis_dom_walker::before_dom_children (basic_block bb)
2568{
2569 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2570 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 2571 return NULL;
2c9561b5
MJ
2572}
2573
c3431191
ML
2574/* Release body info FBI. */
2575
2576void
2577ipa_release_body_info (struct ipa_func_body_info *fbi)
2578{
2579 int i;
2580 struct ipa_bb_info *bi;
2581
2582 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2583 free_ipa_bb_info (bi);
2584 fbi->bb_infos.release ();
2585}
2586
026c3cfd 2587/* Initialize the array describing properties of formal parameters
dd5a833e
MS
2588 of NODE, analyze their uses and compute jump functions associated
2589 with actual arguments of calls from within NODE. */
062c604f
MJ
2590
2591void
2592ipa_analyze_node (struct cgraph_node *node)
2593{
56b40062 2594 struct ipa_func_body_info fbi;
99b1c316 2595 class ipa_node_params *info;
062c604f 2596
57dbdc5a
MJ
2597 ipa_check_create_node_params ();
2598 ipa_check_create_edge_args ();
2599 info = IPA_NODE_REF (node);
8aab5218
MJ
2600
2601 if (info->analysis_done)
2602 return;
2603 info->analysis_done = 1;
2604
2605 if (ipa_func_spec_opts_forbid_analysis_p (node))
2606 {
2607 for (int i = 0; i < ipa_get_param_count (info); i++)
2608 {
2609 ipa_set_param_used (info, i, true);
2610 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2611 }
2612 return;
2613 }
2614
2615 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2616 push_cfun (func);
2617 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2618 ipa_initialize_node_params (node);
8aab5218 2619 ipa_analyze_controlled_uses (node);
062c604f 2620
8aab5218
MJ
2621 fbi.node = node;
2622 fbi.info = IPA_NODE_REF (node);
2623 fbi.bb_infos = vNULL;
2624 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2625 fbi.param_count = ipa_get_param_count (info);
c628d1c3 2626 fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
062c604f 2627
8aab5218
MJ
2628 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2629 {
2630 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2631 bi->cg_edges.safe_push (cs);
2632 }
062c604f 2633
8aab5218
MJ
2634 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2635 {
2636 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2637 bi->cg_edges.safe_push (cs);
2638 }
2639
2640 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2641
c3431191 2642 ipa_release_body_info (&fbi);
8aab5218 2643 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2644 pop_cfun ();
062c604f 2645}
062c604f 2646
be95e2b9 2647/* Update the jump functions associated with call graph edge E when the call
3e293154 2648 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2649 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2650
3e293154
MJ
2651static void
2652update_jump_functions_after_inlining (struct cgraph_edge *cs,
2653 struct cgraph_edge *e)
2654{
99b1c316
MS
2655 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2656 class ipa_edge_args *args = IPA_EDGE_REF (e);
3e293154
MJ
2657 int count = ipa_get_cs_argument_count (args);
2658 int i;
2659
2660 for (i = 0; i < count; i++)
2661 {
b258210c 2662 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
99b1c316 2663 class ipa_polymorphic_call_context *dst_ctx
5ce97055 2664 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2665
685b0d13
MJ
2666 if (dst->type == IPA_JF_ANCESTOR)
2667 {
b258210c 2668 struct ipa_jump_func *src;
8b7773a4 2669 int dst_fid = dst->value.ancestor.formal_id;
99b1c316 2670 class ipa_polymorphic_call_context *src_ctx
5ce97055 2671 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 2672
b258210c
MJ
2673 /* Variable number of arguments can cause havoc if we try to access
2674 one that does not exist in the inlined edge. So make sure we
2675 don't. */
8b7773a4 2676 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 2677 {
04be694e 2678 ipa_set_jf_unknown (dst);
b258210c
MJ
2679 continue;
2680 }
2681
8b7773a4
MJ
2682 src = ipa_get_ith_jump_func (top, dst_fid);
2683
5ce97055
JH
2684 if (src_ctx && !src_ctx->useless_p ())
2685 {
99b1c316 2686 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
2687
2688 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2689 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 2690 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2691 ctx.offset_by (dst->value.ancestor.offset);
2692 if (!ctx.useless_p ())
2693 {
a7d1f3fe
ML
2694 if (!dst_ctx)
2695 {
2696 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2697 count);
2698 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2699 }
2700
2701 dst_ctx->combine_with (ctx);
5ce97055
JH
2702 }
2703 }
2704
8b7773a4
MJ
2705 if (src->agg.items
2706 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2707 {
2708 struct ipa_agg_jf_item *item;
2709 int j;
2710
2711 /* Currently we do not produce clobber aggregate jump functions,
2712 replace with merging when we do. */
2713 gcc_assert (!dst->agg.items);
2714
9771b263 2715 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2716 dst->agg.by_ref = src->agg.by_ref;
9771b263 2717 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2718 item->offset -= dst->value.ancestor.offset;
2719 }
2720
3b97a5c7
MJ
2721 if (src->type == IPA_JF_PASS_THROUGH
2722 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2723 {
2724 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2725 dst->value.ancestor.agg_preserved &=
2726 src->value.pass_through.agg_preserved;
2727 }
a2b4c188
KV
2728 else if (src->type == IPA_JF_PASS_THROUGH
2729 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2730 {
2731 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2732 dst->value.ancestor.agg_preserved = false;
2733 }
b258210c
MJ
2734 else if (src->type == IPA_JF_ANCESTOR)
2735 {
2736 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2737 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2738 dst->value.ancestor.agg_preserved &=
2739 src->value.ancestor.agg_preserved;
b258210c
MJ
2740 }
2741 else
04be694e 2742 ipa_set_jf_unknown (dst);
b258210c
MJ
2743 }
2744 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2745 {
b258210c
MJ
2746 struct ipa_jump_func *src;
2747 /* We must check range due to calls with variable number of arguments
2748 and we cannot combine jump functions with operations. */
2749 if (dst->value.pass_through.operation == NOP_EXPR
2750 && (dst->value.pass_through.formal_id
2751 < ipa_get_cs_argument_count (top)))
2752 {
8b7773a4
MJ
2753 int dst_fid = dst->value.pass_through.formal_id;
2754 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2755 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
99b1c316 2756 class ipa_polymorphic_call_context *src_ctx
5ce97055 2757 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 2758
5ce97055
JH
2759 if (src_ctx && !src_ctx->useless_p ())
2760 {
99b1c316 2761 class ipa_polymorphic_call_context ctx = *src_ctx;
5ce97055
JH
2762
2763 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2764 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 2765 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2766 if (!ctx.useless_p ())
2767 {
2768 if (!dst_ctx)
2769 {
2770 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2771 count);
2772 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2773 }
2774 dst_ctx->combine_with (ctx);
2775 }
2776 }
b8f6e610
MJ
2777 switch (src->type)
2778 {
2779 case IPA_JF_UNKNOWN:
04be694e 2780 ipa_set_jf_unknown (dst);
b8f6e610 2781 break;
b8f6e610
MJ
2782 case IPA_JF_CONST:
2783 ipa_set_jf_cst_copy (dst, src);
2784 break;
2785
2786 case IPA_JF_PASS_THROUGH:
2787 {
2788 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2789 enum tree_code operation;
2790 operation = ipa_get_jf_pass_through_operation (src);
2791
2792 if (operation == NOP_EXPR)
2793 {
3b97a5c7 2794 bool agg_p;
b8f6e610
MJ
2795 agg_p = dst_agg_p
2796 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 2797 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610 2798 }
a2b4c188
KV
2799 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2800 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
b8f6e610
MJ
2801 else
2802 {
2803 tree operand = ipa_get_jf_pass_through_operand (src);
2804 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2805 operation);
2806 }
2807 break;
2808 }
2809 case IPA_JF_ANCESTOR:
2810 {
3b97a5c7 2811 bool agg_p;
b8f6e610
MJ
2812 agg_p = dst_agg_p
2813 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
2814 ipa_set_ancestor_jf (dst,
2815 ipa_get_jf_ancestor_offset (src),
b8f6e610 2816 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 2817 agg_p);
b8f6e610
MJ
2818 break;
2819 }
2820 default:
2821 gcc_unreachable ();
2822 }
8b7773a4
MJ
2823
2824 if (src->agg.items
b8f6e610 2825 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2826 {
2827 /* Currently we do not produce clobber aggregate jump
2828 functions, replace with merging when we do. */
2829 gcc_assert (!dst->agg.items);
2830
2831 dst->agg.by_ref = src->agg.by_ref;
9771b263 2832 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2833 }
b258210c
MJ
2834 }
2835 else
04be694e 2836 ipa_set_jf_unknown (dst);
3e293154 2837 }
b258210c
MJ
2838 }
2839}
2840
5ce97055
JH
2841/* If TARGET is an addr_expr of a function declaration, make it the
2842 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2843 Otherwise, return NULL. */
b258210c 2844
3949c4a7 2845struct cgraph_edge *
5ce97055
JH
2846ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2847 bool speculative)
b258210c
MJ
2848{
2849 struct cgraph_node *callee;
48b1474e 2850 bool unreachable = false;
b258210c 2851
ceeffab0
MJ
2852 if (TREE_CODE (target) == ADDR_EXPR)
2853 target = TREE_OPERAND (target, 0);
b258210c 2854 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2855 {
2856 target = canonicalize_constructor_val (target, NULL);
2857 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2858 {
db66bf68
JH
2859 /* Member pointer call that goes through a VMT lookup. */
2860 if (ie->indirect_info->member_ptr
2861 /* Or if target is not an invariant expression and we do not
2862 know if it will evaulate to function at runtime.
2863 This can happen when folding through &VAR, where &VAR
2864 is IP invariant, but VAR itself is not.
2865
2866 TODO: Revisit this when GCC 5 is branched. It seems that
2867 member_ptr check is not needed and that we may try to fold
2868 the expression and see if VAR is readonly. */
2869 || !is_gimple_ip_invariant (target))
2870 {
2871 if (dump_enabled_p ())
2872 {
4f5b9c80 2873 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118
ML
2874 "discovered direct call non-invariant %s\n",
2875 ie->caller->dump_name ());
db66bf68
JH
2876 }
2877 return NULL;
2878 }
2879
c13bc3d9 2880
2b5f0895
XDL
2881 if (dump_enabled_p ())
2882 {
4f5b9c80 2883 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118 2884 "discovered direct call to non-function in %s, "
807b7d62 2885 "making it __builtin_unreachable\n",
464d0118 2886 ie->caller->dump_name ());
2b5f0895 2887 }
3c9e6fca 2888
48b1474e 2889 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 2890 callee = cgraph_node::get_create (target);
48b1474e 2891 unreachable = true;
a0a7b611 2892 }
48b1474e 2893 else
d52f5295 2894 callee = cgraph_node::get (target);
a0a7b611 2895 }
48b1474e 2896 else
d52f5295 2897 callee = cgraph_node::get (target);
a0a7b611
JH
2898
2899 /* Because may-edges are not explicitely represented and vtable may be external,
2900 we may create the first reference to the object in the unit. */
2901 if (!callee || callee->global.inlined_to)
2902 {
a0a7b611
JH
2903
2904 /* We are better to ensure we can refer to it.
2905 In the case of static functions we are out of luck, since we already
2906 removed its body. In the case of public functions we may or may
2907 not introduce the reference. */
2908 if (!canonicalize_constructor_val (target, NULL)
2909 || !TREE_PUBLIC (target))
2910 {
2911 if (dump_file)
2912 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
67914693 2913 "(%s -> %s) but cannot refer to it. Giving up.\n",
464d0118
ML
2914 ie->caller->dump_name (),
2915 ie->callee->dump_name ());
a0a7b611
JH
2916 return NULL;
2917 }
d52f5295 2918 callee = cgraph_node::get_create (target);
a0a7b611 2919 }
2b5f0895 2920
0127c169
JH
2921 /* If the edge is already speculated. */
2922 if (speculative && ie->speculative)
2923 {
2924 struct cgraph_edge *e2;
2925 struct ipa_ref *ref;
2926 ie->speculative_call_info (e2, ie, ref);
2927 if (e2->callee->ultimate_alias_target ()
2928 != callee->ultimate_alias_target ())
2929 {
2930 if (dump_file)
464d0118
ML
2931 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2932 "target (%s -> %s) but the call is already "
2933 "speculated to %s. Giving up.\n",
2934 ie->caller->dump_name (), callee->dump_name (),
2935 e2->callee->dump_name ());
0127c169
JH
2936 }
2937 else
2938 {
2939 if (dump_file)
2940 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
464d0118
ML
2941 "(%s -> %s) this agree with previous speculation.\n",
2942 ie->caller->dump_name (), callee->dump_name ());
0127c169
JH
2943 }
2944 return NULL;
2945 }
2946
2b5f0895
XDL
2947 if (!dbg_cnt (devirt))
2948 return NULL;
2949
1dbee8c9 2950 ipa_check_create_node_params ();
ceeffab0 2951
67914693 2952 /* We cannot make edges to inline clones. It is bug that someone removed
81fa35bd 2953 the cgraph node too early. */
17afc0fe
JH
2954 gcc_assert (!callee->global.inlined_to);
2955
48b1474e 2956 if (dump_file && !unreachable)
b258210c 2957 {
5ce97055 2958 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
464d0118 2959 "(%s -> %s), for stmt ",
b258210c 2960 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 2961 speculative ? "speculative" : "known",
464d0118
ML
2962 ie->caller->dump_name (),
2963 callee->dump_name ());
b258210c
MJ
2964 if (ie->call_stmt)
2965 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2966 else
2967 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 2968 }
2b5f0895
XDL
2969 if (dump_enabled_p ())
2970 {
4f5b9c80 2971 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
807b7d62
ML
2972 "converting indirect call in %s to direct call to %s\n",
2973 ie->caller->name (), callee->name ());
2b5f0895 2974 }
5ce97055 2975 if (!speculative)
d8d5aef1
JH
2976 {
2977 struct cgraph_edge *orig = ie;
2978 ie = ie->make_direct (callee);
2979 /* If we resolved speculative edge the cost is already up to date
2980 for direct call (adjusted by inline_edge_duplication_hook). */
2981 if (ie == orig)
2982 {
56f62793 2983 ipa_call_summary *es = ipa_call_summaries->get (ie);
d8d5aef1
JH
2984 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2985 - eni_size_weights.call_cost);
2986 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2987 - eni_time_weights.call_cost);
2988 }
2989 }
5ce97055
JH
2990 else
2991 {
2992 if (!callee->can_be_discarded_p ())
2993 {
2994 cgraph_node *alias;
2995 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2996 if (alias)
2997 callee = alias;
2998 }
d8d5aef1 2999 /* make_speculative will update ie's cost to direct call cost. */
5ce97055 3000 ie = ie->make_speculative
1bad9c18 3001 (callee, ie->count.apply_scale (8, 10));
5ce97055 3002 }
749aa96d 3003
b258210c 3004 return ie;
3e293154
MJ
3005}
3006
91bb9f80
MJ
3007/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3008 CONSTRUCTOR and return it. Return NULL if the search fails for some
3009 reason. */
3010
3011static tree
3012find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3013{
3014 tree type = TREE_TYPE (constructor);
3015 if (TREE_CODE (type) != ARRAY_TYPE
3016 && TREE_CODE (type) != RECORD_TYPE)
3017 return NULL;
3018
3019 unsigned ix;
3020 tree index, val;
3021 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3022 {
3023 HOST_WIDE_INT elt_offset;
3024 if (TREE_CODE (type) == ARRAY_TYPE)
3025 {
3026 offset_int off;
3027 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3028 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3029
3030 if (index)
3031 {
db9bbdec
RB
3032 if (TREE_CODE (index) == RANGE_EXPR)
3033 off = wi::to_offset (TREE_OPERAND (index, 0));
3034 else
3035 off = wi::to_offset (index);
91bb9f80
MJ
3036 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3037 {
3038 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3039 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3040 off = wi::sext (off - wi::to_offset (low_bound),
3041 TYPE_PRECISION (TREE_TYPE (index)));
3042 }
3043 off *= wi::to_offset (unit_size);
db9bbdec
RB
3044 /* ??? Handle more than just the first index of a
3045 RANGE_EXPR. */
91bb9f80
MJ
3046 }
3047 else
3048 off = wi::to_offset (unit_size) * ix;
3049
3050 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3051 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3052 continue;
3053 elt_offset = off.to_shwi ();
3054 }
3055 else if (TREE_CODE (type) == RECORD_TYPE)
3056 {
3057 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3058 if (DECL_BIT_FIELD (index))
3059 continue;
3060 elt_offset = int_bit_position (index);
3061 }
3062 else
3063 gcc_unreachable ();
3064
3065 if (elt_offset > req_offset)
3066 return NULL;
3067
3068 if (TREE_CODE (val) == CONSTRUCTOR)
3069 return find_constructor_constant_at_offset (val,
3070 req_offset - elt_offset);
3071
3072 if (elt_offset == req_offset
3073 && is_gimple_reg_type (TREE_TYPE (val))
3074 && is_gimple_ip_invariant (val))
3075 return val;
3076 }
3077 return NULL;
3078}
3079
3080/* Check whether SCALAR could be used to look up an aggregate interprocedural
3081 invariant from a static constructor and if so, return it. Otherwise return
3082 NULL. */
3083
3084static tree
3085ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3086{
3087 if (by_ref)
3088 {
3089 if (TREE_CODE (scalar) != ADDR_EXPR)
3090 return NULL;
3091 scalar = TREE_OPERAND (scalar, 0);
3092 }
3093
8813a647 3094 if (!VAR_P (scalar)
91bb9f80
MJ
3095 || !is_global_var (scalar)
3096 || !TREE_READONLY (scalar)
3097 || !DECL_INITIAL (scalar)
3098 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3099 return NULL;
3100
3101 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3102}
3103
3104/* Retrieve value from aggregate jump function AGG or static initializer of
3105 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3106 none. BY_REF specifies whether the value has to be passed by reference or
3107 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3108 to is set to true if the value comes from an initializer of a constant. */
8b7773a4
MJ
3109
3110tree
91bb9f80
MJ
3111ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3112 HOST_WIDE_INT offset, bool by_ref,
3113 bool *from_global_constant)
8b7773a4
MJ
3114{
3115 struct ipa_agg_jf_item *item;
3116 int i;
3117
91bb9f80
MJ
3118 if (scalar)
3119 {
3120 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3121 if (res)
3122 {
3123 if (from_global_constant)
3124 *from_global_constant = true;
3125 return res;
3126 }
3127 }
3128
3129 if (!agg
3130 || by_ref != agg->by_ref)
8b7773a4
MJ
3131 return NULL;
3132
9771b263 3133 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
3134 if (item->offset == offset)
3135 {
3136 /* Currently we do not have clobber values, return NULL for them once
3137 we do. */
3138 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
3139 if (from_global_constant)
3140 *from_global_constant = false;
2c9561b5
MJ
3141 return item->value;
3142 }
8b7773a4
MJ
3143 return NULL;
3144}
3145
4502fe8d 3146/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
3147 reference description RDESC. Return true if the reference has been
3148 successfully found and removed. */
4502fe8d 3149
568cda29 3150static bool
5e20cdc9 3151remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3152{
3153 struct ipa_ref *to_del;
3154 struct cgraph_edge *origin;
3155
3156 origin = rdesc->cs;
a854f856
MJ
3157 if (!origin)
3158 return false;
d122681a
ML
3159 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3160 origin->lto_stmt_uid);
568cda29
MJ
3161 if (!to_del)
3162 return false;
3163
d122681a 3164 to_del->remove_reference ();
4502fe8d 3165 if (dump_file)
464d0118
ML
3166 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3167 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
568cda29 3168 return true;
4502fe8d
MJ
3169}
3170
3171/* If JFUNC has a reference description with refcount different from
3172 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3173 NULL. JFUNC must be a constant jump function. */
3174
3175static struct ipa_cst_ref_desc *
3176jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3177{
3178 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3179 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3180 return rdesc;
3181 else
3182 return NULL;
3183}
3184
568cda29
MJ
3185/* If the value of constant jump function JFUNC is an address of a function
3186 declaration, return the associated call graph node. Otherwise return
3187 NULL. */
3188
3189static cgraph_node *
3190cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3191{
3192 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3193 tree cst = ipa_get_jf_constant (jfunc);
3194 if (TREE_CODE (cst) != ADDR_EXPR
3195 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3196 return NULL;
3197
d52f5295 3198 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3199}
3200
3201
3202/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3203 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3204 the edge specified in the rdesc. Return false if either the symbol or the
3205 reference could not be found, otherwise return true. */
3206
3207static bool
3208try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3209{
3210 struct ipa_cst_ref_desc *rdesc;
3211 if (jfunc->type == IPA_JF_CONST
3212 && (rdesc = jfunc_rdesc_usable (jfunc))
3213 && --rdesc->refcount == 0)
3214 {
5e20cdc9 3215 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
3216 if (!symbol)
3217 return false;
3218
3219 return remove_described_reference (symbol, rdesc);
3220 }
3221 return true;
3222}
3223
b258210c
MJ
3224/* Try to find a destination for indirect edge IE that corresponds to a simple
3225 call or a call of a member function pointer and where the destination is a
e5cf5e11
PK
3226 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3227 the type of the parameter to which the result of JFUNC is passed. If it can
3228 be determined, return the newly direct edge, otherwise return NULL.
d250540a 3229 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 3230
b258210c
MJ
3231static struct cgraph_edge *
3232try_make_edge_direct_simple_call (struct cgraph_edge *ie,
e5cf5e11 3233 struct ipa_jump_func *jfunc, tree target_type,
99b1c316 3234 class ipa_node_params *new_root_info)
b258210c 3235{
4502fe8d 3236 struct cgraph_edge *cs;
b258210c 3237 tree target;
042ae7d2 3238 bool agg_contents = ie->indirect_info->agg_contents;
e5cf5e11 3239 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
91bb9f80
MJ
3240 if (agg_contents)
3241 {
3242 bool from_global_constant;
3243 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3244 ie->indirect_info->offset,
3245 ie->indirect_info->by_ref,
3246 &from_global_constant);
3247 if (target
3248 && !from_global_constant
3249 && !ie->indirect_info->guaranteed_unmodified)
3250 return NULL;
3251 }
b258210c 3252 else
91bb9f80 3253 target = scalar;
d250540a
MJ
3254 if (!target)
3255 return NULL;
4502fe8d
MJ
3256 cs = ipa_make_edge_direct_to_target (ie, target);
3257
a12cd2db 3258 if (cs && !agg_contents)
568cda29
MJ
3259 {
3260 bool ok;
3261 gcc_checking_assert (cs->callee
ae6d0907
MJ
3262 && (cs != ie
3263 || jfunc->type != IPA_JF_CONST
568cda29
MJ
3264 || !cgraph_node_for_jfunc (jfunc)
3265 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3266 ok = try_decrement_rdesc_refcount (jfunc);
3267 gcc_checking_assert (ok);
3268 }
4502fe8d
MJ
3269
3270 return cs;
b258210c
MJ
3271}
3272
bec81025
MJ
3273/* Return the target to be used in cases of impossible devirtualization. IE
3274 and target (the latter can be NULL) are dumped when dumping is enabled. */
3275
72972c22
MJ
3276tree
3277ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3278{
3279 if (dump_file)
3280 {
3281 if (target)
3282 fprintf (dump_file,
464d0118
ML
3283 "Type inconsistent devirtualization: %s->%s\n",
3284 ie->caller->dump_name (),
bec81025
MJ
3285 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3286 else
3287 fprintf (dump_file,
464d0118
ML
3288 "No devirtualization target in %s\n",
3289 ie->caller->dump_name ());
bec81025
MJ
3290 }
3291 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3292 cgraph_node::get_create (new_target);
bec81025
MJ
3293 return new_target;
3294}
3295
d250540a
MJ
3296/* Try to find a destination for indirect edge IE that corresponds to a virtual
3297 call based on a formal parameter which is described by jump function JFUNC
3298 and if it can be determined, make it direct and return the direct edge.
44210a96
MJ
3299 Otherwise, return NULL. CTX describes the polymorphic context that the
3300 parameter the call is based on brings along with it. */
b258210c
MJ
3301
3302static struct cgraph_edge *
3303try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3304 struct ipa_jump_func *jfunc,
99b1c316 3305 class ipa_polymorphic_call_context ctx)
3e293154 3306{
44210a96 3307 tree target = NULL;
5ce97055 3308 bool speculative = false;
85942f45 3309
2bf86c84 3310 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3311 return NULL;
b258210c 3312
44210a96 3313 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3314
3315 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3316 if (!ie->indirect_info->vptr_changed
3317 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3318 {
9de2f554
JH
3319 tree vtable;
3320 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3321 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3322 : NULL;
3323 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
85942f45
JH
3324 ie->indirect_info->offset,
3325 true);
9de2f554
JH
3326 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3327 {
2994ab20 3328 bool can_refer;
0127c169 3329 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3330 vtable, offset, &can_refer);
3331 if (can_refer)
9de2f554 3332 {
2994ab20 3333 if (!t
cb1180d5 3334 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
9de2f554 3335 || !possible_polymorphic_call_target_p
0127c169
JH
3336 (ie, cgraph_node::get (t)))
3337 {
33c3b6be 3338 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3339 if (!ie->indirect_info->vptr_changed)
3340 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3341 else
3342 target = NULL;
0127c169
JH
3343 }
3344 else
3345 {
3346 target = t;
3347 speculative = ie->indirect_info->vptr_changed;
3348 }
9de2f554
JH
3349 }
3350 }
85942f45
JH
3351 }
3352
44210a96
MJ
3353 ipa_polymorphic_call_context ie_context (ie);
3354 vec <cgraph_node *>targets;
3355 bool final;
d250540a 3356
44210a96
MJ
3357 ctx.offset_by (ie->indirect_info->offset);
3358 if (ie->indirect_info->vptr_changed)
3359 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3360 ie->indirect_info->otr_type);
3361 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3362 targets = possible_polymorphic_call_targets
3363 (ie->indirect_info->otr_type,
3364 ie->indirect_info->otr_token,
3365 ctx, &final);
3366 if (final && targets.length () <= 1)
5ce97055 3367 {
33c3b6be 3368 speculative = false;
44210a96
MJ
3369 if (targets.length () == 1)
3370 target = targets[0]->decl;
3371 else
3372 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3373 }
2bf86c84 3374 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3375 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3376 {
44210a96
MJ
3377 cgraph_node *n;
3378 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3379 ie->indirect_info->otr_token,
3380 ie->indirect_info->context);
3381 if (n)
5ce97055 3382 {
44210a96
MJ
3383 target = n->decl;
3384 speculative = true;
5ce97055 3385 }
5bccb77a 3386 }
b258210c
MJ
3387
3388 if (target)
450ad0cd 3389 {
44210a96
MJ
3390 if (!possible_polymorphic_call_target_p
3391 (ie, cgraph_node::get_create (target)))
0127c169 3392 {
29c43c83 3393 if (speculative)
0127c169
JH
3394 return NULL;
3395 target = ipa_impossible_devirt_target (ie, target);
3396 }
5ce97055 3397 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3398 }
b258210c
MJ
3399 else
3400 return NULL;
3e293154
MJ
3401}
3402
3403/* Update the param called notes associated with NODE when CS is being inlined,
3404 assuming NODE is (potentially indirectly) inlined into CS->callee.
3405 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3406 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3407 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3408
f8e2a1ed 3409static bool
e33c6cd6
MJ
3410update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3411 struct cgraph_node *node,
d52f5295 3412 vec<cgraph_edge *> *new_edges)
3e293154 3413{
99b1c316 3414 class ipa_edge_args *top;
b258210c 3415 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
99b1c316 3416 class ipa_node_params *new_root_info, *inlined_node_info;
f8e2a1ed 3417 bool res = false;
3e293154 3418
e33c6cd6 3419 ipa_check_create_edge_args ();
9e97ff61 3420 top = IPA_EDGE_REF (cs);
d250540a
MJ
3421 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3422 ? cs->caller->global.inlined_to
3423 : cs->caller);
e5cf5e11 3424 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
e33c6cd6
MJ
3425
3426 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3427 {
99b1c316 3428 class cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3429 struct ipa_jump_func *jfunc;
8b7773a4 3430 int param_index;
3ff29913 3431 cgraph_node *spec_target = NULL;
3e293154 3432
e33c6cd6 3433 next_ie = ie->next_callee;
3e293154 3434
5f902d76
JH
3435 if (ici->param_index == -1)
3436 continue;
e33c6cd6 3437
3e293154 3438 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 3439 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3440 {
5ee53a06 3441 ici->param_index = -1;
3e293154
MJ
3442 continue;
3443 }
3444
8b7773a4
MJ
3445 param_index = ici->param_index;
3446 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3447
3ff29913
JH
3448 if (ie->speculative)
3449 {
3450 struct cgraph_edge *de;
3451 struct ipa_ref *ref;
3452 ie->speculative_call_info (de, ie, ref);
3453 spec_target = de->callee;
3454 }
3455
2bf86c84 3456 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3457 new_direct_edge = NULL;
3458 else if (ici->polymorphic)
5ce97055 3459 {
44210a96
MJ
3460 ipa_polymorphic_call_context ctx;
3461 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3462 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
5ce97055 3463 }
b258210c 3464 else
e5cf5e11
PK
3465 {
3466 tree target_type = ipa_get_type (inlined_node_info, param_index);
3467 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3468 target_type,
3469 new_root_info);
3470 }
3471
042ae7d2 3472 /* If speculation was removed, then we need to do nothing. */
3ff29913
JH
3473 if (new_direct_edge && new_direct_edge != ie
3474 && new_direct_edge->callee == spec_target)
042ae7d2
JH
3475 {
3476 new_direct_edge->indirect_inlining_edge = 1;
3477 top = IPA_EDGE_REF (cs);
3478 res = true;
73d098df
JH
3479 if (!new_direct_edge->speculative)
3480 continue;
042ae7d2
JH
3481 }
3482 else if (new_direct_edge)
685b0d13 3483 {
b258210c 3484 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
3485 if (new_direct_edge->call_stmt)
3486 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
3487 = !gimple_check_call_matching_types (
3488 new_direct_edge->call_stmt,
67348ccc 3489 new_direct_edge->callee->decl, false);
b258210c
MJ
3490 if (new_edges)
3491 {
9771b263 3492 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3493 res = true;
3494 }
042ae7d2 3495 top = IPA_EDGE_REF (cs);
3ff29913
JH
3496 /* If speculative edge was introduced we still need to update
3497 call info of the indirect edge. */
3498 if (!new_direct_edge->speculative)
3499 continue;
685b0d13 3500 }
3ff29913
JH
3501 if (jfunc->type == IPA_JF_PASS_THROUGH
3502 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 3503 {
d0502276
JH
3504 if (ici->agg_contents
3505 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3506 && !ici->polymorphic)
36b72910
JH
3507 ici->param_index = -1;
3508 else
d0502276
JH
3509 {
3510 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3511 if (ici->polymorphic
3512 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3513 ici->vptr_changed = true;
3514 }
36b72910
JH
3515 }
3516 else if (jfunc->type == IPA_JF_ANCESTOR)
3517 {
d0502276
JH
3518 if (ici->agg_contents
3519 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3520 && !ici->polymorphic)
36b72910
JH
3521 ici->param_index = -1;
3522 else
3523 {
3524 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3525 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
3526 if (ici->polymorphic
3527 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3528 ici->vptr_changed = true;
36b72910
JH
3529 }
3530 }
3531 else
3532 /* Either we can find a destination for this edge now or never. */
3533 ici->param_index = -1;
3e293154 3534 }
e33c6cd6 3535
f8e2a1ed 3536 return res;
3e293154
MJ
3537}
3538
3539/* Recursively traverse subtree of NODE (including node) made of inlined
3540 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3541 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3542 update_jump_functions_after_inlining on all non-inlined edges that lead out
3543 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3544 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3545 created. */
be95e2b9 3546
f8e2a1ed 3547static bool
3e293154
MJ
3548propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3549 struct cgraph_node *node,
d52f5295 3550 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3551{
3552 struct cgraph_edge *e;
f8e2a1ed 3553 bool res;
3e293154 3554
e33c6cd6 3555 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3556
3557 for (e = node->callees; e; e = e->next_callee)
3558 if (!e->inline_failed)
f8e2a1ed 3559 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3560 else
3561 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3562 for (e = node->indirect_calls; e; e = e->next_callee)
3563 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3564
3565 return res;
3e293154
MJ
3566}
3567
4502fe8d
MJ
3568/* Combine two controlled uses counts as done during inlining. */
3569
3570static int
3571combine_controlled_uses_counters (int c, int d)
3572{
3573 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3574 return IPA_UNDESCRIBED_USE;
3575 else
3576 return c + d - 1;
3577}
3578
3579/* Propagate number of controlled users from CS->caleee to the new root of the
3580 tree of inlined nodes. */
3581
3582static void
3583propagate_controlled_uses (struct cgraph_edge *cs)
3584{
99b1c316 3585 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4502fe8d
MJ
3586 struct cgraph_node *new_root = cs->caller->global.inlined_to
3587 ? cs->caller->global.inlined_to : cs->caller;
99b1c316
MS
3588 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3589 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
4502fe8d
MJ
3590 int count, i;
3591
3592 count = MIN (ipa_get_cs_argument_count (args),
3593 ipa_get_param_count (old_root_info));
3594 for (i = 0; i < count; i++)
3595 {
3596 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3597 struct ipa_cst_ref_desc *rdesc;
3598
3599 if (jf->type == IPA_JF_PASS_THROUGH)
3600 {
3601 int src_idx, c, d;
3602 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3603 c = ipa_get_controlled_uses (new_root_info, src_idx);
3604 d = ipa_get_controlled_uses (old_root_info, i);
3605
3606 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3607 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3608 c = combine_controlled_uses_counters (c, d);
3609 ipa_set_controlled_uses (new_root_info, src_idx, c);
3610 if (c == 0 && new_root_info->ipcp_orig_node)
3611 {
3612 struct cgraph_node *n;
3613 struct ipa_ref *ref;
44210a96 3614 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
3615
3616 if (t && TREE_CODE (t) == ADDR_EXPR
3617 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 3618 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 3619 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
3620 {
3621 if (dump_file)
3622 fprintf (dump_file, "ipa-prop: Removing cloning-created "
464d0118
ML
3623 "reference from %s to %s.\n",
3624 new_root->dump_name (),
3625 n->dump_name ());
d122681a 3626 ref->remove_reference ();
4502fe8d
MJ
3627 }
3628 }
3629 }
3630 else if (jf->type == IPA_JF_CONST
3631 && (rdesc = jfunc_rdesc_usable (jf)))
3632 {
3633 int d = ipa_get_controlled_uses (old_root_info, i);
3634 int c = rdesc->refcount;
3635 rdesc->refcount = combine_controlled_uses_counters (c, d);
3636 if (rdesc->refcount == 0)
3637 {
3638 tree cst = ipa_get_jf_constant (jf);
3639 struct cgraph_node *n;
3640 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3641 && TREE_CODE (TREE_OPERAND (cst, 0))
3642 == FUNCTION_DECL);
d52f5295 3643 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
3644 if (n)
3645 {
3646 struct cgraph_node *clone;
568cda29 3647 bool ok;
67348ccc 3648 ok = remove_described_reference (n, rdesc);
568cda29 3649 gcc_checking_assert (ok);
4502fe8d
MJ
3650
3651 clone = cs->caller;
3652 while (clone->global.inlined_to
3653 && clone != rdesc->cs->caller
3654 && IPA_NODE_REF (clone)->ipcp_orig_node)
3655 {
3656 struct ipa_ref *ref;
d122681a 3657 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
3658 if (ref)
3659 {
3660 if (dump_file)
3661 fprintf (dump_file, "ipa-prop: Removing "
3662 "cloning-created reference "
464d0118
ML
3663 "from %s to %s.\n",
3664 clone->dump_name (),
3665 n->dump_name ());
d122681a 3666 ref->remove_reference ();
4502fe8d
MJ
3667 }
3668 clone = clone->callers->caller;
3669 }
3670 }
3671 }
3672 }
3673 }
3674
3675 for (i = ipa_get_param_count (old_root_info);
3676 i < ipa_get_cs_argument_count (args);
3677 i++)
3678 {
3679 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3680
3681 if (jf->type == IPA_JF_CONST)
3682 {
3683 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3684 if (rdesc)
3685 rdesc->refcount = IPA_UNDESCRIBED_USE;
3686 }
3687 else if (jf->type == IPA_JF_PASS_THROUGH)
3688 ipa_set_controlled_uses (new_root_info,
3689 jf->value.pass_through.formal_id,
3690 IPA_UNDESCRIBED_USE);
3691 }
3692}
3693
3e293154
MJ
3694/* Update jump functions and call note functions on inlining the call site CS.
3695 CS is expected to lead to a node already cloned by
3696 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3697 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3698 created. */
be95e2b9 3699
f8e2a1ed 3700bool
3e293154 3701ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 3702 vec<cgraph_edge *> *new_edges)
3e293154 3703{
5ee53a06 3704 bool changed;
f8e2a1ed
MJ
3705 /* Do nothing if the preparation phase has not been carried out yet
3706 (i.e. during early inlining). */
dd912cb8 3707 if (!ipa_node_params_sum)
f8e2a1ed 3708 return false;
6fe906a3 3709 gcc_assert (ipa_edge_args_sum);
f8e2a1ed 3710
4502fe8d 3711 propagate_controlled_uses (cs);
5ee53a06
JH
3712 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3713
5ee53a06 3714 return changed;
518dc859
RL
3715}
3716
86cd0334
MJ
3717/* Ensure that array of edge arguments infos is big enough to accommodate a
3718 structure for all edges and reallocates it if not. Also, allocate
3719 associated hash tables is they do not already exist. */
3720
3721void
3722ipa_check_create_edge_args (void)
3723{
6fe906a3
MJ
3724 if (!ipa_edge_args_sum)
3725 ipa_edge_args_sum
3726 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3727 ipa_edge_args_sum_t (symtab, true));
86cd0334
MJ
3728 if (!ipa_bits_hash_table)
3729 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3730 if (!ipa_vr_hash_table)
3731 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3732}
3733
771578a0 3734/* Free all ipa_edge structures. */
be95e2b9 3735
518dc859 3736void
771578a0 3737ipa_free_all_edge_args (void)
518dc859 3738{
6fe906a3 3739 if (!ipa_edge_args_sum)
9771b263
DN
3740 return;
3741
6fe906a3
MJ
3742 ipa_edge_args_sum->release ();
3743 ipa_edge_args_sum = NULL;
518dc859
RL
3744}
3745
771578a0 3746/* Free all ipa_node_params structures. */
be95e2b9 3747
518dc859 3748void
771578a0 3749ipa_free_all_node_params (void)
518dc859 3750{
a0a348b1 3751 ipa_node_params_sum->release ();
dd912cb8 3752 ipa_node_params_sum = NULL;
771578a0
MJ
3753}
3754
9d3e0adc 3755/* Initialize IPA CP transformation summary and also allocate any necessary hash
86cd0334 3756 tables if they do not already exist. */
04be694e
MJ
3757
3758void
9d3e0adc 3759ipcp_transformation_initialize (void)
04be694e 3760{
86cd0334
MJ
3761 if (!ipa_bits_hash_table)
3762 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3763 if (!ipa_vr_hash_table)
3764 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
9d3e0adc
ML
3765 if (ipcp_transformation_sum == NULL)
3766 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
04be694e
MJ
3767}
3768
2c9561b5
MJ
3769/* Set the aggregate replacements of NODE to be AGGVALS. */
3770
3771void
3772ipa_set_node_agg_value_chain (struct cgraph_node *node,
3773 struct ipa_agg_replacement_value *aggvals)
3774{
9d3e0adc
ML
3775 ipcp_transformation_initialize ();
3776 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3777 s->agg_values = aggvals;
2c9561b5
MJ
3778}
3779
6fe906a3
MJ
3780/* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3781 count data structures accordingly. */
be95e2b9 3782
6fe906a3
MJ
3783void
3784ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
771578a0 3785{
568cda29
MJ
3786 if (args->jump_functions)
3787 {
3788 struct ipa_jump_func *jf;
3789 int i;
3790 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3791 {
3792 struct ipa_cst_ref_desc *rdesc;
3793 try_decrement_rdesc_refcount (jf);
3794 if (jf->type == IPA_JF_CONST
3795 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3796 && rdesc->cs == cs)
3797 rdesc->cs = NULL;
3798 }
568cda29 3799 }
518dc859
RL
3800}
3801
6fe906a3
MJ
3802/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3803 reference count data strucutres accordingly. */
be95e2b9 3804
6fe906a3
MJ
3805void
3806ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3807 ipa_edge_args *old_args, ipa_edge_args *new_args)
771578a0 3808{
8b7773a4 3809 unsigned int i;
771578a0 3810
9771b263 3811 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
3812 if (old_args->polymorphic_call_contexts)
3813 new_args->polymorphic_call_contexts
3814 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 3815
9771b263 3816 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3817 {
3818 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3819 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3820
3821 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3822
3823 if (src_jf->type == IPA_JF_CONST)
3824 {
3825 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3826
3827 if (!src_rdesc)
3828 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3829 else if (src->caller == dst->caller)
3830 {
3831 struct ipa_ref *ref;
5e20cdc9 3832 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3833 gcc_checking_assert (n);
d122681a
ML
3834 ref = src->caller->find_reference (n, src->call_stmt,
3835 src->lto_stmt_uid);
568cda29 3836 gcc_checking_assert (ref);
d122681a 3837 dst->caller->clone_reference (ref, ref->stmt);
568cda29 3838
601f3293 3839 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
568cda29
MJ
3840 dst_rdesc->cs = dst;
3841 dst_rdesc->refcount = src_rdesc->refcount;
3842 dst_rdesc->next_duplicate = NULL;
3843 dst_jf->value.constant.rdesc = dst_rdesc;
3844 }
4502fe8d
MJ
3845 else if (src_rdesc->cs == src)
3846 {
601f3293 3847 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 3848 dst_rdesc->cs = dst;
4502fe8d 3849 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3850 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3851 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3852 dst_jf->value.constant.rdesc = dst_rdesc;
3853 }
3854 else
3855 {
3856 struct ipa_cst_ref_desc *dst_rdesc;
3857 /* This can happen during inlining, when a JFUNC can refer to a
3858 reference taken in a function up in the tree of inline clones.
3859 We need to find the duplicate that refers to our tree of
3860 inline clones. */
3861
3862 gcc_assert (dst->caller->global.inlined_to);
3863 for (dst_rdesc = src_rdesc->next_duplicate;
3864 dst_rdesc;
3865 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3866 {
3867 struct cgraph_node *top;
3868 top = dst_rdesc->cs->caller->global.inlined_to
3869 ? dst_rdesc->cs->caller->global.inlined_to
3870 : dst_rdesc->cs->caller;
3871 if (dst->caller->global.inlined_to == top)
3872 break;
3873 }
44a60244 3874 gcc_assert (dst_rdesc);
4502fe8d
MJ
3875 dst_jf->value.constant.rdesc = dst_rdesc;
3876 }
3877 }
6fe45955
MJ
3878 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3879 && src->caller == dst->caller)
3880 {
3881 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3882 ? dst->caller->global.inlined_to : dst->caller;
99b1c316 3883 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
6fe45955
MJ
3884 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3885
3886 int c = ipa_get_controlled_uses (root_info, idx);
3887 if (c != IPA_UNDESCRIBED_USE)
3888 {
3889 c++;
3890 ipa_set_controlled_uses (root_info, idx, c);
3891 }
3892 }
4502fe8d 3893 }
771578a0
MJ
3894}
3895
dd912cb8 3896/* Analyze newly added function into callgraph. */
be95e2b9 3897
771578a0 3898static void
dd912cb8 3899ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3900{
dd912cb8
ML
3901 if (node->has_gimple_body_p ())
3902 ipa_analyze_node (node);
3903}
771578a0 3904
dd912cb8
ML
3905/* Hook that is called by summary when a node is duplicated. */
3906
3907void
3908ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3909 ipa_node_params *old_info,
3910 ipa_node_params *new_info)
3911{
3912 ipa_agg_replacement_value *old_av, *new_av;
771578a0 3913
f65f1ae3 3914 new_info->descriptors = vec_safe_copy (old_info->descriptors);
310bc633 3915 new_info->lattices = NULL;
771578a0 3916 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
f65f1ae3
MJ
3917 new_info->known_csts = old_info->known_csts.copy ();
3918 new_info->known_contexts = old_info->known_contexts.copy ();
3949c4a7 3919
8aab5218 3920 new_info->analysis_done = old_info->analysis_done;
3949c4a7 3921 new_info->node_enqueued = old_info->node_enqueued;
7e729474 3922 new_info->versionable = old_info->versionable;
2c9561b5
MJ
3923
3924 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 3925 if (old_av)
2c9561b5 3926 {
04be694e
MJ
3927 new_av = NULL;
3928 while (old_av)
3929 {
3930 struct ipa_agg_replacement_value *v;
2c9561b5 3931
04be694e
MJ
3932 v = ggc_alloc<ipa_agg_replacement_value> ();
3933 memcpy (v, old_av, sizeof (*v));
3934 v->next = new_av;
3935 new_av = v;
3936 old_av = old_av->next;
3937 }
3938 ipa_set_node_agg_value_chain (dst, new_av);
3939 }
3940
9d3e0adc 3941 ipcp_transformation *src_trans = ipcp_get_transformation_summary (src);
04be694e 3942
8bc5448f 3943 if (src_trans)
04be694e 3944 {
9d3e0adc
ML
3945 ipcp_transformation_initialize ();
3946 src_trans = ipcp_transformation_sum->get_create (src);
3947 ipcp_transformation *dst_trans
3948 = ipcp_transformation_sum->get_create (dst);
86cd0334
MJ
3949
3950 dst_trans->bits = vec_safe_copy (src_trans->bits);
3951
8bc5448f 3952 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
8bc5448f
KV
3953 vec<ipa_vr, va_gc> *&dst_vr
3954 = ipcp_get_transformation_summary (dst)->m_vr;
8bc5448f
KV
3955 if (vec_safe_length (src_trans->m_vr) > 0)
3956 {
3957 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3958 for (unsigned i = 0; i < src_vr->length (); ++i)
3959 dst_vr->quick_push ((*src_vr)[i]);
3960 }
2c9561b5 3961 }
771578a0
MJ
3962}
3963
3964/* Register our cgraph hooks if they are not already there. */
be95e2b9 3965
518dc859 3966void
771578a0 3967ipa_register_cgraph_hooks (void)
518dc859 3968{
dd912cb8 3969 ipa_check_create_node_params ();
6fe906a3 3970 ipa_check_create_edge_args ();
dd912cb8 3971
dd912cb8 3972 function_insertion_hook_holder =
3dafb85c 3973 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3974}
518dc859 3975
771578a0 3976/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3977
771578a0
MJ
3978static void
3979ipa_unregister_cgraph_hooks (void)
3980{
3dafb85c 3981 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 3982 function_insertion_hook_holder = NULL;
771578a0
MJ
3983}
3984
3985/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3986 longer needed after ipa-cp. */
be95e2b9 3987
771578a0 3988void
e33c6cd6 3989ipa_free_all_structures_after_ipa_cp (void)
3e293154 3990{
2bf86c84 3991 if (!optimize && !in_lto_p)
3e293154
MJ
3992 {
3993 ipa_free_all_edge_args ();
3994 ipa_free_all_node_params ();
2651e637
ML
3995 ipcp_sources_pool.release ();
3996 ipcp_cst_values_pool.release ();
3997 ipcp_poly_ctx_values_pool.release ();
3998 ipcp_agg_lattice_pool.release ();
3e293154 3999 ipa_unregister_cgraph_hooks ();
601f3293 4000 ipa_refdesc_pool.release ();
3e293154
MJ
4001 }
4002}
4003
4004/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4005 longer needed after indirect inlining. */
be95e2b9 4006
3e293154 4007void
e33c6cd6 4008ipa_free_all_structures_after_iinln (void)
771578a0
MJ
4009{
4010 ipa_free_all_edge_args ();
4011 ipa_free_all_node_params ();
4012 ipa_unregister_cgraph_hooks ();
2651e637
ML
4013 ipcp_sources_pool.release ();
4014 ipcp_cst_values_pool.release ();
4015 ipcp_poly_ctx_values_pool.release ();
4016 ipcp_agg_lattice_pool.release ();
601f3293 4017 ipa_refdesc_pool.release ();
518dc859
RL
4018}
4019
dcd416e3 4020/* Print ipa_tree_map data structures of all functions in the
518dc859 4021 callgraph to F. */
be95e2b9 4022
518dc859 4023void
2c9561b5 4024ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
4025{
4026 int i, count;
99b1c316 4027 class ipa_node_params *info;
518dc859 4028
67348ccc 4029 if (!node->definition)
3e293154
MJ
4030 return;
4031 info = IPA_NODE_REF (node);
464d0118 4032 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
3e293154
MJ
4033 count = ipa_get_param_count (info);
4034 for (i = 0; i < count; i++)
518dc859 4035 {
4502fe8d
MJ
4036 int c;
4037
a4e33812 4038 fprintf (f, " ");
e067bd43 4039 ipa_dump_param (f, info, i);
339f49ec
JH
4040 if (ipa_is_param_used (info, i))
4041 fprintf (f, " used");
4502fe8d
MJ
4042 c = ipa_get_controlled_uses (info, i);
4043 if (c == IPA_UNDESCRIBED_USE)
4044 fprintf (f, " undescribed_use");
4045 else
4046 fprintf (f, " controlled_uses=%i", c);
3e293154 4047 fprintf (f, "\n");
518dc859
RL
4048 }
4049}
dcd416e3 4050
ca30a539 4051/* Print ipa_tree_map data structures of all functions in the
3e293154 4052 callgraph to F. */
be95e2b9 4053
3e293154 4054void
ca30a539 4055ipa_print_all_params (FILE * f)
3e293154
MJ
4056{
4057 struct cgraph_node *node;
4058
ca30a539 4059 fprintf (f, "\nFunction parameters:\n");
65c70e6b 4060 FOR_EACH_FUNCTION (node)
ca30a539 4061 ipa_print_node_params (f, node);
3e293154 4062}
3f84bf08 4063
2c9561b5
MJ
4064/* Dump the AV linked list. */
4065
4066void
4067ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4068{
4069 bool comma = false;
4070 fprintf (f, " Aggregate replacements:");
4071 for (; av; av = av->next)
4072 {
4073 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4074 av->index, av->offset);
ef6cb4c7 4075 print_generic_expr (f, av->value);
2c9561b5
MJ
4076 comma = true;
4077 }
4078 fprintf (f, "\n");
4079}
4080
fb3f88cc
JH
4081/* Stream out jump function JUMP_FUNC to OB. */
4082
4083static void
4084ipa_write_jump_function (struct output_block *ob,
4085 struct ipa_jump_func *jump_func)
4086{
8b7773a4
MJ
4087 struct ipa_agg_jf_item *item;
4088 struct bitpack_d bp;
4089 int i, count;
f714ecf5 4090 int flag = 0;
fb3f88cc 4091
f714ecf5
JH
4092 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4093 as well as WPA memory by handling them specially. */
4094 if (jump_func->type == IPA_JF_CONST
4095 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4096 flag = 1;
4097
4098 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
fb3f88cc
JH
4099 switch (jump_func->type)
4100 {
4101 case IPA_JF_UNKNOWN:
4102 break;
4103 case IPA_JF_CONST:
5368224f 4104 gcc_assert (
4502fe8d 4105 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
f714ecf5
JH
4106 stream_write_tree (ob,
4107 flag
4108 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4109 : jump_func->value.constant.value, true);
fb3f88cc
JH
4110 break;
4111 case IPA_JF_PASS_THROUGH:
412288f1 4112 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4113 if (jump_func->value.pass_through.operation == NOP_EXPR)
4114 {
4115 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4116 bp = bitpack_create (ob->main_stream);
4117 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4118 streamer_write_bitpack (&bp);
4119 }
a2b4c188
KV
4120 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4121 == tcc_unary)
4122 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4a53743e
MJ
4123 else
4124 {
4125 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4126 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4127 }
fb3f88cc
JH
4128 break;
4129 case IPA_JF_ANCESTOR:
412288f1 4130 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4131 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4132 bp = bitpack_create (ob->main_stream);
4133 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4134 streamer_write_bitpack (&bp);
fb3f88cc 4135 break;
8b7773a4
MJ
4136 }
4137
9771b263 4138 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4139 streamer_write_uhwi (ob, count);
4140 if (count)
4141 {
4142 bp = bitpack_create (ob->main_stream);
4143 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4144 streamer_write_bitpack (&bp);
4145 }
4146
9771b263 4147 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4148 {
4149 streamer_write_uhwi (ob, item->offset);
4150 stream_write_tree (ob, item->value, true);
fb3f88cc 4151 }
04be694e 4152
209ca542 4153 bp = bitpack_create (ob->main_stream);
86cd0334 4154 bp_pack_value (&bp, !!jump_func->bits, 1);
209ca542 4155 streamer_write_bitpack (&bp);
86cd0334 4156 if (jump_func->bits)
209ca542 4157 {
86cd0334
MJ
4158 streamer_write_widest_int (ob, jump_func->bits->value);
4159 streamer_write_widest_int (ob, jump_func->bits->mask);
a5e14a42 4160 }
86cd0334 4161 bp_pack_value (&bp, !!jump_func->m_vr, 1);
8bc5448f 4162 streamer_write_bitpack (&bp);
86cd0334 4163 if (jump_func->m_vr)
8bc5448f
KV
4164 {
4165 streamer_write_enum (ob->main_stream, value_rang_type,
54994253
AH
4166 VR_LAST, jump_func->m_vr->kind ());
4167 stream_write_tree (ob, jump_func->m_vr->min (), true);
4168 stream_write_tree (ob, jump_func->m_vr->max (), true);
8bc5448f 4169 }
fb3f88cc
JH
4170}
4171
4172/* Read in jump function JUMP_FUNC from IB. */
4173
4174static void
99b1c316 4175ipa_read_jump_function (class lto_input_block *ib,
fb3f88cc 4176 struct ipa_jump_func *jump_func,
4502fe8d 4177 struct cgraph_edge *cs,
99b1c316 4178 class data_in *data_in,
f714ecf5 4179 bool prevails)
fb3f88cc 4180{
4a53743e
MJ
4181 enum jump_func_type jftype;
4182 enum tree_code operation;
8b7773a4 4183 int i, count;
f714ecf5
JH
4184 int val = streamer_read_uhwi (ib);
4185 bool flag = val & 1;
fb3f88cc 4186
f714ecf5 4187 jftype = (enum jump_func_type) (val / 2);
4a53743e 4188 switch (jftype)
fb3f88cc
JH
4189 {
4190 case IPA_JF_UNKNOWN:
04be694e 4191 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4192 break;
4193 case IPA_JF_CONST:
f714ecf5
JH
4194 {
4195 tree t = stream_read_tree (ib, data_in);
4196 if (flag && prevails)
4197 t = build_fold_addr_expr (t);
4198 ipa_set_jf_constant (jump_func, t, cs);
4199 }
fb3f88cc
JH
4200 break;
4201 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4202 operation = (enum tree_code) streamer_read_uhwi (ib);
4203 if (operation == NOP_EXPR)
4204 {
4205 int formal_id = streamer_read_uhwi (ib);
4206 struct bitpack_d bp = streamer_read_bitpack (ib);
4207 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4208 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e 4209 }
a2b4c188
KV
4210 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4211 {
4212 int formal_id = streamer_read_uhwi (ib);
4213 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4214 }
4a53743e
MJ
4215 else
4216 {
4217 tree operand = stream_read_tree (ib, data_in);
4218 int formal_id = streamer_read_uhwi (ib);
4219 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4220 operation);
4221 }
fb3f88cc
JH
4222 break;
4223 case IPA_JF_ANCESTOR:
4a53743e
MJ
4224 {
4225 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4226 int formal_id = streamer_read_uhwi (ib);
4227 struct bitpack_d bp = streamer_read_bitpack (ib);
4228 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4229 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4230 break;
4231 }
f714ecf5
JH
4232 default:
4233 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4234 }
4235
4236 count = streamer_read_uhwi (ib);
f714ecf5
JH
4237 if (prevails)
4238 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4239 if (count)
4240 {
4a53743e 4241 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4242 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4243 }
4244 for (i = 0; i < count; i++)
4245 {
f32682ca
DN
4246 struct ipa_agg_jf_item item;
4247 item.offset = streamer_read_uhwi (ib);
4248 item.value = stream_read_tree (ib, data_in);
f714ecf5
JH
4249 if (prevails)
4250 jump_func->agg.items->quick_push (item);
fb3f88cc 4251 }
04be694e
MJ
4252
4253 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4254 bool bits_known = bp_unpack_value (&bp, 1);
4255 if (bits_known)
4256 {
86cd0334
MJ
4257 widest_int value = streamer_read_widest_int (ib);
4258 widest_int mask = streamer_read_widest_int (ib);
f714ecf5
JH
4259 if (prevails)
4260 ipa_set_jfunc_bits (jump_func, value, mask);
209ca542
PK
4261 }
4262 else
86cd0334 4263 jump_func->bits = NULL;
8bc5448f
KV
4264
4265 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4266 bool vr_known = bp_unpack_value (&vr_bp, 1);
4267 if (vr_known)
4268 {
54994253 4269 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
86cd0334
MJ
4270 VR_LAST);
4271 tree min = stream_read_tree (ib, data_in);
4272 tree max = stream_read_tree (ib, data_in);
f714ecf5
JH
4273 if (prevails)
4274 ipa_set_jfunc_vr (jump_func, type, min, max);
8bc5448f
KV
4275 }
4276 else
86cd0334 4277 jump_func->m_vr = NULL;
fb3f88cc
JH
4278}
4279
e33c6cd6
MJ
4280/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4281 relevant to indirect inlining to OB. */
661e7330
MJ
4282
4283static void
e33c6cd6
MJ
4284ipa_write_indirect_edge_info (struct output_block *ob,
4285 struct cgraph_edge *cs)
661e7330 4286{
99b1c316 4287 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4288 struct bitpack_d bp;
e33c6cd6 4289
412288f1 4290 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4291 bp = bitpack_create (ob->main_stream);
4292 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4293 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4294 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4295 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4296 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4297 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4298 streamer_write_bitpack (&bp);
ba392339
JH
4299 if (ii->agg_contents || ii->polymorphic)
4300 streamer_write_hwi (ob, ii->offset);
4301 else
4302 gcc_assert (ii->offset == 0);
b258210c
MJ
4303
4304 if (ii->polymorphic)
4305 {
412288f1 4306 streamer_write_hwi (ob, ii->otr_token);
b9393656 4307 stream_write_tree (ob, ii->otr_type, true);
ba392339 4308 ii->context.stream_out (ob);
b258210c 4309 }
661e7330
MJ
4310}
4311
e33c6cd6
MJ
4312/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4313 relevant to indirect inlining from IB. */
661e7330
MJ
4314
4315static void
99b1c316
MS
4316ipa_read_indirect_edge_info (class lto_input_block *ib,
4317 class data_in *data_in,
e33c6cd6 4318 struct cgraph_edge *cs)
661e7330 4319{
99b1c316 4320 class cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4321 struct bitpack_d bp;
661e7330 4322
412288f1 4323 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 4324 bp = streamer_read_bitpack (ib);
2465dcc2 4325 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4326 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4327 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4328 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 4329 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 4330 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
4331 if (ii->agg_contents || ii->polymorphic)
4332 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4333 else
4334 ii->offset = 0;
b258210c
MJ
4335 if (ii->polymorphic)
4336 {
412288f1 4337 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4338 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 4339 ii->context.stream_in (ib, data_in);
b258210c 4340 }
661e7330
MJ
4341}
4342
fb3f88cc
JH
4343/* Stream out NODE info to OB. */
4344
4345static void
4346ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4347{
4348 int node_ref;
7380e6ef 4349 lto_symtab_encoder_t encoder;
99b1c316 4350 class ipa_node_params *info = IPA_NODE_REF (node);
fb3f88cc
JH
4351 int j;
4352 struct cgraph_edge *e;
2465dcc2 4353 struct bitpack_d bp;
fb3f88cc 4354
7380e6ef 4355 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4356 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4357 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4358
0e8853ee
JH
4359 streamer_write_uhwi (ob, ipa_get_param_count (info));
4360 for (j = 0; j < ipa_get_param_count (info); j++)
4361 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4362 bp = bitpack_create (ob->main_stream);
8aab5218 4363 gcc_assert (info->analysis_done
661e7330 4364 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4365 gcc_assert (!info->node_enqueued);
4366 gcc_assert (!info->ipcp_orig_node);
4367 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4368 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4369 streamer_write_bitpack (&bp);
4502fe8d 4370 for (j = 0; j < ipa_get_param_count (info); j++)
a5e14a42
MJ
4371 {
4372 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4373 stream_write_tree (ob, ipa_get_type (info, j), true);
4374 }
fb3f88cc
JH
4375 for (e = node->callees; e; e = e->next_callee)
4376 {
99b1c316 4377 class ipa_edge_args *args = IPA_EDGE_REF (e);
fb3f88cc 4378
5ce97055
JH
4379 streamer_write_uhwi (ob,
4380 ipa_get_cs_argument_count (args) * 2
4381 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 4382 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4383 {
4384 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4385 if (args->polymorphic_call_contexts != NULL)
4386 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4387 }
fb3f88cc 4388 }
e33c6cd6 4389 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 4390 {
99b1c316 4391 class ipa_edge_args *args = IPA_EDGE_REF (e);
c8246dbe 4392
5ce97055
JH
4393 streamer_write_uhwi (ob,
4394 ipa_get_cs_argument_count (args) * 2
4395 + (args->polymorphic_call_contexts != NULL));
c8246dbe 4396 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4397 {
4398 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4399 if (args->polymorphic_call_contexts != NULL)
4400 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4401 }
c8246dbe
JH
4402 ipa_write_indirect_edge_info (ob, e);
4403 }
fb3f88cc
JH
4404}
4405
f714ecf5 4406/* Stream in edge E from IB. */
7b377b22 4407
f714ecf5 4408static void
99b1c316
MS
4409ipa_read_edge_info (class lto_input_block *ib,
4410 class data_in *data_in,
f714ecf5 4411 struct cgraph_edge *e, bool prevails)
7b377b22 4412{
f714ecf5
JH
4413 int count = streamer_read_uhwi (ib);
4414 bool contexts_computed = count & 1;
4415
4416 count /= 2;
4417 if (!count)
4418 return;
4419 if (prevails && e->possibly_call_in_translation_unit_p ())
4420 {
99b1c316 4421 class ipa_edge_args *args = IPA_EDGE_REF (e);
f714ecf5
JH
4422 vec_safe_grow_cleared (args->jump_functions, count);
4423 if (contexts_computed)
4424 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4425 for (int k = 0; k < count; k++)
4426 {
4427 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4428 data_in, prevails);
4429 if (contexts_computed)
4430 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4431 (ib, data_in);
4432 }
4433 }
4434 else
4435 {
4436 for (int k = 0; k < count; k++)
4437 {
4438 struct ipa_jump_func dummy;
4439 ipa_read_jump_function (ib, &dummy, e,
4440 data_in, prevails);
4441 if (contexts_computed)
4442 {
99b1c316 4443 class ipa_polymorphic_call_context ctx;
f714ecf5
JH
4444 ctx.stream_in (ib, data_in);
4445 }
4446 }
4447 }
7b377b22
JH
4448}
4449
61502ca8 4450/* Stream in NODE info from IB. */
fb3f88cc
JH
4451
4452static void
99b1c316
MS
4453ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4454 class data_in *data_in)
fb3f88cc 4455{
fb3f88cc
JH
4456 int k;
4457 struct cgraph_edge *e;
2465dcc2 4458 struct bitpack_d bp;
f714ecf5 4459 bool prevails = node->prevailing_p ();
99b1c316 4460 class ipa_node_params *info = prevails ? IPA_NODE_REF (node) : NULL;
fb3f88cc 4461
f714ecf5
JH
4462 int param_count = streamer_read_uhwi (ib);
4463 if (prevails)
4464 {
4465 ipa_alloc_node_params (node, param_count);
4466 for (k = 0; k < param_count; k++)
4467 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4468 if (ipa_get_param_count (info) != 0)
4469 info->analysis_done = true;
4470 info->node_enqueued = false;
4471 }
4472 else
4473 for (k = 0; k < param_count; k++)
4474 streamer_read_uhwi (ib);
a5e14a42 4475
412288f1 4476 bp = streamer_read_bitpack (ib);
f714ecf5 4477 for (k = 0; k < param_count; k++)
a5e14a42 4478 {
f714ecf5
JH
4479 bool used = bp_unpack_value (&bp, 1);
4480
4481 if (prevails)
4482 ipa_set_param_used (info, k, used);
a5e14a42 4483 }
f714ecf5 4484 for (k = 0; k < param_count; k++)
fb3f88cc 4485 {
f714ecf5
JH
4486 int nuses = streamer_read_hwi (ib);
4487 tree type = stream_read_tree (ib, data_in);
fb3f88cc 4488
f714ecf5 4489 if (prevails)
5ce97055 4490 {
f714ecf5
JH
4491 ipa_set_controlled_uses (info, k, nuses);
4492 (*info->descriptors)[k].decl_or_type = type;
5ce97055 4493 }
fb3f88cc 4494 }
f714ecf5
JH
4495 for (e = node->callees; e; e = e->next_callee)
4496 ipa_read_edge_info (ib, data_in, e, prevails);
e33c6cd6 4497 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 4498 {
f714ecf5 4499 ipa_read_edge_info (ib, data_in, e, prevails);
c8246dbe
JH
4500 ipa_read_indirect_edge_info (ib, data_in, e);
4501 }
fb3f88cc
JH
4502}
4503
4504/* Write jump functions for nodes in SET. */
4505
4506void
f27c1867 4507ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4508{
4509 struct cgraph_node *node;
93536c97 4510 struct output_block *ob;
fb3f88cc 4511 unsigned int count = 0;
f27c1867
JH
4512 lto_symtab_encoder_iterator lsei;
4513 lto_symtab_encoder_t encoder;
4514
6fe906a3 4515 if (!ipa_node_params_sum || !ipa_edge_args_sum)
93536c97 4516 return;
fb3f88cc 4517
93536c97 4518 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4519 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 4520 ob->symbol = NULL;
f27c1867
JH
4521 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4522 lsei_next_function_in_partition (&lsei))
fb3f88cc 4523 {
f27c1867 4524 node = lsei_cgraph_node (lsei);
d52f5295 4525 if (node->has_gimple_body_p ()
c47d0034 4526 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4527 count++;
4528 }
4529
412288f1 4530 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4531
4532 /* Process all of the functions. */
f27c1867
JH
4533 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4534 lsei_next_function_in_partition (&lsei))
fb3f88cc 4535 {
f27c1867 4536 node = lsei_cgraph_node (lsei);
d52f5295 4537 if (node->has_gimple_body_p ()
c47d0034 4538 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4539 ipa_write_node_info (ob, node);
4540 }
412288f1 4541 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4542 produce_asm (ob, NULL);
4543 destroy_output_block (ob);
4544}
4545
4546/* Read section in file FILE_DATA of length LEN with data DATA. */
4547
4548static void
4549ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4550 size_t len)
4551{
4552 const struct lto_function_header *header =
4553 (const struct lto_function_header *) data;
4ad9a9de
EB
4554 const int cfg_offset = sizeof (struct lto_function_header);
4555 const int main_offset = cfg_offset + header->cfg_size;
4556 const int string_offset = main_offset + header->main_size;
99b1c316 4557 class data_in *data_in;
fb3f88cc
JH
4558 unsigned int i;
4559 unsigned int count;
4560
207c68cd 4561 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 4562 header->main_size, file_data->mode_table);
fb3f88cc
JH
4563
4564 data_in =
4565 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4566 header->string_size, vNULL);
412288f1 4567 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4568
4569 for (i = 0; i < count; i++)
4570 {
4571 unsigned int index;
4572 struct cgraph_node *node;
7380e6ef 4573 lto_symtab_encoder_t encoder;
fb3f88cc 4574
412288f1 4575 index = streamer_read_uhwi (&ib_main);
7380e6ef 4576 encoder = file_data->symtab_node_encoder;
d52f5295
ML
4577 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4578 index));
67348ccc 4579 gcc_assert (node->definition);
fb3f88cc
JH
4580 ipa_read_node_info (&ib_main, node, data_in);
4581 }
4582 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4583 len);
4584 lto_data_in_delete (data_in);
4585}
4586
4587/* Read ipcp jump functions. */
4588
4589void
4590ipa_prop_read_jump_functions (void)
4591{
4592 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4593 struct lto_file_decl_data *file_data;
4594 unsigned int j = 0;
4595
4596 ipa_check_create_node_params ();
4597 ipa_check_create_edge_args ();
4598 ipa_register_cgraph_hooks ();
4599
4600 while ((file_data = file_data_vec[j++]))
4601 {
4602 size_t len;
4603 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4604
4605 if (data)
4606 ipa_prop_read_section (file_data, data, len);
4607 }
4608}
4609
2c9561b5 4610void
04be694e 4611write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
4612{
4613 int node_ref;
4614 unsigned int count = 0;
4615 lto_symtab_encoder_t encoder;
4616 struct ipa_agg_replacement_value *aggvals, *av;
4617
4618 aggvals = ipa_get_agg_replacements_for_node (node);
4619 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4620 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4621 streamer_write_uhwi (ob, node_ref);
4622
4623 for (av = aggvals; av; av = av->next)
4624 count++;
4625 streamer_write_uhwi (ob, count);
4626
4627 for (av = aggvals; av; av = av->next)
4628 {
7b920a9a
MJ
4629 struct bitpack_d bp;
4630
2c9561b5
MJ
4631 streamer_write_uhwi (ob, av->offset);
4632 streamer_write_uhwi (ob, av->index);
4633 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4634
4635 bp = bitpack_create (ob->main_stream);
4636 bp_pack_value (&bp, av->by_ref, 1);
4637 streamer_write_bitpack (&bp);
2c9561b5 4638 }
04be694e 4639
9d3e0adc 4640 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
4641 if (ts && vec_safe_length (ts->m_vr) > 0)
4642 {
4643 count = ts->m_vr->length ();
4644 streamer_write_uhwi (ob, count);
4645 for (unsigned i = 0; i < count; ++i)
4646 {
4647 struct bitpack_d bp;
4648 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4649 bp = bitpack_create (ob->main_stream);
4650 bp_pack_value (&bp, parm_vr->known, 1);
4651 streamer_write_bitpack (&bp);
4652 if (parm_vr->known)
4653 {
4654 streamer_write_enum (ob->main_stream, value_rang_type,
4655 VR_LAST, parm_vr->type);
4656 streamer_write_wide_int (ob, parm_vr->min);
4657 streamer_write_wide_int (ob, parm_vr->max);
4658 }
4659 }
4660 }
4661 else
4662 streamer_write_uhwi (ob, 0);
4663
209ca542
PK
4664 if (ts && vec_safe_length (ts->bits) > 0)
4665 {
4666 count = ts->bits->length ();
4667 streamer_write_uhwi (ob, count);
4668
4669 for (unsigned i = 0; i < count; ++i)
4670 {
86cd0334 4671 const ipa_bits *bits_jfunc = (*ts->bits)[i];
209ca542 4672 struct bitpack_d bp = bitpack_create (ob->main_stream);
86cd0334 4673 bp_pack_value (&bp, !!bits_jfunc, 1);
209ca542 4674 streamer_write_bitpack (&bp);
86cd0334 4675 if (bits_jfunc)
209ca542 4676 {
86cd0334
MJ
4677 streamer_write_widest_int (ob, bits_jfunc->value);
4678 streamer_write_widest_int (ob, bits_jfunc->mask);
209ca542
PK
4679 }
4680 }
4681 }
4682 else
4683 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
4684}
4685
4686/* Stream in the aggregate value replacement chain for NODE from IB. */
4687
4688static void
04be694e
MJ
4689read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4690 data_in *data_in)
2c9561b5
MJ
4691{
4692 struct ipa_agg_replacement_value *aggvals = NULL;
4693 unsigned int count, i;
4694
4695 count = streamer_read_uhwi (ib);
4696 for (i = 0; i <count; i++)
4697 {
4698 struct ipa_agg_replacement_value *av;
7b920a9a 4699 struct bitpack_d bp;
2c9561b5 4700
766090c2 4701 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
4702 av->offset = streamer_read_uhwi (ib);
4703 av->index = streamer_read_uhwi (ib);
4704 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4705 bp = streamer_read_bitpack (ib);
4706 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4707 av->next = aggvals;
4708 aggvals = av;
4709 }
4710 ipa_set_node_agg_value_chain (node, aggvals);
67b97478 4711
209ca542
PK
4712 count = streamer_read_uhwi (ib);
4713 if (count > 0)
4714 {
9d3e0adc
ML
4715 ipcp_transformation_initialize ();
4716 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
8bc5448f
KV
4717 vec_safe_grow_cleared (ts->m_vr, count);
4718 for (i = 0; i < count; i++)
4719 {
4720 ipa_vr *parm_vr;
4721 parm_vr = &(*ts->m_vr)[i];
4722 struct bitpack_d bp;
4723 bp = streamer_read_bitpack (ib);
4724 parm_vr->known = bp_unpack_value (&bp, 1);
4725 if (parm_vr->known)
4726 {
54994253 4727 parm_vr->type = streamer_read_enum (ib, value_range_kind,
8bc5448f
KV
4728 VR_LAST);
4729 parm_vr->min = streamer_read_wide_int (ib);
4730 parm_vr->max = streamer_read_wide_int (ib);
4731 }
4732 }
4733 }
4734 count = streamer_read_uhwi (ib);
4735 if (count > 0)
4736 {
9d3e0adc
ML
4737 ipcp_transformation_initialize ();
4738 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
209ca542
PK
4739 vec_safe_grow_cleared (ts->bits, count);
4740
4741 for (i = 0; i < count; i++)
4742 {
209ca542 4743 struct bitpack_d bp = streamer_read_bitpack (ib);
86cd0334
MJ
4744 bool known = bp_unpack_value (&bp, 1);
4745 if (known)
209ca542 4746 {
86cd0334
MJ
4747 ipa_bits *bits
4748 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4749 streamer_read_widest_int (ib));
4750 (*ts->bits)[i] = bits;
209ca542
PK
4751 }
4752 }
4753 }
2c9561b5
MJ
4754}
4755
4756/* Write all aggregate replacement for nodes in set. */
4757
4758void
04be694e 4759ipcp_write_transformation_summaries (void)
2c9561b5
MJ
4760{
4761 struct cgraph_node *node;
4762 struct output_block *ob;
4763 unsigned int count = 0;
4764 lto_symtab_encoder_iterator lsei;
4765 lto_symtab_encoder_t encoder;
4766
2c9561b5
MJ
4767 ob = create_output_block (LTO_section_ipcp_transform);
4768 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 4769 ob->symbol = NULL;
2c9561b5
MJ
4770 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4771 lsei_next_function_in_partition (&lsei))
4772 {
4773 node = lsei_cgraph_node (lsei);
04be694e 4774 if (node->has_gimple_body_p ())
2c9561b5
MJ
4775 count++;
4776 }
4777
4778 streamer_write_uhwi (ob, count);
4779
4780 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4781 lsei_next_function_in_partition (&lsei))
4782 {
4783 node = lsei_cgraph_node (lsei);
04be694e
MJ
4784 if (node->has_gimple_body_p ())
4785 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
4786 }
4787 streamer_write_char_stream (ob->main_stream, 0);
4788 produce_asm (ob, NULL);
4789 destroy_output_block (ob);
4790}
4791
4792/* Read replacements section in file FILE_DATA of length LEN with data
4793 DATA. */
4794
4795static void
4796read_replacements_section (struct lto_file_decl_data *file_data,
4797 const char *data,
4798 size_t len)
4799{
4800 const struct lto_function_header *header =
4801 (const struct lto_function_header *) data;
4802 const int cfg_offset = sizeof (struct lto_function_header);
4803 const int main_offset = cfg_offset + header->cfg_size;
4804 const int string_offset = main_offset + header->main_size;
99b1c316 4805 class data_in *data_in;
2c9561b5
MJ
4806 unsigned int i;
4807 unsigned int count;
4808
207c68cd 4809 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 4810 header->main_size, file_data->mode_table);
2c9561b5
MJ
4811
4812 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4813 header->string_size, vNULL);
2c9561b5
MJ
4814 count = streamer_read_uhwi (&ib_main);
4815
4816 for (i = 0; i < count; i++)
4817 {
4818 unsigned int index;
4819 struct cgraph_node *node;
4820 lto_symtab_encoder_t encoder;
4821
4822 index = streamer_read_uhwi (&ib_main);
4823 encoder = file_data->symtab_node_encoder;
d52f5295
ML
4824 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4825 index));
67348ccc 4826 gcc_assert (node->definition);
04be694e 4827 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
4828 }
4829 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4830 len);
4831 lto_data_in_delete (data_in);
4832}
4833
4834/* Read IPA-CP aggregate replacements. */
4835
4836void
04be694e 4837ipcp_read_transformation_summaries (void)
2c9561b5
MJ
4838{
4839 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4840 struct lto_file_decl_data *file_data;
4841 unsigned int j = 0;
4842
4843 while ((file_data = file_data_vec[j++]))
4844 {
4845 size_t len;
4846 const char *data = lto_get_section_data (file_data,
4847 LTO_section_ipcp_transform,
4848 NULL, &len);
4849 if (data)
4850 read_replacements_section (file_data, data, len);
4851 }
4852}
4853
4854/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4855 NODE. */
4856
4857static void
4858adjust_agg_replacement_values (struct cgraph_node *node,
4859 struct ipa_agg_replacement_value *aggval)
4860{
4861 struct ipa_agg_replacement_value *v;
4862 int i, c = 0, d = 0, *adj;
4863
4864 if (!node->clone.combined_args_to_skip)
4865 return;
4866
4867 for (v = aggval; v; v = v->next)
4868 {
4869 gcc_assert (v->index >= 0);
4870 if (c < v->index)
4871 c = v->index;
4872 }
4873 c++;
4874
4875 adj = XALLOCAVEC (int, c);
4876 for (i = 0; i < c; i++)
4877 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4878 {
4879 adj[i] = -1;
4880 d++;
4881 }
4882 else
4883 adj[i] = i - d;
4884
4885 for (v = aggval; v; v = v->next)
4886 v->index = adj[v->index];
4887}
4888
8aab5218
MJ
4889/* Dominator walker driving the ipcp modification phase. */
4890
4891class ipcp_modif_dom_walker : public dom_walker
4892{
4893public:
56b40062 4894 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
f65f1ae3 4895 vec<ipa_param_descriptor, va_gc> *descs,
8aab5218
MJ
4896 struct ipa_agg_replacement_value *av,
4897 bool *sc, bool *cc)
4898 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4899 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4900
3daacdcd 4901 virtual edge before_dom_children (basic_block);
8aab5218
MJ
4902
4903private:
56b40062 4904 struct ipa_func_body_info *m_fbi;
f65f1ae3 4905 vec<ipa_param_descriptor, va_gc> *m_descriptors;
8aab5218
MJ
4906 struct ipa_agg_replacement_value *m_aggval;
4907 bool *m_something_changed, *m_cfg_changed;
4908};
4909
3daacdcd 4910edge
8aab5218
MJ
4911ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4912{
4913 gimple_stmt_iterator gsi;
4914 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4915 {
4916 struct ipa_agg_replacement_value *v;
355fe088 4917 gimple *stmt = gsi_stmt (gsi);
8aab5218 4918 tree rhs, val, t;
86003645
RS
4919 HOST_WIDE_INT offset;
4920 poly_int64 size;
8aab5218
MJ
4921 int index;
4922 bool by_ref, vce;
4923
4924 if (!gimple_assign_load_p (stmt))
4925 continue;
4926 rhs = gimple_assign_rhs1 (stmt);
4927 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4928 continue;
2c9561b5 4929
8aab5218
MJ
4930 vce = false;
4931 t = rhs;
4932 while (handled_component_p (t))
4933 {
4934 /* V_C_E can do things like convert an array of integers to one
4935 bigger integer and similar things we do not handle below. */
4936 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4937 {
4938 vce = true;
4939 break;
4940 }
4941 t = TREE_OPERAND (t, 0);
4942 }
4943 if (vce)
4944 continue;
4945
ff302741
PB
4946 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4947 &offset, &size, &by_ref))
8aab5218
MJ
4948 continue;
4949 for (v = m_aggval; v; v = v->next)
4950 if (v->index == index
4951 && v->offset == offset)
4952 break;
4953 if (!v
4954 || v->by_ref != by_ref
86003645
RS
4955 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
4956 size))
8aab5218
MJ
4957 continue;
4958
4959 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4960 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4961 {
4962 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4963 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4964 else if (TYPE_SIZE (TREE_TYPE (rhs))
4965 == TYPE_SIZE (TREE_TYPE (v->value)))
4966 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4967 else
4968 {
4969 if (dump_file)
4970 {
4971 fprintf (dump_file, " const ");
ef6cb4c7 4972 print_generic_expr (dump_file, v->value);
8aab5218 4973 fprintf (dump_file, " can't be converted to type of ");
ef6cb4c7 4974 print_generic_expr (dump_file, rhs);
8aab5218
MJ
4975 fprintf (dump_file, "\n");
4976 }
4977 continue;
4978 }
4979 }
4980 else
4981 val = v->value;
4982
4983 if (dump_file && (dump_flags & TDF_DETAILS))
4984 {
4985 fprintf (dump_file, "Modifying stmt:\n ");
ef6cb4c7 4986 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
4987 }
4988 gimple_assign_set_rhs_from_tree (&gsi, val);
4989 update_stmt (stmt);
4990
4991 if (dump_file && (dump_flags & TDF_DETAILS))
4992 {
4993 fprintf (dump_file, "into:\n ");
ef6cb4c7 4994 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
4995 fprintf (dump_file, "\n");
4996 }
4997
4998 *m_something_changed = true;
4999 if (maybe_clean_eh_stmt (stmt)
5000 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5001 *m_cfg_changed = true;
5002 }
3daacdcd 5003 return NULL;
8aab5218
MJ
5004}
5005
209ca542 5006/* Update bits info of formal parameters as described in
9d3e0adc 5007 ipcp_transformation. */
209ca542
PK
5008
5009static void
5010ipcp_update_bits (struct cgraph_node *node)
5011{
5012 tree parm = DECL_ARGUMENTS (node->decl);
5013 tree next_parm = parm;
9d3e0adc 5014 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
209ca542
PK
5015
5016 if (!ts || vec_safe_length (ts->bits) == 0)
5017 return;
5018
86cd0334 5019 vec<ipa_bits *, va_gc> &bits = *ts->bits;
209ca542
PK
5020 unsigned count = bits.length ();
5021
5022 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5023 {
5024 if (node->clone.combined_args_to_skip
5025 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5026 continue;
5027
5028 gcc_checking_assert (parm);
5029 next_parm = DECL_CHAIN (parm);
5030
86cd0334
MJ
5031 if (!bits[i]
5032 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5033 || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542 5034 || !is_gimple_reg (parm))
86cd0334 5035 continue;
209ca542
PK
5036
5037 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5038 if (!ddef)
5039 continue;
5040
5041 if (dump_file)
5042 {
86cd0334
MJ
5043 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5044 print_hex (bits[i]->mask, dump_file);
209ca542
PK
5045 fprintf (dump_file, "\n");
5046 }
5047
67b97478
PK
5048 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5049 {
5050 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5051 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5052
86cd0334
MJ
5053 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5054 | wide_int::from (bits[i]->value, prec, sgn);
67b97478
PK
5055 set_nonzero_bits (ddef, nonzero_bits);
5056 }
5057 else
5058 {
86cd0334
MJ
5059 unsigned tem = bits[i]->mask.to_uhwi ();
5060 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
67b97478
PK
5061 unsigned align = tem & -tem;
5062 unsigned misalign = bitpos & (align - 1);
209ca542 5063
67b97478
PK
5064 if (align > 1)
5065 {
5066 if (dump_file)
5067 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5068
5069 unsigned old_align, old_misalign;
5070 struct ptr_info_def *pi = get_ptr_info (ddef);
5071 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5072
5073 if (old_known
5074 && old_align > align)
5075 {
5076 if (dump_file)
5077 {
5078 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5079 if ((old_misalign & (align - 1)) != misalign)
5080 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5081 old_misalign, misalign);
5082 }
5083 continue;
5084 }
5085
5086 if (old_known
5087 && ((misalign & (old_align - 1)) != old_misalign)
5088 && dump_file)
5089 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5090 old_misalign, misalign);
5091
5092 set_ptr_info_alignment (pi, align, misalign);
5093 }
5094 }
209ca542
PK
5095 }
5096}
5097
8bc5448f 5098/* Update value range of formal parameters as described in
9d3e0adc 5099 ipcp_transformation. */
8bc5448f
KV
5100
5101static void
5102ipcp_update_vr (struct cgraph_node *node)
5103{
5104 tree fndecl = node->decl;
5105 tree parm = DECL_ARGUMENTS (fndecl);
5106 tree next_parm = parm;
9d3e0adc 5107 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5108 if (!ts || vec_safe_length (ts->m_vr) == 0)
5109 return;
5110 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5111 unsigned count = vr.length ();
5112
5113 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5114 {
5115 if (node->clone.combined_args_to_skip
5116 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5117 continue;
5118 gcc_checking_assert (parm);
5119 next_parm = DECL_CHAIN (parm);
5120 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5121
5122 if (!ddef || !is_gimple_reg (parm))
5123 continue;
5124
5125 if (vr[i].known
8bc5448f
KV
5126 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5127 {
5128 tree type = TREE_TYPE (ddef);
5129 unsigned prec = TYPE_PRECISION (type);
718625ad
KV
5130 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5131 {
5132 if (dump_file)
5133 {
5134 fprintf (dump_file, "Setting value range of param %u ", i);
5135 fprintf (dump_file, "%s[",
5136 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5137 print_decs (vr[i].min, dump_file);
5138 fprintf (dump_file, ", ");
5139 print_decs (vr[i].max, dump_file);
5140 fprintf (dump_file, "]\n");
5141 }
5142 set_range_info (ddef, vr[i].type,
5143 wide_int_storage::from (vr[i].min, prec,
5144 TYPE_SIGN (type)),
5145 wide_int_storage::from (vr[i].max, prec,
5146 TYPE_SIGN (type)));
5147 }
5148 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5149 && vr[i].type == VR_ANTI_RANGE
5150 && wi::eq_p (vr[i].min, 0)
5151 && wi::eq_p (vr[i].max, 0))
8bc5448f 5152 {
718625ad
KV
5153 if (dump_file)
5154 fprintf (dump_file, "Setting nonnull for %u\n", i);
5155 set_ptr_nonnull (ddef);
8bc5448f 5156 }
8bc5448f
KV
5157 }
5158 }
5159}
5160
8aab5218 5161/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5162
5163unsigned int
5164ipcp_transform_function (struct cgraph_node *node)
5165{
f65f1ae3 5166 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
56b40062 5167 struct ipa_func_body_info fbi;
2c9561b5 5168 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5169 int param_count;
5170 bool cfg_changed = false, something_changed = false;
5171
5172 gcc_checking_assert (cfun);
5173 gcc_checking_assert (current_function_decl);
5174
5175 if (dump_file)
464d0118
ML
5176 fprintf (dump_file, "Modification phase of node %s\n",
5177 node->dump_name ());
2c9561b5 5178
209ca542 5179 ipcp_update_bits (node);
8bc5448f 5180 ipcp_update_vr (node);
2c9561b5
MJ
5181 aggval = ipa_get_agg_replacements_for_node (node);
5182 if (!aggval)
5183 return 0;
67348ccc 5184 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5185 if (param_count == 0)
5186 return 0;
5187 adjust_agg_replacement_values (node, aggval);
5188 if (dump_file)
5189 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5190
8aab5218
MJ
5191 fbi.node = node;
5192 fbi.info = NULL;
5193 fbi.bb_infos = vNULL;
5194 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5195 fbi.param_count = param_count;
c628d1c3 5196 fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
2c9561b5 5197
f65f1ae3
MJ
5198 vec_safe_grow_cleared (descriptors, param_count);
5199 ipa_populate_param_decls (node, *descriptors);
8aab5218
MJ
5200 calculate_dominance_info (CDI_DOMINATORS);
5201 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5202 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5203
8aab5218
MJ
5204 int i;
5205 struct ipa_bb_info *bi;
5206 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5207 free_ipa_bb_info (bi);
5208 fbi.bb_infos.release ();
5209 free_dominance_info (CDI_DOMINATORS);
9d3e0adc
ML
5210
5211 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5212 s->agg_values = NULL;
5213 s->bits = NULL;
5214 s->m_vr = NULL;
676b4899 5215
f65f1ae3 5216 vec_free (descriptors);
2c9561b5
MJ
5217
5218 if (!something_changed)
5219 return 0;
95a2ed03
MJ
5220
5221 if (cfg_changed)
5222 delete_unreachable_blocks_update_callgraph (node, false);
5223
5224 return TODO_update_ssa_only_virtuals;
2c9561b5 5225}
86cd0334
MJ
5226
5227#include "gt-ipa-prop.h"