]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
Add missing noexcept on definition to match declaration
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
85ec4feb 2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
27d020cf 48#include "ipa-fnsummary.h"
cf835838 49#include "gimple-pretty-print.h"
dfea20f1 50#include "params.h"
450ad0cd 51#include "ipa-utils.h"
2b5f0895 52#include "dbgcnt.h"
8aab5218 53#include "domwalk.h"
9b2b7279 54#include "builtins.h"
771578a0 55
dd912cb8
ML
56/* Function summary where the parameter infos are actually stored. */
57ipa_node_params_t *ipa_node_params_sum = NULL;
9d3e0adc
ML
58
59function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
60
6fe906a3
MJ
61/* Edge summary for IPA-CP edge information. */
62ipa_edge_args_sum_t *ipa_edge_args_sum;
771578a0 63
86cd0334
MJ
64/* Traits for a hash table for reusing already existing ipa_bits. */
65
66struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67{
68 typedef ipa_bits *value_type;
69 typedef ipa_bits *compare_type;
70 static hashval_t
71 hash (const ipa_bits *p)
72 {
73 hashval_t t = (hashval_t) p->value.to_shwi ();
74 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 }
76 static bool
77 equal (const ipa_bits *a, const ipa_bits *b)
78 {
79 return a->value == b->value && a->mask == b->mask;
80 }
81 static void
82 mark_empty (ipa_bits *&p)
83 {
84 p = NULL;
85 }
86 static bool
87 is_empty (const ipa_bits *p)
88 {
89 return p == NULL;
90 }
91 static bool
92 is_deleted (const ipa_bits *p)
93 {
94 return p == reinterpret_cast<const ipa_bits *> (1);
95 }
96 static void
97 mark_deleted (ipa_bits *&p)
98 {
99 p = reinterpret_cast<ipa_bits *> (1);
100 }
101};
102
103/* Hash table for avoid repeated allocations of equal ipa_bits. */
104static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
105
106/* Traits for a hash table for reusing value_ranges used for IPA. Note that
107 the equiv bitmap is not hashed and is expected to be NULL. */
108
109struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
110{
111 typedef value_range *value_type;
112 typedef value_range *compare_type;
113 static hashval_t
114 hash (const value_range *p)
59b2c134
JJ
115 {
116 gcc_checking_assert (!p->equiv);
117 inchash::hash hstate (p->type);
118 hstate.add_ptr (p->min);
119 hstate.add_ptr (p->max);
120 return hstate.end ();
121 }
86cd0334
MJ
122 static bool
123 equal (const value_range *a, const value_range *b)
124 {
125 return a->type == b->type && a->min == b->min && a->max == b->max;
126 }
127 static void
128 mark_empty (value_range *&p)
129 {
130 p = NULL;
131 }
132 static bool
133 is_empty (const value_range *p)
134 {
135 return p == NULL;
136 }
137 static bool
138 is_deleted (const value_range *p)
139 {
140 return p == reinterpret_cast<const value_range *> (1);
141 }
142 static void
143 mark_deleted (value_range *&p)
144 {
145 p = reinterpret_cast<value_range *> (1);
146 }
147};
148
149/* Hash table for avoid repeated allocations of equal value_ranges. */
150static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
151
771578a0 152/* Holders of ipa cgraph hooks: */
40982661 153static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 154
4502fe8d
MJ
155/* Description of a reference to an IPA constant. */
156struct ipa_cst_ref_desc
157{
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge *cs;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc *next_duplicate;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
164 int refcount;
165};
166
167/* Allocation pool for reference descriptions. */
168
fb0b2914 169static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 170 ("IPA-PROP ref descriptions");
4502fe8d 171
5fe8e757
MJ
172/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
174
175static bool
176ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
177{
67348ccc 178 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
179
180 if (!fs_opts)
181 return false;
2bf86c84 182 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
183}
184
be95e2b9
MJ
185/* Return index of the formal whose tree is PTREE in function which corresponds
186 to INFO. */
187
d044dd17 188static int
f65f1ae3
MJ
189ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
190 tree ptree)
518dc859
RL
191{
192 int i, count;
193
f65f1ae3 194 count = vec_safe_length (descriptors);
518dc859 195 for (i = 0; i < count; i++)
f65f1ae3 196 if ((*descriptors)[i].decl_or_type == ptree)
518dc859
RL
197 return i;
198
199 return -1;
200}
201
d044dd17
MJ
202/* Return index of the formal whose tree is PTREE in function which corresponds
203 to INFO. */
204
205int
206ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
207{
208 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
209}
210
211/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
212 NODE. */
be95e2b9 213
f8e2a1ed
MJ
214static void
215ipa_populate_param_decls (struct cgraph_node *node,
f65f1ae3 216 vec<ipa_param_descriptor, va_gc> &descriptors)
518dc859
RL
217{
218 tree fndecl;
219 tree fnargs;
220 tree parm;
221 int param_num;
3e293154 222
67348ccc 223 fndecl = node->decl;
0e8853ee 224 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
225 fnargs = DECL_ARGUMENTS (fndecl);
226 param_num = 0;
910ad8de 227 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 228 {
209ca542 229 descriptors[param_num].decl_or_type = parm;
b4c9af96
RB
230 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
231 true);
518dc859
RL
232 param_num++;
233 }
234}
235
3f84bf08
MJ
236/* Return how many formal parameters FNDECL has. */
237
fd29c024 238int
310bc633 239count_formal_params (tree fndecl)
3f84bf08
MJ
240{
241 tree parm;
242 int count = 0;
0e8853ee 243 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 244
910ad8de 245 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
246 count++;
247
248 return count;
249}
250
0e8853ee
JH
251/* Return the declaration of Ith formal parameter of the function corresponding
252 to INFO. Note there is no setter function as this array is built just once
253 using ipa_initialize_node_params. */
254
255void
256ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
257{
258 fprintf (file, "param #%i", i);
f65f1ae3 259 if ((*info->descriptors)[i].decl_or_type)
0e8853ee
JH
260 {
261 fprintf (file, " ");
ef6cb4c7 262 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
0e8853ee
JH
263 }
264}
265
159f01f8
MJ
266/* If necessary, allocate vector of parameter descriptors in info of NODE.
267 Return true if they were allocated, false if not. */
0e8853ee 268
159f01f8 269static bool
0e8853ee
JH
270ipa_alloc_node_params (struct cgraph_node *node, int param_count)
271{
272 struct ipa_node_params *info = IPA_NODE_REF (node);
273
f65f1ae3 274 if (!info->descriptors && param_count)
159f01f8
MJ
275 {
276 vec_safe_grow_cleared (info->descriptors, param_count);
277 return true;
278 }
279 else
280 return false;
0e8853ee
JH
281}
282
f8e2a1ed
MJ
283/* Initialize the ipa_node_params structure associated with NODE by counting
284 the function parameters, creating the descriptors and populating their
285 param_decls. */
be95e2b9 286
f8e2a1ed
MJ
287void
288ipa_initialize_node_params (struct cgraph_node *node)
289{
290 struct ipa_node_params *info = IPA_NODE_REF (node);
291
159f01f8
MJ
292 if (!info->descriptors
293 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
294 ipa_populate_param_decls (node, *info->descriptors);
518dc859
RL
295}
296
749aa96d
MJ
297/* Print the jump functions associated with call graph edge CS to file F. */
298
299static void
300ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
301{
302 int i, count;
303
304 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
305 for (i = 0; i < count; i++)
306 {
307 struct ipa_jump_func *jump_func;
308 enum jump_func_type type;
309
310 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
311 type = jump_func->type;
312
313 fprintf (f, " param %d: ", i);
314 if (type == IPA_JF_UNKNOWN)
315 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
316 else if (type == IPA_JF_CONST)
317 {
4502fe8d 318 tree val = jump_func->value.constant.value;
749aa96d 319 fprintf (f, "CONST: ");
ef6cb4c7 320 print_generic_expr (f, val);
749aa96d
MJ
321 if (TREE_CODE (val) == ADDR_EXPR
322 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
323 {
324 fprintf (f, " -> ");
ef6cb4c7 325 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
749aa96d
MJ
326 }
327 fprintf (f, "\n");
328 }
749aa96d
MJ
329 else if (type == IPA_JF_PASS_THROUGH)
330 {
331 fprintf (f, "PASS THROUGH: ");
8b7773a4 332 fprintf (f, "%d, op %s",
749aa96d 333 jump_func->value.pass_through.formal_id,
5806f481 334 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 335 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
336 {
337 fprintf (f, " ");
ef6cb4c7 338 print_generic_expr (f, jump_func->value.pass_through.operand);
8b7773a4
MJ
339 }
340 if (jump_func->value.pass_through.agg_preserved)
341 fprintf (f, ", agg_preserved");
3ea6239f 342 fprintf (f, "\n");
749aa96d
MJ
343 }
344 else if (type == IPA_JF_ANCESTOR)
345 {
346 fprintf (f, "ANCESTOR: ");
16998094 347 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
348 jump_func->value.ancestor.formal_id,
349 jump_func->value.ancestor.offset);
8b7773a4
MJ
350 if (jump_func->value.ancestor.agg_preserved)
351 fprintf (f, ", agg_preserved");
3ea6239f 352 fprintf (f, "\n");
749aa96d 353 }
8b7773a4
MJ
354
355 if (jump_func->agg.items)
356 {
357 struct ipa_agg_jf_item *item;
358 int j;
359
360 fprintf (f, " Aggregate passed by %s:\n",
361 jump_func->agg.by_ref ? "reference" : "value");
9771b263 362 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
363 {
364 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
365 item->offset);
366 if (TYPE_P (item->value))
367 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 368 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
369 else
370 {
371 fprintf (f, "cst: ");
ef6cb4c7 372 print_generic_expr (f, item->value);
8b7773a4
MJ
373 }
374 fprintf (f, "\n");
375 }
376 }
44210a96
MJ
377
378 struct ipa_polymorphic_call_context *ctx
379 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
380 if (ctx && !ctx->useless_p ())
381 {
382 fprintf (f, " Context: ");
383 ctx->dump (dump_file);
384 }
04be694e 385
86cd0334 386 if (jump_func->bits)
209ca542 387 {
86cd0334
MJ
388 fprintf (f, " value: ");
389 print_hex (jump_func->bits->value, f);
390 fprintf (f, ", mask: ");
391 print_hex (jump_func->bits->mask, f);
209ca542
PK
392 fprintf (f, "\n");
393 }
394 else
395 fprintf (f, " Unknown bits\n");
8bc5448f 396
86cd0334 397 if (jump_func->m_vr)
8bc5448f
KV
398 {
399 fprintf (f, " VR ");
400 fprintf (f, "%s[",
86cd0334 401 (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
8e6cdc90 402 print_decs (wi::to_wide (jump_func->m_vr->min), f);
8bc5448f 403 fprintf (f, ", ");
8e6cdc90 404 print_decs (wi::to_wide (jump_func->m_vr->max), f);
8bc5448f
KV
405 fprintf (f, "]\n");
406 }
407 else
408 fprintf (f, " Unknown VR\n");
749aa96d
MJ
409 }
410}
411
412
be95e2b9
MJ
413/* Print the jump functions of all arguments on all call graph edges going from
414 NODE to file F. */
415
518dc859 416void
3e293154 417ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 418{
3e293154 419 struct cgraph_edge *cs;
518dc859 420
464d0118 421 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
3e293154
MJ
422 for (cs = node->callees; cs; cs = cs->next_callee)
423 {
424 if (!ipa_edge_args_info_available_for_edge_p (cs))
425 continue;
426
464d0118
ML
427 fprintf (f, " callsite %s -> %s : \n",
428 node->dump_name (),
429 cs->callee->dump_name ());
749aa96d
MJ
430 ipa_print_node_jump_functions_for_edge (f, cs);
431 }
518dc859 432
9de04252 433 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 434 {
9de04252 435 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
436 if (!ipa_edge_args_info_available_for_edge_p (cs))
437 continue;
3e293154 438
9de04252
MJ
439 ii = cs->indirect_info;
440 if (ii->agg_contents)
c13bc3d9 441 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 442 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 443 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
444 ii->param_index, ii->offset,
445 ii->by_ref ? "by reference" : "by_value");
446 else
85942f45
JH
447 fprintf (f, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC,
449 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
450 ii->offset);
9de04252 451
749aa96d
MJ
452 if (cs->call_stmt)
453 {
9de04252 454 fprintf (f, ", for stmt ");
749aa96d 455 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 456 }
749aa96d 457 else
9de04252 458 fprintf (f, "\n");
ba392339
JH
459 if (ii->polymorphic)
460 ii->context.dump (f);
749aa96d 461 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
462 }
463}
464
465/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 466
3e293154
MJ
467void
468ipa_print_all_jump_functions (FILE *f)
469{
470 struct cgraph_node *node;
471
ca30a539 472 fprintf (f, "\nJump functions:\n");
65c70e6b 473 FOR_EACH_FUNCTION (node)
3e293154
MJ
474 {
475 ipa_print_node_jump_functions (f, node);
476 }
477}
478
04be694e
MJ
479/* Set jfunc to be a know-really nothing jump function. */
480
481static void
482ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
483{
484 jfunc->type = IPA_JF_UNKNOWN;
86cd0334
MJ
485 jfunc->bits = NULL;
486 jfunc->m_vr = NULL;
04be694e
MJ
487}
488
b8f6e610
MJ
489/* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
491
492static void
493ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
494 struct ipa_jump_func *src)
495
496{
497 gcc_checking_assert (src->type == IPA_JF_CONST);
498 dst->type = IPA_JF_CONST;
499 dst->value.constant = src->value.constant;
500}
501
7b872d9e
MJ
502/* Set JFUNC to be a constant jmp function. */
503
504static void
4502fe8d
MJ
505ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
506 struct cgraph_edge *cs)
7b872d9e
MJ
507{
508 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
509 jfunc->value.constant.value = unshare_expr_without_location (constant);
510
511 if (TREE_CODE (constant) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
513 {
514 struct ipa_cst_ref_desc *rdesc;
4502fe8d 515
601f3293 516 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
517 rdesc->cs = cs;
518 rdesc->next_duplicate = NULL;
519 rdesc->refcount = 1;
520 jfunc->value.constant.rdesc = rdesc;
521 }
522 else
523 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
524}
525
526/* Set JFUNC to be a simple pass-through jump function. */
527static void
8b7773a4 528ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 529 bool agg_preserved)
7b872d9e
MJ
530{
531 jfunc->type = IPA_JF_PASS_THROUGH;
532 jfunc->value.pass_through.operand = NULL_TREE;
533 jfunc->value.pass_through.formal_id = formal_id;
534 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 535 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
536}
537
a2b4c188
KV
538/* Set JFUNC to be an unary pass through jump function. */
539
540static void
541ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
542 enum tree_code operation)
543{
544 jfunc->type = IPA_JF_PASS_THROUGH;
545 jfunc->value.pass_through.operand = NULL_TREE;
546 jfunc->value.pass_through.formal_id = formal_id;
547 jfunc->value.pass_through.operation = operation;
548 jfunc->value.pass_through.agg_preserved = false;
549}
7b872d9e
MJ
550/* Set JFUNC to be an arithmetic pass through jump function. */
551
552static void
553ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
554 tree operand, enum tree_code operation)
555{
556 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 557 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
558 jfunc->value.pass_through.formal_id = formal_id;
559 jfunc->value.pass_through.operation = operation;
8b7773a4 560 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
561}
562
563/* Set JFUNC to be an ancestor jump function. */
564
565static void
566ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 567 int formal_id, bool agg_preserved)
7b872d9e
MJ
568{
569 jfunc->type = IPA_JF_ANCESTOR;
570 jfunc->value.ancestor.formal_id = formal_id;
571 jfunc->value.ancestor.offset = offset;
8b7773a4 572 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
573}
574
8aab5218
MJ
575/* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
577
578static struct ipa_bb_info *
56b40062 579ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
580{
581 gcc_checking_assert (fbi);
582 return &fbi->bb_infos[bb->index];
583}
584
f65cf2b7
MJ
585/* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
587
11478306 588struct prop_type_change_info
f65cf2b7 589{
290ebcb7
MJ
590 /* Offset into the object where there is the virtual method pointer we are
591 looking for. */
592 HOST_WIDE_INT offset;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
594 type change. */
595 tree object;
f65cf2b7
MJ
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed;
598};
599
600/* Return true if STMT can modify a virtual method table pointer.
601
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
606
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
609
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
612
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
616
617 There is no way to call a constructor of an ancestor sub-object in any
618 other way.
619
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
623
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
635 */
636
637static bool
355fe088 638stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
639{
640 if (is_gimple_call (stmt))
641 return false;
70f633c5
JH
642 if (gimple_clobber_p (stmt))
643 return false;
f65cf2b7
MJ
644 else if (is_gimple_assign (stmt))
645 {
646 tree lhs = gimple_assign_lhs (stmt);
647
0004f992
MJ
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
649 {
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
652 return false;
653
654 if (TREE_CODE (lhs) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 656 return false;
450aa0ee 657 /* In the future we might want to use get_ref_base_and_extent to find
0004f992
MJ
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
660 }
f65cf2b7
MJ
661 }
662 return true;
663}
664
3b97a5c7
MJ
665/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
11478306 668 prop_type_change_info structure. */
f65cf2b7
MJ
669
670static bool
671check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
672{
355fe088 673 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 674 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
675
676 if (stmt_may_be_vtbl_ptr_store (stmt))
677 {
678 tci->type_maybe_changed = true;
679 return true;
680 }
681 else
682 return false;
683}
684
058d0a90
JH
685/* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
290ebcb7 688
058d0a90
JH
689 Generally functions are not allowed to change type of such instances,
690 but they call destructors. We assume that methods can not destroy the THIS
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
693
694static bool
355fe088 695param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90
JH
696{
697 /* Pure functions can not do any changes on the dynamic type;
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
700 return false;
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
707
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
712 return true;
713 if (TREE_CODE (arg) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg)
715 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
716 {
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
719 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 720 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
721 and destructors as those definitely may change the dynamic
722 type. */
723 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function)
725 && !DECL_CXX_DESTRUCTOR_P (function)
726 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
727 {
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
730 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
731 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
732 return true;
058d0a90
JH
733 return false;
734 }
735 }
736 return true;
737}
290ebcb7 738
06d65050
JH
739/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
744 returned by get_ref_base_and_extent, as is the offset.
745
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
748
749static bool
058d0a90 750detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
538dd0b7 751 gcall *call, struct ipa_jump_func *jfunc,
058d0a90 752 HOST_WIDE_INT offset)
f65cf2b7 753{
11478306 754 struct prop_type_change_info tci;
f65cf2b7 755 ao_ref ao;
70f633c5 756 bool entry_reached = false;
f65cf2b7
MJ
757
758 gcc_checking_assert (DECL_P (arg)
759 || TREE_CODE (arg) == MEM_REF
760 || handled_component_p (arg));
f65cf2b7 761
b49407f8
JH
762 comp_type = TYPE_MAIN_VARIANT (comp_type);
763
d570d364
JH
764 /* Const calls cannot call virtual methods through VMT and so type changes do
765 not matter. */
766 if (!flag_devirtualize || !gimple_vuse (call)
767 /* Be sure expected_type is polymorphic. */
768 || !comp_type
769 || TREE_CODE (comp_type) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
772 return true;
4bf2a588 773
dd887943 774 ao_ref_init (&ao, arg);
f65cf2b7
MJ
775 ao.base = base;
776 ao.offset = offset;
777 ao.size = POINTER_SIZE;
778 ao.max_size = ao.size;
f65cf2b7 779
290ebcb7
MJ
780 tci.offset = offset;
781 tci.object = get_base_address (arg);
290ebcb7 782 tci.type_maybe_changed = false;
290ebcb7 783
f65cf2b7 784 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
70f633c5 785 &tci, NULL, &entry_reached);
f65cf2b7
MJ
786 if (!tci.type_maybe_changed)
787 return false;
788
04be694e 789 ipa_set_jf_unknown (jfunc);
f65cf2b7
MJ
790 return true;
791}
792
058d0a90
JH
793/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
794 If it is, return true and fill in the jump function JFUNC with relevant type
795 information or set it to unknown. ARG is the object itself (not a pointer
796 to it, unless dereferenced). BASE is the base of the memory access as
797 returned by get_ref_base_and_extent, as is the offset. */
798
799static bool
538dd0b7 800detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
058d0a90
JH
801 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
802{
803 if (!flag_devirtualize)
804 return false;
805
806 if (TREE_CODE (base) == MEM_REF
807 && !param_type_may_change_p (current_function_decl,
808 TREE_OPERAND (base, 0),
809 call))
810 return false;
811 return detect_type_change_from_memory_writes (arg, base, comp_type,
812 call, jfunc, offset);
813}
814
f65cf2b7
MJ
815/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
816 SSA name (its dereference will become the base and the offset is assumed to
817 be zero). */
818
819static bool
06d65050 820detect_type_change_ssa (tree arg, tree comp_type,
538dd0b7 821 gcall *call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
822{
823 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 824 if (!flag_devirtualize
06d65050 825 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
826 return false;
827
058d0a90
JH
828 if (!param_type_may_change_p (current_function_decl, arg, call))
829 return false;
830
f65cf2b7 831 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 832 build_int_cst (ptr_type_node, 0));
f65cf2b7 833
058d0a90
JH
834 return detect_type_change_from_memory_writes (arg, arg, comp_type,
835 call, jfunc, 0);
f65cf2b7
MJ
836}
837
fdb0e1b4
MJ
838/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
839 boolean variable pointed to by DATA. */
840
841static bool
842mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
843 void *data)
844{
845 bool *b = (bool *) data;
846 *b = true;
847 return true;
848}
849
8aab5218
MJ
850/* Return true if we have already walked so many statements in AA that we
851 should really just start giving up. */
852
853static bool
56b40062 854aa_overwalked (struct ipa_func_body_info *fbi)
8aab5218
MJ
855{
856 gcc_checking_assert (fbi);
857 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
858}
859
860/* Find the nearest valid aa status for parameter specified by INDEX that
861 dominates BB. */
862
56b40062
MJ
863static struct ipa_param_aa_status *
864find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
865 int index)
866{
867 while (true)
868 {
869 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
870 if (!bb)
871 return NULL;
872 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
873 if (!bi->param_aa_statuses.is_empty ()
874 && bi->param_aa_statuses[index].valid)
875 return &bi->param_aa_statuses[index];
876 }
877}
878
879/* Get AA status structure for the given BB and parameter with INDEX. Allocate
880 structures and/or intialize the result with a dominating description as
881 necessary. */
882
56b40062
MJ
883static struct ipa_param_aa_status *
884parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
885 int index)
886{
887 gcc_checking_assert (fbi);
888 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
889 if (bi->param_aa_statuses.is_empty ())
890 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
56b40062 891 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
892 if (!paa->valid)
893 {
894 gcc_checking_assert (!paa->parm_modified
895 && !paa->ref_modified
896 && !paa->pt_modified);
56b40062 897 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
898 dom_paa = find_dominating_aa_status (fbi, bb, index);
899 if (dom_paa)
900 *paa = *dom_paa;
901 else
902 paa->valid = true;
903 }
904
905 return paa;
906}
907
688010ba 908/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 909 a value known not to be modified in this function before reaching the
8aab5218
MJ
910 statement STMT. FBI holds information about the function we have so far
911 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
912
913static bool
56b40062 914parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 915 gimple *stmt, tree parm_load)
fdb0e1b4 916{
56b40062 917 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
918 bool modified = false;
919 ao_ref refd;
920
776e4fe2
MJ
921 tree base = get_base_address (parm_load);
922 gcc_assert (TREE_CODE (base) == PARM_DECL);
923 if (TREE_READONLY (base))
924 return true;
925
8aab5218
MJ
926 /* FIXME: FBI can be NULL if we are being called from outside
927 ipa_node_analysis or ipcp_transform_function, which currently happens
928 during inlining analysis. It would be great to extend fbi's lifetime and
929 always have it. Currently, we are just not afraid of too much walking in
930 that case. */
931 if (fbi)
932 {
933 if (aa_overwalked (fbi))
934 return false;
935 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
936 if (paa->parm_modified)
937 return false;
938 }
939 else
940 paa = NULL;
fdb0e1b4
MJ
941
942 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 943 ao_ref_init (&refd, parm_load);
8aab5218
MJ
944 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
945 &modified, NULL);
946 if (fbi)
947 fbi->aa_walked += walked;
948 if (paa && modified)
949 paa->parm_modified = true;
8b7773a4 950 return !modified;
fdb0e1b4
MJ
951}
952
a2b4c188
KV
953/* If STMT is an assignment that loads a value from an parameter declaration,
954 return the index of the parameter in ipa_node_params which has not been
955 modified. Otherwise return -1. */
956
957static int
958load_from_unmodified_param (struct ipa_func_body_info *fbi,
f65f1ae3 959 vec<ipa_param_descriptor, va_gc> *descriptors,
a2b4c188
KV
960 gimple *stmt)
961{
bda2bc48
MJ
962 int index;
963 tree op1;
964
a2b4c188
KV
965 if (!gimple_assign_single_p (stmt))
966 return -1;
967
bda2bc48
MJ
968 op1 = gimple_assign_rhs1 (stmt);
969 if (TREE_CODE (op1) != PARM_DECL)
a2b4c188
KV
970 return -1;
971
bda2bc48
MJ
972 index = ipa_get_param_decl_index_1 (descriptors, op1);
973 if (index < 0
974 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
a2b4c188
KV
975 return -1;
976
bda2bc48 977 return index;
a2b4c188
KV
978}
979
8aab5218
MJ
980/* Return true if memory reference REF (which must be a load through parameter
981 with INDEX) loads data that are known to be unmodified in this function
982 before reaching statement STMT. */
8b7773a4
MJ
983
984static bool
56b40062 985parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 986 int index, gimple *stmt, tree ref)
8b7773a4 987{
56b40062 988 struct ipa_param_aa_status *paa;
8b7773a4
MJ
989 bool modified = false;
990 ao_ref refd;
991
8aab5218
MJ
992 /* FIXME: FBI can be NULL if we are being called from outside
993 ipa_node_analysis or ipcp_transform_function, which currently happens
994 during inlining analysis. It would be great to extend fbi's lifetime and
995 always have it. Currently, we are just not afraid of too much walking in
996 that case. */
997 if (fbi)
998 {
999 if (aa_overwalked (fbi))
1000 return false;
1001 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1002 if (paa->ref_modified)
1003 return false;
1004 }
1005 else
1006 paa = NULL;
8b7773a4 1007
8aab5218 1008 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 1009 ao_ref_init (&refd, ref);
8aab5218
MJ
1010 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1011 &modified, NULL);
1012 if (fbi)
1013 fbi->aa_walked += walked;
1014 if (paa && modified)
1015 paa->ref_modified = true;
8b7773a4
MJ
1016 return !modified;
1017}
1018
8aab5218
MJ
1019/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1020 is known to be unmodified in this function before reaching call statement
1021 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1022
1023static bool
56b40062 1024parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 1025 gimple *call, tree parm)
8b7773a4
MJ
1026{
1027 bool modified = false;
1028 ao_ref refd;
1029
1030 /* It's unnecessary to calculate anything about memory contnets for a const
1031 function because it is not goin to use it. But do not cache the result
1032 either. Also, no such calculations for non-pointers. */
1033 if (!gimple_vuse (call)
8aab5218
MJ
1034 || !POINTER_TYPE_P (TREE_TYPE (parm))
1035 || aa_overwalked (fbi))
8b7773a4
MJ
1036 return false;
1037
56b40062
MJ
1038 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1039 gimple_bb (call),
1040 index);
8aab5218 1041 if (paa->pt_modified)
8b7773a4
MJ
1042 return false;
1043
1044 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218
MJ
1045 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1046 &modified, NULL);
1047 fbi->aa_walked += walked;
8b7773a4 1048 if (modified)
8aab5218 1049 paa->pt_modified = true;
8b7773a4
MJ
1050 return !modified;
1051}
1052
91bb9f80
MJ
1053/* Return true if we can prove that OP is a memory reference loading
1054 data from an aggregate passed as a parameter.
1055
1056 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1057 false if it cannot prove that the value has not been modified before the
1058 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1059 if it cannot prove the value has not been modified, in that case it will
1060 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1061
8b7773a4
MJ
1062 INFO and PARMS_AINFO describe parameters of the current function (but the
1063 latter can be NULL), STMT is the load statement. If function returns true,
1064 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1065 within the aggregate and whether it is a load from a value passed by
1066 reference respectively. */
1067
ff302741 1068bool
56b40062 1069ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
f65f1ae3 1070 vec<ipa_param_descriptor, va_gc> *descriptors,
355fe088 1071 gimple *stmt, tree op, int *index_p,
ff302741 1072 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
91bb9f80 1073 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
1074{
1075 int index;
588db50c 1076 HOST_WIDE_INT size;
ee45a32d 1077 bool reverse;
588db50c 1078 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
8b7773a4 1079
588db50c 1080 if (!base)
8b7773a4
MJ
1081 return false;
1082
1083 if (DECL_P (base))
1084 {
d044dd17 1085 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1086 if (index >= 0
8aab5218 1087 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1088 {
1089 *index_p = index;
1090 *by_ref_p = false;
3ff2ca23
JJ
1091 if (size_p)
1092 *size_p = size;
91bb9f80
MJ
1093 if (guaranteed_unmodified)
1094 *guaranteed_unmodified = true;
8b7773a4
MJ
1095 return true;
1096 }
1097 return false;
1098 }
1099
1100 if (TREE_CODE (base) != MEM_REF
1101 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1102 || !integer_zerop (TREE_OPERAND (base, 1)))
1103 return false;
1104
1105 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1106 {
1107 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1108 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1109 }
1110 else
1111 {
1112 /* This branch catches situations where a pointer parameter is not a
1113 gimple register, for example:
1114
1115 void hip7(S*) (struct S * p)
1116 {
1117 void (*<T2e4>) (struct S *) D.1867;
1118 struct S * p.1;
1119
1120 <bb 2>:
1121 p.1_1 = p;
1122 D.1867_2 = p.1_1->f;
1123 D.1867_2 ();
1124 gdp = &p;
1125 */
1126
355fe088 1127 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1128 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1129 }
1130
91bb9f80 1131 if (index >= 0)
8b7773a4 1132 {
91bb9f80
MJ
1133 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1134 if (!data_preserved && !guaranteed_unmodified)
1135 return false;
1136
8b7773a4
MJ
1137 *index_p = index;
1138 *by_ref_p = true;
3ff2ca23
JJ
1139 if (size_p)
1140 *size_p = size;
91bb9f80
MJ
1141 if (guaranteed_unmodified)
1142 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1143 return true;
1144 }
1145 return false;
1146}
1147
b258210c 1148/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1149 of an assignment statement STMT, try to determine whether we are actually
1150 handling any of the following cases and construct an appropriate jump
1151 function into JFUNC if so:
1152
1153 1) The passed value is loaded from a formal parameter which is not a gimple
1154 register (most probably because it is addressable, the value has to be
1155 scalar) and we can guarantee the value has not changed. This case can
1156 therefore be described by a simple pass-through jump function. For example:
1157
1158 foo (int a)
1159 {
1160 int a.0;
1161
1162 a.0_2 = a;
1163 bar (a.0_2);
1164
1165 2) The passed value can be described by a simple arithmetic pass-through
1166 jump function. E.g.
1167
1168 foo (int a)
1169 {
1170 int D.2064;
1171
1172 D.2064_4 = a.1(D) + 4;
1173 bar (D.2064_4);
1174
1175 This case can also occur in combination of the previous one, e.g.:
1176
1177 foo (int a, int z)
1178 {
1179 int a.0;
1180 int D.2064;
1181
1182 a.0_3 = a;
1183 D.2064_4 = a.0_3 + 4;
1184 foo (D.2064_4);
1185
1186 3) The passed value is an address of an object within another one (which
1187 also passed by reference). Such situations are described by an ancestor
1188 jump function and describe situations such as:
1189
1190 B::foo() (struct B * const this)
1191 {
1192 struct A * D.1845;
1193
1194 D.1845_2 = &this_1(D)->D.1748;
1195 A::bar (D.1845_2);
1196
1197 INFO is the structure describing individual parameters access different
1198 stages of IPA optimizations. PARMS_AINFO contains the information that is
1199 only needed for intraprocedural analysis. */
685b0d13
MJ
1200
1201static void
56b40062 1202compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1203 struct ipa_node_params *info,
b258210c 1204 struct ipa_jump_func *jfunc,
355fe088 1205 gcall *call, gimple *stmt, tree name,
06d65050 1206 tree param_type)
685b0d13 1207{
588db50c 1208 HOST_WIDE_INT offset, size;
fdb0e1b4 1209 tree op1, tc_ssa, base, ssa;
ee45a32d 1210 bool reverse;
685b0d13 1211 int index;
685b0d13 1212
685b0d13 1213 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1214
fdb0e1b4 1215 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1216 {
fdb0e1b4
MJ
1217 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1218 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1219 else
bda2bc48
MJ
1220 index = load_from_unmodified_param (fbi, info->descriptors,
1221 SSA_NAME_DEF_STMT (op1));
fdb0e1b4
MJ
1222 tc_ssa = op1;
1223 }
1224 else
1225 {
bda2bc48 1226 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1227 tc_ssa = gimple_assign_lhs (stmt);
1228 }
1229
1230 if (index >= 0)
1231 {
a77af182 1232 switch (gimple_assign_rhs_class (stmt))
8b7773a4 1233 {
a77af182
RB
1234 case GIMPLE_BINARY_RHS:
1235 {
1236 tree op2 = gimple_assign_rhs2 (stmt);
1237 if (!is_gimple_ip_invariant (op2)
1238 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1239 != tcc_comparison)
1240 && !useless_type_conversion_p (TREE_TYPE (name),
1241 TREE_TYPE (op1))))
1242 return;
1243
1244 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1245 gimple_assign_rhs_code (stmt));
1246 break;
1247 }
1248 case GIMPLE_SINGLE_RHS:
1249 {
1250 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1251 tc_ssa);
1252 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1253 break;
1254 }
1255 case GIMPLE_UNARY_RHS:
bda2bc48
MJ
1256 if (is_gimple_assign (stmt)
1257 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1258 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
a77af182 1259 ipa_set_jf_unary_pass_through (jfunc, index,
bda2bc48 1260 gimple_assign_rhs_code (stmt));
a77af182 1261 default:;
8b7773a4 1262 }
685b0d13
MJ
1263 return;
1264 }
1265
1266 if (TREE_CODE (op1) != ADDR_EXPR)
1267 return;
1268 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1269 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1270 return;
588db50c 1271 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
aca52e6f
RS
1272 offset_int mem_offset;
1273 if (!base
1274 || TREE_CODE (base) != MEM_REF
1275 || !mem_ref_offset (base).is_constant (&mem_offset))
685b0d13 1276 return;
aca52e6f 1277 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1278 ssa = TREE_OPERAND (base, 0);
1279 if (TREE_CODE (ssa) != SSA_NAME
1280 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1281 || offset < 0)
685b0d13
MJ
1282 return;
1283
b8f6e610 1284 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1285 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1286 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1287 ipa_set_ancestor_jf (jfunc, offset, index,
1288 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1289}
1290
40591473
MJ
1291/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1292 it looks like:
1293
1294 iftmp.1_3 = &obj_2(D)->D.1762;
1295
1296 The base of the MEM_REF must be a default definition SSA NAME of a
1297 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1298 whole MEM_REF expression is returned and the offset calculated from any
1299 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1300 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1301
1302static tree
355fe088 1303get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473 1304{
588db50c 1305 HOST_WIDE_INT size;
40591473 1306 tree expr, parm, obj;
ee45a32d 1307 bool reverse;
40591473
MJ
1308
1309 if (!gimple_assign_single_p (assign))
1310 return NULL_TREE;
1311 expr = gimple_assign_rhs1 (assign);
1312
1313 if (TREE_CODE (expr) != ADDR_EXPR)
1314 return NULL_TREE;
1315 expr = TREE_OPERAND (expr, 0);
1316 obj = expr;
588db50c 1317 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
40591473 1318
aca52e6f
RS
1319 offset_int mem_offset;
1320 if (!expr
1321 || TREE_CODE (expr) != MEM_REF
1322 || !mem_ref_offset (expr).is_constant (&mem_offset))
40591473
MJ
1323 return NULL_TREE;
1324 parm = TREE_OPERAND (expr, 0);
1325 if (TREE_CODE (parm) != SSA_NAME
1326 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1327 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1328 return NULL_TREE;
1329
aca52e6f 1330 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1331 *obj_p = obj;
1332 return expr;
1333}
1334
685b0d13 1335
b258210c
MJ
1336/* Given that an actual argument is an SSA_NAME that is a result of a phi
1337 statement PHI, try to find out whether NAME is in fact a
1338 multiple-inheritance typecast from a descendant into an ancestor of a formal
1339 parameter and thus can be described by an ancestor jump function and if so,
1340 write the appropriate function into JFUNC.
1341
1342 Essentially we want to match the following pattern:
1343
1344 if (obj_2(D) != 0B)
1345 goto <bb 3>;
1346 else
1347 goto <bb 4>;
1348
1349 <bb 3>:
1350 iftmp.1_3 = &obj_2(D)->D.1762;
1351
1352 <bb 4>:
1353 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1354 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1355 return D.1879_6; */
1356
1357static void
56b40062 1358compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1359 struct ipa_node_params *info,
b258210c 1360 struct ipa_jump_func *jfunc,
538dd0b7 1361 gcall *call, gphi *phi)
b258210c 1362{
40591473 1363 HOST_WIDE_INT offset;
355fe088 1364 gimple *assign, *cond;
b258210c 1365 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1366 tree tmp, parm, expr, obj;
b258210c
MJ
1367 int index, i;
1368
54e348cb 1369 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1370 return;
1371
54e348cb
MJ
1372 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1373 tmp = PHI_ARG_DEF (phi, 0);
1374 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1375 tmp = PHI_ARG_DEF (phi, 1);
1376 else
1377 return;
b258210c
MJ
1378 if (TREE_CODE (tmp) != SSA_NAME
1379 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1380 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1381 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1382 return;
1383
1384 assign = SSA_NAME_DEF_STMT (tmp);
1385 assign_bb = gimple_bb (assign);
40591473 1386 if (!single_pred_p (assign_bb))
b258210c 1387 return;
40591473
MJ
1388 expr = get_ancestor_addr_info (assign, &obj, &offset);
1389 if (!expr)
b258210c
MJ
1390 return;
1391 parm = TREE_OPERAND (expr, 0);
b258210c 1392 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1393 if (index < 0)
1394 return;
b258210c
MJ
1395
1396 cond_bb = single_pred (assign_bb);
1397 cond = last_stmt (cond_bb);
69610617
SB
1398 if (!cond
1399 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1400 || gimple_cond_code (cond) != NE_EXPR
1401 || gimple_cond_lhs (cond) != parm
1402 || !integer_zerop (gimple_cond_rhs (cond)))
1403 return;
1404
b258210c
MJ
1405 phi_bb = gimple_bb (phi);
1406 for (i = 0; i < 2; i++)
1407 {
1408 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1409 if (pred != assign_bb && pred != cond_bb)
1410 return;
1411 }
1412
3b97a5c7
MJ
1413 ipa_set_ancestor_jf (jfunc, offset, index,
1414 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1415}
1416
be95e2b9
MJ
1417/* Inspect the given TYPE and return true iff it has the same structure (the
1418 same number of fields of the same types) as a C++ member pointer. If
1419 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1420 corresponding fields there. */
1421
3e293154
MJ
1422static bool
1423type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1424{
1425 tree fld;
1426
1427 if (TREE_CODE (type) != RECORD_TYPE)
1428 return false;
1429
1430 fld = TYPE_FIELDS (type);
1431 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1432 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1433 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1434 return false;
1435
1436 if (method_ptr)
1437 *method_ptr = fld;
1438
910ad8de 1439 fld = DECL_CHAIN (fld);
8b7773a4 1440 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1441 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1442 return false;
1443 if (delta)
1444 *delta = fld;
1445
910ad8de 1446 if (DECL_CHAIN (fld))
3e293154
MJ
1447 return false;
1448
1449 return true;
1450}
1451
61502ca8 1452/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1453 return the rhs of its defining statement. Otherwise return RHS as it
1454 is. */
7ec49257
MJ
1455
1456static inline tree
1457get_ssa_def_if_simple_copy (tree rhs)
1458{
1459 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1460 {
355fe088 1461 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1462
1463 if (gimple_assign_single_p (def_stmt))
1464 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1465 else
1466 break;
7ec49257
MJ
1467 }
1468 return rhs;
1469}
1470
8b7773a4
MJ
1471/* Simple linked list, describing known contents of an aggregate beforere
1472 call. */
1473
1474struct ipa_known_agg_contents_list
1475{
1476 /* Offset and size of the described part of the aggregate. */
1477 HOST_WIDE_INT offset, size;
1478 /* Known constant value or NULL if the contents is known to be unknown. */
1479 tree constant;
1480 /* Pointer to the next structure in the list. */
1481 struct ipa_known_agg_contents_list *next;
1482};
3e293154 1483
0d48ee34
MJ
1484/* Find the proper place in linked list of ipa_known_agg_contents_list
1485 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1486 unless there is a partial overlap, in which case return NULL, or such
1487 element is already there, in which case set *ALREADY_THERE to true. */
1488
1489static struct ipa_known_agg_contents_list **
1490get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1491 HOST_WIDE_INT lhs_offset,
1492 HOST_WIDE_INT lhs_size,
1493 bool *already_there)
1494{
1495 struct ipa_known_agg_contents_list **p = list;
1496 while (*p && (*p)->offset < lhs_offset)
1497 {
1498 if ((*p)->offset + (*p)->size > lhs_offset)
1499 return NULL;
1500 p = &(*p)->next;
1501 }
1502
1503 if (*p && (*p)->offset < lhs_offset + lhs_size)
1504 {
1505 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1506 /* We already know this value is subsequently overwritten with
1507 something else. */
1508 *already_there = true;
1509 else
1510 /* Otherwise this is a partial overlap which we cannot
1511 represent. */
1512 return NULL;
1513 }
1514 return p;
1515}
1516
1517/* Build aggregate jump function from LIST, assuming there are exactly
1518 CONST_COUNT constant entries there and that th offset of the passed argument
1519 is ARG_OFFSET and store it into JFUNC. */
1520
1521static void
1522build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1523 int const_count, HOST_WIDE_INT arg_offset,
1524 struct ipa_jump_func *jfunc)
1525{
1526 vec_alloc (jfunc->agg.items, const_count);
1527 while (list)
1528 {
1529 if (list->constant)
1530 {
1531 struct ipa_agg_jf_item item;
1532 item.offset = list->offset - arg_offset;
1533 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1534 item.value = unshare_expr_without_location (list->constant);
1535 jfunc->agg.items->quick_push (item);
1536 }
1537 list = list->next;
1538 }
1539}
1540
8b7773a4
MJ
1541/* Traverse statements from CALL backwards, scanning whether an aggregate given
1542 in ARG is filled in with constant values. ARG can either be an aggregate
0d48ee34
MJ
1543 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1544 aggregate. JFUNC is the jump function into which the constants are
1545 subsequently stored. */
be95e2b9 1546
3e293154 1547static void
538dd0b7
DM
1548determine_locally_known_aggregate_parts (gcall *call, tree arg,
1549 tree arg_type,
0d48ee34 1550 struct ipa_jump_func *jfunc)
3e293154 1551{
8b7773a4
MJ
1552 struct ipa_known_agg_contents_list *list = NULL;
1553 int item_count = 0, const_count = 0;
1554 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1555 gimple_stmt_iterator gsi;
8b7773a4
MJ
1556 tree arg_base;
1557 bool check_ref, by_ref;
1558 ao_ref r;
3e293154 1559
29799e9d
MJ
1560 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1561 return;
1562
8b7773a4
MJ
1563 /* The function operates in three stages. First, we prepare check_ref, r,
1564 arg_base and arg_offset based on what is actually passed as an actual
1565 argument. */
3e293154 1566
85942f45 1567 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1568 {
1569 by_ref = true;
1570 if (TREE_CODE (arg) == SSA_NAME)
1571 {
1572 tree type_size;
85942f45 1573 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
8b7773a4
MJ
1574 return;
1575 check_ref = true;
1576 arg_base = arg;
1577 arg_offset = 0;
85942f45 1578 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1579 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1580 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1581 }
1582 else if (TREE_CODE (arg) == ADDR_EXPR)
1583 {
ee45a32d 1584 bool reverse;
8b7773a4
MJ
1585
1586 arg = TREE_OPERAND (arg, 0);
588db50c
RS
1587 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1588 &arg_size, &reverse);
1589 if (!arg_base)
8b7773a4
MJ
1590 return;
1591 if (DECL_P (arg_base))
1592 {
8b7773a4 1593 check_ref = false;
0d48ee34 1594 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1595 }
1596 else
1597 return;
1598 }
1599 else
1600 return;
1601 }
1602 else
1603 {
ee45a32d 1604 bool reverse;
8b7773a4
MJ
1605
1606 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1607
1608 by_ref = false;
1609 check_ref = false;
588db50c
RS
1610 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1611 &arg_size, &reverse);
1612 if (!arg_base)
8b7773a4
MJ
1613 return;
1614
1615 ao_ref_init (&r, arg);
1616 }
1617
1618 /* Second stage walks back the BB, looks at individual statements and as long
1619 as it is confident of how the statements affect contents of the
1620 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1621 describing it. */
1622 gsi = gsi_for_stmt (call);
726a989a
RB
1623 gsi_prev (&gsi);
1624 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1625 {
8b7773a4 1626 struct ipa_known_agg_contents_list *n, **p;
355fe088 1627 gimple *stmt = gsi_stmt (gsi);
588db50c 1628 HOST_WIDE_INT lhs_offset, lhs_size;
8b7773a4 1629 tree lhs, rhs, lhs_base;
ee45a32d 1630 bool reverse;
3e293154 1631
8b7773a4 1632 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1633 continue;
8b75fc9b 1634 if (!gimple_assign_single_p (stmt))
8b7773a4 1635 break;
3e293154 1636
726a989a
RB
1637 lhs = gimple_assign_lhs (stmt);
1638 rhs = gimple_assign_rhs1 (stmt);
0c6b087c 1639 if (!is_gimple_reg_type (TREE_TYPE (rhs))
7d2fb524
MJ
1640 || TREE_CODE (lhs) == BIT_FIELD_REF
1641 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1642 break;
3e293154 1643
588db50c
RS
1644 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1645 &lhs_size, &reverse);
1646 if (!lhs_base)
8b7773a4 1647 break;
3e293154 1648
8b7773a4 1649 if (check_ref)
518dc859 1650 {
8b7773a4
MJ
1651 if (TREE_CODE (lhs_base) != MEM_REF
1652 || TREE_OPERAND (lhs_base, 0) != arg_base
1653 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1654 break;
3e293154 1655 }
8b7773a4 1656 else if (lhs_base != arg_base)
774b8a55
MJ
1657 {
1658 if (DECL_P (lhs_base))
1659 continue;
1660 else
1661 break;
1662 }
3e293154 1663
0d48ee34
MJ
1664 bool already_there = false;
1665 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1666 &already_there);
1667 if (!p)
8b7773a4 1668 break;
0d48ee34
MJ
1669 if (already_there)
1670 continue;
3e293154 1671
8b7773a4
MJ
1672 rhs = get_ssa_def_if_simple_copy (rhs);
1673 n = XALLOCA (struct ipa_known_agg_contents_list);
1674 n->size = lhs_size;
1675 n->offset = lhs_offset;
1676 if (is_gimple_ip_invariant (rhs))
1677 {
1678 n->constant = rhs;
1679 const_count++;
1680 }
1681 else
1682 n->constant = NULL_TREE;
1683 n->next = *p;
1684 *p = n;
3e293154 1685
8b7773a4 1686 item_count++;
dfea20f1
MJ
1687 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1688 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1689 break;
1690 }
be95e2b9 1691
8b7773a4
MJ
1692 /* Third stage just goes over the list and creates an appropriate vector of
1693 ipa_agg_jf_item structures out of it, of sourse only if there are
1694 any known constants to begin with. */
3e293154 1695
8b7773a4 1696 if (const_count)
3e293154 1697 {
8b7773a4 1698 jfunc->agg.by_ref = by_ref;
0d48ee34 1699 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
3e293154
MJ
1700 }
1701}
1702
5d5f1e95
KV
1703/* Return the Ith param type of callee associated with call graph
1704 edge E. */
1705
1706tree
06d65050
JH
1707ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1708{
1709 int n;
1710 tree type = (e->callee
67348ccc 1711 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1712 : gimple_call_fntype (e->call_stmt));
1713 tree t = TYPE_ARG_TYPES (type);
1714
1715 for (n = 0; n < i; n++)
1716 {
1717 if (!t)
1718 break;
1719 t = TREE_CHAIN (t);
1720 }
1721 if (t)
1722 return TREE_VALUE (t);
1723 if (!e->callee)
1724 return NULL;
67348ccc 1725 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1726 for (n = 0; n < i; n++)
1727 {
1728 if (!t)
1729 return NULL;
1730 t = TREE_CHAIN (t);
1731 }
1732 if (t)
1733 return TREE_TYPE (t);
1734 return NULL;
1735}
1736
86cd0334
MJ
1737/* Return ipa_bits with VALUE and MASK values, which can be either a newly
1738 allocated structure or a previously existing one shared with other jump
1739 functions and/or transformation summaries. */
1740
1741ipa_bits *
1742ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1743{
1744 ipa_bits tmp;
1745 tmp.value = value;
1746 tmp.mask = mask;
1747
1748 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1749 if (*slot)
1750 return *slot;
1751
1752 ipa_bits *res = ggc_alloc<ipa_bits> ();
1753 res->value = value;
1754 res->mask = mask;
1755 *slot = res;
1756
1757 return res;
1758}
1759
1760/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1761 table in order to avoid creating multiple same ipa_bits structures. */
1762
1763static void
1764ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1765 const widest_int &mask)
1766{
1767 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1768}
1769
1770/* Return a pointer to a value_range just like *TMP, but either find it in
1771 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1772
1773static value_range *
1774ipa_get_value_range (value_range *tmp)
1775{
1776 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1777 if (*slot)
1778 return *slot;
1779
1780 value_range *vr = ggc_alloc<value_range> ();
1781 *vr = *tmp;
1782 *slot = vr;
1783
1784 return vr;
1785}
1786
1787/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1788 equiv set. Use hash table in order to avoid creating multiple same copies of
1789 value_ranges. */
1790
1791static value_range *
1792ipa_get_value_range (enum value_range_type type, tree min, tree max)
1793{
1794 value_range tmp;
1795 tmp.type = type;
1796 tmp.min = min;
1797 tmp.max = max;
1798 tmp.equiv = NULL;
1799 return ipa_get_value_range (&tmp);
1800}
1801
1802/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1803 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1804 same value_range structures. */
1805
1806static void
1807ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1808 tree min, tree max)
1809{
1810 jf->m_vr = ipa_get_value_range (type, min, max);
1811}
1812
1813/* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1814 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1815
1816static void
1817ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1818{
1819 jf->m_vr = ipa_get_value_range (tmp);
1820}
1821
3e293154
MJ
1822/* Compute jump function for all arguments of callsite CS and insert the
1823 information in the jump_functions array in the ipa_edge_args corresponding
1824 to this callsite. */
be95e2b9 1825
749aa96d 1826static void
56b40062 1827ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 1828 struct cgraph_edge *cs)
3e293154
MJ
1829{
1830 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09 1831 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
538dd0b7 1832 gcall *call = cs->call_stmt;
8b7773a4 1833 int n, arg_num = gimple_call_num_args (call);
5ce97055 1834 bool useful_context = false;
3e293154 1835
606d9a09 1836 if (arg_num == 0 || args->jump_functions)
3e293154 1837 return;
9771b263 1838 vec_safe_grow_cleared (args->jump_functions, arg_num);
5ce97055
JH
1839 if (flag_devirtualize)
1840 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
3e293154 1841
96e24d49
JJ
1842 if (gimple_call_internal_p (call))
1843 return;
5fe8e757
MJ
1844 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1845 return;
1846
8b7773a4
MJ
1847 for (n = 0; n < arg_num; n++)
1848 {
1849 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1850 tree arg = gimple_call_arg (call, n);
06d65050 1851 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
1852 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1853 {
049e6d36 1854 tree instance;
5ce97055
JH
1855 struct ipa_polymorphic_call_context context (cs->caller->decl,
1856 arg, cs->call_stmt,
049e6d36
JH
1857 &instance);
1858 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
5ce97055
JH
1859 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1860 if (!context.useless_p ())
1861 useful_context = true;
1862 }
3e293154 1863
718625ad
KV
1864 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1865 {
f7503699
KV
1866 bool addr_nonzero = false;
1867 bool strict_overflow = false;
1868
718625ad
KV
1869 if (TREE_CODE (arg) == SSA_NAME
1870 && param_type
1871 && get_ptr_nonnull (arg))
f7503699
KV
1872 addr_nonzero = true;
1873 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1874 addr_nonzero = true;
1875
1876 if (addr_nonzero)
718625ad 1877 {
86cd0334
MJ
1878 tree z = build_int_cst (TREE_TYPE (arg), 0);
1879 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
718625ad
KV
1880 }
1881 else
86cd0334 1882 gcc_assert (!jfunc->m_vr);
718625ad
KV
1883 }
1884 else
8bc5448f
KV
1885 {
1886 wide_int min, max;
1887 value_range_type type;
1888 if (TREE_CODE (arg) == SSA_NAME
1889 && param_type
1890 && (type = get_range_info (arg, &min, &max))
3a4228ba 1891 && (type == VR_RANGE || type == VR_ANTI_RANGE))
8bc5448f 1892 {
86cd0334
MJ
1893 value_range tmpvr,resvr;
1894
1895 tmpvr.type = type;
1896 tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1897 tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1898 tmpvr.equiv = NULL;
1899 memset (&resvr, 0, sizeof (resvr));
1900 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1901 &tmpvr, TREE_TYPE (arg));
1902 if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1903 ipa_set_jfunc_vr (jfunc, &resvr);
3a4228ba 1904 else
86cd0334 1905 gcc_assert (!jfunc->m_vr);
8bc5448f
KV
1906 }
1907 else
86cd0334 1908 gcc_assert (!jfunc->m_vr);
8bc5448f 1909 }
04be694e 1910
209ca542
PK
1911 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1912 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1913 {
209ca542 1914 if (TREE_CODE (arg) == SSA_NAME)
86cd0334
MJ
1915 ipa_set_jfunc_bits (jfunc, 0,
1916 widest_int::from (get_nonzero_bits (arg),
1917 TYPE_SIGN (TREE_TYPE (arg))));
209ca542 1918 else
86cd0334 1919 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
209ca542 1920 }
67b97478
PK
1921 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1922 {
1923 unsigned HOST_WIDE_INT bitpos;
1924 unsigned align;
1925
67b97478 1926 get_pointer_alignment_1 (arg, &align, &bitpos);
7b27cb4b
RS
1927 widest_int mask = wi::bit_and_not
1928 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1929 align / BITS_PER_UNIT - 1);
86cd0334
MJ
1930 widest_int value = bitpos / BITS_PER_UNIT;
1931 ipa_set_jfunc_bits (jfunc, value, mask);
67b97478 1932 }
209ca542 1933 else
86cd0334 1934 gcc_assert (!jfunc->bits);
209ca542 1935
04643334 1936 if (is_gimple_ip_invariant (arg)
8813a647 1937 || (VAR_P (arg)
04643334
MJ
1938 && is_global_var (arg)
1939 && TREE_READONLY (arg)))
4502fe8d 1940 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1941 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1942 && TREE_CODE (arg) == PARM_DECL)
1943 {
1944 int index = ipa_get_param_decl_index (info, arg);
1945
1946 gcc_assert (index >=0);
1947 /* Aggregate passed by value, check for pass-through, otherwise we
1948 will attempt to fill in aggregate contents later in this
1949 for cycle. */
8aab5218 1950 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 1951 {
3b97a5c7 1952 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
1953 continue;
1954 }
1955 }
1956 else if (TREE_CODE (arg) == SSA_NAME)
1957 {
1958 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1959 {
1960 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1961 if (index >= 0)
8b7773a4 1962 {
3b97a5c7 1963 bool agg_p;
8aab5218 1964 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 1965 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
1966 }
1967 }
1968 else
1969 {
355fe088 1970 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 1971 if (is_gimple_assign (stmt))
8aab5218 1972 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 1973 call, stmt, arg, param_type);
8b7773a4 1974 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 1975 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
1976 call,
1977 as_a <gphi *> (stmt));
8b7773a4
MJ
1978 }
1979 }
3e293154 1980
85942f45
JH
1981 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1982 passed (because type conversions are ignored in gimple). Usually we can
1983 safely get type from function declaration, but in case of K&R prototypes or
1984 variadic functions we can try our luck with type of the pointer passed.
1985 TODO: Since we look for actual initialization of the memory object, we may better
1986 work out the type based on the memory stores we find. */
1987 if (!param_type)
1988 param_type = TREE_TYPE (arg);
1989
8b7773a4
MJ
1990 if ((jfunc->type != IPA_JF_PASS_THROUGH
1991 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1992 && (jfunc->type != IPA_JF_ANCESTOR
1993 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1994 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 1995 || POINTER_TYPE_P (param_type)))
0d48ee34 1996 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
8b7773a4 1997 }
5ce97055
JH
1998 if (!useful_context)
1999 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
2000}
2001
749aa96d 2002/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 2003 from BB. */
749aa96d 2004
062c604f 2005static void
56b40062 2006ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 2007{
8aab5218
MJ
2008 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2009 int i;
749aa96d
MJ
2010 struct cgraph_edge *cs;
2011
8aab5218 2012 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 2013 {
8aab5218 2014 struct cgraph_node *callee = cs->callee;
749aa96d 2015
8aab5218
MJ
2016 if (callee)
2017 {
d52f5295 2018 callee->ultimate_alias_target ();
8aab5218
MJ
2019 /* We do not need to bother analyzing calls to unknown functions
2020 unless they may become known during lto/whopr. */
2021 if (!callee->definition && !flag_lto)
2022 continue;
2023 }
2024 ipa_compute_jump_functions_for_edge (fbi, cs);
2025 }
749aa96d
MJ
2026}
2027
8b7773a4
MJ
2028/* If STMT looks like a statement loading a value from a member pointer formal
2029 parameter, return that parameter and store the offset of the field to
2030 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2031 might be clobbered). If USE_DELTA, then we look for a use of the delta
2032 field rather than the pfn. */
be95e2b9 2033
3e293154 2034static tree
355fe088 2035ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 2036 HOST_WIDE_INT *offset_p)
3e293154 2037{
8b7773a4
MJ
2038 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2039
2040 if (!gimple_assign_single_p (stmt))
2041 return NULL_TREE;
3e293154 2042
8b7773a4 2043 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2044 if (TREE_CODE (rhs) == COMPONENT_REF)
2045 {
2046 ref_field = TREE_OPERAND (rhs, 1);
2047 rhs = TREE_OPERAND (rhs, 0);
2048 }
2049 else
2050 ref_field = NULL_TREE;
d242d063 2051 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2052 return NULL_TREE;
3e293154 2053 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2054 if (TREE_CODE (rec) != ADDR_EXPR)
2055 return NULL_TREE;
2056 rec = TREE_OPERAND (rec, 0);
3e293154 2057 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2058 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2059 return NULL_TREE;
d242d063 2060 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2061
8b7773a4
MJ
2062 if (use_delta)
2063 fld = delta_field;
2064 else
2065 fld = ptr_field;
2066 if (offset_p)
2067 *offset_p = int_bit_position (fld);
2068
ae788515
EB
2069 if (ref_field)
2070 {
2071 if (integer_nonzerop (ref_offset))
2072 return NULL_TREE;
ae788515
EB
2073 return ref_field == fld ? rec : NULL_TREE;
2074 }
3e293154 2075 else
8b7773a4
MJ
2076 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2077 : NULL_TREE;
3e293154
MJ
2078}
2079
2080/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2081
3e293154
MJ
2082static bool
2083ipa_is_ssa_with_stmt_def (tree t)
2084{
2085 if (TREE_CODE (t) == SSA_NAME
2086 && !SSA_NAME_IS_DEFAULT_DEF (t))
2087 return true;
2088 else
2089 return false;
2090}
2091
40591473
MJ
2092/* Find the indirect call graph edge corresponding to STMT and mark it as a
2093 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2094 indirect call graph edge. */
be95e2b9 2095
40591473 2096static struct cgraph_edge *
538dd0b7
DM
2097ipa_note_param_call (struct cgraph_node *node, int param_index,
2098 gcall *stmt)
3e293154 2099{
e33c6cd6 2100 struct cgraph_edge *cs;
3e293154 2101
d52f5295 2102 cs = node->get_edge (stmt);
b258210c 2103 cs->indirect_info->param_index = param_index;
8b7773a4 2104 cs->indirect_info->agg_contents = 0;
c13bc3d9 2105 cs->indirect_info->member_ptr = 0;
91bb9f80 2106 cs->indirect_info->guaranteed_unmodified = 0;
40591473 2107 return cs;
3e293154
MJ
2108}
2109
e33c6cd6 2110/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2111 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2112 intermediate information about each formal parameter. Currently it checks
2113 whether the call calls a pointer that is a formal parameter and if so, the
2114 parameter is marked with the called flag and an indirect call graph edge
2115 describing the call is created. This is very simple for ordinary pointers
2116 represented in SSA but not-so-nice when it comes to member pointers. The
2117 ugly part of this function does nothing more than trying to match the
2118 pattern of such a call. An example of such a pattern is the gimple dump
2119 below, the call is on the last line:
3e293154 2120
ae788515
EB
2121 <bb 2>:
2122 f$__delta_5 = f.__delta;
2123 f$__pfn_24 = f.__pfn;
2124
2125 or
3e293154 2126 <bb 2>:
d242d063
MJ
2127 f$__delta_5 = MEM[(struct *)&f];
2128 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2129
ae788515 2130 and a few lines below:
8aa29647
MJ
2131
2132 <bb 5>
3e293154
MJ
2133 D.2496_3 = (int) f$__pfn_24;
2134 D.2497_4 = D.2496_3 & 1;
2135 if (D.2497_4 != 0)
2136 goto <bb 3>;
2137 else
2138 goto <bb 4>;
2139
8aa29647 2140 <bb 6>:
3e293154
MJ
2141 D.2500_7 = (unsigned int) f$__delta_5;
2142 D.2501_8 = &S + D.2500_7;
2143 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2144 D.2503_10 = *D.2502_9;
2145 D.2504_12 = f$__pfn_24 + -1;
2146 D.2505_13 = (unsigned int) D.2504_12;
2147 D.2506_14 = D.2503_10 + D.2505_13;
2148 D.2507_15 = *D.2506_14;
2149 iftmp.11_16 = (String:: *) D.2507_15;
2150
8aa29647 2151 <bb 7>:
3e293154
MJ
2152 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2153 D.2500_19 = (unsigned int) f$__delta_5;
2154 D.2508_20 = &S + D.2500_19;
2155 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2156
2157 Such patterns are results of simple calls to a member pointer:
2158
2159 int doprinting (int (MyString::* f)(int) const)
2160 {
2161 MyString S ("somestring");
2162
2163 return (S.*f)(4);
2164 }
8b7773a4
MJ
2165
2166 Moreover, the function also looks for called pointers loaded from aggregates
2167 passed by value or reference. */
3e293154
MJ
2168
2169static void
56b40062 2170ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 2171 tree target)
3e293154 2172{
8aab5218 2173 struct ipa_node_params *info = fbi->info;
8b7773a4
MJ
2174 HOST_WIDE_INT offset;
2175 bool by_ref;
3e293154 2176
3e293154
MJ
2177 if (SSA_NAME_IS_DEFAULT_DEF (target))
2178 {
b258210c 2179 tree var = SSA_NAME_VAR (target);
8aab5218 2180 int index = ipa_get_param_decl_index (info, var);
3e293154 2181 if (index >= 0)
8aab5218 2182 ipa_note_param_call (fbi->node, index, call);
3e293154
MJ
2183 return;
2184 }
2185
8aab5218 2186 int index;
355fe088 2187 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 2188 bool guaranteed_unmodified;
8b7773a4 2189 if (gimple_assign_single_p (def)
ff302741
PB
2190 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2191 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2192 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2193 {
8aab5218 2194 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2195 cs->indirect_info->offset = offset;
2196 cs->indirect_info->agg_contents = 1;
2197 cs->indirect_info->by_ref = by_ref;
91bb9f80 2198 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2199 return;
2200 }
2201
3e293154
MJ
2202 /* Now we need to try to match the complex pattern of calling a member
2203 pointer. */
8b7773a4
MJ
2204 if (gimple_code (def) != GIMPLE_PHI
2205 || gimple_phi_num_args (def) != 2
2206 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2207 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2208 return;
2209
3e293154
MJ
2210 /* First, we need to check whether one of these is a load from a member
2211 pointer that is a parameter to this function. */
8aab5218
MJ
2212 tree n1 = PHI_ARG_DEF (def, 0);
2213 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2214 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2215 return;
355fe088
TS
2216 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2217 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2218
8aab5218
MJ
2219 tree rec;
2220 basic_block bb, virt_bb;
2221 basic_block join = gimple_bb (def);
8b7773a4 2222 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2223 {
8b7773a4 2224 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2225 return;
2226
8aa29647 2227 bb = EDGE_PRED (join, 0)->src;
726a989a 2228 virt_bb = gimple_bb (d2);
3e293154 2229 }
8b7773a4 2230 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2231 {
8aa29647 2232 bb = EDGE_PRED (join, 1)->src;
726a989a 2233 virt_bb = gimple_bb (d1);
3e293154
MJ
2234 }
2235 else
2236 return;
2237
2238 /* Second, we need to check that the basic blocks are laid out in the way
2239 corresponding to the pattern. */
2240
3e293154
MJ
2241 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2242 || single_pred (virt_bb) != bb
2243 || single_succ (virt_bb) != join)
2244 return;
2245
2246 /* Third, let's see that the branching is done depending on the least
2247 significant bit of the pfn. */
2248
355fe088 2249 gimple *branch = last_stmt (bb);
8aa29647 2250 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2251 return;
2252
12430896
RG
2253 if ((gimple_cond_code (branch) != NE_EXPR
2254 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2255 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2256 return;
3e293154 2257
8aab5218 2258 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2259 if (!ipa_is_ssa_with_stmt_def (cond))
2260 return;
2261
726a989a 2262 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2263 if (!is_gimple_assign (def)
726a989a
RB
2264 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2265 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2266 return;
726a989a
RB
2267
2268 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2269 if (!ipa_is_ssa_with_stmt_def (cond))
2270 return;
2271
726a989a 2272 def = SSA_NAME_DEF_STMT (cond);
3e293154 2273
8b75fc9b
MJ
2274 if (is_gimple_assign (def)
2275 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2276 {
726a989a 2277 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2278 if (!ipa_is_ssa_with_stmt_def (cond))
2279 return;
726a989a 2280 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2281 }
2282
8aab5218 2283 tree rec2;
6f7b8b70
RE
2284 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2285 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2286 == ptrmemfunc_vbit_in_delta),
2287 NULL);
3e293154
MJ
2288 if (rec != rec2)
2289 return;
2290
2291 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2292 if (index >= 0
8aab5218 2293 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2294 {
8aab5218 2295 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2296 cs->indirect_info->offset = offset;
2297 cs->indirect_info->agg_contents = 1;
c13bc3d9 2298 cs->indirect_info->member_ptr = 1;
91bb9f80 2299 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2300 }
3e293154
MJ
2301
2302 return;
2303}
2304
b258210c
MJ
2305/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2306 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2307 FBI->node (described by FBI->info), create a call note for the
2308 statement. */
b258210c
MJ
2309
2310static void
56b40062 2311ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2312 gcall *call, tree target)
b258210c
MJ
2313{
2314 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2315 int index;
40591473 2316 HOST_WIDE_INT anc_offset;
b258210c 2317
05842ff5
MJ
2318 if (!flag_devirtualize)
2319 return;
2320
40591473 2321 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2322 return;
2323
8aab5218 2324 struct ipa_node_params *info = fbi->info;
40591473
MJ
2325 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2326 {
8aab5218 2327 struct ipa_jump_func jfunc;
40591473
MJ
2328 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2329 return;
b258210c 2330
40591473
MJ
2331 anc_offset = 0;
2332 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2333 gcc_assert (index >= 0);
06d65050
JH
2334 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2335 call, &jfunc))
40591473
MJ
2336 return;
2337 }
2338 else
2339 {
8aab5218 2340 struct ipa_jump_func jfunc;
355fe088 2341 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2342 tree expr;
2343
2344 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2345 if (!expr)
2346 return;
2347 index = ipa_get_param_decl_index (info,
2348 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2349 gcc_assert (index >= 0);
06d65050
JH
2350 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2351 call, &jfunc, anc_offset))
40591473
MJ
2352 return;
2353 }
2354
8aab5218
MJ
2355 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2356 struct cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2357 ii->offset = anc_offset;
ae7e9ddd 2358 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2359 ii->otr_type = obj_type_ref_class (target);
40591473 2360 ii->polymorphic = 1;
b258210c
MJ
2361}
2362
2363/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2364 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2365 containing intermediate information about each formal parameter. */
b258210c
MJ
2366
2367static void
56b40062 2368ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2369{
2370 tree target = gimple_call_fn (call);
b786d31f
JH
2371
2372 if (!target
2373 || (TREE_CODE (target) != SSA_NAME
2374 && !virtual_method_call_p (target)))
2375 return;
b258210c 2376
7d0aa05b 2377 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2378 /* If we previously turned the call into a direct call, there is
2379 no need to analyze. */
b786d31f 2380 if (cs && !cs->indirect_unknown_callee)
25583c4f 2381 return;
7d0aa05b 2382
a5b58b28 2383 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2384 {
7d0aa05b
JH
2385 tree instance;
2386 tree target = gimple_call_fn (call);
6f8091fc
JH
2387 ipa_polymorphic_call_context context (current_function_decl,
2388 target, call, &instance);
7d0aa05b 2389
ba392339
JH
2390 gcc_checking_assert (cs->indirect_info->otr_type
2391 == obj_type_ref_class (target));
2392 gcc_checking_assert (cs->indirect_info->otr_token
2393 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2394
29c43c83
JH
2395 cs->indirect_info->vptr_changed
2396 = !context.get_dynamic_type (instance,
2397 OBJ_TYPE_REF_OBJECT (target),
2398 obj_type_ref_class (target), call);
0127c169 2399 cs->indirect_info->context = context;
7d0aa05b
JH
2400 }
2401
b258210c 2402 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2403 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2404 else if (virtual_method_call_p (target))
8aab5218 2405 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2406}
2407
2408
e33c6cd6 2409/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2410 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2411 formal parameters are called. */
be95e2b9 2412
3e293154 2413static void
355fe088 2414ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2415{
726a989a 2416 if (is_gimple_call (stmt))
538dd0b7 2417 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2418}
2419
2420/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2421 If OP is a parameter declaration, mark it as used in the info structure
2422 passed in DATA. */
2423
2424static bool
355fe088 2425visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f
MJ
2426{
2427 struct ipa_node_params *info = (struct ipa_node_params *) data;
2428
2429 op = get_base_address (op);
2430 if (op
2431 && TREE_CODE (op) == PARM_DECL)
2432 {
2433 int index = ipa_get_param_decl_index (info, op);
2434 gcc_assert (index >= 0);
310bc633 2435 ipa_set_param_used (info, index, true);
062c604f
MJ
2436 }
2437
2438 return false;
3e293154
MJ
2439}
2440
8aab5218
MJ
2441/* Scan the statements in BB and inspect the uses of formal parameters. Store
2442 the findings in various structures of the associated ipa_node_params
2443 structure, such as parameter flags, notes etc. FBI holds various data about
2444 the function being analyzed. */
be95e2b9 2445
062c604f 2446static void
56b40062 2447ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2448{
726a989a 2449 gimple_stmt_iterator gsi;
8aab5218
MJ
2450 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2451 {
355fe088 2452 gimple *stmt = gsi_stmt (gsi);
3e293154 2453
8aab5218
MJ
2454 if (is_gimple_debug (stmt))
2455 continue;
3e293154 2456
8aab5218
MJ
2457 ipa_analyze_stmt_uses (fbi, stmt);
2458 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2459 visit_ref_for_mod_analysis,
2460 visit_ref_for_mod_analysis,
2461 visit_ref_for_mod_analysis);
5fe8e757 2462 }
8aab5218
MJ
2463 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2464 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2465 visit_ref_for_mod_analysis,
2466 visit_ref_for_mod_analysis,
2467 visit_ref_for_mod_analysis);
2468}
2469
2470/* Calculate controlled uses of parameters of NODE. */
2471
2472static void
2473ipa_analyze_controlled_uses (struct cgraph_node *node)
2474{
2475 struct ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2476
8aab5218 2477 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2478 {
2479 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2480 int controlled_uses = 0;
2481
062c604f
MJ
2482 /* For SSA regs see if parameter is used. For non-SSA we compute
2483 the flag during modification analysis. */
4502fe8d
MJ
2484 if (is_gimple_reg (parm))
2485 {
67348ccc 2486 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2487 parm);
2488 if (ddef && !has_zero_uses (ddef))
2489 {
2490 imm_use_iterator imm_iter;
2491 use_operand_p use_p;
2492
2493 ipa_set_param_used (info, i, true);
2494 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2495 if (!is_gimple_call (USE_STMT (use_p)))
2496 {
c6de6665
JJ
2497 if (!is_gimple_debug (USE_STMT (use_p)))
2498 {
2499 controlled_uses = IPA_UNDESCRIBED_USE;
2500 break;
2501 }
4502fe8d
MJ
2502 }
2503 else
2504 controlled_uses++;
2505 }
2506 else
2507 controlled_uses = 0;
2508 }
2509 else
2510 controlled_uses = IPA_UNDESCRIBED_USE;
2511 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2512 }
8aab5218 2513}
062c604f 2514
8aab5218 2515/* Free stuff in BI. */
062c604f 2516
8aab5218
MJ
2517static void
2518free_ipa_bb_info (struct ipa_bb_info *bi)
2519{
2520 bi->cg_edges.release ();
2521 bi->param_aa_statuses.release ();
3e293154
MJ
2522}
2523
8aab5218 2524/* Dominator walker driving the analysis. */
2c9561b5 2525
8aab5218 2526class analysis_dom_walker : public dom_walker
2c9561b5 2527{
8aab5218 2528public:
56b40062 2529 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 2530 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2531
3daacdcd 2532 virtual edge before_dom_children (basic_block);
8aab5218
MJ
2533
2534private:
56b40062 2535 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
2536};
2537
3daacdcd 2538edge
8aab5218
MJ
2539analysis_dom_walker::before_dom_children (basic_block bb)
2540{
2541 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2542 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 2543 return NULL;
2c9561b5
MJ
2544}
2545
c3431191
ML
2546/* Release body info FBI. */
2547
2548void
2549ipa_release_body_info (struct ipa_func_body_info *fbi)
2550{
2551 int i;
2552 struct ipa_bb_info *bi;
2553
2554 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2555 free_ipa_bb_info (bi);
2556 fbi->bb_infos.release ();
2557}
2558
026c3cfd 2559/* Initialize the array describing properties of formal parameters
dd5a833e
MS
2560 of NODE, analyze their uses and compute jump functions associated
2561 with actual arguments of calls from within NODE. */
062c604f
MJ
2562
2563void
2564ipa_analyze_node (struct cgraph_node *node)
2565{
56b40062 2566 struct ipa_func_body_info fbi;
57dbdc5a 2567 struct ipa_node_params *info;
062c604f 2568
57dbdc5a
MJ
2569 ipa_check_create_node_params ();
2570 ipa_check_create_edge_args ();
2571 info = IPA_NODE_REF (node);
8aab5218
MJ
2572
2573 if (info->analysis_done)
2574 return;
2575 info->analysis_done = 1;
2576
2577 if (ipa_func_spec_opts_forbid_analysis_p (node))
2578 {
2579 for (int i = 0; i < ipa_get_param_count (info); i++)
2580 {
2581 ipa_set_param_used (info, i, true);
2582 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2583 }
2584 return;
2585 }
2586
2587 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2588 push_cfun (func);
2589 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2590 ipa_initialize_node_params (node);
8aab5218 2591 ipa_analyze_controlled_uses (node);
062c604f 2592
8aab5218
MJ
2593 fbi.node = node;
2594 fbi.info = IPA_NODE_REF (node);
2595 fbi.bb_infos = vNULL;
2596 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2597 fbi.param_count = ipa_get_param_count (info);
2598 fbi.aa_walked = 0;
062c604f 2599
8aab5218
MJ
2600 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2601 {
2602 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2603 bi->cg_edges.safe_push (cs);
2604 }
062c604f 2605
8aab5218
MJ
2606 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2607 {
2608 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2609 bi->cg_edges.safe_push (cs);
2610 }
2611
2612 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2613
c3431191 2614 ipa_release_body_info (&fbi);
8aab5218 2615 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2616 pop_cfun ();
062c604f 2617}
062c604f 2618
be95e2b9 2619/* Update the jump functions associated with call graph edge E when the call
3e293154 2620 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2621 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2622
3e293154
MJ
2623static void
2624update_jump_functions_after_inlining (struct cgraph_edge *cs,
2625 struct cgraph_edge *e)
2626{
2627 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2628 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2629 int count = ipa_get_cs_argument_count (args);
2630 int i;
2631
2632 for (i = 0; i < count; i++)
2633 {
b258210c 2634 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
5ce97055
JH
2635 struct ipa_polymorphic_call_context *dst_ctx
2636 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2637
685b0d13
MJ
2638 if (dst->type == IPA_JF_ANCESTOR)
2639 {
b258210c 2640 struct ipa_jump_func *src;
8b7773a4 2641 int dst_fid = dst->value.ancestor.formal_id;
5ce97055
JH
2642 struct ipa_polymorphic_call_context *src_ctx
2643 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 2644
b258210c
MJ
2645 /* Variable number of arguments can cause havoc if we try to access
2646 one that does not exist in the inlined edge. So make sure we
2647 don't. */
8b7773a4 2648 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 2649 {
04be694e 2650 ipa_set_jf_unknown (dst);
b258210c
MJ
2651 continue;
2652 }
2653
8b7773a4
MJ
2654 src = ipa_get_ith_jump_func (top, dst_fid);
2655
5ce97055
JH
2656 if (src_ctx && !src_ctx->useless_p ())
2657 {
2658 struct ipa_polymorphic_call_context ctx = *src_ctx;
2659
2660 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2661 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 2662 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2663 ctx.offset_by (dst->value.ancestor.offset);
2664 if (!ctx.useless_p ())
2665 {
a7d1f3fe
ML
2666 if (!dst_ctx)
2667 {
2668 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2669 count);
2670 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2671 }
2672
2673 dst_ctx->combine_with (ctx);
5ce97055
JH
2674 }
2675 }
2676
8b7773a4
MJ
2677 if (src->agg.items
2678 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2679 {
2680 struct ipa_agg_jf_item *item;
2681 int j;
2682
2683 /* Currently we do not produce clobber aggregate jump functions,
2684 replace with merging when we do. */
2685 gcc_assert (!dst->agg.items);
2686
9771b263 2687 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2688 dst->agg.by_ref = src->agg.by_ref;
9771b263 2689 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2690 item->offset -= dst->value.ancestor.offset;
2691 }
2692
3b97a5c7
MJ
2693 if (src->type == IPA_JF_PASS_THROUGH
2694 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2695 {
2696 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2697 dst->value.ancestor.agg_preserved &=
2698 src->value.pass_through.agg_preserved;
2699 }
a2b4c188
KV
2700 else if (src->type == IPA_JF_PASS_THROUGH
2701 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2702 {
2703 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2704 dst->value.ancestor.agg_preserved = false;
2705 }
b258210c
MJ
2706 else if (src->type == IPA_JF_ANCESTOR)
2707 {
2708 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2709 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2710 dst->value.ancestor.agg_preserved &=
2711 src->value.ancestor.agg_preserved;
b258210c
MJ
2712 }
2713 else
04be694e 2714 ipa_set_jf_unknown (dst);
b258210c
MJ
2715 }
2716 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2717 {
b258210c
MJ
2718 struct ipa_jump_func *src;
2719 /* We must check range due to calls with variable number of arguments
2720 and we cannot combine jump functions with operations. */
2721 if (dst->value.pass_through.operation == NOP_EXPR
2722 && (dst->value.pass_through.formal_id
2723 < ipa_get_cs_argument_count (top)))
2724 {
8b7773a4
MJ
2725 int dst_fid = dst->value.pass_through.formal_id;
2726 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2727 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
5ce97055
JH
2728 struct ipa_polymorphic_call_context *src_ctx
2729 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 2730
5ce97055
JH
2731 if (src_ctx && !src_ctx->useless_p ())
2732 {
2733 struct ipa_polymorphic_call_context ctx = *src_ctx;
2734
2735 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2736 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 2737 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2738 if (!ctx.useless_p ())
2739 {
2740 if (!dst_ctx)
2741 {
2742 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2743 count);
2744 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2745 }
2746 dst_ctx->combine_with (ctx);
2747 }
2748 }
b8f6e610
MJ
2749 switch (src->type)
2750 {
2751 case IPA_JF_UNKNOWN:
04be694e 2752 ipa_set_jf_unknown (dst);
b8f6e610 2753 break;
b8f6e610
MJ
2754 case IPA_JF_CONST:
2755 ipa_set_jf_cst_copy (dst, src);
2756 break;
2757
2758 case IPA_JF_PASS_THROUGH:
2759 {
2760 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2761 enum tree_code operation;
2762 operation = ipa_get_jf_pass_through_operation (src);
2763
2764 if (operation == NOP_EXPR)
2765 {
3b97a5c7 2766 bool agg_p;
b8f6e610
MJ
2767 agg_p = dst_agg_p
2768 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 2769 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610 2770 }
a2b4c188
KV
2771 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2772 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
b8f6e610
MJ
2773 else
2774 {
2775 tree operand = ipa_get_jf_pass_through_operand (src);
2776 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2777 operation);
2778 }
2779 break;
2780 }
2781 case IPA_JF_ANCESTOR:
2782 {
3b97a5c7 2783 bool agg_p;
b8f6e610
MJ
2784 agg_p = dst_agg_p
2785 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
2786 ipa_set_ancestor_jf (dst,
2787 ipa_get_jf_ancestor_offset (src),
b8f6e610 2788 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 2789 agg_p);
b8f6e610
MJ
2790 break;
2791 }
2792 default:
2793 gcc_unreachable ();
2794 }
8b7773a4
MJ
2795
2796 if (src->agg.items
b8f6e610 2797 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2798 {
2799 /* Currently we do not produce clobber aggregate jump
2800 functions, replace with merging when we do. */
2801 gcc_assert (!dst->agg.items);
2802
2803 dst->agg.by_ref = src->agg.by_ref;
9771b263 2804 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2805 }
b258210c
MJ
2806 }
2807 else
04be694e 2808 ipa_set_jf_unknown (dst);
3e293154 2809 }
b258210c
MJ
2810 }
2811}
2812
5ce97055
JH
2813/* If TARGET is an addr_expr of a function declaration, make it the
2814 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2815 Otherwise, return NULL. */
b258210c 2816
3949c4a7 2817struct cgraph_edge *
5ce97055
JH
2818ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2819 bool speculative)
b258210c
MJ
2820{
2821 struct cgraph_node *callee;
48b1474e 2822 bool unreachable = false;
b258210c 2823
ceeffab0
MJ
2824 if (TREE_CODE (target) == ADDR_EXPR)
2825 target = TREE_OPERAND (target, 0);
b258210c 2826 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2827 {
2828 target = canonicalize_constructor_val (target, NULL);
2829 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2830 {
db66bf68
JH
2831 /* Member pointer call that goes through a VMT lookup. */
2832 if (ie->indirect_info->member_ptr
2833 /* Or if target is not an invariant expression and we do not
2834 know if it will evaulate to function at runtime.
2835 This can happen when folding through &VAR, where &VAR
2836 is IP invariant, but VAR itself is not.
2837
2838 TODO: Revisit this when GCC 5 is branched. It seems that
2839 member_ptr check is not needed and that we may try to fold
2840 the expression and see if VAR is readonly. */
2841 || !is_gimple_ip_invariant (target))
2842 {
2843 if (dump_enabled_p ())
2844 {
2845 location_t loc = gimple_location_safe (ie->call_stmt);
2846 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
464d0118
ML
2847 "discovered direct call non-invariant %s\n",
2848 ie->caller->dump_name ());
db66bf68
JH
2849 }
2850 return NULL;
2851 }
2852
c13bc3d9 2853
2b5f0895
XDL
2854 if (dump_enabled_p ())
2855 {
807b7d62
ML
2856 location_t loc = gimple_location_safe (ie->call_stmt);
2857 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
464d0118 2858 "discovered direct call to non-function in %s, "
807b7d62 2859 "making it __builtin_unreachable\n",
464d0118 2860 ie->caller->dump_name ());
2b5f0895 2861 }
3c9e6fca 2862
48b1474e 2863 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 2864 callee = cgraph_node::get_create (target);
48b1474e 2865 unreachable = true;
a0a7b611 2866 }
48b1474e 2867 else
d52f5295 2868 callee = cgraph_node::get (target);
a0a7b611 2869 }
48b1474e 2870 else
d52f5295 2871 callee = cgraph_node::get (target);
a0a7b611
JH
2872
2873 /* Because may-edges are not explicitely represented and vtable may be external,
2874 we may create the first reference to the object in the unit. */
2875 if (!callee || callee->global.inlined_to)
2876 {
a0a7b611
JH
2877
2878 /* We are better to ensure we can refer to it.
2879 In the case of static functions we are out of luck, since we already
2880 removed its body. In the case of public functions we may or may
2881 not introduce the reference. */
2882 if (!canonicalize_constructor_val (target, NULL)
2883 || !TREE_PUBLIC (target))
2884 {
2885 if (dump_file)
2886 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
464d0118
ML
2887 "(%s -> %s) but can not refer to it. Giving up.\n",
2888 ie->caller->dump_name (),
2889 ie->callee->dump_name ());
a0a7b611
JH
2890 return NULL;
2891 }
d52f5295 2892 callee = cgraph_node::get_create (target);
a0a7b611 2893 }
2b5f0895 2894
0127c169
JH
2895 /* If the edge is already speculated. */
2896 if (speculative && ie->speculative)
2897 {
2898 struct cgraph_edge *e2;
2899 struct ipa_ref *ref;
2900 ie->speculative_call_info (e2, ie, ref);
2901 if (e2->callee->ultimate_alias_target ()
2902 != callee->ultimate_alias_target ())
2903 {
2904 if (dump_file)
464d0118
ML
2905 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2906 "target (%s -> %s) but the call is already "
2907 "speculated to %s. Giving up.\n",
2908 ie->caller->dump_name (), callee->dump_name (),
2909 e2->callee->dump_name ());
0127c169
JH
2910 }
2911 else
2912 {
2913 if (dump_file)
2914 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
464d0118
ML
2915 "(%s -> %s) this agree with previous speculation.\n",
2916 ie->caller->dump_name (), callee->dump_name ());
0127c169
JH
2917 }
2918 return NULL;
2919 }
2920
2b5f0895
XDL
2921 if (!dbg_cnt (devirt))
2922 return NULL;
2923
1dbee8c9 2924 ipa_check_create_node_params ();
ceeffab0 2925
81fa35bd
MJ
2926 /* We can not make edges to inline clones. It is bug that someone removed
2927 the cgraph node too early. */
17afc0fe
JH
2928 gcc_assert (!callee->global.inlined_to);
2929
48b1474e 2930 if (dump_file && !unreachable)
b258210c 2931 {
5ce97055 2932 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
464d0118 2933 "(%s -> %s), for stmt ",
b258210c 2934 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 2935 speculative ? "speculative" : "known",
464d0118
ML
2936 ie->caller->dump_name (),
2937 callee->dump_name ());
b258210c
MJ
2938 if (ie->call_stmt)
2939 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2940 else
2941 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 2942 }
2b5f0895
XDL
2943 if (dump_enabled_p ())
2944 {
807b7d62 2945 location_t loc = gimple_location_safe (ie->call_stmt);
3c9e6fca 2946
807b7d62
ML
2947 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2948 "converting indirect call in %s to direct call to %s\n",
2949 ie->caller->name (), callee->name ());
2b5f0895 2950 }
5ce97055 2951 if (!speculative)
d8d5aef1
JH
2952 {
2953 struct cgraph_edge *orig = ie;
2954 ie = ie->make_direct (callee);
2955 /* If we resolved speculative edge the cost is already up to date
2956 for direct call (adjusted by inline_edge_duplication_hook). */
2957 if (ie == orig)
2958 {
56f62793 2959 ipa_call_summary *es = ipa_call_summaries->get (ie);
d8d5aef1
JH
2960 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2961 - eni_size_weights.call_cost);
2962 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2963 - eni_time_weights.call_cost);
2964 }
2965 }
5ce97055
JH
2966 else
2967 {
2968 if (!callee->can_be_discarded_p ())
2969 {
2970 cgraph_node *alias;
2971 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2972 if (alias)
2973 callee = alias;
2974 }
d8d5aef1 2975 /* make_speculative will update ie's cost to direct call cost. */
5ce97055 2976 ie = ie->make_speculative
1bad9c18 2977 (callee, ie->count.apply_scale (8, 10));
5ce97055 2978 }
749aa96d 2979
b258210c 2980 return ie;
3e293154
MJ
2981}
2982
91bb9f80
MJ
2983/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2984 CONSTRUCTOR and return it. Return NULL if the search fails for some
2985 reason. */
2986
2987static tree
2988find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2989{
2990 tree type = TREE_TYPE (constructor);
2991 if (TREE_CODE (type) != ARRAY_TYPE
2992 && TREE_CODE (type) != RECORD_TYPE)
2993 return NULL;
2994
2995 unsigned ix;
2996 tree index, val;
2997 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2998 {
2999 HOST_WIDE_INT elt_offset;
3000 if (TREE_CODE (type) == ARRAY_TYPE)
3001 {
3002 offset_int off;
3003 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3004 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3005
3006 if (index)
3007 {
db9bbdec
RB
3008 if (TREE_CODE (index) == RANGE_EXPR)
3009 off = wi::to_offset (TREE_OPERAND (index, 0));
3010 else
3011 off = wi::to_offset (index);
91bb9f80
MJ
3012 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3013 {
3014 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3015 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3016 off = wi::sext (off - wi::to_offset (low_bound),
3017 TYPE_PRECISION (TREE_TYPE (index)));
3018 }
3019 off *= wi::to_offset (unit_size);
db9bbdec
RB
3020 /* ??? Handle more than just the first index of a
3021 RANGE_EXPR. */
91bb9f80
MJ
3022 }
3023 else
3024 off = wi::to_offset (unit_size) * ix;
3025
3026 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3027 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3028 continue;
3029 elt_offset = off.to_shwi ();
3030 }
3031 else if (TREE_CODE (type) == RECORD_TYPE)
3032 {
3033 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3034 if (DECL_BIT_FIELD (index))
3035 continue;
3036 elt_offset = int_bit_position (index);
3037 }
3038 else
3039 gcc_unreachable ();
3040
3041 if (elt_offset > req_offset)
3042 return NULL;
3043
3044 if (TREE_CODE (val) == CONSTRUCTOR)
3045 return find_constructor_constant_at_offset (val,
3046 req_offset - elt_offset);
3047
3048 if (elt_offset == req_offset
3049 && is_gimple_reg_type (TREE_TYPE (val))
3050 && is_gimple_ip_invariant (val))
3051 return val;
3052 }
3053 return NULL;
3054}
3055
3056/* Check whether SCALAR could be used to look up an aggregate interprocedural
3057 invariant from a static constructor and if so, return it. Otherwise return
3058 NULL. */
3059
3060static tree
3061ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3062{
3063 if (by_ref)
3064 {
3065 if (TREE_CODE (scalar) != ADDR_EXPR)
3066 return NULL;
3067 scalar = TREE_OPERAND (scalar, 0);
3068 }
3069
8813a647 3070 if (!VAR_P (scalar)
91bb9f80
MJ
3071 || !is_global_var (scalar)
3072 || !TREE_READONLY (scalar)
3073 || !DECL_INITIAL (scalar)
3074 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3075 return NULL;
3076
3077 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3078}
3079
3080/* Retrieve value from aggregate jump function AGG or static initializer of
3081 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3082 none. BY_REF specifies whether the value has to be passed by reference or
3083 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3084 to is set to true if the value comes from an initializer of a constant. */
8b7773a4
MJ
3085
3086tree
91bb9f80
MJ
3087ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3088 HOST_WIDE_INT offset, bool by_ref,
3089 bool *from_global_constant)
8b7773a4
MJ
3090{
3091 struct ipa_agg_jf_item *item;
3092 int i;
3093
91bb9f80
MJ
3094 if (scalar)
3095 {
3096 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3097 if (res)
3098 {
3099 if (from_global_constant)
3100 *from_global_constant = true;
3101 return res;
3102 }
3103 }
3104
3105 if (!agg
3106 || by_ref != agg->by_ref)
8b7773a4
MJ
3107 return NULL;
3108
9771b263 3109 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
3110 if (item->offset == offset)
3111 {
3112 /* Currently we do not have clobber values, return NULL for them once
3113 we do. */
3114 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
3115 if (from_global_constant)
3116 *from_global_constant = false;
2c9561b5
MJ
3117 return item->value;
3118 }
8b7773a4
MJ
3119 return NULL;
3120}
3121
4502fe8d 3122/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
3123 reference description RDESC. Return true if the reference has been
3124 successfully found and removed. */
4502fe8d 3125
568cda29 3126static bool
5e20cdc9 3127remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3128{
3129 struct ipa_ref *to_del;
3130 struct cgraph_edge *origin;
3131
3132 origin = rdesc->cs;
a854f856
MJ
3133 if (!origin)
3134 return false;
d122681a
ML
3135 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3136 origin->lto_stmt_uid);
568cda29
MJ
3137 if (!to_del)
3138 return false;
3139
d122681a 3140 to_del->remove_reference ();
4502fe8d 3141 if (dump_file)
464d0118
ML
3142 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3143 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
568cda29 3144 return true;
4502fe8d
MJ
3145}
3146
3147/* If JFUNC has a reference description with refcount different from
3148 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3149 NULL. JFUNC must be a constant jump function. */
3150
3151static struct ipa_cst_ref_desc *
3152jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3153{
3154 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3155 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3156 return rdesc;
3157 else
3158 return NULL;
3159}
3160
568cda29
MJ
3161/* If the value of constant jump function JFUNC is an address of a function
3162 declaration, return the associated call graph node. Otherwise return
3163 NULL. */
3164
3165static cgraph_node *
3166cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3167{
3168 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3169 tree cst = ipa_get_jf_constant (jfunc);
3170 if (TREE_CODE (cst) != ADDR_EXPR
3171 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3172 return NULL;
3173
d52f5295 3174 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3175}
3176
3177
3178/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3179 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3180 the edge specified in the rdesc. Return false if either the symbol or the
3181 reference could not be found, otherwise return true. */
3182
3183static bool
3184try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3185{
3186 struct ipa_cst_ref_desc *rdesc;
3187 if (jfunc->type == IPA_JF_CONST
3188 && (rdesc = jfunc_rdesc_usable (jfunc))
3189 && --rdesc->refcount == 0)
3190 {
5e20cdc9 3191 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
3192 if (!symbol)
3193 return false;
3194
3195 return remove_described_reference (symbol, rdesc);
3196 }
3197 return true;
3198}
3199
b258210c
MJ
3200/* Try to find a destination for indirect edge IE that corresponds to a simple
3201 call or a call of a member function pointer and where the destination is a
e5cf5e11
PK
3202 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3203 the type of the parameter to which the result of JFUNC is passed. If it can
3204 be determined, return the newly direct edge, otherwise return NULL.
d250540a 3205 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 3206
b258210c
MJ
3207static struct cgraph_edge *
3208try_make_edge_direct_simple_call (struct cgraph_edge *ie,
e5cf5e11 3209 struct ipa_jump_func *jfunc, tree target_type,
d250540a 3210 struct ipa_node_params *new_root_info)
b258210c 3211{
4502fe8d 3212 struct cgraph_edge *cs;
b258210c 3213 tree target;
042ae7d2 3214 bool agg_contents = ie->indirect_info->agg_contents;
e5cf5e11 3215 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
91bb9f80
MJ
3216 if (agg_contents)
3217 {
3218 bool from_global_constant;
3219 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3220 ie->indirect_info->offset,
3221 ie->indirect_info->by_ref,
3222 &from_global_constant);
3223 if (target
3224 && !from_global_constant
3225 && !ie->indirect_info->guaranteed_unmodified)
3226 return NULL;
3227 }
b258210c 3228 else
91bb9f80 3229 target = scalar;
d250540a
MJ
3230 if (!target)
3231 return NULL;
4502fe8d
MJ
3232 cs = ipa_make_edge_direct_to_target (ie, target);
3233
a12cd2db 3234 if (cs && !agg_contents)
568cda29
MJ
3235 {
3236 bool ok;
3237 gcc_checking_assert (cs->callee
ae6d0907
MJ
3238 && (cs != ie
3239 || jfunc->type != IPA_JF_CONST
568cda29
MJ
3240 || !cgraph_node_for_jfunc (jfunc)
3241 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3242 ok = try_decrement_rdesc_refcount (jfunc);
3243 gcc_checking_assert (ok);
3244 }
4502fe8d
MJ
3245
3246 return cs;
b258210c
MJ
3247}
3248
bec81025
MJ
3249/* Return the target to be used in cases of impossible devirtualization. IE
3250 and target (the latter can be NULL) are dumped when dumping is enabled. */
3251
72972c22
MJ
3252tree
3253ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3254{
3255 if (dump_file)
3256 {
3257 if (target)
3258 fprintf (dump_file,
464d0118
ML
3259 "Type inconsistent devirtualization: %s->%s\n",
3260 ie->caller->dump_name (),
bec81025
MJ
3261 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3262 else
3263 fprintf (dump_file,
464d0118
ML
3264 "No devirtualization target in %s\n",
3265 ie->caller->dump_name ());
bec81025
MJ
3266 }
3267 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3268 cgraph_node::get_create (new_target);
bec81025
MJ
3269 return new_target;
3270}
3271
d250540a
MJ
3272/* Try to find a destination for indirect edge IE that corresponds to a virtual
3273 call based on a formal parameter which is described by jump function JFUNC
3274 and if it can be determined, make it direct and return the direct edge.
44210a96
MJ
3275 Otherwise, return NULL. CTX describes the polymorphic context that the
3276 parameter the call is based on brings along with it. */
b258210c
MJ
3277
3278static struct cgraph_edge *
3279try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3280 struct ipa_jump_func *jfunc,
44210a96 3281 struct ipa_polymorphic_call_context ctx)
3e293154 3282{
44210a96 3283 tree target = NULL;
5ce97055 3284 bool speculative = false;
85942f45 3285
2bf86c84 3286 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3287 return NULL;
b258210c 3288
44210a96 3289 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3290
3291 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3292 if (!ie->indirect_info->vptr_changed
3293 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3294 {
9de2f554
JH
3295 tree vtable;
3296 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3297 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3298 : NULL;
3299 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
85942f45
JH
3300 ie->indirect_info->offset,
3301 true);
9de2f554
JH
3302 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3303 {
2994ab20 3304 bool can_refer;
0127c169 3305 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3306 vtable, offset, &can_refer);
3307 if (can_refer)
9de2f554 3308 {
2994ab20
JH
3309 if (!t
3310 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3311 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
9de2f554 3312 || !possible_polymorphic_call_target_p
0127c169
JH
3313 (ie, cgraph_node::get (t)))
3314 {
33c3b6be 3315 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3316 if (!ie->indirect_info->vptr_changed)
3317 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3318 else
3319 target = NULL;
0127c169
JH
3320 }
3321 else
3322 {
3323 target = t;
3324 speculative = ie->indirect_info->vptr_changed;
3325 }
9de2f554
JH
3326 }
3327 }
85942f45
JH
3328 }
3329
44210a96
MJ
3330 ipa_polymorphic_call_context ie_context (ie);
3331 vec <cgraph_node *>targets;
3332 bool final;
d250540a 3333
44210a96
MJ
3334 ctx.offset_by (ie->indirect_info->offset);
3335 if (ie->indirect_info->vptr_changed)
3336 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3337 ie->indirect_info->otr_type);
3338 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3339 targets = possible_polymorphic_call_targets
3340 (ie->indirect_info->otr_type,
3341 ie->indirect_info->otr_token,
3342 ctx, &final);
3343 if (final && targets.length () <= 1)
5ce97055 3344 {
33c3b6be 3345 speculative = false;
44210a96
MJ
3346 if (targets.length () == 1)
3347 target = targets[0]->decl;
3348 else
3349 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3350 }
2bf86c84 3351 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3352 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3353 {
44210a96
MJ
3354 cgraph_node *n;
3355 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3356 ie->indirect_info->otr_token,
3357 ie->indirect_info->context);
3358 if (n)
5ce97055 3359 {
44210a96
MJ
3360 target = n->decl;
3361 speculative = true;
5ce97055 3362 }
5bccb77a 3363 }
b258210c
MJ
3364
3365 if (target)
450ad0cd 3366 {
44210a96
MJ
3367 if (!possible_polymorphic_call_target_p
3368 (ie, cgraph_node::get_create (target)))
0127c169 3369 {
29c43c83 3370 if (speculative)
0127c169
JH
3371 return NULL;
3372 target = ipa_impossible_devirt_target (ie, target);
3373 }
5ce97055 3374 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3375 }
b258210c
MJ
3376 else
3377 return NULL;
3e293154
MJ
3378}
3379
3380/* Update the param called notes associated with NODE when CS is being inlined,
3381 assuming NODE is (potentially indirectly) inlined into CS->callee.
3382 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3383 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3384 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3385
f8e2a1ed 3386static bool
e33c6cd6
MJ
3387update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3388 struct cgraph_node *node,
d52f5295 3389 vec<cgraph_edge *> *new_edges)
3e293154 3390{
9e97ff61 3391 struct ipa_edge_args *top;
b258210c 3392 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
e5cf5e11 3393 struct ipa_node_params *new_root_info, *inlined_node_info;
f8e2a1ed 3394 bool res = false;
3e293154 3395
e33c6cd6 3396 ipa_check_create_edge_args ();
9e97ff61 3397 top = IPA_EDGE_REF (cs);
d250540a
MJ
3398 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3399 ? cs->caller->global.inlined_to
3400 : cs->caller);
e5cf5e11 3401 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
e33c6cd6
MJ
3402
3403 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3404 {
e33c6cd6 3405 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3406 struct ipa_jump_func *jfunc;
8b7773a4 3407 int param_index;
3ff29913 3408 cgraph_node *spec_target = NULL;
3e293154 3409
e33c6cd6 3410 next_ie = ie->next_callee;
3e293154 3411
5f902d76
JH
3412 if (ici->param_index == -1)
3413 continue;
e33c6cd6 3414
3e293154 3415 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 3416 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3417 {
5ee53a06 3418 ici->param_index = -1;
3e293154
MJ
3419 continue;
3420 }
3421
8b7773a4
MJ
3422 param_index = ici->param_index;
3423 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3424
3ff29913
JH
3425 if (ie->speculative)
3426 {
3427 struct cgraph_edge *de;
3428 struct ipa_ref *ref;
3429 ie->speculative_call_info (de, ie, ref);
3430 spec_target = de->callee;
3431 }
3432
2bf86c84 3433 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3434 new_direct_edge = NULL;
3435 else if (ici->polymorphic)
5ce97055 3436 {
44210a96
MJ
3437 ipa_polymorphic_call_context ctx;
3438 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3439 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
5ce97055 3440 }
b258210c 3441 else
e5cf5e11
PK
3442 {
3443 tree target_type = ipa_get_type (inlined_node_info, param_index);
3444 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3445 target_type,
3446 new_root_info);
3447 }
3448
042ae7d2 3449 /* If speculation was removed, then we need to do nothing. */
3ff29913
JH
3450 if (new_direct_edge && new_direct_edge != ie
3451 && new_direct_edge->callee == spec_target)
042ae7d2
JH
3452 {
3453 new_direct_edge->indirect_inlining_edge = 1;
3454 top = IPA_EDGE_REF (cs);
3455 res = true;
73d098df
JH
3456 if (!new_direct_edge->speculative)
3457 continue;
042ae7d2
JH
3458 }
3459 else if (new_direct_edge)
685b0d13 3460 {
b258210c 3461 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
3462 if (new_direct_edge->call_stmt)
3463 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
3464 = !gimple_check_call_matching_types (
3465 new_direct_edge->call_stmt,
67348ccc 3466 new_direct_edge->callee->decl, false);
b258210c
MJ
3467 if (new_edges)
3468 {
9771b263 3469 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3470 res = true;
3471 }
042ae7d2 3472 top = IPA_EDGE_REF (cs);
3ff29913
JH
3473 /* If speculative edge was introduced we still need to update
3474 call info of the indirect edge. */
3475 if (!new_direct_edge->speculative)
3476 continue;
685b0d13 3477 }
3ff29913
JH
3478 if (jfunc->type == IPA_JF_PASS_THROUGH
3479 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 3480 {
d0502276
JH
3481 if (ici->agg_contents
3482 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3483 && !ici->polymorphic)
36b72910
JH
3484 ici->param_index = -1;
3485 else
d0502276
JH
3486 {
3487 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3488 if (ici->polymorphic
3489 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3490 ici->vptr_changed = true;
3491 }
36b72910
JH
3492 }
3493 else if (jfunc->type == IPA_JF_ANCESTOR)
3494 {
d0502276
JH
3495 if (ici->agg_contents
3496 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3497 && !ici->polymorphic)
36b72910
JH
3498 ici->param_index = -1;
3499 else
3500 {
3501 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3502 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
3503 if (ici->polymorphic
3504 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3505 ici->vptr_changed = true;
36b72910
JH
3506 }
3507 }
3508 else
3509 /* Either we can find a destination for this edge now or never. */
3510 ici->param_index = -1;
3e293154 3511 }
e33c6cd6 3512
f8e2a1ed 3513 return res;
3e293154
MJ
3514}
3515
3516/* Recursively traverse subtree of NODE (including node) made of inlined
3517 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3518 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3519 update_jump_functions_after_inlining on all non-inlined edges that lead out
3520 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3521 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3522 created. */
be95e2b9 3523
f8e2a1ed 3524static bool
3e293154
MJ
3525propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3526 struct cgraph_node *node,
d52f5295 3527 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3528{
3529 struct cgraph_edge *e;
f8e2a1ed 3530 bool res;
3e293154 3531
e33c6cd6 3532 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3533
3534 for (e = node->callees; e; e = e->next_callee)
3535 if (!e->inline_failed)
f8e2a1ed 3536 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3537 else
3538 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3539 for (e = node->indirect_calls; e; e = e->next_callee)
3540 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3541
3542 return res;
3e293154
MJ
3543}
3544
4502fe8d
MJ
3545/* Combine two controlled uses counts as done during inlining. */
3546
3547static int
3548combine_controlled_uses_counters (int c, int d)
3549{
3550 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3551 return IPA_UNDESCRIBED_USE;
3552 else
3553 return c + d - 1;
3554}
3555
3556/* Propagate number of controlled users from CS->caleee to the new root of the
3557 tree of inlined nodes. */
3558
3559static void
3560propagate_controlled_uses (struct cgraph_edge *cs)
3561{
3562 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3563 struct cgraph_node *new_root = cs->caller->global.inlined_to
3564 ? cs->caller->global.inlined_to : cs->caller;
3565 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3566 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3567 int count, i;
3568
3569 count = MIN (ipa_get_cs_argument_count (args),
3570 ipa_get_param_count (old_root_info));
3571 for (i = 0; i < count; i++)
3572 {
3573 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3574 struct ipa_cst_ref_desc *rdesc;
3575
3576 if (jf->type == IPA_JF_PASS_THROUGH)
3577 {
3578 int src_idx, c, d;
3579 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3580 c = ipa_get_controlled_uses (new_root_info, src_idx);
3581 d = ipa_get_controlled_uses (old_root_info, i);
3582
3583 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3584 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3585 c = combine_controlled_uses_counters (c, d);
3586 ipa_set_controlled_uses (new_root_info, src_idx, c);
3587 if (c == 0 && new_root_info->ipcp_orig_node)
3588 {
3589 struct cgraph_node *n;
3590 struct ipa_ref *ref;
44210a96 3591 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
3592
3593 if (t && TREE_CODE (t) == ADDR_EXPR
3594 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 3595 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 3596 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
3597 {
3598 if (dump_file)
3599 fprintf (dump_file, "ipa-prop: Removing cloning-created "
464d0118
ML
3600 "reference from %s to %s.\n",
3601 new_root->dump_name (),
3602 n->dump_name ());
d122681a 3603 ref->remove_reference ();
4502fe8d
MJ
3604 }
3605 }
3606 }
3607 else if (jf->type == IPA_JF_CONST
3608 && (rdesc = jfunc_rdesc_usable (jf)))
3609 {
3610 int d = ipa_get_controlled_uses (old_root_info, i);
3611 int c = rdesc->refcount;
3612 rdesc->refcount = combine_controlled_uses_counters (c, d);
3613 if (rdesc->refcount == 0)
3614 {
3615 tree cst = ipa_get_jf_constant (jf);
3616 struct cgraph_node *n;
3617 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3618 && TREE_CODE (TREE_OPERAND (cst, 0))
3619 == FUNCTION_DECL);
d52f5295 3620 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
3621 if (n)
3622 {
3623 struct cgraph_node *clone;
568cda29 3624 bool ok;
67348ccc 3625 ok = remove_described_reference (n, rdesc);
568cda29 3626 gcc_checking_assert (ok);
4502fe8d
MJ
3627
3628 clone = cs->caller;
3629 while (clone->global.inlined_to
3630 && clone != rdesc->cs->caller
3631 && IPA_NODE_REF (clone)->ipcp_orig_node)
3632 {
3633 struct ipa_ref *ref;
d122681a 3634 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
3635 if (ref)
3636 {
3637 if (dump_file)
3638 fprintf (dump_file, "ipa-prop: Removing "
3639 "cloning-created reference "
464d0118
ML
3640 "from %s to %s.\n",
3641 clone->dump_name (),
3642 n->dump_name ());
d122681a 3643 ref->remove_reference ();
4502fe8d
MJ
3644 }
3645 clone = clone->callers->caller;
3646 }
3647 }
3648 }
3649 }
3650 }
3651
3652 for (i = ipa_get_param_count (old_root_info);
3653 i < ipa_get_cs_argument_count (args);
3654 i++)
3655 {
3656 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3657
3658 if (jf->type == IPA_JF_CONST)
3659 {
3660 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3661 if (rdesc)
3662 rdesc->refcount = IPA_UNDESCRIBED_USE;
3663 }
3664 else if (jf->type == IPA_JF_PASS_THROUGH)
3665 ipa_set_controlled_uses (new_root_info,
3666 jf->value.pass_through.formal_id,
3667 IPA_UNDESCRIBED_USE);
3668 }
3669}
3670
3e293154
MJ
3671/* Update jump functions and call note functions on inlining the call site CS.
3672 CS is expected to lead to a node already cloned by
3673 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3674 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3675 created. */
be95e2b9 3676
f8e2a1ed 3677bool
3e293154 3678ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 3679 vec<cgraph_edge *> *new_edges)
3e293154 3680{
5ee53a06 3681 bool changed;
f8e2a1ed
MJ
3682 /* Do nothing if the preparation phase has not been carried out yet
3683 (i.e. during early inlining). */
dd912cb8 3684 if (!ipa_node_params_sum)
f8e2a1ed 3685 return false;
6fe906a3 3686 gcc_assert (ipa_edge_args_sum);
f8e2a1ed 3687
4502fe8d 3688 propagate_controlled_uses (cs);
5ee53a06
JH
3689 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3690
5ee53a06 3691 return changed;
518dc859
RL
3692}
3693
86cd0334
MJ
3694/* Ensure that array of edge arguments infos is big enough to accommodate a
3695 structure for all edges and reallocates it if not. Also, allocate
3696 associated hash tables is they do not already exist. */
3697
3698void
3699ipa_check_create_edge_args (void)
3700{
6fe906a3
MJ
3701 if (!ipa_edge_args_sum)
3702 ipa_edge_args_sum
3703 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3704 ipa_edge_args_sum_t (symtab, true));
86cd0334
MJ
3705 if (!ipa_bits_hash_table)
3706 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3707 if (!ipa_vr_hash_table)
3708 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3709}
3710
771578a0 3711/* Free all ipa_edge structures. */
be95e2b9 3712
518dc859 3713void
771578a0 3714ipa_free_all_edge_args (void)
518dc859 3715{
6fe906a3 3716 if (!ipa_edge_args_sum)
9771b263
DN
3717 return;
3718
6fe906a3
MJ
3719 ipa_edge_args_sum->release ();
3720 ipa_edge_args_sum = NULL;
518dc859
RL
3721}
3722
771578a0 3723/* Free all ipa_node_params structures. */
be95e2b9 3724
518dc859 3725void
771578a0 3726ipa_free_all_node_params (void)
518dc859 3727{
a0a348b1 3728 ipa_node_params_sum->release ();
dd912cb8 3729 ipa_node_params_sum = NULL;
771578a0
MJ
3730}
3731
9d3e0adc 3732/* Initialize IPA CP transformation summary and also allocate any necessary hash
86cd0334 3733 tables if they do not already exist. */
04be694e
MJ
3734
3735void
9d3e0adc 3736ipcp_transformation_initialize (void)
04be694e 3737{
86cd0334
MJ
3738 if (!ipa_bits_hash_table)
3739 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3740 if (!ipa_vr_hash_table)
3741 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
9d3e0adc
ML
3742 if (ipcp_transformation_sum == NULL)
3743 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
04be694e
MJ
3744}
3745
2c9561b5
MJ
3746/* Set the aggregate replacements of NODE to be AGGVALS. */
3747
3748void
3749ipa_set_node_agg_value_chain (struct cgraph_node *node,
3750 struct ipa_agg_replacement_value *aggvals)
3751{
9d3e0adc
ML
3752 ipcp_transformation_initialize ();
3753 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3754 s->agg_values = aggvals;
2c9561b5
MJ
3755}
3756
6fe906a3
MJ
3757/* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3758 count data structures accordingly. */
be95e2b9 3759
6fe906a3
MJ
3760void
3761ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
771578a0 3762{
568cda29
MJ
3763 if (args->jump_functions)
3764 {
3765 struct ipa_jump_func *jf;
3766 int i;
3767 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3768 {
3769 struct ipa_cst_ref_desc *rdesc;
3770 try_decrement_rdesc_refcount (jf);
3771 if (jf->type == IPA_JF_CONST
3772 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3773 && rdesc->cs == cs)
3774 rdesc->cs = NULL;
3775 }
568cda29 3776 }
518dc859
RL
3777}
3778
6fe906a3
MJ
3779/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3780 reference count data strucutres accordingly. */
be95e2b9 3781
6fe906a3
MJ
3782void
3783ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3784 ipa_edge_args *old_args, ipa_edge_args *new_args)
771578a0 3785{
8b7773a4 3786 unsigned int i;
771578a0 3787
9771b263 3788 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
3789 if (old_args->polymorphic_call_contexts)
3790 new_args->polymorphic_call_contexts
3791 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 3792
9771b263 3793 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3794 {
3795 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3796 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3797
3798 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3799
3800 if (src_jf->type == IPA_JF_CONST)
3801 {
3802 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3803
3804 if (!src_rdesc)
3805 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3806 else if (src->caller == dst->caller)
3807 {
3808 struct ipa_ref *ref;
5e20cdc9 3809 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3810 gcc_checking_assert (n);
d122681a
ML
3811 ref = src->caller->find_reference (n, src->call_stmt,
3812 src->lto_stmt_uid);
568cda29 3813 gcc_checking_assert (ref);
d122681a 3814 dst->caller->clone_reference (ref, ref->stmt);
568cda29 3815
601f3293 3816 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
568cda29
MJ
3817 dst_rdesc->cs = dst;
3818 dst_rdesc->refcount = src_rdesc->refcount;
3819 dst_rdesc->next_duplicate = NULL;
3820 dst_jf->value.constant.rdesc = dst_rdesc;
3821 }
4502fe8d
MJ
3822 else if (src_rdesc->cs == src)
3823 {
601f3293 3824 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 3825 dst_rdesc->cs = dst;
4502fe8d 3826 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3827 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3828 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3829 dst_jf->value.constant.rdesc = dst_rdesc;
3830 }
3831 else
3832 {
3833 struct ipa_cst_ref_desc *dst_rdesc;
3834 /* This can happen during inlining, when a JFUNC can refer to a
3835 reference taken in a function up in the tree of inline clones.
3836 We need to find the duplicate that refers to our tree of
3837 inline clones. */
3838
3839 gcc_assert (dst->caller->global.inlined_to);
3840 for (dst_rdesc = src_rdesc->next_duplicate;
3841 dst_rdesc;
3842 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3843 {
3844 struct cgraph_node *top;
3845 top = dst_rdesc->cs->caller->global.inlined_to
3846 ? dst_rdesc->cs->caller->global.inlined_to
3847 : dst_rdesc->cs->caller;
3848 if (dst->caller->global.inlined_to == top)
3849 break;
3850 }
44a60244 3851 gcc_assert (dst_rdesc);
4502fe8d
MJ
3852 dst_jf->value.constant.rdesc = dst_rdesc;
3853 }
3854 }
6fe45955
MJ
3855 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3856 && src->caller == dst->caller)
3857 {
3858 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3859 ? dst->caller->global.inlined_to : dst->caller;
3860 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3861 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3862
3863 int c = ipa_get_controlled_uses (root_info, idx);
3864 if (c != IPA_UNDESCRIBED_USE)
3865 {
3866 c++;
3867 ipa_set_controlled_uses (root_info, idx, c);
3868 }
3869 }
4502fe8d 3870 }
771578a0
MJ
3871}
3872
dd912cb8 3873/* Analyze newly added function into callgraph. */
be95e2b9 3874
771578a0 3875static void
dd912cb8 3876ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3877{
dd912cb8
ML
3878 if (node->has_gimple_body_p ())
3879 ipa_analyze_node (node);
3880}
771578a0 3881
dd912cb8
ML
3882/* Hook that is called by summary when a node is duplicated. */
3883
3884void
3885ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3886 ipa_node_params *old_info,
3887 ipa_node_params *new_info)
3888{
3889 ipa_agg_replacement_value *old_av, *new_av;
771578a0 3890
f65f1ae3 3891 new_info->descriptors = vec_safe_copy (old_info->descriptors);
310bc633 3892 new_info->lattices = NULL;
771578a0 3893 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
f65f1ae3
MJ
3894 new_info->known_csts = old_info->known_csts.copy ();
3895 new_info->known_contexts = old_info->known_contexts.copy ();
3949c4a7 3896
8aab5218 3897 new_info->analysis_done = old_info->analysis_done;
3949c4a7 3898 new_info->node_enqueued = old_info->node_enqueued;
7e729474 3899 new_info->versionable = old_info->versionable;
2c9561b5
MJ
3900
3901 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 3902 if (old_av)
2c9561b5 3903 {
04be694e
MJ
3904 new_av = NULL;
3905 while (old_av)
3906 {
3907 struct ipa_agg_replacement_value *v;
2c9561b5 3908
04be694e
MJ
3909 v = ggc_alloc<ipa_agg_replacement_value> ();
3910 memcpy (v, old_av, sizeof (*v));
3911 v->next = new_av;
3912 new_av = v;
3913 old_av = old_av->next;
3914 }
3915 ipa_set_node_agg_value_chain (dst, new_av);
3916 }
3917
9d3e0adc 3918 ipcp_transformation *src_trans = ipcp_get_transformation_summary (src);
04be694e 3919
8bc5448f 3920 if (src_trans)
04be694e 3921 {
9d3e0adc
ML
3922 ipcp_transformation_initialize ();
3923 src_trans = ipcp_transformation_sum->get_create (src);
3924 ipcp_transformation *dst_trans
3925 = ipcp_transformation_sum->get_create (dst);
86cd0334
MJ
3926
3927 dst_trans->bits = vec_safe_copy (src_trans->bits);
3928
8bc5448f 3929 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
8bc5448f
KV
3930 vec<ipa_vr, va_gc> *&dst_vr
3931 = ipcp_get_transformation_summary (dst)->m_vr;
8bc5448f
KV
3932 if (vec_safe_length (src_trans->m_vr) > 0)
3933 {
3934 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3935 for (unsigned i = 0; i < src_vr->length (); ++i)
3936 dst_vr->quick_push ((*src_vr)[i]);
3937 }
2c9561b5 3938 }
771578a0
MJ
3939}
3940
3941/* Register our cgraph hooks if they are not already there. */
be95e2b9 3942
518dc859 3943void
771578a0 3944ipa_register_cgraph_hooks (void)
518dc859 3945{
dd912cb8 3946 ipa_check_create_node_params ();
6fe906a3 3947 ipa_check_create_edge_args ();
dd912cb8 3948
dd912cb8 3949 function_insertion_hook_holder =
3dafb85c 3950 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3951}
518dc859 3952
771578a0 3953/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3954
771578a0
MJ
3955static void
3956ipa_unregister_cgraph_hooks (void)
3957{
3dafb85c 3958 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 3959 function_insertion_hook_holder = NULL;
771578a0
MJ
3960}
3961
3962/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3963 longer needed after ipa-cp. */
be95e2b9 3964
771578a0 3965void
e33c6cd6 3966ipa_free_all_structures_after_ipa_cp (void)
3e293154 3967{
2bf86c84 3968 if (!optimize && !in_lto_p)
3e293154
MJ
3969 {
3970 ipa_free_all_edge_args ();
3971 ipa_free_all_node_params ();
2651e637
ML
3972 ipcp_sources_pool.release ();
3973 ipcp_cst_values_pool.release ();
3974 ipcp_poly_ctx_values_pool.release ();
3975 ipcp_agg_lattice_pool.release ();
3e293154 3976 ipa_unregister_cgraph_hooks ();
601f3293 3977 ipa_refdesc_pool.release ();
3e293154
MJ
3978 }
3979}
3980
3981/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3982 longer needed after indirect inlining. */
be95e2b9 3983
3e293154 3984void
e33c6cd6 3985ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3986{
3987 ipa_free_all_edge_args ();
3988 ipa_free_all_node_params ();
3989 ipa_unregister_cgraph_hooks ();
2651e637
ML
3990 ipcp_sources_pool.release ();
3991 ipcp_cst_values_pool.release ();
3992 ipcp_poly_ctx_values_pool.release ();
3993 ipcp_agg_lattice_pool.release ();
601f3293 3994 ipa_refdesc_pool.release ();
518dc859
RL
3995}
3996
dcd416e3 3997/* Print ipa_tree_map data structures of all functions in the
518dc859 3998 callgraph to F. */
be95e2b9 3999
518dc859 4000void
2c9561b5 4001ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
4002{
4003 int i, count;
3e293154 4004 struct ipa_node_params *info;
518dc859 4005
67348ccc 4006 if (!node->definition)
3e293154
MJ
4007 return;
4008 info = IPA_NODE_REF (node);
464d0118 4009 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
3e293154
MJ
4010 count = ipa_get_param_count (info);
4011 for (i = 0; i < count; i++)
518dc859 4012 {
4502fe8d
MJ
4013 int c;
4014
a4e33812 4015 fprintf (f, " ");
e067bd43 4016 ipa_dump_param (f, info, i);
339f49ec
JH
4017 if (ipa_is_param_used (info, i))
4018 fprintf (f, " used");
4502fe8d
MJ
4019 c = ipa_get_controlled_uses (info, i);
4020 if (c == IPA_UNDESCRIBED_USE)
4021 fprintf (f, " undescribed_use");
4022 else
4023 fprintf (f, " controlled_uses=%i", c);
3e293154 4024 fprintf (f, "\n");
518dc859
RL
4025 }
4026}
dcd416e3 4027
ca30a539 4028/* Print ipa_tree_map data structures of all functions in the
3e293154 4029 callgraph to F. */
be95e2b9 4030
3e293154 4031void
ca30a539 4032ipa_print_all_params (FILE * f)
3e293154
MJ
4033{
4034 struct cgraph_node *node;
4035
ca30a539 4036 fprintf (f, "\nFunction parameters:\n");
65c70e6b 4037 FOR_EACH_FUNCTION (node)
ca30a539 4038 ipa_print_node_params (f, node);
3e293154 4039}
3f84bf08 4040
2c9561b5
MJ
4041/* Dump the AV linked list. */
4042
4043void
4044ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4045{
4046 bool comma = false;
4047 fprintf (f, " Aggregate replacements:");
4048 for (; av; av = av->next)
4049 {
4050 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4051 av->index, av->offset);
ef6cb4c7 4052 print_generic_expr (f, av->value);
2c9561b5
MJ
4053 comma = true;
4054 }
4055 fprintf (f, "\n");
4056}
4057
fb3f88cc
JH
4058/* Stream out jump function JUMP_FUNC to OB. */
4059
4060static void
4061ipa_write_jump_function (struct output_block *ob,
4062 struct ipa_jump_func *jump_func)
4063{
8b7773a4
MJ
4064 struct ipa_agg_jf_item *item;
4065 struct bitpack_d bp;
4066 int i, count;
fb3f88cc 4067
8b7773a4 4068 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
4069 switch (jump_func->type)
4070 {
4071 case IPA_JF_UNKNOWN:
4072 break;
4073 case IPA_JF_CONST:
5368224f 4074 gcc_assert (
4502fe8d
MJ
4075 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4076 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
4077 break;
4078 case IPA_JF_PASS_THROUGH:
412288f1 4079 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4080 if (jump_func->value.pass_through.operation == NOP_EXPR)
4081 {
4082 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4083 bp = bitpack_create (ob->main_stream);
4084 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4085 streamer_write_bitpack (&bp);
4086 }
a2b4c188
KV
4087 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4088 == tcc_unary)
4089 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4a53743e
MJ
4090 else
4091 {
4092 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4093 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4094 }
fb3f88cc
JH
4095 break;
4096 case IPA_JF_ANCESTOR:
412288f1 4097 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4098 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4099 bp = bitpack_create (ob->main_stream);
4100 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4101 streamer_write_bitpack (&bp);
fb3f88cc 4102 break;
8b7773a4
MJ
4103 }
4104
9771b263 4105 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4106 streamer_write_uhwi (ob, count);
4107 if (count)
4108 {
4109 bp = bitpack_create (ob->main_stream);
4110 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4111 streamer_write_bitpack (&bp);
4112 }
4113
9771b263 4114 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4115 {
4116 streamer_write_uhwi (ob, item->offset);
4117 stream_write_tree (ob, item->value, true);
fb3f88cc 4118 }
04be694e 4119
209ca542 4120 bp = bitpack_create (ob->main_stream);
86cd0334 4121 bp_pack_value (&bp, !!jump_func->bits, 1);
209ca542 4122 streamer_write_bitpack (&bp);
86cd0334 4123 if (jump_func->bits)
209ca542 4124 {
86cd0334
MJ
4125 streamer_write_widest_int (ob, jump_func->bits->value);
4126 streamer_write_widest_int (ob, jump_func->bits->mask);
a5e14a42 4127 }
86cd0334 4128 bp_pack_value (&bp, !!jump_func->m_vr, 1);
8bc5448f 4129 streamer_write_bitpack (&bp);
86cd0334 4130 if (jump_func->m_vr)
8bc5448f
KV
4131 {
4132 streamer_write_enum (ob->main_stream, value_rang_type,
86cd0334
MJ
4133 VR_LAST, jump_func->m_vr->type);
4134 stream_write_tree (ob, jump_func->m_vr->min, true);
4135 stream_write_tree (ob, jump_func->m_vr->max, true);
8bc5448f 4136 }
fb3f88cc
JH
4137}
4138
4139/* Read in jump function JUMP_FUNC from IB. */
4140
4141static void
4142ipa_read_jump_function (struct lto_input_block *ib,
4143 struct ipa_jump_func *jump_func,
4502fe8d 4144 struct cgraph_edge *cs,
fb3f88cc
JH
4145 struct data_in *data_in)
4146{
4a53743e
MJ
4147 enum jump_func_type jftype;
4148 enum tree_code operation;
8b7773a4 4149 int i, count;
fb3f88cc 4150
4a53743e
MJ
4151 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4152 switch (jftype)
fb3f88cc
JH
4153 {
4154 case IPA_JF_UNKNOWN:
04be694e 4155 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4156 break;
4157 case IPA_JF_CONST:
4502fe8d 4158 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4159 break;
4160 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4161 operation = (enum tree_code) streamer_read_uhwi (ib);
4162 if (operation == NOP_EXPR)
4163 {
4164 int formal_id = streamer_read_uhwi (ib);
4165 struct bitpack_d bp = streamer_read_bitpack (ib);
4166 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4167 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e 4168 }
a2b4c188
KV
4169 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4170 {
4171 int formal_id = streamer_read_uhwi (ib);
4172 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4173 }
4a53743e
MJ
4174 else
4175 {
4176 tree operand = stream_read_tree (ib, data_in);
4177 int formal_id = streamer_read_uhwi (ib);
4178 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4179 operation);
4180 }
fb3f88cc
JH
4181 break;
4182 case IPA_JF_ANCESTOR:
4a53743e
MJ
4183 {
4184 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4185 int formal_id = streamer_read_uhwi (ib);
4186 struct bitpack_d bp = streamer_read_bitpack (ib);
4187 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4188 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4189 break;
4190 }
8b7773a4
MJ
4191 }
4192
4193 count = streamer_read_uhwi (ib);
9771b263 4194 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4195 if (count)
4196 {
4a53743e 4197 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4198 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4199 }
4200 for (i = 0; i < count; i++)
4201 {
f32682ca
DN
4202 struct ipa_agg_jf_item item;
4203 item.offset = streamer_read_uhwi (ib);
4204 item.value = stream_read_tree (ib, data_in);
9771b263 4205 jump_func->agg.items->quick_push (item);
fb3f88cc 4206 }
04be694e
MJ
4207
4208 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4209 bool bits_known = bp_unpack_value (&bp, 1);
4210 if (bits_known)
4211 {
86cd0334
MJ
4212 widest_int value = streamer_read_widest_int (ib);
4213 widest_int mask = streamer_read_widest_int (ib);
4214 ipa_set_jfunc_bits (jump_func, value, mask);
209ca542
PK
4215 }
4216 else
86cd0334 4217 jump_func->bits = NULL;
8bc5448f
KV
4218
4219 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4220 bool vr_known = bp_unpack_value (&vr_bp, 1);
4221 if (vr_known)
4222 {
86cd0334
MJ
4223 enum value_range_type type = streamer_read_enum (ib, value_range_type,
4224 VR_LAST);
4225 tree min = stream_read_tree (ib, data_in);
4226 tree max = stream_read_tree (ib, data_in);
4227 ipa_set_jfunc_vr (jump_func, type, min, max);
8bc5448f
KV
4228 }
4229 else
86cd0334 4230 jump_func->m_vr = NULL;
fb3f88cc
JH
4231}
4232
e33c6cd6
MJ
4233/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4234 relevant to indirect inlining to OB. */
661e7330
MJ
4235
4236static void
e33c6cd6
MJ
4237ipa_write_indirect_edge_info (struct output_block *ob,
4238 struct cgraph_edge *cs)
661e7330 4239{
e33c6cd6 4240 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4241 struct bitpack_d bp;
e33c6cd6 4242
412288f1 4243 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4244 bp = bitpack_create (ob->main_stream);
4245 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4246 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4247 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4248 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4249 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4250 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4251 streamer_write_bitpack (&bp);
ba392339
JH
4252 if (ii->agg_contents || ii->polymorphic)
4253 streamer_write_hwi (ob, ii->offset);
4254 else
4255 gcc_assert (ii->offset == 0);
b258210c
MJ
4256
4257 if (ii->polymorphic)
4258 {
412288f1 4259 streamer_write_hwi (ob, ii->otr_token);
b9393656 4260 stream_write_tree (ob, ii->otr_type, true);
ba392339 4261 ii->context.stream_out (ob);
b258210c 4262 }
661e7330
MJ
4263}
4264
e33c6cd6
MJ
4265/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4266 relevant to indirect inlining from IB. */
661e7330
MJ
4267
4268static void
e33c6cd6 4269ipa_read_indirect_edge_info (struct lto_input_block *ib,
ba392339 4270 struct data_in *data_in,
e33c6cd6 4271 struct cgraph_edge *cs)
661e7330 4272{
e33c6cd6 4273 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4274 struct bitpack_d bp;
661e7330 4275
412288f1 4276 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 4277 bp = streamer_read_bitpack (ib);
2465dcc2 4278 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4279 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4280 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4281 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 4282 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 4283 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
4284 if (ii->agg_contents || ii->polymorphic)
4285 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4286 else
4287 ii->offset = 0;
b258210c
MJ
4288 if (ii->polymorphic)
4289 {
412288f1 4290 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4291 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 4292 ii->context.stream_in (ib, data_in);
b258210c 4293 }
661e7330
MJ
4294}
4295
fb3f88cc
JH
4296/* Stream out NODE info to OB. */
4297
4298static void
4299ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4300{
4301 int node_ref;
7380e6ef 4302 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4303 struct ipa_node_params *info = IPA_NODE_REF (node);
4304 int j;
4305 struct cgraph_edge *e;
2465dcc2 4306 struct bitpack_d bp;
fb3f88cc 4307
7380e6ef 4308 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4309 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4310 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4311
0e8853ee
JH
4312 streamer_write_uhwi (ob, ipa_get_param_count (info));
4313 for (j = 0; j < ipa_get_param_count (info); j++)
4314 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4315 bp = bitpack_create (ob->main_stream);
8aab5218 4316 gcc_assert (info->analysis_done
661e7330 4317 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4318 gcc_assert (!info->node_enqueued);
4319 gcc_assert (!info->ipcp_orig_node);
4320 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4321 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4322 streamer_write_bitpack (&bp);
4502fe8d 4323 for (j = 0; j < ipa_get_param_count (info); j++)
a5e14a42
MJ
4324 {
4325 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4326 stream_write_tree (ob, ipa_get_type (info, j), true);
4327 }
fb3f88cc
JH
4328 for (e = node->callees; e; e = e->next_callee)
4329 {
4330 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4331
5ce97055
JH
4332 streamer_write_uhwi (ob,
4333 ipa_get_cs_argument_count (args) * 2
4334 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 4335 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4336 {
4337 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4338 if (args->polymorphic_call_contexts != NULL)
4339 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4340 }
fb3f88cc 4341 }
e33c6cd6 4342 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4343 {
4344 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4345
5ce97055
JH
4346 streamer_write_uhwi (ob,
4347 ipa_get_cs_argument_count (args) * 2
4348 + (args->polymorphic_call_contexts != NULL));
c8246dbe 4349 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4350 {
4351 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4352 if (args->polymorphic_call_contexts != NULL)
4353 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4354 }
c8246dbe
JH
4355 ipa_write_indirect_edge_info (ob, e);
4356 }
fb3f88cc
JH
4357}
4358
61502ca8 4359/* Stream in NODE info from IB. */
fb3f88cc
JH
4360
4361static void
4362ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4363 struct data_in *data_in)
4364{
4365 struct ipa_node_params *info = IPA_NODE_REF (node);
4366 int k;
4367 struct cgraph_edge *e;
2465dcc2 4368 struct bitpack_d bp;
fb3f88cc 4369
0e8853ee 4370 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4371
0e8853ee 4372 for (k = 0; k < ipa_get_param_count (info); k++)
f65f1ae3 4373 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
a5e14a42 4374
412288f1 4375 bp = streamer_read_bitpack (ib);
fb3f88cc 4376 if (ipa_get_param_count (info) != 0)
8aab5218 4377 info->analysis_done = true;
fb3f88cc
JH
4378 info->node_enqueued = false;
4379 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4380 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a 4381 for (k = 0; k < ipa_get_param_count (info); k++)
a5e14a42
MJ
4382 {
4383 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
f65f1ae3 4384 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
a5e14a42 4385 }
fb3f88cc
JH
4386 for (e = node->callees; e; e = e->next_callee)
4387 {
4388 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4389 int count = streamer_read_uhwi (ib);
5ce97055
JH
4390 bool contexts_computed = count & 1;
4391 count /= 2;
fb3f88cc 4392
fb3f88cc
JH
4393 if (!count)
4394 continue;
9771b263 4395 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
4396 if (contexts_computed)
4397 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
fb3f88cc 4398
fb3f88cc 4399 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
4400 {
4401 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4402 data_in);
4403 if (contexts_computed)
4404 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4405 }
fb3f88cc 4406 }
e33c6cd6 4407 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4408 {
4409 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4410 int count = streamer_read_uhwi (ib);
5ce97055
JH
4411 bool contexts_computed = count & 1;
4412 count /= 2;
c8246dbe 4413
c8246dbe
JH
4414 if (count)
4415 {
9771b263 4416 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
4417 if (contexts_computed)
4418 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
c8246dbe 4419 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
4420 {
4421 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4422 data_in);
4423 if (contexts_computed)
4424 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4425 }
c8246dbe
JH
4426 }
4427 ipa_read_indirect_edge_info (ib, data_in, e);
4428 }
fb3f88cc
JH
4429}
4430
4431/* Write jump functions for nodes in SET. */
4432
4433void
f27c1867 4434ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4435{
4436 struct cgraph_node *node;
93536c97 4437 struct output_block *ob;
fb3f88cc 4438 unsigned int count = 0;
f27c1867
JH
4439 lto_symtab_encoder_iterator lsei;
4440 lto_symtab_encoder_t encoder;
4441
6fe906a3 4442 if (!ipa_node_params_sum || !ipa_edge_args_sum)
93536c97 4443 return;
fb3f88cc 4444
93536c97 4445 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4446 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 4447 ob->symbol = NULL;
f27c1867
JH
4448 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4449 lsei_next_function_in_partition (&lsei))
fb3f88cc 4450 {
f27c1867 4451 node = lsei_cgraph_node (lsei);
d52f5295 4452 if (node->has_gimple_body_p ()
c47d0034 4453 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4454 count++;
4455 }
4456
412288f1 4457 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4458
4459 /* Process all of the functions. */
f27c1867
JH
4460 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4461 lsei_next_function_in_partition (&lsei))
fb3f88cc 4462 {
f27c1867 4463 node = lsei_cgraph_node (lsei);
d52f5295 4464 if (node->has_gimple_body_p ()
c47d0034 4465 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4466 ipa_write_node_info (ob, node);
4467 }
412288f1 4468 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4469 produce_asm (ob, NULL);
4470 destroy_output_block (ob);
4471}
4472
4473/* Read section in file FILE_DATA of length LEN with data DATA. */
4474
4475static void
4476ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4477 size_t len)
4478{
4479 const struct lto_function_header *header =
4480 (const struct lto_function_header *) data;
4ad9a9de
EB
4481 const int cfg_offset = sizeof (struct lto_function_header);
4482 const int main_offset = cfg_offset + header->cfg_size;
4483 const int string_offset = main_offset + header->main_size;
fb3f88cc 4484 struct data_in *data_in;
fb3f88cc
JH
4485 unsigned int i;
4486 unsigned int count;
4487
207c68cd 4488 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 4489 header->main_size, file_data->mode_table);
fb3f88cc
JH
4490
4491 data_in =
4492 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4493 header->string_size, vNULL);
412288f1 4494 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4495
4496 for (i = 0; i < count; i++)
4497 {
4498 unsigned int index;
4499 struct cgraph_node *node;
7380e6ef 4500 lto_symtab_encoder_t encoder;
fb3f88cc 4501
412288f1 4502 index = streamer_read_uhwi (&ib_main);
7380e6ef 4503 encoder = file_data->symtab_node_encoder;
d52f5295
ML
4504 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4505 index));
67348ccc 4506 gcc_assert (node->definition);
fb3f88cc
JH
4507 ipa_read_node_info (&ib_main, node, data_in);
4508 }
4509 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4510 len);
4511 lto_data_in_delete (data_in);
4512}
4513
4514/* Read ipcp jump functions. */
4515
4516void
4517ipa_prop_read_jump_functions (void)
4518{
4519 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4520 struct lto_file_decl_data *file_data;
4521 unsigned int j = 0;
4522
4523 ipa_check_create_node_params ();
4524 ipa_check_create_edge_args ();
4525 ipa_register_cgraph_hooks ();
4526
4527 while ((file_data = file_data_vec[j++]))
4528 {
4529 size_t len;
4530 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4531
4532 if (data)
4533 ipa_prop_read_section (file_data, data, len);
4534 }
4535}
4536
2c9561b5 4537void
04be694e 4538write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
4539{
4540 int node_ref;
4541 unsigned int count = 0;
4542 lto_symtab_encoder_t encoder;
4543 struct ipa_agg_replacement_value *aggvals, *av;
4544
4545 aggvals = ipa_get_agg_replacements_for_node (node);
4546 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4547 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4548 streamer_write_uhwi (ob, node_ref);
4549
4550 for (av = aggvals; av; av = av->next)
4551 count++;
4552 streamer_write_uhwi (ob, count);
4553
4554 for (av = aggvals; av; av = av->next)
4555 {
7b920a9a
MJ
4556 struct bitpack_d bp;
4557
2c9561b5
MJ
4558 streamer_write_uhwi (ob, av->offset);
4559 streamer_write_uhwi (ob, av->index);
4560 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4561
4562 bp = bitpack_create (ob->main_stream);
4563 bp_pack_value (&bp, av->by_ref, 1);
4564 streamer_write_bitpack (&bp);
2c9561b5 4565 }
04be694e 4566
9d3e0adc 4567 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
4568 if (ts && vec_safe_length (ts->m_vr) > 0)
4569 {
4570 count = ts->m_vr->length ();
4571 streamer_write_uhwi (ob, count);
4572 for (unsigned i = 0; i < count; ++i)
4573 {
4574 struct bitpack_d bp;
4575 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4576 bp = bitpack_create (ob->main_stream);
4577 bp_pack_value (&bp, parm_vr->known, 1);
4578 streamer_write_bitpack (&bp);
4579 if (parm_vr->known)
4580 {
4581 streamer_write_enum (ob->main_stream, value_rang_type,
4582 VR_LAST, parm_vr->type);
4583 streamer_write_wide_int (ob, parm_vr->min);
4584 streamer_write_wide_int (ob, parm_vr->max);
4585 }
4586 }
4587 }
4588 else
4589 streamer_write_uhwi (ob, 0);
4590
209ca542
PK
4591 if (ts && vec_safe_length (ts->bits) > 0)
4592 {
4593 count = ts->bits->length ();
4594 streamer_write_uhwi (ob, count);
4595
4596 for (unsigned i = 0; i < count; ++i)
4597 {
86cd0334 4598 const ipa_bits *bits_jfunc = (*ts->bits)[i];
209ca542 4599 struct bitpack_d bp = bitpack_create (ob->main_stream);
86cd0334 4600 bp_pack_value (&bp, !!bits_jfunc, 1);
209ca542 4601 streamer_write_bitpack (&bp);
86cd0334 4602 if (bits_jfunc)
209ca542 4603 {
86cd0334
MJ
4604 streamer_write_widest_int (ob, bits_jfunc->value);
4605 streamer_write_widest_int (ob, bits_jfunc->mask);
209ca542
PK
4606 }
4607 }
4608 }
4609 else
4610 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
4611}
4612
4613/* Stream in the aggregate value replacement chain for NODE from IB. */
4614
4615static void
04be694e
MJ
4616read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4617 data_in *data_in)
2c9561b5
MJ
4618{
4619 struct ipa_agg_replacement_value *aggvals = NULL;
4620 unsigned int count, i;
4621
4622 count = streamer_read_uhwi (ib);
4623 for (i = 0; i <count; i++)
4624 {
4625 struct ipa_agg_replacement_value *av;
7b920a9a 4626 struct bitpack_d bp;
2c9561b5 4627
766090c2 4628 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
4629 av->offset = streamer_read_uhwi (ib);
4630 av->index = streamer_read_uhwi (ib);
4631 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4632 bp = streamer_read_bitpack (ib);
4633 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4634 av->next = aggvals;
4635 aggvals = av;
4636 }
4637 ipa_set_node_agg_value_chain (node, aggvals);
67b97478 4638
209ca542
PK
4639 count = streamer_read_uhwi (ib);
4640 if (count > 0)
4641 {
9d3e0adc
ML
4642 ipcp_transformation_initialize ();
4643 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
8bc5448f
KV
4644 vec_safe_grow_cleared (ts->m_vr, count);
4645 for (i = 0; i < count; i++)
4646 {
4647 ipa_vr *parm_vr;
4648 parm_vr = &(*ts->m_vr)[i];
4649 struct bitpack_d bp;
4650 bp = streamer_read_bitpack (ib);
4651 parm_vr->known = bp_unpack_value (&bp, 1);
4652 if (parm_vr->known)
4653 {
4654 parm_vr->type = streamer_read_enum (ib, value_range_type,
4655 VR_LAST);
4656 parm_vr->min = streamer_read_wide_int (ib);
4657 parm_vr->max = streamer_read_wide_int (ib);
4658 }
4659 }
4660 }
4661 count = streamer_read_uhwi (ib);
4662 if (count > 0)
4663 {
9d3e0adc
ML
4664 ipcp_transformation_initialize ();
4665 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
209ca542
PK
4666 vec_safe_grow_cleared (ts->bits, count);
4667
4668 for (i = 0; i < count; i++)
4669 {
209ca542 4670 struct bitpack_d bp = streamer_read_bitpack (ib);
86cd0334
MJ
4671 bool known = bp_unpack_value (&bp, 1);
4672 if (known)
209ca542 4673 {
86cd0334
MJ
4674 ipa_bits *bits
4675 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4676 streamer_read_widest_int (ib));
4677 (*ts->bits)[i] = bits;
209ca542
PK
4678 }
4679 }
4680 }
2c9561b5
MJ
4681}
4682
4683/* Write all aggregate replacement for nodes in set. */
4684
4685void
04be694e 4686ipcp_write_transformation_summaries (void)
2c9561b5
MJ
4687{
4688 struct cgraph_node *node;
4689 struct output_block *ob;
4690 unsigned int count = 0;
4691 lto_symtab_encoder_iterator lsei;
4692 lto_symtab_encoder_t encoder;
4693
2c9561b5
MJ
4694 ob = create_output_block (LTO_section_ipcp_transform);
4695 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 4696 ob->symbol = NULL;
2c9561b5
MJ
4697 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4698 lsei_next_function_in_partition (&lsei))
4699 {
4700 node = lsei_cgraph_node (lsei);
04be694e 4701 if (node->has_gimple_body_p ())
2c9561b5
MJ
4702 count++;
4703 }
4704
4705 streamer_write_uhwi (ob, count);
4706
4707 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4708 lsei_next_function_in_partition (&lsei))
4709 {
4710 node = lsei_cgraph_node (lsei);
04be694e
MJ
4711 if (node->has_gimple_body_p ())
4712 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
4713 }
4714 streamer_write_char_stream (ob->main_stream, 0);
4715 produce_asm (ob, NULL);
4716 destroy_output_block (ob);
4717}
4718
4719/* Read replacements section in file FILE_DATA of length LEN with data
4720 DATA. */
4721
4722static void
4723read_replacements_section (struct lto_file_decl_data *file_data,
4724 const char *data,
4725 size_t len)
4726{
4727 const struct lto_function_header *header =
4728 (const struct lto_function_header *) data;
4729 const int cfg_offset = sizeof (struct lto_function_header);
4730 const int main_offset = cfg_offset + header->cfg_size;
4731 const int string_offset = main_offset + header->main_size;
4732 struct data_in *data_in;
2c9561b5
MJ
4733 unsigned int i;
4734 unsigned int count;
4735
207c68cd 4736 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 4737 header->main_size, file_data->mode_table);
2c9561b5
MJ
4738
4739 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4740 header->string_size, vNULL);
2c9561b5
MJ
4741 count = streamer_read_uhwi (&ib_main);
4742
4743 for (i = 0; i < count; i++)
4744 {
4745 unsigned int index;
4746 struct cgraph_node *node;
4747 lto_symtab_encoder_t encoder;
4748
4749 index = streamer_read_uhwi (&ib_main);
4750 encoder = file_data->symtab_node_encoder;
d52f5295
ML
4751 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4752 index));
67348ccc 4753 gcc_assert (node->definition);
04be694e 4754 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
4755 }
4756 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4757 len);
4758 lto_data_in_delete (data_in);
4759}
4760
4761/* Read IPA-CP aggregate replacements. */
4762
4763void
04be694e 4764ipcp_read_transformation_summaries (void)
2c9561b5
MJ
4765{
4766 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4767 struct lto_file_decl_data *file_data;
4768 unsigned int j = 0;
4769
4770 while ((file_data = file_data_vec[j++]))
4771 {
4772 size_t len;
4773 const char *data = lto_get_section_data (file_data,
4774 LTO_section_ipcp_transform,
4775 NULL, &len);
4776 if (data)
4777 read_replacements_section (file_data, data, len);
4778 }
4779}
4780
4781/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4782 NODE. */
4783
4784static void
4785adjust_agg_replacement_values (struct cgraph_node *node,
4786 struct ipa_agg_replacement_value *aggval)
4787{
4788 struct ipa_agg_replacement_value *v;
4789 int i, c = 0, d = 0, *adj;
4790
4791 if (!node->clone.combined_args_to_skip)
4792 return;
4793
4794 for (v = aggval; v; v = v->next)
4795 {
4796 gcc_assert (v->index >= 0);
4797 if (c < v->index)
4798 c = v->index;
4799 }
4800 c++;
4801
4802 adj = XALLOCAVEC (int, c);
4803 for (i = 0; i < c; i++)
4804 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4805 {
4806 adj[i] = -1;
4807 d++;
4808 }
4809 else
4810 adj[i] = i - d;
4811
4812 for (v = aggval; v; v = v->next)
4813 v->index = adj[v->index];
4814}
4815
8aab5218
MJ
4816/* Dominator walker driving the ipcp modification phase. */
4817
4818class ipcp_modif_dom_walker : public dom_walker
4819{
4820public:
56b40062 4821 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
f65f1ae3 4822 vec<ipa_param_descriptor, va_gc> *descs,
8aab5218
MJ
4823 struct ipa_agg_replacement_value *av,
4824 bool *sc, bool *cc)
4825 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4826 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4827
3daacdcd 4828 virtual edge before_dom_children (basic_block);
8aab5218
MJ
4829
4830private:
56b40062 4831 struct ipa_func_body_info *m_fbi;
f65f1ae3 4832 vec<ipa_param_descriptor, va_gc> *m_descriptors;
8aab5218
MJ
4833 struct ipa_agg_replacement_value *m_aggval;
4834 bool *m_something_changed, *m_cfg_changed;
4835};
4836
3daacdcd 4837edge
8aab5218
MJ
4838ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4839{
4840 gimple_stmt_iterator gsi;
4841 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4842 {
4843 struct ipa_agg_replacement_value *v;
355fe088 4844 gimple *stmt = gsi_stmt (gsi);
8aab5218
MJ
4845 tree rhs, val, t;
4846 HOST_WIDE_INT offset, size;
4847 int index;
4848 bool by_ref, vce;
4849
4850 if (!gimple_assign_load_p (stmt))
4851 continue;
4852 rhs = gimple_assign_rhs1 (stmt);
4853 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4854 continue;
2c9561b5 4855
8aab5218
MJ
4856 vce = false;
4857 t = rhs;
4858 while (handled_component_p (t))
4859 {
4860 /* V_C_E can do things like convert an array of integers to one
4861 bigger integer and similar things we do not handle below. */
4862 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4863 {
4864 vce = true;
4865 break;
4866 }
4867 t = TREE_OPERAND (t, 0);
4868 }
4869 if (vce)
4870 continue;
4871
ff302741
PB
4872 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4873 &offset, &size, &by_ref))
8aab5218
MJ
4874 continue;
4875 for (v = m_aggval; v; v = v->next)
4876 if (v->index == index
4877 && v->offset == offset)
4878 break;
4879 if (!v
4880 || v->by_ref != by_ref
4881 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4882 continue;
4883
4884 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4885 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4886 {
4887 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4888 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4889 else if (TYPE_SIZE (TREE_TYPE (rhs))
4890 == TYPE_SIZE (TREE_TYPE (v->value)))
4891 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4892 else
4893 {
4894 if (dump_file)
4895 {
4896 fprintf (dump_file, " const ");
ef6cb4c7 4897 print_generic_expr (dump_file, v->value);
8aab5218 4898 fprintf (dump_file, " can't be converted to type of ");
ef6cb4c7 4899 print_generic_expr (dump_file, rhs);
8aab5218
MJ
4900 fprintf (dump_file, "\n");
4901 }
4902 continue;
4903 }
4904 }
4905 else
4906 val = v->value;
4907
4908 if (dump_file && (dump_flags & TDF_DETAILS))
4909 {
4910 fprintf (dump_file, "Modifying stmt:\n ");
ef6cb4c7 4911 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
4912 }
4913 gimple_assign_set_rhs_from_tree (&gsi, val);
4914 update_stmt (stmt);
4915
4916 if (dump_file && (dump_flags & TDF_DETAILS))
4917 {
4918 fprintf (dump_file, "into:\n ");
ef6cb4c7 4919 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
4920 fprintf (dump_file, "\n");
4921 }
4922
4923 *m_something_changed = true;
4924 if (maybe_clean_eh_stmt (stmt)
4925 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4926 *m_cfg_changed = true;
4927 }
3daacdcd 4928 return NULL;
8aab5218
MJ
4929}
4930
209ca542 4931/* Update bits info of formal parameters as described in
9d3e0adc 4932 ipcp_transformation. */
209ca542
PK
4933
4934static void
4935ipcp_update_bits (struct cgraph_node *node)
4936{
4937 tree parm = DECL_ARGUMENTS (node->decl);
4938 tree next_parm = parm;
9d3e0adc 4939 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
209ca542
PK
4940
4941 if (!ts || vec_safe_length (ts->bits) == 0)
4942 return;
4943
86cd0334 4944 vec<ipa_bits *, va_gc> &bits = *ts->bits;
209ca542
PK
4945 unsigned count = bits.length ();
4946
4947 for (unsigned i = 0; i < count; ++i, parm = next_parm)
4948 {
4949 if (node->clone.combined_args_to_skip
4950 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
4951 continue;
4952
4953 gcc_checking_assert (parm);
4954 next_parm = DECL_CHAIN (parm);
4955
86cd0334
MJ
4956 if (!bits[i]
4957 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
4958 || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542 4959 || !is_gimple_reg (parm))
86cd0334 4960 continue;
209ca542
PK
4961
4962 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
4963 if (!ddef)
4964 continue;
4965
4966 if (dump_file)
4967 {
86cd0334
MJ
4968 fprintf (dump_file, "Adjusting mask for param %u to ", i);
4969 print_hex (bits[i]->mask, dump_file);
209ca542
PK
4970 fprintf (dump_file, "\n");
4971 }
4972
67b97478
PK
4973 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
4974 {
4975 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
4976 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
4977
86cd0334
MJ
4978 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
4979 | wide_int::from (bits[i]->value, prec, sgn);
67b97478
PK
4980 set_nonzero_bits (ddef, nonzero_bits);
4981 }
4982 else
4983 {
86cd0334
MJ
4984 unsigned tem = bits[i]->mask.to_uhwi ();
4985 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
67b97478
PK
4986 unsigned align = tem & -tem;
4987 unsigned misalign = bitpos & (align - 1);
209ca542 4988
67b97478
PK
4989 if (align > 1)
4990 {
4991 if (dump_file)
4992 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
4993
4994 unsigned old_align, old_misalign;
4995 struct ptr_info_def *pi = get_ptr_info (ddef);
4996 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
4997
4998 if (old_known
4999 && old_align > align)
5000 {
5001 if (dump_file)
5002 {
5003 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5004 if ((old_misalign & (align - 1)) != misalign)
5005 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5006 old_misalign, misalign);
5007 }
5008 continue;
5009 }
5010
5011 if (old_known
5012 && ((misalign & (old_align - 1)) != old_misalign)
5013 && dump_file)
5014 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5015 old_misalign, misalign);
5016
5017 set_ptr_info_alignment (pi, align, misalign);
5018 }
5019 }
209ca542
PK
5020 }
5021}
5022
8bc5448f 5023/* Update value range of formal parameters as described in
9d3e0adc 5024 ipcp_transformation. */
8bc5448f
KV
5025
5026static void
5027ipcp_update_vr (struct cgraph_node *node)
5028{
5029 tree fndecl = node->decl;
5030 tree parm = DECL_ARGUMENTS (fndecl);
5031 tree next_parm = parm;
9d3e0adc 5032 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5033 if (!ts || vec_safe_length (ts->m_vr) == 0)
5034 return;
5035 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5036 unsigned count = vr.length ();
5037
5038 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5039 {
5040 if (node->clone.combined_args_to_skip
5041 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5042 continue;
5043 gcc_checking_assert (parm);
5044 next_parm = DECL_CHAIN (parm);
5045 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5046
5047 if (!ddef || !is_gimple_reg (parm))
5048 continue;
5049
5050 if (vr[i].known
8bc5448f
KV
5051 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5052 {
5053 tree type = TREE_TYPE (ddef);
5054 unsigned prec = TYPE_PRECISION (type);
718625ad
KV
5055 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5056 {
5057 if (dump_file)
5058 {
5059 fprintf (dump_file, "Setting value range of param %u ", i);
5060 fprintf (dump_file, "%s[",
5061 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5062 print_decs (vr[i].min, dump_file);
5063 fprintf (dump_file, ", ");
5064 print_decs (vr[i].max, dump_file);
5065 fprintf (dump_file, "]\n");
5066 }
5067 set_range_info (ddef, vr[i].type,
5068 wide_int_storage::from (vr[i].min, prec,
5069 TYPE_SIGN (type)),
5070 wide_int_storage::from (vr[i].max, prec,
5071 TYPE_SIGN (type)));
5072 }
5073 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5074 && vr[i].type == VR_ANTI_RANGE
5075 && wi::eq_p (vr[i].min, 0)
5076 && wi::eq_p (vr[i].max, 0))
8bc5448f 5077 {
718625ad
KV
5078 if (dump_file)
5079 fprintf (dump_file, "Setting nonnull for %u\n", i);
5080 set_ptr_nonnull (ddef);
8bc5448f 5081 }
8bc5448f
KV
5082 }
5083 }
5084}
5085
8aab5218 5086/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5087
5088unsigned int
5089ipcp_transform_function (struct cgraph_node *node)
5090{
f65f1ae3 5091 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
56b40062 5092 struct ipa_func_body_info fbi;
2c9561b5 5093 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5094 int param_count;
5095 bool cfg_changed = false, something_changed = false;
5096
5097 gcc_checking_assert (cfun);
5098 gcc_checking_assert (current_function_decl);
5099
5100 if (dump_file)
464d0118
ML
5101 fprintf (dump_file, "Modification phase of node %s\n",
5102 node->dump_name ());
2c9561b5 5103
209ca542 5104 ipcp_update_bits (node);
8bc5448f 5105 ipcp_update_vr (node);
2c9561b5
MJ
5106 aggval = ipa_get_agg_replacements_for_node (node);
5107 if (!aggval)
5108 return 0;
67348ccc 5109 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5110 if (param_count == 0)
5111 return 0;
5112 adjust_agg_replacement_values (node, aggval);
5113 if (dump_file)
5114 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5115
8aab5218
MJ
5116 fbi.node = node;
5117 fbi.info = NULL;
5118 fbi.bb_infos = vNULL;
5119 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5120 fbi.param_count = param_count;
5121 fbi.aa_walked = 0;
2c9561b5 5122
f65f1ae3
MJ
5123 vec_safe_grow_cleared (descriptors, param_count);
5124 ipa_populate_param_decls (node, *descriptors);
8aab5218
MJ
5125 calculate_dominance_info (CDI_DOMINATORS);
5126 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5127 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5128
8aab5218
MJ
5129 int i;
5130 struct ipa_bb_info *bi;
5131 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5132 free_ipa_bb_info (bi);
5133 fbi.bb_infos.release ();
5134 free_dominance_info (CDI_DOMINATORS);
9d3e0adc
ML
5135
5136 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5137 s->agg_values = NULL;
5138 s->bits = NULL;
5139 s->m_vr = NULL;
676b4899 5140
f65f1ae3 5141 vec_free (descriptors);
2c9561b5
MJ
5142
5143 if (!something_changed)
5144 return 0;
5145 else if (cfg_changed)
5146 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5147 else
5148 return TODO_update_ssa_only_virtuals;
5149}
86cd0334
MJ
5150
5151#include "gt-ipa-prop.h"