]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
Update copyright years.
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
a5544970 2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
27d020cf 48#include "ipa-fnsummary.h"
cf835838 49#include "gimple-pretty-print.h"
dfea20f1 50#include "params.h"
450ad0cd 51#include "ipa-utils.h"
2b5f0895 52#include "dbgcnt.h"
8aab5218 53#include "domwalk.h"
9b2b7279 54#include "builtins.h"
771578a0 55
dd912cb8
ML
56/* Function summary where the parameter infos are actually stored. */
57ipa_node_params_t *ipa_node_params_sum = NULL;
9d3e0adc
ML
58
59function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
60
6fe906a3
MJ
61/* Edge summary for IPA-CP edge information. */
62ipa_edge_args_sum_t *ipa_edge_args_sum;
771578a0 63
86cd0334
MJ
64/* Traits for a hash table for reusing already existing ipa_bits. */
65
66struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67{
68 typedef ipa_bits *value_type;
69 typedef ipa_bits *compare_type;
70 static hashval_t
71 hash (const ipa_bits *p)
72 {
73 hashval_t t = (hashval_t) p->value.to_shwi ();
74 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 }
76 static bool
77 equal (const ipa_bits *a, const ipa_bits *b)
78 {
79 return a->value == b->value && a->mask == b->mask;
80 }
81 static void
82 mark_empty (ipa_bits *&p)
83 {
84 p = NULL;
85 }
86 static bool
87 is_empty (const ipa_bits *p)
88 {
89 return p == NULL;
90 }
91 static bool
92 is_deleted (const ipa_bits *p)
93 {
94 return p == reinterpret_cast<const ipa_bits *> (1);
95 }
96 static void
97 mark_deleted (ipa_bits *&p)
98 {
99 p = reinterpret_cast<ipa_bits *> (1);
100 }
101};
102
103/* Hash table for avoid repeated allocations of equal ipa_bits. */
104static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
105
106/* Traits for a hash table for reusing value_ranges used for IPA. Note that
107 the equiv bitmap is not hashed and is expected to be NULL. */
108
2506b92c 109struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range_base *>
86cd0334 110{
2506b92c
RB
111 typedef value_range_base *value_type;
112 typedef value_range_base *compare_type;
86cd0334 113 static hashval_t
2506b92c 114 hash (const value_range_base *p)
59b2c134 115 {
54994253 116 inchash::hash hstate (p->kind ());
74ca1c01
ML
117 inchash::add_expr (p->min (), hstate);
118 inchash::add_expr (p->max (), hstate);
59b2c134
JJ
119 return hstate.end ();
120 }
86cd0334 121 static bool
2506b92c 122 equal (const value_range_base *a, const value_range_base *b)
86cd0334 123 {
ff361cc6 124 return a->equal_p (*b);
86cd0334
MJ
125 }
126 static void
2506b92c 127 mark_empty (value_range_base *&p)
86cd0334
MJ
128 {
129 p = NULL;
130 }
131 static bool
2506b92c 132 is_empty (const value_range_base *p)
86cd0334
MJ
133 {
134 return p == NULL;
135 }
136 static bool
2506b92c 137 is_deleted (const value_range_base *p)
86cd0334 138 {
2506b92c 139 return p == reinterpret_cast<const value_range_base *> (1);
86cd0334
MJ
140 }
141 static void
2506b92c 142 mark_deleted (value_range_base *&p)
86cd0334 143 {
2506b92c 144 p = reinterpret_cast<value_range_base *> (1);
86cd0334
MJ
145 }
146};
147
148/* Hash table for avoid repeated allocations of equal value_ranges. */
149static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
150
771578a0 151/* Holders of ipa cgraph hooks: */
40982661 152static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 153
4502fe8d
MJ
154/* Description of a reference to an IPA constant. */
155struct ipa_cst_ref_desc
156{
157 /* Edge that corresponds to the statement which took the reference. */
158 struct cgraph_edge *cs;
159 /* Linked list of duplicates created when call graph edges are cloned. */
160 struct ipa_cst_ref_desc *next_duplicate;
161 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
162 if out of control. */
163 int refcount;
164};
165
166/* Allocation pool for reference descriptions. */
167
fb0b2914 168static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 169 ("IPA-PROP ref descriptions");
4502fe8d 170
5fe8e757
MJ
171/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
172 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
173
174static bool
175ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
176{
67348ccc 177 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
178
179 if (!fs_opts)
180 return false;
2bf86c84 181 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
182}
183
be95e2b9
MJ
184/* Return index of the formal whose tree is PTREE in function which corresponds
185 to INFO. */
186
d044dd17 187static int
f65f1ae3
MJ
188ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
189 tree ptree)
518dc859
RL
190{
191 int i, count;
192
f65f1ae3 193 count = vec_safe_length (descriptors);
518dc859 194 for (i = 0; i < count; i++)
f65f1ae3 195 if ((*descriptors)[i].decl_or_type == ptree)
518dc859
RL
196 return i;
197
198 return -1;
199}
200
d044dd17
MJ
201/* Return index of the formal whose tree is PTREE in function which corresponds
202 to INFO. */
203
204int
205ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
206{
207 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
208}
209
210/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
211 NODE. */
be95e2b9 212
f8e2a1ed
MJ
213static void
214ipa_populate_param_decls (struct cgraph_node *node,
f65f1ae3 215 vec<ipa_param_descriptor, va_gc> &descriptors)
518dc859
RL
216{
217 tree fndecl;
218 tree fnargs;
219 tree parm;
220 int param_num;
3e293154 221
67348ccc 222 fndecl = node->decl;
0e8853ee 223 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
224 fnargs = DECL_ARGUMENTS (fndecl);
225 param_num = 0;
910ad8de 226 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 227 {
209ca542 228 descriptors[param_num].decl_or_type = parm;
b4c9af96
RB
229 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
230 true);
518dc859
RL
231 param_num++;
232 }
233}
234
3f84bf08
MJ
235/* Return how many formal parameters FNDECL has. */
236
fd29c024 237int
310bc633 238count_formal_params (tree fndecl)
3f84bf08
MJ
239{
240 tree parm;
241 int count = 0;
0e8853ee 242 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 243
910ad8de 244 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
245 count++;
246
247 return count;
248}
249
0e8853ee
JH
250/* Return the declaration of Ith formal parameter of the function corresponding
251 to INFO. Note there is no setter function as this array is built just once
252 using ipa_initialize_node_params. */
253
254void
255ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
256{
257 fprintf (file, "param #%i", i);
f65f1ae3 258 if ((*info->descriptors)[i].decl_or_type)
0e8853ee
JH
259 {
260 fprintf (file, " ");
ef6cb4c7 261 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
0e8853ee
JH
262 }
263}
264
159f01f8
MJ
265/* If necessary, allocate vector of parameter descriptors in info of NODE.
266 Return true if they were allocated, false if not. */
0e8853ee 267
159f01f8 268static bool
0e8853ee
JH
269ipa_alloc_node_params (struct cgraph_node *node, int param_count)
270{
271 struct ipa_node_params *info = IPA_NODE_REF (node);
272
f65f1ae3 273 if (!info->descriptors && param_count)
159f01f8
MJ
274 {
275 vec_safe_grow_cleared (info->descriptors, param_count);
276 return true;
277 }
278 else
279 return false;
0e8853ee
JH
280}
281
f8e2a1ed
MJ
282/* Initialize the ipa_node_params structure associated with NODE by counting
283 the function parameters, creating the descriptors and populating their
284 param_decls. */
be95e2b9 285
f8e2a1ed
MJ
286void
287ipa_initialize_node_params (struct cgraph_node *node)
288{
289 struct ipa_node_params *info = IPA_NODE_REF (node);
290
159f01f8
MJ
291 if (!info->descriptors
292 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
293 ipa_populate_param_decls (node, *info->descriptors);
518dc859
RL
294}
295
749aa96d
MJ
296/* Print the jump functions associated with call graph edge CS to file F. */
297
298static void
299ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
300{
301 int i, count;
302
303 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
304 for (i = 0; i < count; i++)
305 {
306 struct ipa_jump_func *jump_func;
307 enum jump_func_type type;
308
309 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
310 type = jump_func->type;
311
312 fprintf (f, " param %d: ", i);
313 if (type == IPA_JF_UNKNOWN)
314 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
315 else if (type == IPA_JF_CONST)
316 {
4502fe8d 317 tree val = jump_func->value.constant.value;
749aa96d 318 fprintf (f, "CONST: ");
ef6cb4c7 319 print_generic_expr (f, val);
749aa96d
MJ
320 if (TREE_CODE (val) == ADDR_EXPR
321 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
322 {
323 fprintf (f, " -> ");
ef6cb4c7 324 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
749aa96d
MJ
325 }
326 fprintf (f, "\n");
327 }
749aa96d
MJ
328 else if (type == IPA_JF_PASS_THROUGH)
329 {
330 fprintf (f, "PASS THROUGH: ");
8b7773a4 331 fprintf (f, "%d, op %s",
749aa96d 332 jump_func->value.pass_through.formal_id,
5806f481 333 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 334 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
335 {
336 fprintf (f, " ");
ef6cb4c7 337 print_generic_expr (f, jump_func->value.pass_through.operand);
8b7773a4
MJ
338 }
339 if (jump_func->value.pass_through.agg_preserved)
340 fprintf (f, ", agg_preserved");
3ea6239f 341 fprintf (f, "\n");
749aa96d
MJ
342 }
343 else if (type == IPA_JF_ANCESTOR)
344 {
345 fprintf (f, "ANCESTOR: ");
16998094 346 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
347 jump_func->value.ancestor.formal_id,
348 jump_func->value.ancestor.offset);
8b7773a4
MJ
349 if (jump_func->value.ancestor.agg_preserved)
350 fprintf (f, ", agg_preserved");
3ea6239f 351 fprintf (f, "\n");
749aa96d 352 }
8b7773a4
MJ
353
354 if (jump_func->agg.items)
355 {
356 struct ipa_agg_jf_item *item;
357 int j;
358
359 fprintf (f, " Aggregate passed by %s:\n",
360 jump_func->agg.by_ref ? "reference" : "value");
9771b263 361 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
362 {
363 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
364 item->offset);
365 if (TYPE_P (item->value))
366 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 367 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
368 else
369 {
370 fprintf (f, "cst: ");
ef6cb4c7 371 print_generic_expr (f, item->value);
8b7773a4
MJ
372 }
373 fprintf (f, "\n");
374 }
375 }
44210a96
MJ
376
377 struct ipa_polymorphic_call_context *ctx
378 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
379 if (ctx && !ctx->useless_p ())
380 {
381 fprintf (f, " Context: ");
382 ctx->dump (dump_file);
383 }
04be694e 384
86cd0334 385 if (jump_func->bits)
209ca542 386 {
86cd0334
MJ
387 fprintf (f, " value: ");
388 print_hex (jump_func->bits->value, f);
389 fprintf (f, ", mask: ");
390 print_hex (jump_func->bits->mask, f);
209ca542
PK
391 fprintf (f, "\n");
392 }
393 else
394 fprintf (f, " Unknown bits\n");
8bc5448f 395
86cd0334 396 if (jump_func->m_vr)
8bc5448f
KV
397 {
398 fprintf (f, " VR ");
399 fprintf (f, "%s[",
54994253
AH
400 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
401 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
8bc5448f 402 fprintf (f, ", ");
54994253 403 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
8bc5448f
KV
404 fprintf (f, "]\n");
405 }
406 else
407 fprintf (f, " Unknown VR\n");
749aa96d
MJ
408 }
409}
410
411
be95e2b9
MJ
412/* Print the jump functions of all arguments on all call graph edges going from
413 NODE to file F. */
414
518dc859 415void
3e293154 416ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 417{
3e293154 418 struct cgraph_edge *cs;
518dc859 419
464d0118 420 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
3e293154
MJ
421 for (cs = node->callees; cs; cs = cs->next_callee)
422 {
423 if (!ipa_edge_args_info_available_for_edge_p (cs))
424 continue;
425
464d0118
ML
426 fprintf (f, " callsite %s -> %s : \n",
427 node->dump_name (),
428 cs->callee->dump_name ());
749aa96d
MJ
429 ipa_print_node_jump_functions_for_edge (f, cs);
430 }
518dc859 431
9de04252 432 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 433 {
9de04252 434 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
435 if (!ipa_edge_args_info_available_for_edge_p (cs))
436 continue;
3e293154 437
9de04252
MJ
438 ii = cs->indirect_info;
439 if (ii->agg_contents)
c13bc3d9 440 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 441 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 442 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
443 ii->param_index, ii->offset,
444 ii->by_ref ? "by reference" : "by_value");
445 else
85942f45
JH
446 fprintf (f, " indirect %s callsite, calling param %i, "
447 "offset " HOST_WIDE_INT_PRINT_DEC,
448 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
449 ii->offset);
9de04252 450
749aa96d
MJ
451 if (cs->call_stmt)
452 {
9de04252 453 fprintf (f, ", for stmt ");
749aa96d 454 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 455 }
749aa96d 456 else
9de04252 457 fprintf (f, "\n");
ba392339
JH
458 if (ii->polymorphic)
459 ii->context.dump (f);
749aa96d 460 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
461 }
462}
463
464/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 465
3e293154
MJ
466void
467ipa_print_all_jump_functions (FILE *f)
468{
469 struct cgraph_node *node;
470
ca30a539 471 fprintf (f, "\nJump functions:\n");
65c70e6b 472 FOR_EACH_FUNCTION (node)
3e293154
MJ
473 {
474 ipa_print_node_jump_functions (f, node);
475 }
476}
477
04be694e
MJ
478/* Set jfunc to be a know-really nothing jump function. */
479
480static void
481ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
482{
483 jfunc->type = IPA_JF_UNKNOWN;
86cd0334
MJ
484 jfunc->bits = NULL;
485 jfunc->m_vr = NULL;
04be694e
MJ
486}
487
b8f6e610
MJ
488/* Set JFUNC to be a copy of another jmp (to be used by jump function
489 combination code). The two functions will share their rdesc. */
490
491static void
492ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
493 struct ipa_jump_func *src)
494
495{
496 gcc_checking_assert (src->type == IPA_JF_CONST);
497 dst->type = IPA_JF_CONST;
498 dst->value.constant = src->value.constant;
499}
500
7b872d9e
MJ
501/* Set JFUNC to be a constant jmp function. */
502
503static void
4502fe8d
MJ
504ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
505 struct cgraph_edge *cs)
7b872d9e
MJ
506{
507 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
508 jfunc->value.constant.value = unshare_expr_without_location (constant);
509
510 if (TREE_CODE (constant) == ADDR_EXPR
511 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
512 {
513 struct ipa_cst_ref_desc *rdesc;
4502fe8d 514
601f3293 515 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
516 rdesc->cs = cs;
517 rdesc->next_duplicate = NULL;
518 rdesc->refcount = 1;
519 jfunc->value.constant.rdesc = rdesc;
520 }
521 else
522 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
523}
524
525/* Set JFUNC to be a simple pass-through jump function. */
526static void
8b7773a4 527ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 528 bool agg_preserved)
7b872d9e
MJ
529{
530 jfunc->type = IPA_JF_PASS_THROUGH;
531 jfunc->value.pass_through.operand = NULL_TREE;
532 jfunc->value.pass_through.formal_id = formal_id;
533 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 534 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
535}
536
a2b4c188
KV
537/* Set JFUNC to be an unary pass through jump function. */
538
539static void
540ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
541 enum tree_code operation)
542{
543 jfunc->type = IPA_JF_PASS_THROUGH;
544 jfunc->value.pass_through.operand = NULL_TREE;
545 jfunc->value.pass_through.formal_id = formal_id;
546 jfunc->value.pass_through.operation = operation;
547 jfunc->value.pass_through.agg_preserved = false;
548}
7b872d9e
MJ
549/* Set JFUNC to be an arithmetic pass through jump function. */
550
551static void
552ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
553 tree operand, enum tree_code operation)
554{
555 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 556 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
557 jfunc->value.pass_through.formal_id = formal_id;
558 jfunc->value.pass_through.operation = operation;
8b7773a4 559 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
560}
561
562/* Set JFUNC to be an ancestor jump function. */
563
564static void
565ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 566 int formal_id, bool agg_preserved)
7b872d9e
MJ
567{
568 jfunc->type = IPA_JF_ANCESTOR;
569 jfunc->value.ancestor.formal_id = formal_id;
570 jfunc->value.ancestor.offset = offset;
8b7773a4 571 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
572}
573
8aab5218
MJ
574/* Get IPA BB information about the given BB. FBI is the context of analyzis
575 of this function body. */
576
577static struct ipa_bb_info *
56b40062 578ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
579{
580 gcc_checking_assert (fbi);
581 return &fbi->bb_infos[bb->index];
582}
583
f65cf2b7
MJ
584/* Structure to be passed in between detect_type_change and
585 check_stmt_for_type_change. */
586
11478306 587struct prop_type_change_info
f65cf2b7 588{
290ebcb7
MJ
589 /* Offset into the object where there is the virtual method pointer we are
590 looking for. */
591 HOST_WIDE_INT offset;
592 /* The declaration or SSA_NAME pointer of the base that we are checking for
593 type change. */
594 tree object;
f65cf2b7
MJ
595 /* Set to true if dynamic type change has been detected. */
596 bool type_maybe_changed;
597};
598
599/* Return true if STMT can modify a virtual method table pointer.
600
601 This function makes special assumptions about both constructors and
602 destructors which are all the functions that are allowed to alter the VMT
603 pointers. It assumes that destructors begin with assignment into all VMT
604 pointers and that constructors essentially look in the following way:
605
606 1) The very first thing they do is that they call constructors of ancestor
607 sub-objects that have them.
608
609 2) Then VMT pointers of this and all its ancestors is set to new values
610 corresponding to the type corresponding to the constructor.
611
612 3) Only afterwards, other stuff such as constructor of member sub-objects
613 and the code written by the user is run. Only this may include calling
614 virtual functions, directly or indirectly.
615
616 There is no way to call a constructor of an ancestor sub-object in any
617 other way.
618
619 This means that we do not have to care whether constructors get the correct
620 type information because they will always change it (in fact, if we define
621 the type to be given by the VMT pointer, it is undefined).
622
623 The most important fact to derive from the above is that if, for some
624 statement in the section 3, we try to detect whether the dynamic type has
625 changed, we can safely ignore all calls as we examine the function body
626 backwards until we reach statements in section 2 because these calls cannot
627 be ancestor constructors or destructors (if the input is not bogus) and so
628 do not change the dynamic type (this holds true only for automatically
629 allocated objects but at the moment we devirtualize only these). We then
630 must detect that statements in section 2 change the dynamic type and can try
631 to derive the new type. That is enough and we can stop, we will never see
632 the calls into constructors of sub-objects in this code. Therefore we can
633 safely ignore all call statements that we traverse.
634 */
635
636static bool
355fe088 637stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
638{
639 if (is_gimple_call (stmt))
640 return false;
70f633c5
JH
641 if (gimple_clobber_p (stmt))
642 return false;
f65cf2b7
MJ
643 else if (is_gimple_assign (stmt))
644 {
645 tree lhs = gimple_assign_lhs (stmt);
646
0004f992
MJ
647 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
648 {
649 if (flag_strict_aliasing
650 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
651 return false;
652
653 if (TREE_CODE (lhs) == COMPONENT_REF
654 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 655 return false;
450aa0ee 656 /* In the future we might want to use get_ref_base_and_extent to find
0004f992
MJ
657 if there is a field corresponding to the offset and if so, proceed
658 almost like if it was a component ref. */
659 }
f65cf2b7
MJ
660 }
661 return true;
662}
663
3b97a5c7
MJ
664/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
665 to check whether a particular statement may modify the virtual table
666 pointerIt stores its result into DATA, which points to a
11478306 667 prop_type_change_info structure. */
f65cf2b7
MJ
668
669static bool
670check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
671{
355fe088 672 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 673 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
674
675 if (stmt_may_be_vtbl_ptr_store (stmt))
676 {
677 tci->type_maybe_changed = true;
678 return true;
679 }
680 else
681 return false;
682}
683
058d0a90
JH
684/* See if ARG is PARAM_DECl describing instance passed by pointer
685 or reference in FUNCTION. Return false if the dynamic type may change
686 in between beggining of the function until CALL is invoked.
290ebcb7 687
058d0a90
JH
688 Generally functions are not allowed to change type of such instances,
689 but they call destructors. We assume that methods can not destroy the THIS
690 pointer. Also as a special cases, constructor and destructors may change
691 type of the THIS pointer. */
692
693static bool
355fe088 694param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90
JH
695{
696 /* Pure functions can not do any changes on the dynamic type;
697 that require writting to memory. */
698 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
699 return false;
700 /* We need to check if we are within inlined consturctor
701 or destructor (ideally we would have way to check that the
702 inline cdtor is actually working on ARG, but we don't have
703 easy tie on this, so punt on all non-pure cdtors.
704 We may also record the types of cdtors and once we know type
705 of the instance match them.
706
707 Also code unification optimizations may merge calls from
708 different blocks making return values unreliable. So
709 do nothing during late optimization. */
710 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
711 return true;
712 if (TREE_CODE (arg) == SSA_NAME
713 && SSA_NAME_IS_DEFAULT_DEF (arg)
714 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
715 {
716 /* Normal (non-THIS) argument. */
717 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
718 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 719 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
720 and destructors as those definitely may change the dynamic
721 type. */
722 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
723 && !DECL_CXX_CONSTRUCTOR_P (function)
724 && !DECL_CXX_DESTRUCTOR_P (function)
725 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
726 {
727 /* Walk the inline stack and watch out for ctors/dtors. */
728 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
729 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
730 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
731 return true;
058d0a90
JH
732 return false;
733 }
734 }
735 return true;
736}
290ebcb7 737
06d65050
JH
738/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
739 callsite CALL) by looking for assignments to its virtual table pointer. If
740 it is, return true and fill in the jump function JFUNC with relevant type
741 information or set it to unknown. ARG is the object itself (not a pointer
742 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
743 returned by get_ref_base_and_extent, as is the offset.
744
745 This is helper function for detect_type_change and detect_type_change_ssa
746 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
747
748static bool
058d0a90 749detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
538dd0b7 750 gcall *call, struct ipa_jump_func *jfunc,
058d0a90 751 HOST_WIDE_INT offset)
f65cf2b7 752{
11478306 753 struct prop_type_change_info tci;
f65cf2b7 754 ao_ref ao;
70f633c5 755 bool entry_reached = false;
f65cf2b7
MJ
756
757 gcc_checking_assert (DECL_P (arg)
758 || TREE_CODE (arg) == MEM_REF
759 || handled_component_p (arg));
f65cf2b7 760
b49407f8
JH
761 comp_type = TYPE_MAIN_VARIANT (comp_type);
762
d570d364
JH
763 /* Const calls cannot call virtual methods through VMT and so type changes do
764 not matter. */
765 if (!flag_devirtualize || !gimple_vuse (call)
766 /* Be sure expected_type is polymorphic. */
767 || !comp_type
768 || TREE_CODE (comp_type) != RECORD_TYPE
769 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
770 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
771 return true;
4bf2a588 772
dd887943 773 ao_ref_init (&ao, arg);
f65cf2b7
MJ
774 ao.base = base;
775 ao.offset = offset;
776 ao.size = POINTER_SIZE;
777 ao.max_size = ao.size;
f65cf2b7 778
290ebcb7
MJ
779 tci.offset = offset;
780 tci.object = get_base_address (arg);
290ebcb7 781 tci.type_maybe_changed = false;
290ebcb7 782
f65cf2b7 783 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
70f633c5 784 &tci, NULL, &entry_reached);
f65cf2b7
MJ
785 if (!tci.type_maybe_changed)
786 return false;
787
04be694e 788 ipa_set_jf_unknown (jfunc);
f65cf2b7
MJ
789 return true;
790}
791
058d0a90
JH
792/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
793 If it is, return true and fill in the jump function JFUNC with relevant type
794 information or set it to unknown. ARG is the object itself (not a pointer
795 to it, unless dereferenced). BASE is the base of the memory access as
796 returned by get_ref_base_and_extent, as is the offset. */
797
798static bool
538dd0b7 799detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
058d0a90
JH
800 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
801{
802 if (!flag_devirtualize)
803 return false;
804
805 if (TREE_CODE (base) == MEM_REF
806 && !param_type_may_change_p (current_function_decl,
807 TREE_OPERAND (base, 0),
808 call))
809 return false;
810 return detect_type_change_from_memory_writes (arg, base, comp_type,
811 call, jfunc, offset);
812}
813
f65cf2b7
MJ
814/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
815 SSA name (its dereference will become the base and the offset is assumed to
816 be zero). */
817
818static bool
06d65050 819detect_type_change_ssa (tree arg, tree comp_type,
538dd0b7 820 gcall *call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
821{
822 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 823 if (!flag_devirtualize
06d65050 824 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
825 return false;
826
058d0a90
JH
827 if (!param_type_may_change_p (current_function_decl, arg, call))
828 return false;
829
f65cf2b7 830 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 831 build_int_cst (ptr_type_node, 0));
f65cf2b7 832
058d0a90
JH
833 return detect_type_change_from_memory_writes (arg, arg, comp_type,
834 call, jfunc, 0);
f65cf2b7
MJ
835}
836
fdb0e1b4
MJ
837/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
838 boolean variable pointed to by DATA. */
839
840static bool
841mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
842 void *data)
843{
844 bool *b = (bool *) data;
845 *b = true;
846 return true;
847}
848
8aab5218
MJ
849/* Return true if we have already walked so many statements in AA that we
850 should really just start giving up. */
851
852static bool
56b40062 853aa_overwalked (struct ipa_func_body_info *fbi)
8aab5218
MJ
854{
855 gcc_checking_assert (fbi);
856 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
857}
858
859/* Find the nearest valid aa status for parameter specified by INDEX that
860 dominates BB. */
861
56b40062
MJ
862static struct ipa_param_aa_status *
863find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
864 int index)
865{
866 while (true)
867 {
868 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
869 if (!bb)
870 return NULL;
871 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
872 if (!bi->param_aa_statuses.is_empty ()
873 && bi->param_aa_statuses[index].valid)
874 return &bi->param_aa_statuses[index];
875 }
876}
877
878/* Get AA status structure for the given BB and parameter with INDEX. Allocate
879 structures and/or intialize the result with a dominating description as
880 necessary. */
881
56b40062
MJ
882static struct ipa_param_aa_status *
883parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
884 int index)
885{
886 gcc_checking_assert (fbi);
887 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
888 if (bi->param_aa_statuses.is_empty ())
889 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
56b40062 890 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
891 if (!paa->valid)
892 {
893 gcc_checking_assert (!paa->parm_modified
894 && !paa->ref_modified
895 && !paa->pt_modified);
56b40062 896 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
897 dom_paa = find_dominating_aa_status (fbi, bb, index);
898 if (dom_paa)
899 *paa = *dom_paa;
900 else
901 paa->valid = true;
902 }
903
904 return paa;
905}
906
688010ba 907/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 908 a value known not to be modified in this function before reaching the
8aab5218
MJ
909 statement STMT. FBI holds information about the function we have so far
910 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
911
912static bool
56b40062 913parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 914 gimple *stmt, tree parm_load)
fdb0e1b4 915{
56b40062 916 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
917 bool modified = false;
918 ao_ref refd;
919
776e4fe2
MJ
920 tree base = get_base_address (parm_load);
921 gcc_assert (TREE_CODE (base) == PARM_DECL);
922 if (TREE_READONLY (base))
923 return true;
924
8aab5218
MJ
925 /* FIXME: FBI can be NULL if we are being called from outside
926 ipa_node_analysis or ipcp_transform_function, which currently happens
927 during inlining analysis. It would be great to extend fbi's lifetime and
928 always have it. Currently, we are just not afraid of too much walking in
929 that case. */
930 if (fbi)
931 {
932 if (aa_overwalked (fbi))
933 return false;
934 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
935 if (paa->parm_modified)
936 return false;
937 }
938 else
939 paa = NULL;
fdb0e1b4
MJ
940
941 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 942 ao_ref_init (&refd, parm_load);
8aab5218
MJ
943 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
944 &modified, NULL);
945 if (fbi)
946 fbi->aa_walked += walked;
947 if (paa && modified)
948 paa->parm_modified = true;
8b7773a4 949 return !modified;
fdb0e1b4
MJ
950}
951
a2b4c188
KV
952/* If STMT is an assignment that loads a value from an parameter declaration,
953 return the index of the parameter in ipa_node_params which has not been
954 modified. Otherwise return -1. */
955
956static int
957load_from_unmodified_param (struct ipa_func_body_info *fbi,
f65f1ae3 958 vec<ipa_param_descriptor, va_gc> *descriptors,
a2b4c188
KV
959 gimple *stmt)
960{
bda2bc48
MJ
961 int index;
962 tree op1;
963
a2b4c188
KV
964 if (!gimple_assign_single_p (stmt))
965 return -1;
966
bda2bc48
MJ
967 op1 = gimple_assign_rhs1 (stmt);
968 if (TREE_CODE (op1) != PARM_DECL)
a2b4c188
KV
969 return -1;
970
bda2bc48
MJ
971 index = ipa_get_param_decl_index_1 (descriptors, op1);
972 if (index < 0
973 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
a2b4c188
KV
974 return -1;
975
bda2bc48 976 return index;
a2b4c188
KV
977}
978
8aab5218
MJ
979/* Return true if memory reference REF (which must be a load through parameter
980 with INDEX) loads data that are known to be unmodified in this function
981 before reaching statement STMT. */
8b7773a4
MJ
982
983static bool
56b40062 984parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 985 int index, gimple *stmt, tree ref)
8b7773a4 986{
56b40062 987 struct ipa_param_aa_status *paa;
8b7773a4
MJ
988 bool modified = false;
989 ao_ref refd;
990
8aab5218
MJ
991 /* FIXME: FBI can be NULL if we are being called from outside
992 ipa_node_analysis or ipcp_transform_function, which currently happens
993 during inlining analysis. It would be great to extend fbi's lifetime and
994 always have it. Currently, we are just not afraid of too much walking in
995 that case. */
996 if (fbi)
997 {
998 if (aa_overwalked (fbi))
999 return false;
1000 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1001 if (paa->ref_modified)
1002 return false;
1003 }
1004 else
1005 paa = NULL;
8b7773a4 1006
8aab5218 1007 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 1008 ao_ref_init (&refd, ref);
8aab5218
MJ
1009 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1010 &modified, NULL);
1011 if (fbi)
1012 fbi->aa_walked += walked;
1013 if (paa && modified)
1014 paa->ref_modified = true;
8b7773a4
MJ
1015 return !modified;
1016}
1017
8aab5218
MJ
1018/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1019 is known to be unmodified in this function before reaching call statement
1020 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1021
1022static bool
56b40062 1023parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 1024 gimple *call, tree parm)
8b7773a4
MJ
1025{
1026 bool modified = false;
1027 ao_ref refd;
1028
1029 /* It's unnecessary to calculate anything about memory contnets for a const
1030 function because it is not goin to use it. But do not cache the result
1031 either. Also, no such calculations for non-pointers. */
1032 if (!gimple_vuse (call)
8aab5218
MJ
1033 || !POINTER_TYPE_P (TREE_TYPE (parm))
1034 || aa_overwalked (fbi))
8b7773a4
MJ
1035 return false;
1036
56b40062
MJ
1037 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1038 gimple_bb (call),
1039 index);
8aab5218 1040 if (paa->pt_modified)
8b7773a4
MJ
1041 return false;
1042
1043 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218
MJ
1044 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1045 &modified, NULL);
1046 fbi->aa_walked += walked;
8b7773a4 1047 if (modified)
8aab5218 1048 paa->pt_modified = true;
8b7773a4
MJ
1049 return !modified;
1050}
1051
91bb9f80
MJ
1052/* Return true if we can prove that OP is a memory reference loading
1053 data from an aggregate passed as a parameter.
1054
1055 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1056 false if it cannot prove that the value has not been modified before the
1057 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1058 if it cannot prove the value has not been modified, in that case it will
1059 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1060
8b7773a4
MJ
1061 INFO and PARMS_AINFO describe parameters of the current function (but the
1062 latter can be NULL), STMT is the load statement. If function returns true,
1063 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1064 within the aggregate and whether it is a load from a value passed by
1065 reference respectively. */
1066
ff302741 1067bool
56b40062 1068ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
f65f1ae3 1069 vec<ipa_param_descriptor, va_gc> *descriptors,
355fe088 1070 gimple *stmt, tree op, int *index_p,
ff302741 1071 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
91bb9f80 1072 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
1073{
1074 int index;
588db50c 1075 HOST_WIDE_INT size;
ee45a32d 1076 bool reverse;
588db50c 1077 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
8b7773a4 1078
588db50c 1079 if (!base)
8b7773a4
MJ
1080 return false;
1081
1082 if (DECL_P (base))
1083 {
d044dd17 1084 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1085 if (index >= 0
8aab5218 1086 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1087 {
1088 *index_p = index;
1089 *by_ref_p = false;
3ff2ca23
JJ
1090 if (size_p)
1091 *size_p = size;
91bb9f80
MJ
1092 if (guaranteed_unmodified)
1093 *guaranteed_unmodified = true;
8b7773a4
MJ
1094 return true;
1095 }
1096 return false;
1097 }
1098
1099 if (TREE_CODE (base) != MEM_REF
1100 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1101 || !integer_zerop (TREE_OPERAND (base, 1)))
1102 return false;
1103
1104 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1105 {
1106 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1107 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1108 }
1109 else
1110 {
1111 /* This branch catches situations where a pointer parameter is not a
1112 gimple register, for example:
1113
1114 void hip7(S*) (struct S * p)
1115 {
1116 void (*<T2e4>) (struct S *) D.1867;
1117 struct S * p.1;
1118
1119 <bb 2>:
1120 p.1_1 = p;
1121 D.1867_2 = p.1_1->f;
1122 D.1867_2 ();
1123 gdp = &p;
1124 */
1125
355fe088 1126 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1127 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1128 }
1129
91bb9f80 1130 if (index >= 0)
8b7773a4 1131 {
91bb9f80
MJ
1132 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1133 if (!data_preserved && !guaranteed_unmodified)
1134 return false;
1135
8b7773a4
MJ
1136 *index_p = index;
1137 *by_ref_p = true;
3ff2ca23
JJ
1138 if (size_p)
1139 *size_p = size;
91bb9f80
MJ
1140 if (guaranteed_unmodified)
1141 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1142 return true;
1143 }
1144 return false;
1145}
1146
b258210c 1147/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1148 of an assignment statement STMT, try to determine whether we are actually
1149 handling any of the following cases and construct an appropriate jump
1150 function into JFUNC if so:
1151
1152 1) The passed value is loaded from a formal parameter which is not a gimple
1153 register (most probably because it is addressable, the value has to be
1154 scalar) and we can guarantee the value has not changed. This case can
1155 therefore be described by a simple pass-through jump function. For example:
1156
1157 foo (int a)
1158 {
1159 int a.0;
1160
1161 a.0_2 = a;
1162 bar (a.0_2);
1163
1164 2) The passed value can be described by a simple arithmetic pass-through
1165 jump function. E.g.
1166
1167 foo (int a)
1168 {
1169 int D.2064;
1170
1171 D.2064_4 = a.1(D) + 4;
1172 bar (D.2064_4);
1173
1174 This case can also occur in combination of the previous one, e.g.:
1175
1176 foo (int a, int z)
1177 {
1178 int a.0;
1179 int D.2064;
1180
1181 a.0_3 = a;
1182 D.2064_4 = a.0_3 + 4;
1183 foo (D.2064_4);
1184
1185 3) The passed value is an address of an object within another one (which
1186 also passed by reference). Such situations are described by an ancestor
1187 jump function and describe situations such as:
1188
1189 B::foo() (struct B * const this)
1190 {
1191 struct A * D.1845;
1192
1193 D.1845_2 = &this_1(D)->D.1748;
1194 A::bar (D.1845_2);
1195
1196 INFO is the structure describing individual parameters access different
1197 stages of IPA optimizations. PARMS_AINFO contains the information that is
1198 only needed for intraprocedural analysis. */
685b0d13
MJ
1199
1200static void
56b40062 1201compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1202 struct ipa_node_params *info,
b258210c 1203 struct ipa_jump_func *jfunc,
355fe088 1204 gcall *call, gimple *stmt, tree name,
06d65050 1205 tree param_type)
685b0d13 1206{
588db50c 1207 HOST_WIDE_INT offset, size;
fdb0e1b4 1208 tree op1, tc_ssa, base, ssa;
ee45a32d 1209 bool reverse;
685b0d13 1210 int index;
685b0d13 1211
685b0d13 1212 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1213
fdb0e1b4 1214 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1215 {
fdb0e1b4
MJ
1216 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1217 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1218 else
bda2bc48
MJ
1219 index = load_from_unmodified_param (fbi, info->descriptors,
1220 SSA_NAME_DEF_STMT (op1));
fdb0e1b4
MJ
1221 tc_ssa = op1;
1222 }
1223 else
1224 {
bda2bc48 1225 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1226 tc_ssa = gimple_assign_lhs (stmt);
1227 }
1228
1229 if (index >= 0)
1230 {
a77af182 1231 switch (gimple_assign_rhs_class (stmt))
8b7773a4 1232 {
a77af182
RB
1233 case GIMPLE_BINARY_RHS:
1234 {
1235 tree op2 = gimple_assign_rhs2 (stmt);
1236 if (!is_gimple_ip_invariant (op2)
1237 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1238 != tcc_comparison)
1239 && !useless_type_conversion_p (TREE_TYPE (name),
1240 TREE_TYPE (op1))))
1241 return;
1242
1243 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1244 gimple_assign_rhs_code (stmt));
1245 break;
1246 }
1247 case GIMPLE_SINGLE_RHS:
1248 {
1249 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1250 tc_ssa);
1251 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1252 break;
1253 }
1254 case GIMPLE_UNARY_RHS:
bda2bc48
MJ
1255 if (is_gimple_assign (stmt)
1256 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1257 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
a77af182 1258 ipa_set_jf_unary_pass_through (jfunc, index,
bda2bc48 1259 gimple_assign_rhs_code (stmt));
a77af182 1260 default:;
8b7773a4 1261 }
685b0d13
MJ
1262 return;
1263 }
1264
1265 if (TREE_CODE (op1) != ADDR_EXPR)
1266 return;
1267 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1268 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1269 return;
588db50c 1270 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
aca52e6f
RS
1271 offset_int mem_offset;
1272 if (!base
1273 || TREE_CODE (base) != MEM_REF
1274 || !mem_ref_offset (base).is_constant (&mem_offset))
685b0d13 1275 return;
aca52e6f 1276 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1277 ssa = TREE_OPERAND (base, 0);
1278 if (TREE_CODE (ssa) != SSA_NAME
1279 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1280 || offset < 0)
685b0d13
MJ
1281 return;
1282
b8f6e610 1283 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1284 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1285 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1286 ipa_set_ancestor_jf (jfunc, offset, index,
1287 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1288}
1289
40591473
MJ
1290/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1291 it looks like:
1292
1293 iftmp.1_3 = &obj_2(D)->D.1762;
1294
1295 The base of the MEM_REF must be a default definition SSA NAME of a
1296 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1297 whole MEM_REF expression is returned and the offset calculated from any
1298 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1299 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1300
1301static tree
355fe088 1302get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473 1303{
588db50c 1304 HOST_WIDE_INT size;
40591473 1305 tree expr, parm, obj;
ee45a32d 1306 bool reverse;
40591473
MJ
1307
1308 if (!gimple_assign_single_p (assign))
1309 return NULL_TREE;
1310 expr = gimple_assign_rhs1 (assign);
1311
1312 if (TREE_CODE (expr) != ADDR_EXPR)
1313 return NULL_TREE;
1314 expr = TREE_OPERAND (expr, 0);
1315 obj = expr;
588db50c 1316 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
40591473 1317
aca52e6f
RS
1318 offset_int mem_offset;
1319 if (!expr
1320 || TREE_CODE (expr) != MEM_REF
1321 || !mem_ref_offset (expr).is_constant (&mem_offset))
40591473
MJ
1322 return NULL_TREE;
1323 parm = TREE_OPERAND (expr, 0);
1324 if (TREE_CODE (parm) != SSA_NAME
1325 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1326 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1327 return NULL_TREE;
1328
aca52e6f 1329 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1330 *obj_p = obj;
1331 return expr;
1332}
1333
685b0d13 1334
b258210c
MJ
1335/* Given that an actual argument is an SSA_NAME that is a result of a phi
1336 statement PHI, try to find out whether NAME is in fact a
1337 multiple-inheritance typecast from a descendant into an ancestor of a formal
1338 parameter and thus can be described by an ancestor jump function and if so,
1339 write the appropriate function into JFUNC.
1340
1341 Essentially we want to match the following pattern:
1342
1343 if (obj_2(D) != 0B)
1344 goto <bb 3>;
1345 else
1346 goto <bb 4>;
1347
1348 <bb 3>:
1349 iftmp.1_3 = &obj_2(D)->D.1762;
1350
1351 <bb 4>:
1352 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1353 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1354 return D.1879_6; */
1355
1356static void
56b40062 1357compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1358 struct ipa_node_params *info,
b258210c 1359 struct ipa_jump_func *jfunc,
538dd0b7 1360 gcall *call, gphi *phi)
b258210c 1361{
40591473 1362 HOST_WIDE_INT offset;
355fe088 1363 gimple *assign, *cond;
b258210c 1364 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1365 tree tmp, parm, expr, obj;
b258210c
MJ
1366 int index, i;
1367
54e348cb 1368 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1369 return;
1370
54e348cb
MJ
1371 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1372 tmp = PHI_ARG_DEF (phi, 0);
1373 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1374 tmp = PHI_ARG_DEF (phi, 1);
1375 else
1376 return;
b258210c
MJ
1377 if (TREE_CODE (tmp) != SSA_NAME
1378 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1379 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1380 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1381 return;
1382
1383 assign = SSA_NAME_DEF_STMT (tmp);
1384 assign_bb = gimple_bb (assign);
40591473 1385 if (!single_pred_p (assign_bb))
b258210c 1386 return;
40591473
MJ
1387 expr = get_ancestor_addr_info (assign, &obj, &offset);
1388 if (!expr)
b258210c
MJ
1389 return;
1390 parm = TREE_OPERAND (expr, 0);
b258210c 1391 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1392 if (index < 0)
1393 return;
b258210c
MJ
1394
1395 cond_bb = single_pred (assign_bb);
1396 cond = last_stmt (cond_bb);
69610617
SB
1397 if (!cond
1398 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1399 || gimple_cond_code (cond) != NE_EXPR
1400 || gimple_cond_lhs (cond) != parm
1401 || !integer_zerop (gimple_cond_rhs (cond)))
1402 return;
1403
b258210c
MJ
1404 phi_bb = gimple_bb (phi);
1405 for (i = 0; i < 2; i++)
1406 {
1407 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1408 if (pred != assign_bb && pred != cond_bb)
1409 return;
1410 }
1411
3b97a5c7
MJ
1412 ipa_set_ancestor_jf (jfunc, offset, index,
1413 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1414}
1415
be95e2b9
MJ
1416/* Inspect the given TYPE and return true iff it has the same structure (the
1417 same number of fields of the same types) as a C++ member pointer. If
1418 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1419 corresponding fields there. */
1420
3e293154
MJ
1421static bool
1422type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1423{
1424 tree fld;
1425
1426 if (TREE_CODE (type) != RECORD_TYPE)
1427 return false;
1428
1429 fld = TYPE_FIELDS (type);
1430 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1431 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1432 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1433 return false;
1434
1435 if (method_ptr)
1436 *method_ptr = fld;
1437
910ad8de 1438 fld = DECL_CHAIN (fld);
8b7773a4 1439 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1440 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1441 return false;
1442 if (delta)
1443 *delta = fld;
1444
910ad8de 1445 if (DECL_CHAIN (fld))
3e293154
MJ
1446 return false;
1447
1448 return true;
1449}
1450
61502ca8 1451/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1452 return the rhs of its defining statement. Otherwise return RHS as it
1453 is. */
7ec49257
MJ
1454
1455static inline tree
1456get_ssa_def_if_simple_copy (tree rhs)
1457{
1458 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1459 {
355fe088 1460 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1461
1462 if (gimple_assign_single_p (def_stmt))
1463 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1464 else
1465 break;
7ec49257
MJ
1466 }
1467 return rhs;
1468}
1469
8b7773a4
MJ
1470/* Simple linked list, describing known contents of an aggregate beforere
1471 call. */
1472
1473struct ipa_known_agg_contents_list
1474{
1475 /* Offset and size of the described part of the aggregate. */
1476 HOST_WIDE_INT offset, size;
1477 /* Known constant value or NULL if the contents is known to be unknown. */
1478 tree constant;
1479 /* Pointer to the next structure in the list. */
1480 struct ipa_known_agg_contents_list *next;
1481};
3e293154 1482
0d48ee34
MJ
1483/* Find the proper place in linked list of ipa_known_agg_contents_list
1484 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1485 unless there is a partial overlap, in which case return NULL, or such
1486 element is already there, in which case set *ALREADY_THERE to true. */
1487
1488static struct ipa_known_agg_contents_list **
1489get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1490 HOST_WIDE_INT lhs_offset,
1491 HOST_WIDE_INT lhs_size,
1492 bool *already_there)
1493{
1494 struct ipa_known_agg_contents_list **p = list;
1495 while (*p && (*p)->offset < lhs_offset)
1496 {
1497 if ((*p)->offset + (*p)->size > lhs_offset)
1498 return NULL;
1499 p = &(*p)->next;
1500 }
1501
1502 if (*p && (*p)->offset < lhs_offset + lhs_size)
1503 {
1504 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1505 /* We already know this value is subsequently overwritten with
1506 something else. */
1507 *already_there = true;
1508 else
1509 /* Otherwise this is a partial overlap which we cannot
1510 represent. */
1511 return NULL;
1512 }
1513 return p;
1514}
1515
1516/* Build aggregate jump function from LIST, assuming there are exactly
1517 CONST_COUNT constant entries there and that th offset of the passed argument
1518 is ARG_OFFSET and store it into JFUNC. */
1519
1520static void
1521build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1522 int const_count, HOST_WIDE_INT arg_offset,
1523 struct ipa_jump_func *jfunc)
1524{
1525 vec_alloc (jfunc->agg.items, const_count);
1526 while (list)
1527 {
1528 if (list->constant)
1529 {
1530 struct ipa_agg_jf_item item;
1531 item.offset = list->offset - arg_offset;
1532 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1533 item.value = unshare_expr_without_location (list->constant);
1534 jfunc->agg.items->quick_push (item);
1535 }
1536 list = list->next;
1537 }
1538}
1539
8b7773a4
MJ
1540/* Traverse statements from CALL backwards, scanning whether an aggregate given
1541 in ARG is filled in with constant values. ARG can either be an aggregate
0d48ee34
MJ
1542 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1543 aggregate. JFUNC is the jump function into which the constants are
1544 subsequently stored. */
be95e2b9 1545
3e293154 1546static void
538dd0b7
DM
1547determine_locally_known_aggregate_parts (gcall *call, tree arg,
1548 tree arg_type,
0d48ee34 1549 struct ipa_jump_func *jfunc)
3e293154 1550{
8b7773a4
MJ
1551 struct ipa_known_agg_contents_list *list = NULL;
1552 int item_count = 0, const_count = 0;
1553 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1554 gimple_stmt_iterator gsi;
8b7773a4
MJ
1555 tree arg_base;
1556 bool check_ref, by_ref;
1557 ao_ref r;
3e293154 1558
29799e9d
MJ
1559 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1560 return;
1561
8b7773a4
MJ
1562 /* The function operates in three stages. First, we prepare check_ref, r,
1563 arg_base and arg_offset based on what is actually passed as an actual
1564 argument. */
3e293154 1565
85942f45 1566 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1567 {
1568 by_ref = true;
1569 if (TREE_CODE (arg) == SSA_NAME)
1570 {
1571 tree type_size;
63831879
MJ
1572 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1573 || !POINTER_TYPE_P (TREE_TYPE (arg)))
8b7773a4
MJ
1574 return;
1575 check_ref = true;
1576 arg_base = arg;
1577 arg_offset = 0;
85942f45 1578 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1579 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1580 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1581 }
1582 else if (TREE_CODE (arg) == ADDR_EXPR)
1583 {
ee45a32d 1584 bool reverse;
8b7773a4
MJ
1585
1586 arg = TREE_OPERAND (arg, 0);
588db50c
RS
1587 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1588 &arg_size, &reverse);
1589 if (!arg_base)
8b7773a4
MJ
1590 return;
1591 if (DECL_P (arg_base))
1592 {
8b7773a4 1593 check_ref = false;
0d48ee34 1594 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1595 }
1596 else
1597 return;
1598 }
1599 else
1600 return;
1601 }
1602 else
1603 {
ee45a32d 1604 bool reverse;
8b7773a4
MJ
1605
1606 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1607
1608 by_ref = false;
1609 check_ref = false;
588db50c
RS
1610 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1611 &arg_size, &reverse);
1612 if (!arg_base)
8b7773a4
MJ
1613 return;
1614
1615 ao_ref_init (&r, arg);
1616 }
1617
1618 /* Second stage walks back the BB, looks at individual statements and as long
1619 as it is confident of how the statements affect contents of the
1620 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1621 describing it. */
1622 gsi = gsi_for_stmt (call);
726a989a
RB
1623 gsi_prev (&gsi);
1624 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1625 {
8b7773a4 1626 struct ipa_known_agg_contents_list *n, **p;
355fe088 1627 gimple *stmt = gsi_stmt (gsi);
588db50c 1628 HOST_WIDE_INT lhs_offset, lhs_size;
8b7773a4 1629 tree lhs, rhs, lhs_base;
ee45a32d 1630 bool reverse;
3e293154 1631
8b7773a4 1632 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1633 continue;
8b75fc9b 1634 if (!gimple_assign_single_p (stmt))
8b7773a4 1635 break;
3e293154 1636
726a989a
RB
1637 lhs = gimple_assign_lhs (stmt);
1638 rhs = gimple_assign_rhs1 (stmt);
0c6b087c 1639 if (!is_gimple_reg_type (TREE_TYPE (rhs))
7d2fb524
MJ
1640 || TREE_CODE (lhs) == BIT_FIELD_REF
1641 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1642 break;
3e293154 1643
588db50c
RS
1644 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1645 &lhs_size, &reverse);
1646 if (!lhs_base)
8b7773a4 1647 break;
3e293154 1648
8b7773a4 1649 if (check_ref)
518dc859 1650 {
8b7773a4
MJ
1651 if (TREE_CODE (lhs_base) != MEM_REF
1652 || TREE_OPERAND (lhs_base, 0) != arg_base
1653 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1654 break;
3e293154 1655 }
8b7773a4 1656 else if (lhs_base != arg_base)
774b8a55
MJ
1657 {
1658 if (DECL_P (lhs_base))
1659 continue;
1660 else
1661 break;
1662 }
3e293154 1663
0d48ee34
MJ
1664 bool already_there = false;
1665 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1666 &already_there);
1667 if (!p)
8b7773a4 1668 break;
0d48ee34
MJ
1669 if (already_there)
1670 continue;
3e293154 1671
8b7773a4
MJ
1672 rhs = get_ssa_def_if_simple_copy (rhs);
1673 n = XALLOCA (struct ipa_known_agg_contents_list);
1674 n->size = lhs_size;
1675 n->offset = lhs_offset;
1676 if (is_gimple_ip_invariant (rhs))
1677 {
1678 n->constant = rhs;
1679 const_count++;
1680 }
1681 else
1682 n->constant = NULL_TREE;
1683 n->next = *p;
1684 *p = n;
3e293154 1685
8b7773a4 1686 item_count++;
dfea20f1
MJ
1687 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1688 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1689 break;
1690 }
be95e2b9 1691
8b7773a4
MJ
1692 /* Third stage just goes over the list and creates an appropriate vector of
1693 ipa_agg_jf_item structures out of it, of sourse only if there are
1694 any known constants to begin with. */
3e293154 1695
8b7773a4 1696 if (const_count)
3e293154 1697 {
8b7773a4 1698 jfunc->agg.by_ref = by_ref;
0d48ee34 1699 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
3e293154
MJ
1700 }
1701}
1702
5d5f1e95
KV
1703/* Return the Ith param type of callee associated with call graph
1704 edge E. */
1705
1706tree
06d65050
JH
1707ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1708{
1709 int n;
1710 tree type = (e->callee
67348ccc 1711 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1712 : gimple_call_fntype (e->call_stmt));
1713 tree t = TYPE_ARG_TYPES (type);
1714
1715 for (n = 0; n < i; n++)
1716 {
1717 if (!t)
1718 break;
1719 t = TREE_CHAIN (t);
1720 }
1721 if (t)
1722 return TREE_VALUE (t);
1723 if (!e->callee)
1724 return NULL;
67348ccc 1725 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1726 for (n = 0; n < i; n++)
1727 {
1728 if (!t)
1729 return NULL;
1730 t = TREE_CHAIN (t);
1731 }
1732 if (t)
1733 return TREE_TYPE (t);
1734 return NULL;
1735}
1736
86cd0334
MJ
1737/* Return ipa_bits with VALUE and MASK values, which can be either a newly
1738 allocated structure or a previously existing one shared with other jump
1739 functions and/or transformation summaries. */
1740
1741ipa_bits *
1742ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1743{
1744 ipa_bits tmp;
1745 tmp.value = value;
1746 tmp.mask = mask;
1747
1748 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1749 if (*slot)
1750 return *slot;
1751
1752 ipa_bits *res = ggc_alloc<ipa_bits> ();
1753 res->value = value;
1754 res->mask = mask;
1755 *slot = res;
1756
1757 return res;
1758}
1759
1760/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1761 table in order to avoid creating multiple same ipa_bits structures. */
1762
1763static void
1764ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1765 const widest_int &mask)
1766{
1767 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1768}
1769
1770/* Return a pointer to a value_range just like *TMP, but either find it in
1771 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1772
2506b92c
RB
1773static value_range_base *
1774ipa_get_value_range (value_range_base *tmp)
86cd0334 1775{
2506b92c 1776 value_range_base **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
86cd0334
MJ
1777 if (*slot)
1778 return *slot;
1779
2506b92c 1780 value_range_base *vr = ggc_alloc<value_range_base> ();
86cd0334
MJ
1781 *vr = *tmp;
1782 *slot = vr;
1783
1784 return vr;
1785}
1786
1787/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1788 equiv set. Use hash table in order to avoid creating multiple same copies of
1789 value_ranges. */
1790
2506b92c 1791static value_range_base *
54994253 1792ipa_get_value_range (enum value_range_kind type, tree min, tree max)
86cd0334 1793{
2506b92c 1794 value_range_base tmp (type, min, max);
86cd0334
MJ
1795 return ipa_get_value_range (&tmp);
1796}
1797
1798/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1799 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1800 same value_range structures. */
1801
1802static void
54994253 1803ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
86cd0334
MJ
1804 tree min, tree max)
1805{
1806 jf->m_vr = ipa_get_value_range (type, min, max);
1807}
1808
1809/* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1810 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1811
1812static void
2506b92c 1813ipa_set_jfunc_vr (ipa_jump_func *jf, value_range_base *tmp)
86cd0334
MJ
1814{
1815 jf->m_vr = ipa_get_value_range (tmp);
1816}
1817
3e293154
MJ
1818/* Compute jump function for all arguments of callsite CS and insert the
1819 information in the jump_functions array in the ipa_edge_args corresponding
1820 to this callsite. */
be95e2b9 1821
749aa96d 1822static void
56b40062 1823ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 1824 struct cgraph_edge *cs)
3e293154
MJ
1825{
1826 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09 1827 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
538dd0b7 1828 gcall *call = cs->call_stmt;
8b7773a4 1829 int n, arg_num = gimple_call_num_args (call);
5ce97055 1830 bool useful_context = false;
3e293154 1831
606d9a09 1832 if (arg_num == 0 || args->jump_functions)
3e293154 1833 return;
9771b263 1834 vec_safe_grow_cleared (args->jump_functions, arg_num);
5ce97055
JH
1835 if (flag_devirtualize)
1836 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
3e293154 1837
96e24d49
JJ
1838 if (gimple_call_internal_p (call))
1839 return;
5fe8e757
MJ
1840 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1841 return;
1842
8b7773a4
MJ
1843 for (n = 0; n < arg_num; n++)
1844 {
1845 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1846 tree arg = gimple_call_arg (call, n);
06d65050 1847 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
1848 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1849 {
049e6d36 1850 tree instance;
5ce97055
JH
1851 struct ipa_polymorphic_call_context context (cs->caller->decl,
1852 arg, cs->call_stmt,
049e6d36
JH
1853 &instance);
1854 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
5ce97055
JH
1855 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1856 if (!context.useless_p ())
1857 useful_context = true;
1858 }
3e293154 1859
718625ad
KV
1860 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1861 {
f7503699
KV
1862 bool addr_nonzero = false;
1863 bool strict_overflow = false;
1864
718625ad
KV
1865 if (TREE_CODE (arg) == SSA_NAME
1866 && param_type
1867 && get_ptr_nonnull (arg))
f7503699
KV
1868 addr_nonzero = true;
1869 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1870 addr_nonzero = true;
1871
1872 if (addr_nonzero)
718625ad 1873 {
86cd0334
MJ
1874 tree z = build_int_cst (TREE_TYPE (arg), 0);
1875 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
718625ad
KV
1876 }
1877 else
86cd0334 1878 gcc_assert (!jfunc->m_vr);
718625ad
KV
1879 }
1880 else
8bc5448f
KV
1881 {
1882 wide_int min, max;
54994253 1883 value_range_kind type;
8bc5448f
KV
1884 if (TREE_CODE (arg) == SSA_NAME
1885 && param_type
1886 && (type = get_range_info (arg, &min, &max))
3a4228ba 1887 && (type == VR_RANGE || type == VR_ANTI_RANGE))
8bc5448f 1888 {
27922d51
RB
1889 value_range_base resvr;
1890 value_range_base tmpvr (type,
1891 wide_int_to_tree (TREE_TYPE (arg), min),
1892 wide_int_to_tree (TREE_TYPE (arg), max));
86cd0334
MJ
1893 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1894 &tmpvr, TREE_TYPE (arg));
54994253 1895 if (!resvr.undefined_p () && !resvr.varying_p ())
86cd0334 1896 ipa_set_jfunc_vr (jfunc, &resvr);
3a4228ba 1897 else
86cd0334 1898 gcc_assert (!jfunc->m_vr);
8bc5448f
KV
1899 }
1900 else
86cd0334 1901 gcc_assert (!jfunc->m_vr);
8bc5448f 1902 }
04be694e 1903
209ca542
PK
1904 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1905 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1906 {
209ca542 1907 if (TREE_CODE (arg) == SSA_NAME)
86cd0334
MJ
1908 ipa_set_jfunc_bits (jfunc, 0,
1909 widest_int::from (get_nonzero_bits (arg),
1910 TYPE_SIGN (TREE_TYPE (arg))));
209ca542 1911 else
86cd0334 1912 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
209ca542 1913 }
67b97478
PK
1914 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1915 {
1916 unsigned HOST_WIDE_INT bitpos;
1917 unsigned align;
1918
67b97478 1919 get_pointer_alignment_1 (arg, &align, &bitpos);
7b27cb4b
RS
1920 widest_int mask = wi::bit_and_not
1921 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1922 align / BITS_PER_UNIT - 1);
86cd0334
MJ
1923 widest_int value = bitpos / BITS_PER_UNIT;
1924 ipa_set_jfunc_bits (jfunc, value, mask);
67b97478 1925 }
209ca542 1926 else
86cd0334 1927 gcc_assert (!jfunc->bits);
209ca542 1928
04643334 1929 if (is_gimple_ip_invariant (arg)
8813a647 1930 || (VAR_P (arg)
04643334
MJ
1931 && is_global_var (arg)
1932 && TREE_READONLY (arg)))
4502fe8d 1933 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1934 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1935 && TREE_CODE (arg) == PARM_DECL)
1936 {
1937 int index = ipa_get_param_decl_index (info, arg);
1938
1939 gcc_assert (index >=0);
1940 /* Aggregate passed by value, check for pass-through, otherwise we
1941 will attempt to fill in aggregate contents later in this
1942 for cycle. */
8aab5218 1943 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 1944 {
3b97a5c7 1945 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
1946 continue;
1947 }
1948 }
1949 else if (TREE_CODE (arg) == SSA_NAME)
1950 {
1951 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1952 {
1953 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1954 if (index >= 0)
8b7773a4 1955 {
3b97a5c7 1956 bool agg_p;
8aab5218 1957 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 1958 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
1959 }
1960 }
1961 else
1962 {
355fe088 1963 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 1964 if (is_gimple_assign (stmt))
8aab5218 1965 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 1966 call, stmt, arg, param_type);
8b7773a4 1967 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 1968 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
1969 call,
1970 as_a <gphi *> (stmt));
8b7773a4
MJ
1971 }
1972 }
3e293154 1973
85942f45
JH
1974 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1975 passed (because type conversions are ignored in gimple). Usually we can
1976 safely get type from function declaration, but in case of K&R prototypes or
1977 variadic functions we can try our luck with type of the pointer passed.
1978 TODO: Since we look for actual initialization of the memory object, we may better
1979 work out the type based on the memory stores we find. */
1980 if (!param_type)
1981 param_type = TREE_TYPE (arg);
1982
8b7773a4
MJ
1983 if ((jfunc->type != IPA_JF_PASS_THROUGH
1984 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1985 && (jfunc->type != IPA_JF_ANCESTOR
1986 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1987 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 1988 || POINTER_TYPE_P (param_type)))
0d48ee34 1989 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
8b7773a4 1990 }
5ce97055
JH
1991 if (!useful_context)
1992 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
1993}
1994
749aa96d 1995/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 1996 from BB. */
749aa96d 1997
062c604f 1998static void
56b40062 1999ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 2000{
8aab5218
MJ
2001 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2002 int i;
749aa96d
MJ
2003 struct cgraph_edge *cs;
2004
8aab5218 2005 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 2006 {
8aab5218 2007 struct cgraph_node *callee = cs->callee;
749aa96d 2008
8aab5218
MJ
2009 if (callee)
2010 {
d52f5295 2011 callee->ultimate_alias_target ();
8aab5218
MJ
2012 /* We do not need to bother analyzing calls to unknown functions
2013 unless they may become known during lto/whopr. */
2014 if (!callee->definition && !flag_lto)
2015 continue;
2016 }
2017 ipa_compute_jump_functions_for_edge (fbi, cs);
2018 }
749aa96d
MJ
2019}
2020
8b7773a4
MJ
2021/* If STMT looks like a statement loading a value from a member pointer formal
2022 parameter, return that parameter and store the offset of the field to
2023 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2024 might be clobbered). If USE_DELTA, then we look for a use of the delta
2025 field rather than the pfn. */
be95e2b9 2026
3e293154 2027static tree
355fe088 2028ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 2029 HOST_WIDE_INT *offset_p)
3e293154 2030{
8b7773a4
MJ
2031 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2032
2033 if (!gimple_assign_single_p (stmt))
2034 return NULL_TREE;
3e293154 2035
8b7773a4 2036 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2037 if (TREE_CODE (rhs) == COMPONENT_REF)
2038 {
2039 ref_field = TREE_OPERAND (rhs, 1);
2040 rhs = TREE_OPERAND (rhs, 0);
2041 }
2042 else
2043 ref_field = NULL_TREE;
d242d063 2044 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2045 return NULL_TREE;
3e293154 2046 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2047 if (TREE_CODE (rec) != ADDR_EXPR)
2048 return NULL_TREE;
2049 rec = TREE_OPERAND (rec, 0);
3e293154 2050 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2051 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2052 return NULL_TREE;
d242d063 2053 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2054
8b7773a4
MJ
2055 if (use_delta)
2056 fld = delta_field;
2057 else
2058 fld = ptr_field;
2059 if (offset_p)
2060 *offset_p = int_bit_position (fld);
2061
ae788515
EB
2062 if (ref_field)
2063 {
2064 if (integer_nonzerop (ref_offset))
2065 return NULL_TREE;
ae788515
EB
2066 return ref_field == fld ? rec : NULL_TREE;
2067 }
3e293154 2068 else
8b7773a4
MJ
2069 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2070 : NULL_TREE;
3e293154
MJ
2071}
2072
2073/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2074
3e293154
MJ
2075static bool
2076ipa_is_ssa_with_stmt_def (tree t)
2077{
2078 if (TREE_CODE (t) == SSA_NAME
2079 && !SSA_NAME_IS_DEFAULT_DEF (t))
2080 return true;
2081 else
2082 return false;
2083}
2084
40591473
MJ
2085/* Find the indirect call graph edge corresponding to STMT and mark it as a
2086 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2087 indirect call graph edge. */
be95e2b9 2088
40591473 2089static struct cgraph_edge *
538dd0b7
DM
2090ipa_note_param_call (struct cgraph_node *node, int param_index,
2091 gcall *stmt)
3e293154 2092{
e33c6cd6 2093 struct cgraph_edge *cs;
3e293154 2094
d52f5295 2095 cs = node->get_edge (stmt);
b258210c 2096 cs->indirect_info->param_index = param_index;
8b7773a4 2097 cs->indirect_info->agg_contents = 0;
c13bc3d9 2098 cs->indirect_info->member_ptr = 0;
91bb9f80 2099 cs->indirect_info->guaranteed_unmodified = 0;
40591473 2100 return cs;
3e293154
MJ
2101}
2102
e33c6cd6 2103/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2104 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2105 intermediate information about each formal parameter. Currently it checks
2106 whether the call calls a pointer that is a formal parameter and if so, the
2107 parameter is marked with the called flag and an indirect call graph edge
2108 describing the call is created. This is very simple for ordinary pointers
2109 represented in SSA but not-so-nice when it comes to member pointers. The
2110 ugly part of this function does nothing more than trying to match the
2111 pattern of such a call. An example of such a pattern is the gimple dump
2112 below, the call is on the last line:
3e293154 2113
ae788515
EB
2114 <bb 2>:
2115 f$__delta_5 = f.__delta;
2116 f$__pfn_24 = f.__pfn;
2117
2118 or
3e293154 2119 <bb 2>:
d242d063
MJ
2120 f$__delta_5 = MEM[(struct *)&f];
2121 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2122
ae788515 2123 and a few lines below:
8aa29647
MJ
2124
2125 <bb 5>
3e293154
MJ
2126 D.2496_3 = (int) f$__pfn_24;
2127 D.2497_4 = D.2496_3 & 1;
2128 if (D.2497_4 != 0)
2129 goto <bb 3>;
2130 else
2131 goto <bb 4>;
2132
8aa29647 2133 <bb 6>:
3e293154
MJ
2134 D.2500_7 = (unsigned int) f$__delta_5;
2135 D.2501_8 = &S + D.2500_7;
2136 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2137 D.2503_10 = *D.2502_9;
2138 D.2504_12 = f$__pfn_24 + -1;
2139 D.2505_13 = (unsigned int) D.2504_12;
2140 D.2506_14 = D.2503_10 + D.2505_13;
2141 D.2507_15 = *D.2506_14;
2142 iftmp.11_16 = (String:: *) D.2507_15;
2143
8aa29647 2144 <bb 7>:
3e293154
MJ
2145 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2146 D.2500_19 = (unsigned int) f$__delta_5;
2147 D.2508_20 = &S + D.2500_19;
2148 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2149
2150 Such patterns are results of simple calls to a member pointer:
2151
2152 int doprinting (int (MyString::* f)(int) const)
2153 {
2154 MyString S ("somestring");
2155
2156 return (S.*f)(4);
2157 }
8b7773a4
MJ
2158
2159 Moreover, the function also looks for called pointers loaded from aggregates
2160 passed by value or reference. */
3e293154
MJ
2161
2162static void
56b40062 2163ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 2164 tree target)
3e293154 2165{
8aab5218 2166 struct ipa_node_params *info = fbi->info;
8b7773a4
MJ
2167 HOST_WIDE_INT offset;
2168 bool by_ref;
3e293154 2169
3e293154
MJ
2170 if (SSA_NAME_IS_DEFAULT_DEF (target))
2171 {
b258210c 2172 tree var = SSA_NAME_VAR (target);
8aab5218 2173 int index = ipa_get_param_decl_index (info, var);
3e293154 2174 if (index >= 0)
8aab5218 2175 ipa_note_param_call (fbi->node, index, call);
3e293154
MJ
2176 return;
2177 }
2178
8aab5218 2179 int index;
355fe088 2180 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 2181 bool guaranteed_unmodified;
8b7773a4 2182 if (gimple_assign_single_p (def)
ff302741
PB
2183 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2184 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2185 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2186 {
8aab5218 2187 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2188 cs->indirect_info->offset = offset;
2189 cs->indirect_info->agg_contents = 1;
2190 cs->indirect_info->by_ref = by_ref;
91bb9f80 2191 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2192 return;
2193 }
2194
3e293154
MJ
2195 /* Now we need to try to match the complex pattern of calling a member
2196 pointer. */
8b7773a4
MJ
2197 if (gimple_code (def) != GIMPLE_PHI
2198 || gimple_phi_num_args (def) != 2
2199 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2200 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2201 return;
2202
3e293154
MJ
2203 /* First, we need to check whether one of these is a load from a member
2204 pointer that is a parameter to this function. */
8aab5218
MJ
2205 tree n1 = PHI_ARG_DEF (def, 0);
2206 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2207 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2208 return;
355fe088
TS
2209 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2210 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2211
8aab5218
MJ
2212 tree rec;
2213 basic_block bb, virt_bb;
2214 basic_block join = gimple_bb (def);
8b7773a4 2215 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2216 {
8b7773a4 2217 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2218 return;
2219
8aa29647 2220 bb = EDGE_PRED (join, 0)->src;
726a989a 2221 virt_bb = gimple_bb (d2);
3e293154 2222 }
8b7773a4 2223 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2224 {
8aa29647 2225 bb = EDGE_PRED (join, 1)->src;
726a989a 2226 virt_bb = gimple_bb (d1);
3e293154
MJ
2227 }
2228 else
2229 return;
2230
2231 /* Second, we need to check that the basic blocks are laid out in the way
2232 corresponding to the pattern. */
2233
3e293154
MJ
2234 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2235 || single_pred (virt_bb) != bb
2236 || single_succ (virt_bb) != join)
2237 return;
2238
2239 /* Third, let's see that the branching is done depending on the least
2240 significant bit of the pfn. */
2241
355fe088 2242 gimple *branch = last_stmt (bb);
8aa29647 2243 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2244 return;
2245
12430896
RG
2246 if ((gimple_cond_code (branch) != NE_EXPR
2247 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2248 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2249 return;
3e293154 2250
8aab5218 2251 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2252 if (!ipa_is_ssa_with_stmt_def (cond))
2253 return;
2254
726a989a 2255 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2256 if (!is_gimple_assign (def)
726a989a
RB
2257 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2258 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2259 return;
726a989a
RB
2260
2261 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2262 if (!ipa_is_ssa_with_stmt_def (cond))
2263 return;
2264
726a989a 2265 def = SSA_NAME_DEF_STMT (cond);
3e293154 2266
8b75fc9b
MJ
2267 if (is_gimple_assign (def)
2268 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2269 {
726a989a 2270 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2271 if (!ipa_is_ssa_with_stmt_def (cond))
2272 return;
726a989a 2273 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2274 }
2275
8aab5218 2276 tree rec2;
6f7b8b70
RE
2277 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2278 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2279 == ptrmemfunc_vbit_in_delta),
2280 NULL);
3e293154
MJ
2281 if (rec != rec2)
2282 return;
2283
2284 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2285 if (index >= 0
8aab5218 2286 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2287 {
8aab5218 2288 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2289 cs->indirect_info->offset = offset;
2290 cs->indirect_info->agg_contents = 1;
c13bc3d9 2291 cs->indirect_info->member_ptr = 1;
91bb9f80 2292 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2293 }
3e293154
MJ
2294
2295 return;
2296}
2297
b258210c
MJ
2298/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2299 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2300 FBI->node (described by FBI->info), create a call note for the
2301 statement. */
b258210c
MJ
2302
2303static void
56b40062 2304ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2305 gcall *call, tree target)
b258210c
MJ
2306{
2307 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2308 int index;
40591473 2309 HOST_WIDE_INT anc_offset;
b258210c 2310
05842ff5
MJ
2311 if (!flag_devirtualize)
2312 return;
2313
40591473 2314 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2315 return;
2316
8aab5218 2317 struct ipa_node_params *info = fbi->info;
40591473
MJ
2318 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2319 {
8aab5218 2320 struct ipa_jump_func jfunc;
40591473
MJ
2321 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2322 return;
b258210c 2323
40591473
MJ
2324 anc_offset = 0;
2325 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2326 gcc_assert (index >= 0);
06d65050
JH
2327 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2328 call, &jfunc))
40591473
MJ
2329 return;
2330 }
2331 else
2332 {
8aab5218 2333 struct ipa_jump_func jfunc;
355fe088 2334 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2335 tree expr;
2336
2337 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2338 if (!expr)
2339 return;
2340 index = ipa_get_param_decl_index (info,
2341 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2342 gcc_assert (index >= 0);
06d65050
JH
2343 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2344 call, &jfunc, anc_offset))
40591473
MJ
2345 return;
2346 }
2347
8aab5218
MJ
2348 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2349 struct cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2350 ii->offset = anc_offset;
ae7e9ddd 2351 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2352 ii->otr_type = obj_type_ref_class (target);
40591473 2353 ii->polymorphic = 1;
b258210c
MJ
2354}
2355
2356/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2357 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2358 containing intermediate information about each formal parameter. */
b258210c
MJ
2359
2360static void
56b40062 2361ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2362{
2363 tree target = gimple_call_fn (call);
b786d31f
JH
2364
2365 if (!target
2366 || (TREE_CODE (target) != SSA_NAME
2367 && !virtual_method_call_p (target)))
2368 return;
b258210c 2369
7d0aa05b 2370 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2371 /* If we previously turned the call into a direct call, there is
2372 no need to analyze. */
b786d31f 2373 if (cs && !cs->indirect_unknown_callee)
25583c4f 2374 return;
7d0aa05b 2375
a5b58b28 2376 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2377 {
7d0aa05b
JH
2378 tree instance;
2379 tree target = gimple_call_fn (call);
6f8091fc
JH
2380 ipa_polymorphic_call_context context (current_function_decl,
2381 target, call, &instance);
7d0aa05b 2382
ba392339
JH
2383 gcc_checking_assert (cs->indirect_info->otr_type
2384 == obj_type_ref_class (target));
2385 gcc_checking_assert (cs->indirect_info->otr_token
2386 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2387
29c43c83
JH
2388 cs->indirect_info->vptr_changed
2389 = !context.get_dynamic_type (instance,
2390 OBJ_TYPE_REF_OBJECT (target),
2391 obj_type_ref_class (target), call);
0127c169 2392 cs->indirect_info->context = context;
7d0aa05b
JH
2393 }
2394
b258210c 2395 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2396 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2397 else if (virtual_method_call_p (target))
8aab5218 2398 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2399}
2400
2401
e33c6cd6 2402/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2403 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2404 formal parameters are called. */
be95e2b9 2405
3e293154 2406static void
355fe088 2407ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2408{
726a989a 2409 if (is_gimple_call (stmt))
538dd0b7 2410 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2411}
2412
2413/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2414 If OP is a parameter declaration, mark it as used in the info structure
2415 passed in DATA. */
2416
2417static bool
355fe088 2418visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f
MJ
2419{
2420 struct ipa_node_params *info = (struct ipa_node_params *) data;
2421
2422 op = get_base_address (op);
2423 if (op
2424 && TREE_CODE (op) == PARM_DECL)
2425 {
2426 int index = ipa_get_param_decl_index (info, op);
2427 gcc_assert (index >= 0);
310bc633 2428 ipa_set_param_used (info, index, true);
062c604f
MJ
2429 }
2430
2431 return false;
3e293154
MJ
2432}
2433
8aab5218
MJ
2434/* Scan the statements in BB and inspect the uses of formal parameters. Store
2435 the findings in various structures of the associated ipa_node_params
2436 structure, such as parameter flags, notes etc. FBI holds various data about
2437 the function being analyzed. */
be95e2b9 2438
062c604f 2439static void
56b40062 2440ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2441{
726a989a 2442 gimple_stmt_iterator gsi;
8aab5218
MJ
2443 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2444 {
355fe088 2445 gimple *stmt = gsi_stmt (gsi);
3e293154 2446
8aab5218
MJ
2447 if (is_gimple_debug (stmt))
2448 continue;
3e293154 2449
8aab5218
MJ
2450 ipa_analyze_stmt_uses (fbi, stmt);
2451 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2452 visit_ref_for_mod_analysis,
2453 visit_ref_for_mod_analysis,
2454 visit_ref_for_mod_analysis);
5fe8e757 2455 }
8aab5218
MJ
2456 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2457 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2458 visit_ref_for_mod_analysis,
2459 visit_ref_for_mod_analysis,
2460 visit_ref_for_mod_analysis);
2461}
2462
2463/* Calculate controlled uses of parameters of NODE. */
2464
2465static void
2466ipa_analyze_controlled_uses (struct cgraph_node *node)
2467{
2468 struct ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2469
8aab5218 2470 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2471 {
2472 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2473 int controlled_uses = 0;
2474
062c604f
MJ
2475 /* For SSA regs see if parameter is used. For non-SSA we compute
2476 the flag during modification analysis. */
4502fe8d
MJ
2477 if (is_gimple_reg (parm))
2478 {
67348ccc 2479 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2480 parm);
2481 if (ddef && !has_zero_uses (ddef))
2482 {
2483 imm_use_iterator imm_iter;
2484 use_operand_p use_p;
2485
2486 ipa_set_param_used (info, i, true);
2487 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2488 if (!is_gimple_call (USE_STMT (use_p)))
2489 {
c6de6665
JJ
2490 if (!is_gimple_debug (USE_STMT (use_p)))
2491 {
2492 controlled_uses = IPA_UNDESCRIBED_USE;
2493 break;
2494 }
4502fe8d
MJ
2495 }
2496 else
2497 controlled_uses++;
2498 }
2499 else
2500 controlled_uses = 0;
2501 }
2502 else
2503 controlled_uses = IPA_UNDESCRIBED_USE;
2504 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2505 }
8aab5218 2506}
062c604f 2507
8aab5218 2508/* Free stuff in BI. */
062c604f 2509
8aab5218
MJ
2510static void
2511free_ipa_bb_info (struct ipa_bb_info *bi)
2512{
2513 bi->cg_edges.release ();
2514 bi->param_aa_statuses.release ();
3e293154
MJ
2515}
2516
8aab5218 2517/* Dominator walker driving the analysis. */
2c9561b5 2518
8aab5218 2519class analysis_dom_walker : public dom_walker
2c9561b5 2520{
8aab5218 2521public:
56b40062 2522 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 2523 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2524
3daacdcd 2525 virtual edge before_dom_children (basic_block);
8aab5218
MJ
2526
2527private:
56b40062 2528 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
2529};
2530
3daacdcd 2531edge
8aab5218
MJ
2532analysis_dom_walker::before_dom_children (basic_block bb)
2533{
2534 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2535 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 2536 return NULL;
2c9561b5
MJ
2537}
2538
c3431191
ML
2539/* Release body info FBI. */
2540
2541void
2542ipa_release_body_info (struct ipa_func_body_info *fbi)
2543{
2544 int i;
2545 struct ipa_bb_info *bi;
2546
2547 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2548 free_ipa_bb_info (bi);
2549 fbi->bb_infos.release ();
2550}
2551
026c3cfd 2552/* Initialize the array describing properties of formal parameters
dd5a833e
MS
2553 of NODE, analyze their uses and compute jump functions associated
2554 with actual arguments of calls from within NODE. */
062c604f
MJ
2555
2556void
2557ipa_analyze_node (struct cgraph_node *node)
2558{
56b40062 2559 struct ipa_func_body_info fbi;
57dbdc5a 2560 struct ipa_node_params *info;
062c604f 2561
57dbdc5a
MJ
2562 ipa_check_create_node_params ();
2563 ipa_check_create_edge_args ();
2564 info = IPA_NODE_REF (node);
8aab5218
MJ
2565
2566 if (info->analysis_done)
2567 return;
2568 info->analysis_done = 1;
2569
2570 if (ipa_func_spec_opts_forbid_analysis_p (node))
2571 {
2572 for (int i = 0; i < ipa_get_param_count (info); i++)
2573 {
2574 ipa_set_param_used (info, i, true);
2575 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2576 }
2577 return;
2578 }
2579
2580 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2581 push_cfun (func);
2582 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2583 ipa_initialize_node_params (node);
8aab5218 2584 ipa_analyze_controlled_uses (node);
062c604f 2585
8aab5218
MJ
2586 fbi.node = node;
2587 fbi.info = IPA_NODE_REF (node);
2588 fbi.bb_infos = vNULL;
2589 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2590 fbi.param_count = ipa_get_param_count (info);
2591 fbi.aa_walked = 0;
062c604f 2592
8aab5218
MJ
2593 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2594 {
2595 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2596 bi->cg_edges.safe_push (cs);
2597 }
062c604f 2598
8aab5218
MJ
2599 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2600 {
2601 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2602 bi->cg_edges.safe_push (cs);
2603 }
2604
2605 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2606
c3431191 2607 ipa_release_body_info (&fbi);
8aab5218 2608 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2609 pop_cfun ();
062c604f 2610}
062c604f 2611
be95e2b9 2612/* Update the jump functions associated with call graph edge E when the call
3e293154 2613 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2614 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2615
3e293154
MJ
2616static void
2617update_jump_functions_after_inlining (struct cgraph_edge *cs,
2618 struct cgraph_edge *e)
2619{
2620 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2621 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2622 int count = ipa_get_cs_argument_count (args);
2623 int i;
2624
2625 for (i = 0; i < count; i++)
2626 {
b258210c 2627 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
5ce97055
JH
2628 struct ipa_polymorphic_call_context *dst_ctx
2629 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2630
685b0d13
MJ
2631 if (dst->type == IPA_JF_ANCESTOR)
2632 {
b258210c 2633 struct ipa_jump_func *src;
8b7773a4 2634 int dst_fid = dst->value.ancestor.formal_id;
5ce97055
JH
2635 struct ipa_polymorphic_call_context *src_ctx
2636 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 2637
b258210c
MJ
2638 /* Variable number of arguments can cause havoc if we try to access
2639 one that does not exist in the inlined edge. So make sure we
2640 don't. */
8b7773a4 2641 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 2642 {
04be694e 2643 ipa_set_jf_unknown (dst);
b258210c
MJ
2644 continue;
2645 }
2646
8b7773a4
MJ
2647 src = ipa_get_ith_jump_func (top, dst_fid);
2648
5ce97055
JH
2649 if (src_ctx && !src_ctx->useless_p ())
2650 {
2651 struct ipa_polymorphic_call_context ctx = *src_ctx;
2652
2653 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2654 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 2655 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2656 ctx.offset_by (dst->value.ancestor.offset);
2657 if (!ctx.useless_p ())
2658 {
a7d1f3fe
ML
2659 if (!dst_ctx)
2660 {
2661 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2662 count);
2663 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2664 }
2665
2666 dst_ctx->combine_with (ctx);
5ce97055
JH
2667 }
2668 }
2669
8b7773a4
MJ
2670 if (src->agg.items
2671 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2672 {
2673 struct ipa_agg_jf_item *item;
2674 int j;
2675
2676 /* Currently we do not produce clobber aggregate jump functions,
2677 replace with merging when we do. */
2678 gcc_assert (!dst->agg.items);
2679
9771b263 2680 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2681 dst->agg.by_ref = src->agg.by_ref;
9771b263 2682 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2683 item->offset -= dst->value.ancestor.offset;
2684 }
2685
3b97a5c7
MJ
2686 if (src->type == IPA_JF_PASS_THROUGH
2687 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2688 {
2689 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2690 dst->value.ancestor.agg_preserved &=
2691 src->value.pass_through.agg_preserved;
2692 }
a2b4c188
KV
2693 else if (src->type == IPA_JF_PASS_THROUGH
2694 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2695 {
2696 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2697 dst->value.ancestor.agg_preserved = false;
2698 }
b258210c
MJ
2699 else if (src->type == IPA_JF_ANCESTOR)
2700 {
2701 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2702 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2703 dst->value.ancestor.agg_preserved &=
2704 src->value.ancestor.agg_preserved;
b258210c
MJ
2705 }
2706 else
04be694e 2707 ipa_set_jf_unknown (dst);
b258210c
MJ
2708 }
2709 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2710 {
b258210c
MJ
2711 struct ipa_jump_func *src;
2712 /* We must check range due to calls with variable number of arguments
2713 and we cannot combine jump functions with operations. */
2714 if (dst->value.pass_through.operation == NOP_EXPR
2715 && (dst->value.pass_through.formal_id
2716 < ipa_get_cs_argument_count (top)))
2717 {
8b7773a4
MJ
2718 int dst_fid = dst->value.pass_through.formal_id;
2719 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2720 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
5ce97055
JH
2721 struct ipa_polymorphic_call_context *src_ctx
2722 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 2723
5ce97055
JH
2724 if (src_ctx && !src_ctx->useless_p ())
2725 {
2726 struct ipa_polymorphic_call_context ctx = *src_ctx;
2727
2728 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2729 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 2730 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2731 if (!ctx.useless_p ())
2732 {
2733 if (!dst_ctx)
2734 {
2735 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2736 count);
2737 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2738 }
2739 dst_ctx->combine_with (ctx);
2740 }
2741 }
b8f6e610
MJ
2742 switch (src->type)
2743 {
2744 case IPA_JF_UNKNOWN:
04be694e 2745 ipa_set_jf_unknown (dst);
b8f6e610 2746 break;
b8f6e610
MJ
2747 case IPA_JF_CONST:
2748 ipa_set_jf_cst_copy (dst, src);
2749 break;
2750
2751 case IPA_JF_PASS_THROUGH:
2752 {
2753 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2754 enum tree_code operation;
2755 operation = ipa_get_jf_pass_through_operation (src);
2756
2757 if (operation == NOP_EXPR)
2758 {
3b97a5c7 2759 bool agg_p;
b8f6e610
MJ
2760 agg_p = dst_agg_p
2761 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 2762 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610 2763 }
a2b4c188
KV
2764 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2765 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
b8f6e610
MJ
2766 else
2767 {
2768 tree operand = ipa_get_jf_pass_through_operand (src);
2769 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2770 operation);
2771 }
2772 break;
2773 }
2774 case IPA_JF_ANCESTOR:
2775 {
3b97a5c7 2776 bool agg_p;
b8f6e610
MJ
2777 agg_p = dst_agg_p
2778 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
2779 ipa_set_ancestor_jf (dst,
2780 ipa_get_jf_ancestor_offset (src),
b8f6e610 2781 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 2782 agg_p);
b8f6e610
MJ
2783 break;
2784 }
2785 default:
2786 gcc_unreachable ();
2787 }
8b7773a4
MJ
2788
2789 if (src->agg.items
b8f6e610 2790 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2791 {
2792 /* Currently we do not produce clobber aggregate jump
2793 functions, replace with merging when we do. */
2794 gcc_assert (!dst->agg.items);
2795
2796 dst->agg.by_ref = src->agg.by_ref;
9771b263 2797 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2798 }
b258210c
MJ
2799 }
2800 else
04be694e 2801 ipa_set_jf_unknown (dst);
3e293154 2802 }
b258210c
MJ
2803 }
2804}
2805
5ce97055
JH
2806/* If TARGET is an addr_expr of a function declaration, make it the
2807 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2808 Otherwise, return NULL. */
b258210c 2809
3949c4a7 2810struct cgraph_edge *
5ce97055
JH
2811ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2812 bool speculative)
b258210c
MJ
2813{
2814 struct cgraph_node *callee;
48b1474e 2815 bool unreachable = false;
b258210c 2816
ceeffab0
MJ
2817 if (TREE_CODE (target) == ADDR_EXPR)
2818 target = TREE_OPERAND (target, 0);
b258210c 2819 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2820 {
2821 target = canonicalize_constructor_val (target, NULL);
2822 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2823 {
db66bf68
JH
2824 /* Member pointer call that goes through a VMT lookup. */
2825 if (ie->indirect_info->member_ptr
2826 /* Or if target is not an invariant expression and we do not
2827 know if it will evaulate to function at runtime.
2828 This can happen when folding through &VAR, where &VAR
2829 is IP invariant, but VAR itself is not.
2830
2831 TODO: Revisit this when GCC 5 is branched. It seems that
2832 member_ptr check is not needed and that we may try to fold
2833 the expression and see if VAR is readonly. */
2834 || !is_gimple_ip_invariant (target))
2835 {
2836 if (dump_enabled_p ())
2837 {
4f5b9c80 2838 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118
ML
2839 "discovered direct call non-invariant %s\n",
2840 ie->caller->dump_name ());
db66bf68
JH
2841 }
2842 return NULL;
2843 }
2844
c13bc3d9 2845
2b5f0895
XDL
2846 if (dump_enabled_p ())
2847 {
4f5b9c80 2848 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
464d0118 2849 "discovered direct call to non-function in %s, "
807b7d62 2850 "making it __builtin_unreachable\n",
464d0118 2851 ie->caller->dump_name ());
2b5f0895 2852 }
3c9e6fca 2853
48b1474e 2854 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 2855 callee = cgraph_node::get_create (target);
48b1474e 2856 unreachable = true;
a0a7b611 2857 }
48b1474e 2858 else
d52f5295 2859 callee = cgraph_node::get (target);
a0a7b611 2860 }
48b1474e 2861 else
d52f5295 2862 callee = cgraph_node::get (target);
a0a7b611
JH
2863
2864 /* Because may-edges are not explicitely represented and vtable may be external,
2865 we may create the first reference to the object in the unit. */
2866 if (!callee || callee->global.inlined_to)
2867 {
a0a7b611
JH
2868
2869 /* We are better to ensure we can refer to it.
2870 In the case of static functions we are out of luck, since we already
2871 removed its body. In the case of public functions we may or may
2872 not introduce the reference. */
2873 if (!canonicalize_constructor_val (target, NULL)
2874 || !TREE_PUBLIC (target))
2875 {
2876 if (dump_file)
2877 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
464d0118
ML
2878 "(%s -> %s) but can not refer to it. Giving up.\n",
2879 ie->caller->dump_name (),
2880 ie->callee->dump_name ());
a0a7b611
JH
2881 return NULL;
2882 }
d52f5295 2883 callee = cgraph_node::get_create (target);
a0a7b611 2884 }
2b5f0895 2885
0127c169
JH
2886 /* If the edge is already speculated. */
2887 if (speculative && ie->speculative)
2888 {
2889 struct cgraph_edge *e2;
2890 struct ipa_ref *ref;
2891 ie->speculative_call_info (e2, ie, ref);
2892 if (e2->callee->ultimate_alias_target ()
2893 != callee->ultimate_alias_target ())
2894 {
2895 if (dump_file)
464d0118
ML
2896 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2897 "target (%s -> %s) but the call is already "
2898 "speculated to %s. Giving up.\n",
2899 ie->caller->dump_name (), callee->dump_name (),
2900 e2->callee->dump_name ());
0127c169
JH
2901 }
2902 else
2903 {
2904 if (dump_file)
2905 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
464d0118
ML
2906 "(%s -> %s) this agree with previous speculation.\n",
2907 ie->caller->dump_name (), callee->dump_name ());
0127c169
JH
2908 }
2909 return NULL;
2910 }
2911
2b5f0895
XDL
2912 if (!dbg_cnt (devirt))
2913 return NULL;
2914
1dbee8c9 2915 ipa_check_create_node_params ();
ceeffab0 2916
81fa35bd
MJ
2917 /* We can not make edges to inline clones. It is bug that someone removed
2918 the cgraph node too early. */
17afc0fe
JH
2919 gcc_assert (!callee->global.inlined_to);
2920
48b1474e 2921 if (dump_file && !unreachable)
b258210c 2922 {
5ce97055 2923 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
464d0118 2924 "(%s -> %s), for stmt ",
b258210c 2925 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 2926 speculative ? "speculative" : "known",
464d0118
ML
2927 ie->caller->dump_name (),
2928 callee->dump_name ());
b258210c
MJ
2929 if (ie->call_stmt)
2930 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2931 else
2932 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 2933 }
2b5f0895
XDL
2934 if (dump_enabled_p ())
2935 {
4f5b9c80 2936 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
807b7d62
ML
2937 "converting indirect call in %s to direct call to %s\n",
2938 ie->caller->name (), callee->name ());
2b5f0895 2939 }
5ce97055 2940 if (!speculative)
d8d5aef1
JH
2941 {
2942 struct cgraph_edge *orig = ie;
2943 ie = ie->make_direct (callee);
2944 /* If we resolved speculative edge the cost is already up to date
2945 for direct call (adjusted by inline_edge_duplication_hook). */
2946 if (ie == orig)
2947 {
56f62793 2948 ipa_call_summary *es = ipa_call_summaries->get (ie);
d8d5aef1
JH
2949 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2950 - eni_size_weights.call_cost);
2951 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2952 - eni_time_weights.call_cost);
2953 }
2954 }
5ce97055
JH
2955 else
2956 {
2957 if (!callee->can_be_discarded_p ())
2958 {
2959 cgraph_node *alias;
2960 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2961 if (alias)
2962 callee = alias;
2963 }
d8d5aef1 2964 /* make_speculative will update ie's cost to direct call cost. */
5ce97055 2965 ie = ie->make_speculative
1bad9c18 2966 (callee, ie->count.apply_scale (8, 10));
5ce97055 2967 }
749aa96d 2968
b258210c 2969 return ie;
3e293154
MJ
2970}
2971
91bb9f80
MJ
2972/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2973 CONSTRUCTOR and return it. Return NULL if the search fails for some
2974 reason. */
2975
2976static tree
2977find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2978{
2979 tree type = TREE_TYPE (constructor);
2980 if (TREE_CODE (type) != ARRAY_TYPE
2981 && TREE_CODE (type) != RECORD_TYPE)
2982 return NULL;
2983
2984 unsigned ix;
2985 tree index, val;
2986 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2987 {
2988 HOST_WIDE_INT elt_offset;
2989 if (TREE_CODE (type) == ARRAY_TYPE)
2990 {
2991 offset_int off;
2992 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2993 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2994
2995 if (index)
2996 {
db9bbdec
RB
2997 if (TREE_CODE (index) == RANGE_EXPR)
2998 off = wi::to_offset (TREE_OPERAND (index, 0));
2999 else
3000 off = wi::to_offset (index);
91bb9f80
MJ
3001 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3002 {
3003 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3004 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3005 off = wi::sext (off - wi::to_offset (low_bound),
3006 TYPE_PRECISION (TREE_TYPE (index)));
3007 }
3008 off *= wi::to_offset (unit_size);
db9bbdec
RB
3009 /* ??? Handle more than just the first index of a
3010 RANGE_EXPR. */
91bb9f80
MJ
3011 }
3012 else
3013 off = wi::to_offset (unit_size) * ix;
3014
3015 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3016 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3017 continue;
3018 elt_offset = off.to_shwi ();
3019 }
3020 else if (TREE_CODE (type) == RECORD_TYPE)
3021 {
3022 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3023 if (DECL_BIT_FIELD (index))
3024 continue;
3025 elt_offset = int_bit_position (index);
3026 }
3027 else
3028 gcc_unreachable ();
3029
3030 if (elt_offset > req_offset)
3031 return NULL;
3032
3033 if (TREE_CODE (val) == CONSTRUCTOR)
3034 return find_constructor_constant_at_offset (val,
3035 req_offset - elt_offset);
3036
3037 if (elt_offset == req_offset
3038 && is_gimple_reg_type (TREE_TYPE (val))
3039 && is_gimple_ip_invariant (val))
3040 return val;
3041 }
3042 return NULL;
3043}
3044
3045/* Check whether SCALAR could be used to look up an aggregate interprocedural
3046 invariant from a static constructor and if so, return it. Otherwise return
3047 NULL. */
3048
3049static tree
3050ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3051{
3052 if (by_ref)
3053 {
3054 if (TREE_CODE (scalar) != ADDR_EXPR)
3055 return NULL;
3056 scalar = TREE_OPERAND (scalar, 0);
3057 }
3058
8813a647 3059 if (!VAR_P (scalar)
91bb9f80
MJ
3060 || !is_global_var (scalar)
3061 || !TREE_READONLY (scalar)
3062 || !DECL_INITIAL (scalar)
3063 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3064 return NULL;
3065
3066 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3067}
3068
3069/* Retrieve value from aggregate jump function AGG or static initializer of
3070 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3071 none. BY_REF specifies whether the value has to be passed by reference or
3072 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3073 to is set to true if the value comes from an initializer of a constant. */
8b7773a4
MJ
3074
3075tree
91bb9f80
MJ
3076ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3077 HOST_WIDE_INT offset, bool by_ref,
3078 bool *from_global_constant)
8b7773a4
MJ
3079{
3080 struct ipa_agg_jf_item *item;
3081 int i;
3082
91bb9f80
MJ
3083 if (scalar)
3084 {
3085 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3086 if (res)
3087 {
3088 if (from_global_constant)
3089 *from_global_constant = true;
3090 return res;
3091 }
3092 }
3093
3094 if (!agg
3095 || by_ref != agg->by_ref)
8b7773a4
MJ
3096 return NULL;
3097
9771b263 3098 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
3099 if (item->offset == offset)
3100 {
3101 /* Currently we do not have clobber values, return NULL for them once
3102 we do. */
3103 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
3104 if (from_global_constant)
3105 *from_global_constant = false;
2c9561b5
MJ
3106 return item->value;
3107 }
8b7773a4
MJ
3108 return NULL;
3109}
3110
4502fe8d 3111/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
3112 reference description RDESC. Return true if the reference has been
3113 successfully found and removed. */
4502fe8d 3114
568cda29 3115static bool
5e20cdc9 3116remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3117{
3118 struct ipa_ref *to_del;
3119 struct cgraph_edge *origin;
3120
3121 origin = rdesc->cs;
a854f856
MJ
3122 if (!origin)
3123 return false;
d122681a
ML
3124 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3125 origin->lto_stmt_uid);
568cda29
MJ
3126 if (!to_del)
3127 return false;
3128
d122681a 3129 to_del->remove_reference ();
4502fe8d 3130 if (dump_file)
464d0118
ML
3131 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3132 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
568cda29 3133 return true;
4502fe8d
MJ
3134}
3135
3136/* If JFUNC has a reference description with refcount different from
3137 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3138 NULL. JFUNC must be a constant jump function. */
3139
3140static struct ipa_cst_ref_desc *
3141jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3142{
3143 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3144 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3145 return rdesc;
3146 else
3147 return NULL;
3148}
3149
568cda29
MJ
3150/* If the value of constant jump function JFUNC is an address of a function
3151 declaration, return the associated call graph node. Otherwise return
3152 NULL. */
3153
3154static cgraph_node *
3155cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3156{
3157 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3158 tree cst = ipa_get_jf_constant (jfunc);
3159 if (TREE_CODE (cst) != ADDR_EXPR
3160 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3161 return NULL;
3162
d52f5295 3163 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3164}
3165
3166
3167/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3168 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3169 the edge specified in the rdesc. Return false if either the symbol or the
3170 reference could not be found, otherwise return true. */
3171
3172static bool
3173try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3174{
3175 struct ipa_cst_ref_desc *rdesc;
3176 if (jfunc->type == IPA_JF_CONST
3177 && (rdesc = jfunc_rdesc_usable (jfunc))
3178 && --rdesc->refcount == 0)
3179 {
5e20cdc9 3180 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
3181 if (!symbol)
3182 return false;
3183
3184 return remove_described_reference (symbol, rdesc);
3185 }
3186 return true;
3187}
3188
b258210c
MJ
3189/* Try to find a destination for indirect edge IE that corresponds to a simple
3190 call or a call of a member function pointer and where the destination is a
e5cf5e11
PK
3191 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3192 the type of the parameter to which the result of JFUNC is passed. If it can
3193 be determined, return the newly direct edge, otherwise return NULL.
d250540a 3194 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 3195
b258210c
MJ
3196static struct cgraph_edge *
3197try_make_edge_direct_simple_call (struct cgraph_edge *ie,
e5cf5e11 3198 struct ipa_jump_func *jfunc, tree target_type,
d250540a 3199 struct ipa_node_params *new_root_info)
b258210c 3200{
4502fe8d 3201 struct cgraph_edge *cs;
b258210c 3202 tree target;
042ae7d2 3203 bool agg_contents = ie->indirect_info->agg_contents;
e5cf5e11 3204 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
91bb9f80
MJ
3205 if (agg_contents)
3206 {
3207 bool from_global_constant;
3208 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3209 ie->indirect_info->offset,
3210 ie->indirect_info->by_ref,
3211 &from_global_constant);
3212 if (target
3213 && !from_global_constant
3214 && !ie->indirect_info->guaranteed_unmodified)
3215 return NULL;
3216 }
b258210c 3217 else
91bb9f80 3218 target = scalar;
d250540a
MJ
3219 if (!target)
3220 return NULL;
4502fe8d
MJ
3221 cs = ipa_make_edge_direct_to_target (ie, target);
3222
a12cd2db 3223 if (cs && !agg_contents)
568cda29
MJ
3224 {
3225 bool ok;
3226 gcc_checking_assert (cs->callee
ae6d0907
MJ
3227 && (cs != ie
3228 || jfunc->type != IPA_JF_CONST
568cda29
MJ
3229 || !cgraph_node_for_jfunc (jfunc)
3230 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3231 ok = try_decrement_rdesc_refcount (jfunc);
3232 gcc_checking_assert (ok);
3233 }
4502fe8d
MJ
3234
3235 return cs;
b258210c
MJ
3236}
3237
bec81025
MJ
3238/* Return the target to be used in cases of impossible devirtualization. IE
3239 and target (the latter can be NULL) are dumped when dumping is enabled. */
3240
72972c22
MJ
3241tree
3242ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3243{
3244 if (dump_file)
3245 {
3246 if (target)
3247 fprintf (dump_file,
464d0118
ML
3248 "Type inconsistent devirtualization: %s->%s\n",
3249 ie->caller->dump_name (),
bec81025
MJ
3250 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3251 else
3252 fprintf (dump_file,
464d0118
ML
3253 "No devirtualization target in %s\n",
3254 ie->caller->dump_name ());
bec81025
MJ
3255 }
3256 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3257 cgraph_node::get_create (new_target);
bec81025
MJ
3258 return new_target;
3259}
3260
d250540a
MJ
3261/* Try to find a destination for indirect edge IE that corresponds to a virtual
3262 call based on a formal parameter which is described by jump function JFUNC
3263 and if it can be determined, make it direct and return the direct edge.
44210a96
MJ
3264 Otherwise, return NULL. CTX describes the polymorphic context that the
3265 parameter the call is based on brings along with it. */
b258210c
MJ
3266
3267static struct cgraph_edge *
3268try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3269 struct ipa_jump_func *jfunc,
44210a96 3270 struct ipa_polymorphic_call_context ctx)
3e293154 3271{
44210a96 3272 tree target = NULL;
5ce97055 3273 bool speculative = false;
85942f45 3274
2bf86c84 3275 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3276 return NULL;
b258210c 3277
44210a96 3278 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3279
3280 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3281 if (!ie->indirect_info->vptr_changed
3282 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3283 {
9de2f554
JH
3284 tree vtable;
3285 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3286 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3287 : NULL;
3288 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
85942f45
JH
3289 ie->indirect_info->offset,
3290 true);
9de2f554
JH
3291 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3292 {
2994ab20 3293 bool can_refer;
0127c169 3294 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3295 vtable, offset, &can_refer);
3296 if (can_refer)
9de2f554 3297 {
2994ab20
JH
3298 if (!t
3299 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3300 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
9de2f554 3301 || !possible_polymorphic_call_target_p
0127c169
JH
3302 (ie, cgraph_node::get (t)))
3303 {
33c3b6be 3304 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3305 if (!ie->indirect_info->vptr_changed)
3306 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3307 else
3308 target = NULL;
0127c169
JH
3309 }
3310 else
3311 {
3312 target = t;
3313 speculative = ie->indirect_info->vptr_changed;
3314 }
9de2f554
JH
3315 }
3316 }
85942f45
JH
3317 }
3318
44210a96
MJ
3319 ipa_polymorphic_call_context ie_context (ie);
3320 vec <cgraph_node *>targets;
3321 bool final;
d250540a 3322
44210a96
MJ
3323 ctx.offset_by (ie->indirect_info->offset);
3324 if (ie->indirect_info->vptr_changed)
3325 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3326 ie->indirect_info->otr_type);
3327 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3328 targets = possible_polymorphic_call_targets
3329 (ie->indirect_info->otr_type,
3330 ie->indirect_info->otr_token,
3331 ctx, &final);
3332 if (final && targets.length () <= 1)
5ce97055 3333 {
33c3b6be 3334 speculative = false;
44210a96
MJ
3335 if (targets.length () == 1)
3336 target = targets[0]->decl;
3337 else
3338 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3339 }
2bf86c84 3340 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3341 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3342 {
44210a96
MJ
3343 cgraph_node *n;
3344 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3345 ie->indirect_info->otr_token,
3346 ie->indirect_info->context);
3347 if (n)
5ce97055 3348 {
44210a96
MJ
3349 target = n->decl;
3350 speculative = true;
5ce97055 3351 }
5bccb77a 3352 }
b258210c
MJ
3353
3354 if (target)
450ad0cd 3355 {
44210a96
MJ
3356 if (!possible_polymorphic_call_target_p
3357 (ie, cgraph_node::get_create (target)))
0127c169 3358 {
29c43c83 3359 if (speculative)
0127c169
JH
3360 return NULL;
3361 target = ipa_impossible_devirt_target (ie, target);
3362 }
5ce97055 3363 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3364 }
b258210c
MJ
3365 else
3366 return NULL;
3e293154
MJ
3367}
3368
3369/* Update the param called notes associated with NODE when CS is being inlined,
3370 assuming NODE is (potentially indirectly) inlined into CS->callee.
3371 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3372 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3373 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3374
f8e2a1ed 3375static bool
e33c6cd6
MJ
3376update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3377 struct cgraph_node *node,
d52f5295 3378 vec<cgraph_edge *> *new_edges)
3e293154 3379{
9e97ff61 3380 struct ipa_edge_args *top;
b258210c 3381 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
e5cf5e11 3382 struct ipa_node_params *new_root_info, *inlined_node_info;
f8e2a1ed 3383 bool res = false;
3e293154 3384
e33c6cd6 3385 ipa_check_create_edge_args ();
9e97ff61 3386 top = IPA_EDGE_REF (cs);
d250540a
MJ
3387 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3388 ? cs->caller->global.inlined_to
3389 : cs->caller);
e5cf5e11 3390 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
e33c6cd6
MJ
3391
3392 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3393 {
e33c6cd6 3394 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3395 struct ipa_jump_func *jfunc;
8b7773a4 3396 int param_index;
3ff29913 3397 cgraph_node *spec_target = NULL;
3e293154 3398
e33c6cd6 3399 next_ie = ie->next_callee;
3e293154 3400
5f902d76
JH
3401 if (ici->param_index == -1)
3402 continue;
e33c6cd6 3403
3e293154 3404 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 3405 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3406 {
5ee53a06 3407 ici->param_index = -1;
3e293154
MJ
3408 continue;
3409 }
3410
8b7773a4
MJ
3411 param_index = ici->param_index;
3412 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3413
3ff29913
JH
3414 if (ie->speculative)
3415 {
3416 struct cgraph_edge *de;
3417 struct ipa_ref *ref;
3418 ie->speculative_call_info (de, ie, ref);
3419 spec_target = de->callee;
3420 }
3421
2bf86c84 3422 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3423 new_direct_edge = NULL;
3424 else if (ici->polymorphic)
5ce97055 3425 {
44210a96
MJ
3426 ipa_polymorphic_call_context ctx;
3427 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3428 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
5ce97055 3429 }
b258210c 3430 else
e5cf5e11
PK
3431 {
3432 tree target_type = ipa_get_type (inlined_node_info, param_index);
3433 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3434 target_type,
3435 new_root_info);
3436 }
3437
042ae7d2 3438 /* If speculation was removed, then we need to do nothing. */
3ff29913
JH
3439 if (new_direct_edge && new_direct_edge != ie
3440 && new_direct_edge->callee == spec_target)
042ae7d2
JH
3441 {
3442 new_direct_edge->indirect_inlining_edge = 1;
3443 top = IPA_EDGE_REF (cs);
3444 res = true;
73d098df
JH
3445 if (!new_direct_edge->speculative)
3446 continue;
042ae7d2
JH
3447 }
3448 else if (new_direct_edge)
685b0d13 3449 {
b258210c 3450 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
3451 if (new_direct_edge->call_stmt)
3452 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
3453 = !gimple_check_call_matching_types (
3454 new_direct_edge->call_stmt,
67348ccc 3455 new_direct_edge->callee->decl, false);
b258210c
MJ
3456 if (new_edges)
3457 {
9771b263 3458 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3459 res = true;
3460 }
042ae7d2 3461 top = IPA_EDGE_REF (cs);
3ff29913
JH
3462 /* If speculative edge was introduced we still need to update
3463 call info of the indirect edge. */
3464 if (!new_direct_edge->speculative)
3465 continue;
685b0d13 3466 }
3ff29913
JH
3467 if (jfunc->type == IPA_JF_PASS_THROUGH
3468 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 3469 {
d0502276
JH
3470 if (ici->agg_contents
3471 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3472 && !ici->polymorphic)
36b72910
JH
3473 ici->param_index = -1;
3474 else
d0502276
JH
3475 {
3476 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3477 if (ici->polymorphic
3478 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3479 ici->vptr_changed = true;
3480 }
36b72910
JH
3481 }
3482 else if (jfunc->type == IPA_JF_ANCESTOR)
3483 {
d0502276
JH
3484 if (ici->agg_contents
3485 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3486 && !ici->polymorphic)
36b72910
JH
3487 ici->param_index = -1;
3488 else
3489 {
3490 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3491 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
3492 if (ici->polymorphic
3493 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3494 ici->vptr_changed = true;
36b72910
JH
3495 }
3496 }
3497 else
3498 /* Either we can find a destination for this edge now or never. */
3499 ici->param_index = -1;
3e293154 3500 }
e33c6cd6 3501
f8e2a1ed 3502 return res;
3e293154
MJ
3503}
3504
3505/* Recursively traverse subtree of NODE (including node) made of inlined
3506 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3507 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3508 update_jump_functions_after_inlining on all non-inlined edges that lead out
3509 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3510 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3511 created. */
be95e2b9 3512
f8e2a1ed 3513static bool
3e293154
MJ
3514propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3515 struct cgraph_node *node,
d52f5295 3516 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3517{
3518 struct cgraph_edge *e;
f8e2a1ed 3519 bool res;
3e293154 3520
e33c6cd6 3521 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3522
3523 for (e = node->callees; e; e = e->next_callee)
3524 if (!e->inline_failed)
f8e2a1ed 3525 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3526 else
3527 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3528 for (e = node->indirect_calls; e; e = e->next_callee)
3529 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3530
3531 return res;
3e293154
MJ
3532}
3533
4502fe8d
MJ
3534/* Combine two controlled uses counts as done during inlining. */
3535
3536static int
3537combine_controlled_uses_counters (int c, int d)
3538{
3539 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3540 return IPA_UNDESCRIBED_USE;
3541 else
3542 return c + d - 1;
3543}
3544
3545/* Propagate number of controlled users from CS->caleee to the new root of the
3546 tree of inlined nodes. */
3547
3548static void
3549propagate_controlled_uses (struct cgraph_edge *cs)
3550{
3551 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3552 struct cgraph_node *new_root = cs->caller->global.inlined_to
3553 ? cs->caller->global.inlined_to : cs->caller;
3554 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3555 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3556 int count, i;
3557
3558 count = MIN (ipa_get_cs_argument_count (args),
3559 ipa_get_param_count (old_root_info));
3560 for (i = 0; i < count; i++)
3561 {
3562 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3563 struct ipa_cst_ref_desc *rdesc;
3564
3565 if (jf->type == IPA_JF_PASS_THROUGH)
3566 {
3567 int src_idx, c, d;
3568 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3569 c = ipa_get_controlled_uses (new_root_info, src_idx);
3570 d = ipa_get_controlled_uses (old_root_info, i);
3571
3572 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3573 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3574 c = combine_controlled_uses_counters (c, d);
3575 ipa_set_controlled_uses (new_root_info, src_idx, c);
3576 if (c == 0 && new_root_info->ipcp_orig_node)
3577 {
3578 struct cgraph_node *n;
3579 struct ipa_ref *ref;
44210a96 3580 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
3581
3582 if (t && TREE_CODE (t) == ADDR_EXPR
3583 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 3584 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 3585 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
3586 {
3587 if (dump_file)
3588 fprintf (dump_file, "ipa-prop: Removing cloning-created "
464d0118
ML
3589 "reference from %s to %s.\n",
3590 new_root->dump_name (),
3591 n->dump_name ());
d122681a 3592 ref->remove_reference ();
4502fe8d
MJ
3593 }
3594 }
3595 }
3596 else if (jf->type == IPA_JF_CONST
3597 && (rdesc = jfunc_rdesc_usable (jf)))
3598 {
3599 int d = ipa_get_controlled_uses (old_root_info, i);
3600 int c = rdesc->refcount;
3601 rdesc->refcount = combine_controlled_uses_counters (c, d);
3602 if (rdesc->refcount == 0)
3603 {
3604 tree cst = ipa_get_jf_constant (jf);
3605 struct cgraph_node *n;
3606 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3607 && TREE_CODE (TREE_OPERAND (cst, 0))
3608 == FUNCTION_DECL);
d52f5295 3609 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
3610 if (n)
3611 {
3612 struct cgraph_node *clone;
568cda29 3613 bool ok;
67348ccc 3614 ok = remove_described_reference (n, rdesc);
568cda29 3615 gcc_checking_assert (ok);
4502fe8d
MJ
3616
3617 clone = cs->caller;
3618 while (clone->global.inlined_to
3619 && clone != rdesc->cs->caller
3620 && IPA_NODE_REF (clone)->ipcp_orig_node)
3621 {
3622 struct ipa_ref *ref;
d122681a 3623 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
3624 if (ref)
3625 {
3626 if (dump_file)
3627 fprintf (dump_file, "ipa-prop: Removing "
3628 "cloning-created reference "
464d0118
ML
3629 "from %s to %s.\n",
3630 clone->dump_name (),
3631 n->dump_name ());
d122681a 3632 ref->remove_reference ();
4502fe8d
MJ
3633 }
3634 clone = clone->callers->caller;
3635 }
3636 }
3637 }
3638 }
3639 }
3640
3641 for (i = ipa_get_param_count (old_root_info);
3642 i < ipa_get_cs_argument_count (args);
3643 i++)
3644 {
3645 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3646
3647 if (jf->type == IPA_JF_CONST)
3648 {
3649 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3650 if (rdesc)
3651 rdesc->refcount = IPA_UNDESCRIBED_USE;
3652 }
3653 else if (jf->type == IPA_JF_PASS_THROUGH)
3654 ipa_set_controlled_uses (new_root_info,
3655 jf->value.pass_through.formal_id,
3656 IPA_UNDESCRIBED_USE);
3657 }
3658}
3659
3e293154
MJ
3660/* Update jump functions and call note functions on inlining the call site CS.
3661 CS is expected to lead to a node already cloned by
3662 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3663 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3664 created. */
be95e2b9 3665
f8e2a1ed 3666bool
3e293154 3667ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 3668 vec<cgraph_edge *> *new_edges)
3e293154 3669{
5ee53a06 3670 bool changed;
f8e2a1ed
MJ
3671 /* Do nothing if the preparation phase has not been carried out yet
3672 (i.e. during early inlining). */
dd912cb8 3673 if (!ipa_node_params_sum)
f8e2a1ed 3674 return false;
6fe906a3 3675 gcc_assert (ipa_edge_args_sum);
f8e2a1ed 3676
4502fe8d 3677 propagate_controlled_uses (cs);
5ee53a06
JH
3678 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3679
5ee53a06 3680 return changed;
518dc859
RL
3681}
3682
86cd0334
MJ
3683/* Ensure that array of edge arguments infos is big enough to accommodate a
3684 structure for all edges and reallocates it if not. Also, allocate
3685 associated hash tables is they do not already exist. */
3686
3687void
3688ipa_check_create_edge_args (void)
3689{
6fe906a3
MJ
3690 if (!ipa_edge_args_sum)
3691 ipa_edge_args_sum
3692 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3693 ipa_edge_args_sum_t (symtab, true));
86cd0334
MJ
3694 if (!ipa_bits_hash_table)
3695 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3696 if (!ipa_vr_hash_table)
3697 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3698}
3699
771578a0 3700/* Free all ipa_edge structures. */
be95e2b9 3701
518dc859 3702void
771578a0 3703ipa_free_all_edge_args (void)
518dc859 3704{
6fe906a3 3705 if (!ipa_edge_args_sum)
9771b263
DN
3706 return;
3707
6fe906a3
MJ
3708 ipa_edge_args_sum->release ();
3709 ipa_edge_args_sum = NULL;
518dc859
RL
3710}
3711
771578a0 3712/* Free all ipa_node_params structures. */
be95e2b9 3713
518dc859 3714void
771578a0 3715ipa_free_all_node_params (void)
518dc859 3716{
a0a348b1 3717 ipa_node_params_sum->release ();
dd912cb8 3718 ipa_node_params_sum = NULL;
771578a0
MJ
3719}
3720
9d3e0adc 3721/* Initialize IPA CP transformation summary and also allocate any necessary hash
86cd0334 3722 tables if they do not already exist. */
04be694e
MJ
3723
3724void
9d3e0adc 3725ipcp_transformation_initialize (void)
04be694e 3726{
86cd0334
MJ
3727 if (!ipa_bits_hash_table)
3728 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3729 if (!ipa_vr_hash_table)
3730 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
9d3e0adc
ML
3731 if (ipcp_transformation_sum == NULL)
3732 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
04be694e
MJ
3733}
3734
2c9561b5
MJ
3735/* Set the aggregate replacements of NODE to be AGGVALS. */
3736
3737void
3738ipa_set_node_agg_value_chain (struct cgraph_node *node,
3739 struct ipa_agg_replacement_value *aggvals)
3740{
9d3e0adc
ML
3741 ipcp_transformation_initialize ();
3742 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3743 s->agg_values = aggvals;
2c9561b5
MJ
3744}
3745
6fe906a3
MJ
3746/* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3747 count data structures accordingly. */
be95e2b9 3748
6fe906a3
MJ
3749void
3750ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
771578a0 3751{
568cda29
MJ
3752 if (args->jump_functions)
3753 {
3754 struct ipa_jump_func *jf;
3755 int i;
3756 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3757 {
3758 struct ipa_cst_ref_desc *rdesc;
3759 try_decrement_rdesc_refcount (jf);
3760 if (jf->type == IPA_JF_CONST
3761 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3762 && rdesc->cs == cs)
3763 rdesc->cs = NULL;
3764 }
568cda29 3765 }
518dc859
RL
3766}
3767
6fe906a3
MJ
3768/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3769 reference count data strucutres accordingly. */
be95e2b9 3770
6fe906a3
MJ
3771void
3772ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3773 ipa_edge_args *old_args, ipa_edge_args *new_args)
771578a0 3774{
8b7773a4 3775 unsigned int i;
771578a0 3776
9771b263 3777 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
3778 if (old_args->polymorphic_call_contexts)
3779 new_args->polymorphic_call_contexts
3780 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 3781
9771b263 3782 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3783 {
3784 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3785 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3786
3787 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3788
3789 if (src_jf->type == IPA_JF_CONST)
3790 {
3791 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3792
3793 if (!src_rdesc)
3794 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3795 else if (src->caller == dst->caller)
3796 {
3797 struct ipa_ref *ref;
5e20cdc9 3798 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3799 gcc_checking_assert (n);
d122681a
ML
3800 ref = src->caller->find_reference (n, src->call_stmt,
3801 src->lto_stmt_uid);
568cda29 3802 gcc_checking_assert (ref);
d122681a 3803 dst->caller->clone_reference (ref, ref->stmt);
568cda29 3804
601f3293 3805 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
568cda29
MJ
3806 dst_rdesc->cs = dst;
3807 dst_rdesc->refcount = src_rdesc->refcount;
3808 dst_rdesc->next_duplicate = NULL;
3809 dst_jf->value.constant.rdesc = dst_rdesc;
3810 }
4502fe8d
MJ
3811 else if (src_rdesc->cs == src)
3812 {
601f3293 3813 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 3814 dst_rdesc->cs = dst;
4502fe8d 3815 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3816 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3817 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3818 dst_jf->value.constant.rdesc = dst_rdesc;
3819 }
3820 else
3821 {
3822 struct ipa_cst_ref_desc *dst_rdesc;
3823 /* This can happen during inlining, when a JFUNC can refer to a
3824 reference taken in a function up in the tree of inline clones.
3825 We need to find the duplicate that refers to our tree of
3826 inline clones. */
3827
3828 gcc_assert (dst->caller->global.inlined_to);
3829 for (dst_rdesc = src_rdesc->next_duplicate;
3830 dst_rdesc;
3831 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3832 {
3833 struct cgraph_node *top;
3834 top = dst_rdesc->cs->caller->global.inlined_to
3835 ? dst_rdesc->cs->caller->global.inlined_to
3836 : dst_rdesc->cs->caller;
3837 if (dst->caller->global.inlined_to == top)
3838 break;
3839 }
44a60244 3840 gcc_assert (dst_rdesc);
4502fe8d
MJ
3841 dst_jf->value.constant.rdesc = dst_rdesc;
3842 }
3843 }
6fe45955
MJ
3844 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3845 && src->caller == dst->caller)
3846 {
3847 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3848 ? dst->caller->global.inlined_to : dst->caller;
3849 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3850 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3851
3852 int c = ipa_get_controlled_uses (root_info, idx);
3853 if (c != IPA_UNDESCRIBED_USE)
3854 {
3855 c++;
3856 ipa_set_controlled_uses (root_info, idx, c);
3857 }
3858 }
4502fe8d 3859 }
771578a0
MJ
3860}
3861
dd912cb8 3862/* Analyze newly added function into callgraph. */
be95e2b9 3863
771578a0 3864static void
dd912cb8 3865ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3866{
dd912cb8
ML
3867 if (node->has_gimple_body_p ())
3868 ipa_analyze_node (node);
3869}
771578a0 3870
dd912cb8
ML
3871/* Hook that is called by summary when a node is duplicated. */
3872
3873void
3874ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3875 ipa_node_params *old_info,
3876 ipa_node_params *new_info)
3877{
3878 ipa_agg_replacement_value *old_av, *new_av;
771578a0 3879
f65f1ae3 3880 new_info->descriptors = vec_safe_copy (old_info->descriptors);
310bc633 3881 new_info->lattices = NULL;
771578a0 3882 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
f65f1ae3
MJ
3883 new_info->known_csts = old_info->known_csts.copy ();
3884 new_info->known_contexts = old_info->known_contexts.copy ();
3949c4a7 3885
8aab5218 3886 new_info->analysis_done = old_info->analysis_done;
3949c4a7 3887 new_info->node_enqueued = old_info->node_enqueued;
7e729474 3888 new_info->versionable = old_info->versionable;
2c9561b5
MJ
3889
3890 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 3891 if (old_av)
2c9561b5 3892 {
04be694e
MJ
3893 new_av = NULL;
3894 while (old_av)
3895 {
3896 struct ipa_agg_replacement_value *v;
2c9561b5 3897
04be694e
MJ
3898 v = ggc_alloc<ipa_agg_replacement_value> ();
3899 memcpy (v, old_av, sizeof (*v));
3900 v->next = new_av;
3901 new_av = v;
3902 old_av = old_av->next;
3903 }
3904 ipa_set_node_agg_value_chain (dst, new_av);
3905 }
3906
9d3e0adc 3907 ipcp_transformation *src_trans = ipcp_get_transformation_summary (src);
04be694e 3908
8bc5448f 3909 if (src_trans)
04be694e 3910 {
9d3e0adc
ML
3911 ipcp_transformation_initialize ();
3912 src_trans = ipcp_transformation_sum->get_create (src);
3913 ipcp_transformation *dst_trans
3914 = ipcp_transformation_sum->get_create (dst);
86cd0334
MJ
3915
3916 dst_trans->bits = vec_safe_copy (src_trans->bits);
3917
8bc5448f 3918 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
8bc5448f
KV
3919 vec<ipa_vr, va_gc> *&dst_vr
3920 = ipcp_get_transformation_summary (dst)->m_vr;
8bc5448f
KV
3921 if (vec_safe_length (src_trans->m_vr) > 0)
3922 {
3923 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3924 for (unsigned i = 0; i < src_vr->length (); ++i)
3925 dst_vr->quick_push ((*src_vr)[i]);
3926 }
2c9561b5 3927 }
771578a0
MJ
3928}
3929
3930/* Register our cgraph hooks if they are not already there. */
be95e2b9 3931
518dc859 3932void
771578a0 3933ipa_register_cgraph_hooks (void)
518dc859 3934{
dd912cb8 3935 ipa_check_create_node_params ();
6fe906a3 3936 ipa_check_create_edge_args ();
dd912cb8 3937
dd912cb8 3938 function_insertion_hook_holder =
3dafb85c 3939 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3940}
518dc859 3941
771578a0 3942/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3943
771578a0
MJ
3944static void
3945ipa_unregister_cgraph_hooks (void)
3946{
3dafb85c 3947 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 3948 function_insertion_hook_holder = NULL;
771578a0
MJ
3949}
3950
3951/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3952 longer needed after ipa-cp. */
be95e2b9 3953
771578a0 3954void
e33c6cd6 3955ipa_free_all_structures_after_ipa_cp (void)
3e293154 3956{
2bf86c84 3957 if (!optimize && !in_lto_p)
3e293154
MJ
3958 {
3959 ipa_free_all_edge_args ();
3960 ipa_free_all_node_params ();
2651e637
ML
3961 ipcp_sources_pool.release ();
3962 ipcp_cst_values_pool.release ();
3963 ipcp_poly_ctx_values_pool.release ();
3964 ipcp_agg_lattice_pool.release ();
3e293154 3965 ipa_unregister_cgraph_hooks ();
601f3293 3966 ipa_refdesc_pool.release ();
3e293154
MJ
3967 }
3968}
3969
3970/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3971 longer needed after indirect inlining. */
be95e2b9 3972
3e293154 3973void
e33c6cd6 3974ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3975{
3976 ipa_free_all_edge_args ();
3977 ipa_free_all_node_params ();
3978 ipa_unregister_cgraph_hooks ();
2651e637
ML
3979 ipcp_sources_pool.release ();
3980 ipcp_cst_values_pool.release ();
3981 ipcp_poly_ctx_values_pool.release ();
3982 ipcp_agg_lattice_pool.release ();
601f3293 3983 ipa_refdesc_pool.release ();
518dc859
RL
3984}
3985
dcd416e3 3986/* Print ipa_tree_map data structures of all functions in the
518dc859 3987 callgraph to F. */
be95e2b9 3988
518dc859 3989void
2c9561b5 3990ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3991{
3992 int i, count;
3e293154 3993 struct ipa_node_params *info;
518dc859 3994
67348ccc 3995 if (!node->definition)
3e293154
MJ
3996 return;
3997 info = IPA_NODE_REF (node);
464d0118 3998 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
3e293154
MJ
3999 count = ipa_get_param_count (info);
4000 for (i = 0; i < count; i++)
518dc859 4001 {
4502fe8d
MJ
4002 int c;
4003
a4e33812 4004 fprintf (f, " ");
e067bd43 4005 ipa_dump_param (f, info, i);
339f49ec
JH
4006 if (ipa_is_param_used (info, i))
4007 fprintf (f, " used");
4502fe8d
MJ
4008 c = ipa_get_controlled_uses (info, i);
4009 if (c == IPA_UNDESCRIBED_USE)
4010 fprintf (f, " undescribed_use");
4011 else
4012 fprintf (f, " controlled_uses=%i", c);
3e293154 4013 fprintf (f, "\n");
518dc859
RL
4014 }
4015}
dcd416e3 4016
ca30a539 4017/* Print ipa_tree_map data structures of all functions in the
3e293154 4018 callgraph to F. */
be95e2b9 4019
3e293154 4020void
ca30a539 4021ipa_print_all_params (FILE * f)
3e293154
MJ
4022{
4023 struct cgraph_node *node;
4024
ca30a539 4025 fprintf (f, "\nFunction parameters:\n");
65c70e6b 4026 FOR_EACH_FUNCTION (node)
ca30a539 4027 ipa_print_node_params (f, node);
3e293154 4028}
3f84bf08 4029
2c9561b5
MJ
4030/* Dump the AV linked list. */
4031
4032void
4033ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4034{
4035 bool comma = false;
4036 fprintf (f, " Aggregate replacements:");
4037 for (; av; av = av->next)
4038 {
4039 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4040 av->index, av->offset);
ef6cb4c7 4041 print_generic_expr (f, av->value);
2c9561b5
MJ
4042 comma = true;
4043 }
4044 fprintf (f, "\n");
4045}
4046
fb3f88cc
JH
4047/* Stream out jump function JUMP_FUNC to OB. */
4048
4049static void
4050ipa_write_jump_function (struct output_block *ob,
4051 struct ipa_jump_func *jump_func)
4052{
8b7773a4
MJ
4053 struct ipa_agg_jf_item *item;
4054 struct bitpack_d bp;
4055 int i, count;
f714ecf5 4056 int flag = 0;
fb3f88cc 4057
f714ecf5
JH
4058 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4059 as well as WPA memory by handling them specially. */
4060 if (jump_func->type == IPA_JF_CONST
4061 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4062 flag = 1;
4063
4064 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
fb3f88cc
JH
4065 switch (jump_func->type)
4066 {
4067 case IPA_JF_UNKNOWN:
4068 break;
4069 case IPA_JF_CONST:
5368224f 4070 gcc_assert (
4502fe8d 4071 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
f714ecf5
JH
4072 stream_write_tree (ob,
4073 flag
4074 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4075 : jump_func->value.constant.value, true);
fb3f88cc
JH
4076 break;
4077 case IPA_JF_PASS_THROUGH:
412288f1 4078 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4079 if (jump_func->value.pass_through.operation == NOP_EXPR)
4080 {
4081 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4082 bp = bitpack_create (ob->main_stream);
4083 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4084 streamer_write_bitpack (&bp);
4085 }
a2b4c188
KV
4086 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4087 == tcc_unary)
4088 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4a53743e
MJ
4089 else
4090 {
4091 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4092 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4093 }
fb3f88cc
JH
4094 break;
4095 case IPA_JF_ANCESTOR:
412288f1 4096 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4097 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4098 bp = bitpack_create (ob->main_stream);
4099 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4100 streamer_write_bitpack (&bp);
fb3f88cc 4101 break;
8b7773a4
MJ
4102 }
4103
9771b263 4104 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4105 streamer_write_uhwi (ob, count);
4106 if (count)
4107 {
4108 bp = bitpack_create (ob->main_stream);
4109 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4110 streamer_write_bitpack (&bp);
4111 }
4112
9771b263 4113 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4114 {
4115 streamer_write_uhwi (ob, item->offset);
4116 stream_write_tree (ob, item->value, true);
fb3f88cc 4117 }
04be694e 4118
209ca542 4119 bp = bitpack_create (ob->main_stream);
86cd0334 4120 bp_pack_value (&bp, !!jump_func->bits, 1);
209ca542 4121 streamer_write_bitpack (&bp);
86cd0334 4122 if (jump_func->bits)
209ca542 4123 {
86cd0334
MJ
4124 streamer_write_widest_int (ob, jump_func->bits->value);
4125 streamer_write_widest_int (ob, jump_func->bits->mask);
a5e14a42 4126 }
86cd0334 4127 bp_pack_value (&bp, !!jump_func->m_vr, 1);
8bc5448f 4128 streamer_write_bitpack (&bp);
86cd0334 4129 if (jump_func->m_vr)
8bc5448f
KV
4130 {
4131 streamer_write_enum (ob->main_stream, value_rang_type,
54994253
AH
4132 VR_LAST, jump_func->m_vr->kind ());
4133 stream_write_tree (ob, jump_func->m_vr->min (), true);
4134 stream_write_tree (ob, jump_func->m_vr->max (), true);
8bc5448f 4135 }
fb3f88cc
JH
4136}
4137
4138/* Read in jump function JUMP_FUNC from IB. */
4139
4140static void
4141ipa_read_jump_function (struct lto_input_block *ib,
4142 struct ipa_jump_func *jump_func,
4502fe8d 4143 struct cgraph_edge *cs,
f714ecf5
JH
4144 struct data_in *data_in,
4145 bool prevails)
fb3f88cc 4146{
4a53743e
MJ
4147 enum jump_func_type jftype;
4148 enum tree_code operation;
8b7773a4 4149 int i, count;
f714ecf5
JH
4150 int val = streamer_read_uhwi (ib);
4151 bool flag = val & 1;
fb3f88cc 4152
f714ecf5 4153 jftype = (enum jump_func_type) (val / 2);
4a53743e 4154 switch (jftype)
fb3f88cc
JH
4155 {
4156 case IPA_JF_UNKNOWN:
04be694e 4157 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4158 break;
4159 case IPA_JF_CONST:
f714ecf5
JH
4160 {
4161 tree t = stream_read_tree (ib, data_in);
4162 if (flag && prevails)
4163 t = build_fold_addr_expr (t);
4164 ipa_set_jf_constant (jump_func, t, cs);
4165 }
fb3f88cc
JH
4166 break;
4167 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4168 operation = (enum tree_code) streamer_read_uhwi (ib);
4169 if (operation == NOP_EXPR)
4170 {
4171 int formal_id = streamer_read_uhwi (ib);
4172 struct bitpack_d bp = streamer_read_bitpack (ib);
4173 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4174 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e 4175 }
a2b4c188
KV
4176 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4177 {
4178 int formal_id = streamer_read_uhwi (ib);
4179 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4180 }
4a53743e
MJ
4181 else
4182 {
4183 tree operand = stream_read_tree (ib, data_in);
4184 int formal_id = streamer_read_uhwi (ib);
4185 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4186 operation);
4187 }
fb3f88cc
JH
4188 break;
4189 case IPA_JF_ANCESTOR:
4a53743e
MJ
4190 {
4191 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4192 int formal_id = streamer_read_uhwi (ib);
4193 struct bitpack_d bp = streamer_read_bitpack (ib);
4194 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4195 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4196 break;
4197 }
f714ecf5
JH
4198 default:
4199 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
8b7773a4
MJ
4200 }
4201
4202 count = streamer_read_uhwi (ib);
f714ecf5
JH
4203 if (prevails)
4204 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4205 if (count)
4206 {
4a53743e 4207 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4208 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4209 }
4210 for (i = 0; i < count; i++)
4211 {
f32682ca
DN
4212 struct ipa_agg_jf_item item;
4213 item.offset = streamer_read_uhwi (ib);
4214 item.value = stream_read_tree (ib, data_in);
f714ecf5
JH
4215 if (prevails)
4216 jump_func->agg.items->quick_push (item);
fb3f88cc 4217 }
04be694e
MJ
4218
4219 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4220 bool bits_known = bp_unpack_value (&bp, 1);
4221 if (bits_known)
4222 {
86cd0334
MJ
4223 widest_int value = streamer_read_widest_int (ib);
4224 widest_int mask = streamer_read_widest_int (ib);
f714ecf5
JH
4225 if (prevails)
4226 ipa_set_jfunc_bits (jump_func, value, mask);
209ca542
PK
4227 }
4228 else
86cd0334 4229 jump_func->bits = NULL;
8bc5448f
KV
4230
4231 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4232 bool vr_known = bp_unpack_value (&vr_bp, 1);
4233 if (vr_known)
4234 {
54994253 4235 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
86cd0334
MJ
4236 VR_LAST);
4237 tree min = stream_read_tree (ib, data_in);
4238 tree max = stream_read_tree (ib, data_in);
f714ecf5
JH
4239 if (prevails)
4240 ipa_set_jfunc_vr (jump_func, type, min, max);
8bc5448f
KV
4241 }
4242 else
86cd0334 4243 jump_func->m_vr = NULL;
fb3f88cc
JH
4244}
4245
e33c6cd6
MJ
4246/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4247 relevant to indirect inlining to OB. */
661e7330
MJ
4248
4249static void
e33c6cd6
MJ
4250ipa_write_indirect_edge_info (struct output_block *ob,
4251 struct cgraph_edge *cs)
661e7330 4252{
e33c6cd6 4253 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4254 struct bitpack_d bp;
e33c6cd6 4255
412288f1 4256 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4257 bp = bitpack_create (ob->main_stream);
4258 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4259 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4260 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4261 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4262 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4263 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4264 streamer_write_bitpack (&bp);
ba392339
JH
4265 if (ii->agg_contents || ii->polymorphic)
4266 streamer_write_hwi (ob, ii->offset);
4267 else
4268 gcc_assert (ii->offset == 0);
b258210c
MJ
4269
4270 if (ii->polymorphic)
4271 {
412288f1 4272 streamer_write_hwi (ob, ii->otr_token);
b9393656 4273 stream_write_tree (ob, ii->otr_type, true);
ba392339 4274 ii->context.stream_out (ob);
b258210c 4275 }
661e7330
MJ
4276}
4277
e33c6cd6
MJ
4278/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4279 relevant to indirect inlining from IB. */
661e7330
MJ
4280
4281static void
e33c6cd6 4282ipa_read_indirect_edge_info (struct lto_input_block *ib,
ba392339 4283 struct data_in *data_in,
e33c6cd6 4284 struct cgraph_edge *cs)
661e7330 4285{
e33c6cd6 4286 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4287 struct bitpack_d bp;
661e7330 4288
412288f1 4289 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 4290 bp = streamer_read_bitpack (ib);
2465dcc2 4291 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4292 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4293 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4294 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 4295 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 4296 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
4297 if (ii->agg_contents || ii->polymorphic)
4298 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4299 else
4300 ii->offset = 0;
b258210c
MJ
4301 if (ii->polymorphic)
4302 {
412288f1 4303 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4304 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 4305 ii->context.stream_in (ib, data_in);
b258210c 4306 }
661e7330
MJ
4307}
4308
fb3f88cc
JH
4309/* Stream out NODE info to OB. */
4310
4311static void
4312ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4313{
4314 int node_ref;
7380e6ef 4315 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4316 struct ipa_node_params *info = IPA_NODE_REF (node);
4317 int j;
4318 struct cgraph_edge *e;
2465dcc2 4319 struct bitpack_d bp;
fb3f88cc 4320
7380e6ef 4321 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4322 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4323 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4324
0e8853ee
JH
4325 streamer_write_uhwi (ob, ipa_get_param_count (info));
4326 for (j = 0; j < ipa_get_param_count (info); j++)
4327 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4328 bp = bitpack_create (ob->main_stream);
8aab5218 4329 gcc_assert (info->analysis_done
661e7330 4330 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4331 gcc_assert (!info->node_enqueued);
4332 gcc_assert (!info->ipcp_orig_node);
4333 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4334 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4335 streamer_write_bitpack (&bp);
4502fe8d 4336 for (j = 0; j < ipa_get_param_count (info); j++)
a5e14a42
MJ
4337 {
4338 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4339 stream_write_tree (ob, ipa_get_type (info, j), true);
4340 }
fb3f88cc
JH
4341 for (e = node->callees; e; e = e->next_callee)
4342 {
4343 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4344
5ce97055
JH
4345 streamer_write_uhwi (ob,
4346 ipa_get_cs_argument_count (args) * 2
4347 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 4348 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4349 {
4350 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4351 if (args->polymorphic_call_contexts != NULL)
4352 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4353 }
fb3f88cc 4354 }
e33c6cd6 4355 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4356 {
4357 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4358
5ce97055
JH
4359 streamer_write_uhwi (ob,
4360 ipa_get_cs_argument_count (args) * 2
4361 + (args->polymorphic_call_contexts != NULL));
c8246dbe 4362 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4363 {
4364 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4365 if (args->polymorphic_call_contexts != NULL)
4366 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4367 }
c8246dbe
JH
4368 ipa_write_indirect_edge_info (ob, e);
4369 }
fb3f88cc
JH
4370}
4371
f714ecf5 4372/* Stream in edge E from IB. */
7b377b22 4373
f714ecf5
JH
4374static void
4375ipa_read_edge_info (struct lto_input_block *ib,
4376 struct data_in *data_in,
4377 struct cgraph_edge *e, bool prevails)
7b377b22 4378{
f714ecf5
JH
4379 int count = streamer_read_uhwi (ib);
4380 bool contexts_computed = count & 1;
4381
4382 count /= 2;
4383 if (!count)
4384 return;
4385 if (prevails && e->possibly_call_in_translation_unit_p ())
4386 {
4387 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4388 vec_safe_grow_cleared (args->jump_functions, count);
4389 if (contexts_computed)
4390 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4391 for (int k = 0; k < count; k++)
4392 {
4393 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4394 data_in, prevails);
4395 if (contexts_computed)
4396 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4397 (ib, data_in);
4398 }
4399 }
4400 else
4401 {
4402 for (int k = 0; k < count; k++)
4403 {
4404 struct ipa_jump_func dummy;
4405 ipa_read_jump_function (ib, &dummy, e,
4406 data_in, prevails);
4407 if (contexts_computed)
4408 {
4409 struct ipa_polymorphic_call_context ctx;
4410 ctx.stream_in (ib, data_in);
4411 }
4412 }
4413 }
7b377b22
JH
4414}
4415
61502ca8 4416/* Stream in NODE info from IB. */
fb3f88cc
JH
4417
4418static void
4419ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4420 struct data_in *data_in)
4421{
fb3f88cc
JH
4422 int k;
4423 struct cgraph_edge *e;
2465dcc2 4424 struct bitpack_d bp;
f714ecf5
JH
4425 bool prevails = node->prevailing_p ();
4426 struct ipa_node_params *info = prevails ? IPA_NODE_REF (node) : NULL;
fb3f88cc 4427
f714ecf5
JH
4428 int param_count = streamer_read_uhwi (ib);
4429 if (prevails)
4430 {
4431 ipa_alloc_node_params (node, param_count);
4432 for (k = 0; k < param_count; k++)
4433 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4434 if (ipa_get_param_count (info) != 0)
4435 info->analysis_done = true;
4436 info->node_enqueued = false;
4437 }
4438 else
4439 for (k = 0; k < param_count; k++)
4440 streamer_read_uhwi (ib);
a5e14a42 4441
412288f1 4442 bp = streamer_read_bitpack (ib);
f714ecf5 4443 for (k = 0; k < param_count; k++)
a5e14a42 4444 {
f714ecf5
JH
4445 bool used = bp_unpack_value (&bp, 1);
4446
4447 if (prevails)
4448 ipa_set_param_used (info, k, used);
a5e14a42 4449 }
f714ecf5 4450 for (k = 0; k < param_count; k++)
fb3f88cc 4451 {
f714ecf5
JH
4452 int nuses = streamer_read_hwi (ib);
4453 tree type = stream_read_tree (ib, data_in);
fb3f88cc 4454
f714ecf5 4455 if (prevails)
5ce97055 4456 {
f714ecf5
JH
4457 ipa_set_controlled_uses (info, k, nuses);
4458 (*info->descriptors)[k].decl_or_type = type;
5ce97055 4459 }
fb3f88cc 4460 }
f714ecf5
JH
4461 for (e = node->callees; e; e = e->next_callee)
4462 ipa_read_edge_info (ib, data_in, e, prevails);
e33c6cd6 4463 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe 4464 {
f714ecf5 4465 ipa_read_edge_info (ib, data_in, e, prevails);
c8246dbe
JH
4466 ipa_read_indirect_edge_info (ib, data_in, e);
4467 }
fb3f88cc
JH
4468}
4469
4470/* Write jump functions for nodes in SET. */
4471
4472void
f27c1867 4473ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4474{
4475 struct cgraph_node *node;
93536c97 4476 struct output_block *ob;
fb3f88cc 4477 unsigned int count = 0;
f27c1867
JH
4478 lto_symtab_encoder_iterator lsei;
4479 lto_symtab_encoder_t encoder;
4480
6fe906a3 4481 if (!ipa_node_params_sum || !ipa_edge_args_sum)
93536c97 4482 return;
fb3f88cc 4483
93536c97 4484 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4485 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 4486 ob->symbol = NULL;
f27c1867
JH
4487 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4488 lsei_next_function_in_partition (&lsei))
fb3f88cc 4489 {
f27c1867 4490 node = lsei_cgraph_node (lsei);
d52f5295 4491 if (node->has_gimple_body_p ()
c47d0034 4492 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4493 count++;
4494 }
4495
412288f1 4496 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4497
4498 /* Process all of the functions. */
f27c1867
JH
4499 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4500 lsei_next_function_in_partition (&lsei))
fb3f88cc 4501 {
f27c1867 4502 node = lsei_cgraph_node (lsei);
d52f5295 4503 if (node->has_gimple_body_p ()
c47d0034 4504 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4505 ipa_write_node_info (ob, node);
4506 }
412288f1 4507 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4508 produce_asm (ob, NULL);
4509 destroy_output_block (ob);
4510}
4511
4512/* Read section in file FILE_DATA of length LEN with data DATA. */
4513
4514static void
4515ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4516 size_t len)
4517{
4518 const struct lto_function_header *header =
4519 (const struct lto_function_header *) data;
4ad9a9de
EB
4520 const int cfg_offset = sizeof (struct lto_function_header);
4521 const int main_offset = cfg_offset + header->cfg_size;
4522 const int string_offset = main_offset + header->main_size;
fb3f88cc 4523 struct data_in *data_in;
fb3f88cc
JH
4524 unsigned int i;
4525 unsigned int count;
4526
207c68cd 4527 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 4528 header->main_size, file_data->mode_table);
fb3f88cc
JH
4529
4530 data_in =
4531 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4532 header->string_size, vNULL);
412288f1 4533 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4534
4535 for (i = 0; i < count; i++)
4536 {
4537 unsigned int index;
4538 struct cgraph_node *node;
7380e6ef 4539 lto_symtab_encoder_t encoder;
fb3f88cc 4540
412288f1 4541 index = streamer_read_uhwi (&ib_main);
7380e6ef 4542 encoder = file_data->symtab_node_encoder;
d52f5295
ML
4543 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4544 index));
67348ccc 4545 gcc_assert (node->definition);
fb3f88cc
JH
4546 ipa_read_node_info (&ib_main, node, data_in);
4547 }
4548 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4549 len);
4550 lto_data_in_delete (data_in);
4551}
4552
4553/* Read ipcp jump functions. */
4554
4555void
4556ipa_prop_read_jump_functions (void)
4557{
4558 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4559 struct lto_file_decl_data *file_data;
4560 unsigned int j = 0;
4561
4562 ipa_check_create_node_params ();
4563 ipa_check_create_edge_args ();
4564 ipa_register_cgraph_hooks ();
4565
4566 while ((file_data = file_data_vec[j++]))
4567 {
4568 size_t len;
4569 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4570
4571 if (data)
4572 ipa_prop_read_section (file_data, data, len);
4573 }
4574}
4575
2c9561b5 4576void
04be694e 4577write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
4578{
4579 int node_ref;
4580 unsigned int count = 0;
4581 lto_symtab_encoder_t encoder;
4582 struct ipa_agg_replacement_value *aggvals, *av;
4583
4584 aggvals = ipa_get_agg_replacements_for_node (node);
4585 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4586 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4587 streamer_write_uhwi (ob, node_ref);
4588
4589 for (av = aggvals; av; av = av->next)
4590 count++;
4591 streamer_write_uhwi (ob, count);
4592
4593 for (av = aggvals; av; av = av->next)
4594 {
7b920a9a
MJ
4595 struct bitpack_d bp;
4596
2c9561b5
MJ
4597 streamer_write_uhwi (ob, av->offset);
4598 streamer_write_uhwi (ob, av->index);
4599 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4600
4601 bp = bitpack_create (ob->main_stream);
4602 bp_pack_value (&bp, av->by_ref, 1);
4603 streamer_write_bitpack (&bp);
2c9561b5 4604 }
04be694e 4605
9d3e0adc 4606 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
4607 if (ts && vec_safe_length (ts->m_vr) > 0)
4608 {
4609 count = ts->m_vr->length ();
4610 streamer_write_uhwi (ob, count);
4611 for (unsigned i = 0; i < count; ++i)
4612 {
4613 struct bitpack_d bp;
4614 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4615 bp = bitpack_create (ob->main_stream);
4616 bp_pack_value (&bp, parm_vr->known, 1);
4617 streamer_write_bitpack (&bp);
4618 if (parm_vr->known)
4619 {
4620 streamer_write_enum (ob->main_stream, value_rang_type,
4621 VR_LAST, parm_vr->type);
4622 streamer_write_wide_int (ob, parm_vr->min);
4623 streamer_write_wide_int (ob, parm_vr->max);
4624 }
4625 }
4626 }
4627 else
4628 streamer_write_uhwi (ob, 0);
4629
209ca542
PK
4630 if (ts && vec_safe_length (ts->bits) > 0)
4631 {
4632 count = ts->bits->length ();
4633 streamer_write_uhwi (ob, count);
4634
4635 for (unsigned i = 0; i < count; ++i)
4636 {
86cd0334 4637 const ipa_bits *bits_jfunc = (*ts->bits)[i];
209ca542 4638 struct bitpack_d bp = bitpack_create (ob->main_stream);
86cd0334 4639 bp_pack_value (&bp, !!bits_jfunc, 1);
209ca542 4640 streamer_write_bitpack (&bp);
86cd0334 4641 if (bits_jfunc)
209ca542 4642 {
86cd0334
MJ
4643 streamer_write_widest_int (ob, bits_jfunc->value);
4644 streamer_write_widest_int (ob, bits_jfunc->mask);
209ca542
PK
4645 }
4646 }
4647 }
4648 else
4649 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
4650}
4651
4652/* Stream in the aggregate value replacement chain for NODE from IB. */
4653
4654static void
04be694e
MJ
4655read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4656 data_in *data_in)
2c9561b5
MJ
4657{
4658 struct ipa_agg_replacement_value *aggvals = NULL;
4659 unsigned int count, i;
4660
4661 count = streamer_read_uhwi (ib);
4662 for (i = 0; i <count; i++)
4663 {
4664 struct ipa_agg_replacement_value *av;
7b920a9a 4665 struct bitpack_d bp;
2c9561b5 4666
766090c2 4667 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
4668 av->offset = streamer_read_uhwi (ib);
4669 av->index = streamer_read_uhwi (ib);
4670 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4671 bp = streamer_read_bitpack (ib);
4672 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4673 av->next = aggvals;
4674 aggvals = av;
4675 }
4676 ipa_set_node_agg_value_chain (node, aggvals);
67b97478 4677
209ca542
PK
4678 count = streamer_read_uhwi (ib);
4679 if (count > 0)
4680 {
9d3e0adc
ML
4681 ipcp_transformation_initialize ();
4682 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
8bc5448f
KV
4683 vec_safe_grow_cleared (ts->m_vr, count);
4684 for (i = 0; i < count; i++)
4685 {
4686 ipa_vr *parm_vr;
4687 parm_vr = &(*ts->m_vr)[i];
4688 struct bitpack_d bp;
4689 bp = streamer_read_bitpack (ib);
4690 parm_vr->known = bp_unpack_value (&bp, 1);
4691 if (parm_vr->known)
4692 {
54994253 4693 parm_vr->type = streamer_read_enum (ib, value_range_kind,
8bc5448f
KV
4694 VR_LAST);
4695 parm_vr->min = streamer_read_wide_int (ib);
4696 parm_vr->max = streamer_read_wide_int (ib);
4697 }
4698 }
4699 }
4700 count = streamer_read_uhwi (ib);
4701 if (count > 0)
4702 {
9d3e0adc
ML
4703 ipcp_transformation_initialize ();
4704 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
209ca542
PK
4705 vec_safe_grow_cleared (ts->bits, count);
4706
4707 for (i = 0; i < count; i++)
4708 {
209ca542 4709 struct bitpack_d bp = streamer_read_bitpack (ib);
86cd0334
MJ
4710 bool known = bp_unpack_value (&bp, 1);
4711 if (known)
209ca542 4712 {
86cd0334
MJ
4713 ipa_bits *bits
4714 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4715 streamer_read_widest_int (ib));
4716 (*ts->bits)[i] = bits;
209ca542
PK
4717 }
4718 }
4719 }
2c9561b5
MJ
4720}
4721
4722/* Write all aggregate replacement for nodes in set. */
4723
4724void
04be694e 4725ipcp_write_transformation_summaries (void)
2c9561b5
MJ
4726{
4727 struct cgraph_node *node;
4728 struct output_block *ob;
4729 unsigned int count = 0;
4730 lto_symtab_encoder_iterator lsei;
4731 lto_symtab_encoder_t encoder;
4732
2c9561b5
MJ
4733 ob = create_output_block (LTO_section_ipcp_transform);
4734 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 4735 ob->symbol = NULL;
2c9561b5
MJ
4736 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4737 lsei_next_function_in_partition (&lsei))
4738 {
4739 node = lsei_cgraph_node (lsei);
04be694e 4740 if (node->has_gimple_body_p ())
2c9561b5
MJ
4741 count++;
4742 }
4743
4744 streamer_write_uhwi (ob, count);
4745
4746 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4747 lsei_next_function_in_partition (&lsei))
4748 {
4749 node = lsei_cgraph_node (lsei);
04be694e
MJ
4750 if (node->has_gimple_body_p ())
4751 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
4752 }
4753 streamer_write_char_stream (ob->main_stream, 0);
4754 produce_asm (ob, NULL);
4755 destroy_output_block (ob);
4756}
4757
4758/* Read replacements section in file FILE_DATA of length LEN with data
4759 DATA. */
4760
4761static void
4762read_replacements_section (struct lto_file_decl_data *file_data,
4763 const char *data,
4764 size_t len)
4765{
4766 const struct lto_function_header *header =
4767 (const struct lto_function_header *) data;
4768 const int cfg_offset = sizeof (struct lto_function_header);
4769 const int main_offset = cfg_offset + header->cfg_size;
4770 const int string_offset = main_offset + header->main_size;
4771 struct data_in *data_in;
2c9561b5
MJ
4772 unsigned int i;
4773 unsigned int count;
4774
207c68cd 4775 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 4776 header->main_size, file_data->mode_table);
2c9561b5
MJ
4777
4778 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4779 header->string_size, vNULL);
2c9561b5
MJ
4780 count = streamer_read_uhwi (&ib_main);
4781
4782 for (i = 0; i < count; i++)
4783 {
4784 unsigned int index;
4785 struct cgraph_node *node;
4786 lto_symtab_encoder_t encoder;
4787
4788 index = streamer_read_uhwi (&ib_main);
4789 encoder = file_data->symtab_node_encoder;
d52f5295
ML
4790 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4791 index));
67348ccc 4792 gcc_assert (node->definition);
04be694e 4793 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
4794 }
4795 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4796 len);
4797 lto_data_in_delete (data_in);
4798}
4799
4800/* Read IPA-CP aggregate replacements. */
4801
4802void
04be694e 4803ipcp_read_transformation_summaries (void)
2c9561b5
MJ
4804{
4805 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4806 struct lto_file_decl_data *file_data;
4807 unsigned int j = 0;
4808
4809 while ((file_data = file_data_vec[j++]))
4810 {
4811 size_t len;
4812 const char *data = lto_get_section_data (file_data,
4813 LTO_section_ipcp_transform,
4814 NULL, &len);
4815 if (data)
4816 read_replacements_section (file_data, data, len);
4817 }
4818}
4819
4820/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4821 NODE. */
4822
4823static void
4824adjust_agg_replacement_values (struct cgraph_node *node,
4825 struct ipa_agg_replacement_value *aggval)
4826{
4827 struct ipa_agg_replacement_value *v;
4828 int i, c = 0, d = 0, *adj;
4829
4830 if (!node->clone.combined_args_to_skip)
4831 return;
4832
4833 for (v = aggval; v; v = v->next)
4834 {
4835 gcc_assert (v->index >= 0);
4836 if (c < v->index)
4837 c = v->index;
4838 }
4839 c++;
4840
4841 adj = XALLOCAVEC (int, c);
4842 for (i = 0; i < c; i++)
4843 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4844 {
4845 adj[i] = -1;
4846 d++;
4847 }
4848 else
4849 adj[i] = i - d;
4850
4851 for (v = aggval; v; v = v->next)
4852 v->index = adj[v->index];
4853}
4854
8aab5218
MJ
4855/* Dominator walker driving the ipcp modification phase. */
4856
4857class ipcp_modif_dom_walker : public dom_walker
4858{
4859public:
56b40062 4860 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
f65f1ae3 4861 vec<ipa_param_descriptor, va_gc> *descs,
8aab5218
MJ
4862 struct ipa_agg_replacement_value *av,
4863 bool *sc, bool *cc)
4864 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4865 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4866
3daacdcd 4867 virtual edge before_dom_children (basic_block);
8aab5218
MJ
4868
4869private:
56b40062 4870 struct ipa_func_body_info *m_fbi;
f65f1ae3 4871 vec<ipa_param_descriptor, va_gc> *m_descriptors;
8aab5218
MJ
4872 struct ipa_agg_replacement_value *m_aggval;
4873 bool *m_something_changed, *m_cfg_changed;
4874};
4875
3daacdcd 4876edge
8aab5218
MJ
4877ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4878{
4879 gimple_stmt_iterator gsi;
4880 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4881 {
4882 struct ipa_agg_replacement_value *v;
355fe088 4883 gimple *stmt = gsi_stmt (gsi);
8aab5218
MJ
4884 tree rhs, val, t;
4885 HOST_WIDE_INT offset, size;
4886 int index;
4887 bool by_ref, vce;
4888
4889 if (!gimple_assign_load_p (stmt))
4890 continue;
4891 rhs = gimple_assign_rhs1 (stmt);
4892 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4893 continue;
2c9561b5 4894
8aab5218
MJ
4895 vce = false;
4896 t = rhs;
4897 while (handled_component_p (t))
4898 {
4899 /* V_C_E can do things like convert an array of integers to one
4900 bigger integer and similar things we do not handle below. */
4901 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4902 {
4903 vce = true;
4904 break;
4905 }
4906 t = TREE_OPERAND (t, 0);
4907 }
4908 if (vce)
4909 continue;
4910
ff302741
PB
4911 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4912 &offset, &size, &by_ref))
8aab5218
MJ
4913 continue;
4914 for (v = m_aggval; v; v = v->next)
4915 if (v->index == index
4916 && v->offset == offset)
4917 break;
4918 if (!v
4919 || v->by_ref != by_ref
4920 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4921 continue;
4922
4923 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4924 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4925 {
4926 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4927 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4928 else if (TYPE_SIZE (TREE_TYPE (rhs))
4929 == TYPE_SIZE (TREE_TYPE (v->value)))
4930 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4931 else
4932 {
4933 if (dump_file)
4934 {
4935 fprintf (dump_file, " const ");
ef6cb4c7 4936 print_generic_expr (dump_file, v->value);
8aab5218 4937 fprintf (dump_file, " can't be converted to type of ");
ef6cb4c7 4938 print_generic_expr (dump_file, rhs);
8aab5218
MJ
4939 fprintf (dump_file, "\n");
4940 }
4941 continue;
4942 }
4943 }
4944 else
4945 val = v->value;
4946
4947 if (dump_file && (dump_flags & TDF_DETAILS))
4948 {
4949 fprintf (dump_file, "Modifying stmt:\n ");
ef6cb4c7 4950 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
4951 }
4952 gimple_assign_set_rhs_from_tree (&gsi, val);
4953 update_stmt (stmt);
4954
4955 if (dump_file && (dump_flags & TDF_DETAILS))
4956 {
4957 fprintf (dump_file, "into:\n ");
ef6cb4c7 4958 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
4959 fprintf (dump_file, "\n");
4960 }
4961
4962 *m_something_changed = true;
4963 if (maybe_clean_eh_stmt (stmt)
4964 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4965 *m_cfg_changed = true;
4966 }
3daacdcd 4967 return NULL;
8aab5218
MJ
4968}
4969
209ca542 4970/* Update bits info of formal parameters as described in
9d3e0adc 4971 ipcp_transformation. */
209ca542
PK
4972
4973static void
4974ipcp_update_bits (struct cgraph_node *node)
4975{
4976 tree parm = DECL_ARGUMENTS (node->decl);
4977 tree next_parm = parm;
9d3e0adc 4978 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
209ca542
PK
4979
4980 if (!ts || vec_safe_length (ts->bits) == 0)
4981 return;
4982
86cd0334 4983 vec<ipa_bits *, va_gc> &bits = *ts->bits;
209ca542
PK
4984 unsigned count = bits.length ();
4985
4986 for (unsigned i = 0; i < count; ++i, parm = next_parm)
4987 {
4988 if (node->clone.combined_args_to_skip
4989 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
4990 continue;
4991
4992 gcc_checking_assert (parm);
4993 next_parm = DECL_CHAIN (parm);
4994
86cd0334
MJ
4995 if (!bits[i]
4996 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
4997 || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542 4998 || !is_gimple_reg (parm))
86cd0334 4999 continue;
209ca542
PK
5000
5001 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5002 if (!ddef)
5003 continue;
5004
5005 if (dump_file)
5006 {
86cd0334
MJ
5007 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5008 print_hex (bits[i]->mask, dump_file);
209ca542
PK
5009 fprintf (dump_file, "\n");
5010 }
5011
67b97478
PK
5012 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5013 {
5014 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5015 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5016
86cd0334
MJ
5017 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5018 | wide_int::from (bits[i]->value, prec, sgn);
67b97478
PK
5019 set_nonzero_bits (ddef, nonzero_bits);
5020 }
5021 else
5022 {
86cd0334
MJ
5023 unsigned tem = bits[i]->mask.to_uhwi ();
5024 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
67b97478
PK
5025 unsigned align = tem & -tem;
5026 unsigned misalign = bitpos & (align - 1);
209ca542 5027
67b97478
PK
5028 if (align > 1)
5029 {
5030 if (dump_file)
5031 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5032
5033 unsigned old_align, old_misalign;
5034 struct ptr_info_def *pi = get_ptr_info (ddef);
5035 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5036
5037 if (old_known
5038 && old_align > align)
5039 {
5040 if (dump_file)
5041 {
5042 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5043 if ((old_misalign & (align - 1)) != misalign)
5044 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5045 old_misalign, misalign);
5046 }
5047 continue;
5048 }
5049
5050 if (old_known
5051 && ((misalign & (old_align - 1)) != old_misalign)
5052 && dump_file)
5053 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5054 old_misalign, misalign);
5055
5056 set_ptr_info_alignment (pi, align, misalign);
5057 }
5058 }
209ca542
PK
5059 }
5060}
5061
8bc5448f 5062/* Update value range of formal parameters as described in
9d3e0adc 5063 ipcp_transformation. */
8bc5448f
KV
5064
5065static void
5066ipcp_update_vr (struct cgraph_node *node)
5067{
5068 tree fndecl = node->decl;
5069 tree parm = DECL_ARGUMENTS (fndecl);
5070 tree next_parm = parm;
9d3e0adc 5071 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5072 if (!ts || vec_safe_length (ts->m_vr) == 0)
5073 return;
5074 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5075 unsigned count = vr.length ();
5076
5077 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5078 {
5079 if (node->clone.combined_args_to_skip
5080 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5081 continue;
5082 gcc_checking_assert (parm);
5083 next_parm = DECL_CHAIN (parm);
5084 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5085
5086 if (!ddef || !is_gimple_reg (parm))
5087 continue;
5088
5089 if (vr[i].known
8bc5448f
KV
5090 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5091 {
5092 tree type = TREE_TYPE (ddef);
5093 unsigned prec = TYPE_PRECISION (type);
718625ad
KV
5094 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5095 {
5096 if (dump_file)
5097 {
5098 fprintf (dump_file, "Setting value range of param %u ", i);
5099 fprintf (dump_file, "%s[",
5100 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5101 print_decs (vr[i].min, dump_file);
5102 fprintf (dump_file, ", ");
5103 print_decs (vr[i].max, dump_file);
5104 fprintf (dump_file, "]\n");
5105 }
5106 set_range_info (ddef, vr[i].type,
5107 wide_int_storage::from (vr[i].min, prec,
5108 TYPE_SIGN (type)),
5109 wide_int_storage::from (vr[i].max, prec,
5110 TYPE_SIGN (type)));
5111 }
5112 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5113 && vr[i].type == VR_ANTI_RANGE
5114 && wi::eq_p (vr[i].min, 0)
5115 && wi::eq_p (vr[i].max, 0))
8bc5448f 5116 {
718625ad
KV
5117 if (dump_file)
5118 fprintf (dump_file, "Setting nonnull for %u\n", i);
5119 set_ptr_nonnull (ddef);
8bc5448f 5120 }
8bc5448f
KV
5121 }
5122 }
5123}
5124
8aab5218 5125/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5126
5127unsigned int
5128ipcp_transform_function (struct cgraph_node *node)
5129{
f65f1ae3 5130 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
56b40062 5131 struct ipa_func_body_info fbi;
2c9561b5 5132 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5133 int param_count;
5134 bool cfg_changed = false, something_changed = false;
5135
5136 gcc_checking_assert (cfun);
5137 gcc_checking_assert (current_function_decl);
5138
5139 if (dump_file)
464d0118
ML
5140 fprintf (dump_file, "Modification phase of node %s\n",
5141 node->dump_name ());
2c9561b5 5142
209ca542 5143 ipcp_update_bits (node);
8bc5448f 5144 ipcp_update_vr (node);
2c9561b5
MJ
5145 aggval = ipa_get_agg_replacements_for_node (node);
5146 if (!aggval)
5147 return 0;
67348ccc 5148 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5149 if (param_count == 0)
5150 return 0;
5151 adjust_agg_replacement_values (node, aggval);
5152 if (dump_file)
5153 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5154
8aab5218
MJ
5155 fbi.node = node;
5156 fbi.info = NULL;
5157 fbi.bb_infos = vNULL;
5158 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5159 fbi.param_count = param_count;
5160 fbi.aa_walked = 0;
2c9561b5 5161
f65f1ae3
MJ
5162 vec_safe_grow_cleared (descriptors, param_count);
5163 ipa_populate_param_decls (node, *descriptors);
8aab5218
MJ
5164 calculate_dominance_info (CDI_DOMINATORS);
5165 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5166 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5167
8aab5218
MJ
5168 int i;
5169 struct ipa_bb_info *bi;
5170 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5171 free_ipa_bb_info (bi);
5172 fbi.bb_infos.release ();
5173 free_dominance_info (CDI_DOMINATORS);
9d3e0adc
ML
5174
5175 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5176 s->agg_values = NULL;
5177 s->bits = NULL;
5178 s->m_vr = NULL;
676b4899 5179
f65f1ae3 5180 vec_free (descriptors);
2c9561b5
MJ
5181
5182 if (!something_changed)
5183 return 0;
5184 else if (cfg_changed)
5185 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5186 else
5187 return TODO_update_ssa_only_virtuals;
5188}
86cd0334
MJ
5189
5190#include "gt-ipa-prop.h"