]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
Covert ipa-pure-const.c to symbol_summary.
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
85ec4feb 2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
27d020cf 48#include "ipa-fnsummary.h"
cf835838 49#include "gimple-pretty-print.h"
dfea20f1 50#include "params.h"
450ad0cd 51#include "ipa-utils.h"
2b5f0895 52#include "dbgcnt.h"
8aab5218 53#include "domwalk.h"
9b2b7279 54#include "builtins.h"
771578a0 55
dd912cb8
ML
56/* Function summary where the parameter infos are actually stored. */
57ipa_node_params_t *ipa_node_params_sum = NULL;
04be694e
MJ
58/* Vector of IPA-CP transformation data for each clone. */
59vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
6fe906a3
MJ
60/* Edge summary for IPA-CP edge information. */
61ipa_edge_args_sum_t *ipa_edge_args_sum;
771578a0 62
86cd0334
MJ
63/* Traits for a hash table for reusing already existing ipa_bits. */
64
65struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
66{
67 typedef ipa_bits *value_type;
68 typedef ipa_bits *compare_type;
69 static hashval_t
70 hash (const ipa_bits *p)
71 {
72 hashval_t t = (hashval_t) p->value.to_shwi ();
73 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
74 }
75 static bool
76 equal (const ipa_bits *a, const ipa_bits *b)
77 {
78 return a->value == b->value && a->mask == b->mask;
79 }
80 static void
81 mark_empty (ipa_bits *&p)
82 {
83 p = NULL;
84 }
85 static bool
86 is_empty (const ipa_bits *p)
87 {
88 return p == NULL;
89 }
90 static bool
91 is_deleted (const ipa_bits *p)
92 {
93 return p == reinterpret_cast<const ipa_bits *> (1);
94 }
95 static void
96 mark_deleted (ipa_bits *&p)
97 {
98 p = reinterpret_cast<ipa_bits *> (1);
99 }
100};
101
102/* Hash table for avoid repeated allocations of equal ipa_bits. */
103static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
104
105/* Traits for a hash table for reusing value_ranges used for IPA. Note that
106 the equiv bitmap is not hashed and is expected to be NULL. */
107
108struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
109{
110 typedef value_range *value_type;
111 typedef value_range *compare_type;
112 static hashval_t
113 hash (const value_range *p)
59b2c134
JJ
114 {
115 gcc_checking_assert (!p->equiv);
116 inchash::hash hstate (p->type);
117 hstate.add_ptr (p->min);
118 hstate.add_ptr (p->max);
119 return hstate.end ();
120 }
86cd0334
MJ
121 static bool
122 equal (const value_range *a, const value_range *b)
123 {
124 return a->type == b->type && a->min == b->min && a->max == b->max;
125 }
126 static void
127 mark_empty (value_range *&p)
128 {
129 p = NULL;
130 }
131 static bool
132 is_empty (const value_range *p)
133 {
134 return p == NULL;
135 }
136 static bool
137 is_deleted (const value_range *p)
138 {
139 return p == reinterpret_cast<const value_range *> (1);
140 }
141 static void
142 mark_deleted (value_range *&p)
143 {
144 p = reinterpret_cast<value_range *> (1);
145 }
146};
147
148/* Hash table for avoid repeated allocations of equal value_ranges. */
149static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
150
771578a0 151/* Holders of ipa cgraph hooks: */
40982661 152static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 153
4502fe8d
MJ
154/* Description of a reference to an IPA constant. */
155struct ipa_cst_ref_desc
156{
157 /* Edge that corresponds to the statement which took the reference. */
158 struct cgraph_edge *cs;
159 /* Linked list of duplicates created when call graph edges are cloned. */
160 struct ipa_cst_ref_desc *next_duplicate;
161 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
162 if out of control. */
163 int refcount;
164};
165
166/* Allocation pool for reference descriptions. */
167
fb0b2914 168static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 169 ("IPA-PROP ref descriptions");
4502fe8d 170
5fe8e757
MJ
171/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
172 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
173
174static bool
175ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
176{
67348ccc 177 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
178
179 if (!fs_opts)
180 return false;
2bf86c84 181 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
182}
183
be95e2b9
MJ
184/* Return index of the formal whose tree is PTREE in function which corresponds
185 to INFO. */
186
d044dd17 187static int
f65f1ae3
MJ
188ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
189 tree ptree)
518dc859
RL
190{
191 int i, count;
192
f65f1ae3 193 count = vec_safe_length (descriptors);
518dc859 194 for (i = 0; i < count; i++)
f65f1ae3 195 if ((*descriptors)[i].decl_or_type == ptree)
518dc859
RL
196 return i;
197
198 return -1;
199}
200
d044dd17
MJ
201/* Return index of the formal whose tree is PTREE in function which corresponds
202 to INFO. */
203
204int
205ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
206{
207 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
208}
209
210/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
211 NODE. */
be95e2b9 212
f8e2a1ed
MJ
213static void
214ipa_populate_param_decls (struct cgraph_node *node,
f65f1ae3 215 vec<ipa_param_descriptor, va_gc> &descriptors)
518dc859
RL
216{
217 tree fndecl;
218 tree fnargs;
219 tree parm;
220 int param_num;
3e293154 221
67348ccc 222 fndecl = node->decl;
0e8853ee 223 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
224 fnargs = DECL_ARGUMENTS (fndecl);
225 param_num = 0;
910ad8de 226 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 227 {
209ca542 228 descriptors[param_num].decl_or_type = parm;
b4c9af96
RB
229 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
230 true);
518dc859
RL
231 param_num++;
232 }
233}
234
3f84bf08
MJ
235/* Return how many formal parameters FNDECL has. */
236
fd29c024 237int
310bc633 238count_formal_params (tree fndecl)
3f84bf08
MJ
239{
240 tree parm;
241 int count = 0;
0e8853ee 242 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 243
910ad8de 244 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
245 count++;
246
247 return count;
248}
249
0e8853ee
JH
250/* Return the declaration of Ith formal parameter of the function corresponding
251 to INFO. Note there is no setter function as this array is built just once
252 using ipa_initialize_node_params. */
253
254void
255ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
256{
257 fprintf (file, "param #%i", i);
f65f1ae3 258 if ((*info->descriptors)[i].decl_or_type)
0e8853ee
JH
259 {
260 fprintf (file, " ");
ef6cb4c7 261 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
0e8853ee
JH
262 }
263}
264
159f01f8
MJ
265/* If necessary, allocate vector of parameter descriptors in info of NODE.
266 Return true if they were allocated, false if not. */
0e8853ee 267
159f01f8 268static bool
0e8853ee
JH
269ipa_alloc_node_params (struct cgraph_node *node, int param_count)
270{
271 struct ipa_node_params *info = IPA_NODE_REF (node);
272
f65f1ae3 273 if (!info->descriptors && param_count)
159f01f8
MJ
274 {
275 vec_safe_grow_cleared (info->descriptors, param_count);
276 return true;
277 }
278 else
279 return false;
0e8853ee
JH
280}
281
f8e2a1ed
MJ
282/* Initialize the ipa_node_params structure associated with NODE by counting
283 the function parameters, creating the descriptors and populating their
284 param_decls. */
be95e2b9 285
f8e2a1ed
MJ
286void
287ipa_initialize_node_params (struct cgraph_node *node)
288{
289 struct ipa_node_params *info = IPA_NODE_REF (node);
290
159f01f8
MJ
291 if (!info->descriptors
292 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
293 ipa_populate_param_decls (node, *info->descriptors);
518dc859
RL
294}
295
749aa96d
MJ
296/* Print the jump functions associated with call graph edge CS to file F. */
297
298static void
299ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
300{
301 int i, count;
302
303 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
304 for (i = 0; i < count; i++)
305 {
306 struct ipa_jump_func *jump_func;
307 enum jump_func_type type;
308
309 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
310 type = jump_func->type;
311
312 fprintf (f, " param %d: ", i);
313 if (type == IPA_JF_UNKNOWN)
314 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
315 else if (type == IPA_JF_CONST)
316 {
4502fe8d 317 tree val = jump_func->value.constant.value;
749aa96d 318 fprintf (f, "CONST: ");
ef6cb4c7 319 print_generic_expr (f, val);
749aa96d
MJ
320 if (TREE_CODE (val) == ADDR_EXPR
321 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
322 {
323 fprintf (f, " -> ");
ef6cb4c7 324 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
749aa96d
MJ
325 }
326 fprintf (f, "\n");
327 }
749aa96d
MJ
328 else if (type == IPA_JF_PASS_THROUGH)
329 {
330 fprintf (f, "PASS THROUGH: ");
8b7773a4 331 fprintf (f, "%d, op %s",
749aa96d 332 jump_func->value.pass_through.formal_id,
5806f481 333 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 334 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
335 {
336 fprintf (f, " ");
ef6cb4c7 337 print_generic_expr (f, jump_func->value.pass_through.operand);
8b7773a4
MJ
338 }
339 if (jump_func->value.pass_through.agg_preserved)
340 fprintf (f, ", agg_preserved");
3ea6239f 341 fprintf (f, "\n");
749aa96d
MJ
342 }
343 else if (type == IPA_JF_ANCESTOR)
344 {
345 fprintf (f, "ANCESTOR: ");
16998094 346 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
347 jump_func->value.ancestor.formal_id,
348 jump_func->value.ancestor.offset);
8b7773a4
MJ
349 if (jump_func->value.ancestor.agg_preserved)
350 fprintf (f, ", agg_preserved");
3ea6239f 351 fprintf (f, "\n");
749aa96d 352 }
8b7773a4
MJ
353
354 if (jump_func->agg.items)
355 {
356 struct ipa_agg_jf_item *item;
357 int j;
358
359 fprintf (f, " Aggregate passed by %s:\n",
360 jump_func->agg.by_ref ? "reference" : "value");
9771b263 361 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
362 {
363 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
364 item->offset);
365 if (TYPE_P (item->value))
366 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 367 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
368 else
369 {
370 fprintf (f, "cst: ");
ef6cb4c7 371 print_generic_expr (f, item->value);
8b7773a4
MJ
372 }
373 fprintf (f, "\n");
374 }
375 }
44210a96
MJ
376
377 struct ipa_polymorphic_call_context *ctx
378 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
379 if (ctx && !ctx->useless_p ())
380 {
381 fprintf (f, " Context: ");
382 ctx->dump (dump_file);
383 }
04be694e 384
86cd0334 385 if (jump_func->bits)
209ca542 386 {
86cd0334
MJ
387 fprintf (f, " value: ");
388 print_hex (jump_func->bits->value, f);
389 fprintf (f, ", mask: ");
390 print_hex (jump_func->bits->mask, f);
209ca542
PK
391 fprintf (f, "\n");
392 }
393 else
394 fprintf (f, " Unknown bits\n");
8bc5448f 395
86cd0334 396 if (jump_func->m_vr)
8bc5448f
KV
397 {
398 fprintf (f, " VR ");
399 fprintf (f, "%s[",
86cd0334 400 (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
8e6cdc90 401 print_decs (wi::to_wide (jump_func->m_vr->min), f);
8bc5448f 402 fprintf (f, ", ");
8e6cdc90 403 print_decs (wi::to_wide (jump_func->m_vr->max), f);
8bc5448f
KV
404 fprintf (f, "]\n");
405 }
406 else
407 fprintf (f, " Unknown VR\n");
749aa96d
MJ
408 }
409}
410
411
be95e2b9
MJ
412/* Print the jump functions of all arguments on all call graph edges going from
413 NODE to file F. */
414
518dc859 415void
3e293154 416ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 417{
3e293154 418 struct cgraph_edge *cs;
518dc859 419
464d0118 420 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
3e293154
MJ
421 for (cs = node->callees; cs; cs = cs->next_callee)
422 {
423 if (!ipa_edge_args_info_available_for_edge_p (cs))
424 continue;
425
464d0118
ML
426 fprintf (f, " callsite %s -> %s : \n",
427 node->dump_name (),
428 cs->callee->dump_name ());
749aa96d
MJ
429 ipa_print_node_jump_functions_for_edge (f, cs);
430 }
518dc859 431
9de04252 432 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 433 {
9de04252 434 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
435 if (!ipa_edge_args_info_available_for_edge_p (cs))
436 continue;
3e293154 437
9de04252
MJ
438 ii = cs->indirect_info;
439 if (ii->agg_contents)
c13bc3d9 440 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 441 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 442 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
443 ii->param_index, ii->offset,
444 ii->by_ref ? "by reference" : "by_value");
445 else
85942f45
JH
446 fprintf (f, " indirect %s callsite, calling param %i, "
447 "offset " HOST_WIDE_INT_PRINT_DEC,
448 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
449 ii->offset);
9de04252 450
749aa96d
MJ
451 if (cs->call_stmt)
452 {
9de04252 453 fprintf (f, ", for stmt ");
749aa96d 454 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 455 }
749aa96d 456 else
9de04252 457 fprintf (f, "\n");
ba392339
JH
458 if (ii->polymorphic)
459 ii->context.dump (f);
749aa96d 460 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
461 }
462}
463
464/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 465
3e293154
MJ
466void
467ipa_print_all_jump_functions (FILE *f)
468{
469 struct cgraph_node *node;
470
ca30a539 471 fprintf (f, "\nJump functions:\n");
65c70e6b 472 FOR_EACH_FUNCTION (node)
3e293154
MJ
473 {
474 ipa_print_node_jump_functions (f, node);
475 }
476}
477
04be694e
MJ
478/* Set jfunc to be a know-really nothing jump function. */
479
480static void
481ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
482{
483 jfunc->type = IPA_JF_UNKNOWN;
86cd0334
MJ
484 jfunc->bits = NULL;
485 jfunc->m_vr = NULL;
04be694e
MJ
486}
487
b8f6e610
MJ
488/* Set JFUNC to be a copy of another jmp (to be used by jump function
489 combination code). The two functions will share their rdesc. */
490
491static void
492ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
493 struct ipa_jump_func *src)
494
495{
496 gcc_checking_assert (src->type == IPA_JF_CONST);
497 dst->type = IPA_JF_CONST;
498 dst->value.constant = src->value.constant;
499}
500
7b872d9e
MJ
501/* Set JFUNC to be a constant jmp function. */
502
503static void
4502fe8d
MJ
504ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
505 struct cgraph_edge *cs)
7b872d9e
MJ
506{
507 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
508 jfunc->value.constant.value = unshare_expr_without_location (constant);
509
510 if (TREE_CODE (constant) == ADDR_EXPR
511 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
512 {
513 struct ipa_cst_ref_desc *rdesc;
4502fe8d 514
601f3293 515 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
516 rdesc->cs = cs;
517 rdesc->next_duplicate = NULL;
518 rdesc->refcount = 1;
519 jfunc->value.constant.rdesc = rdesc;
520 }
521 else
522 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
523}
524
525/* Set JFUNC to be a simple pass-through jump function. */
526static void
8b7773a4 527ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 528 bool agg_preserved)
7b872d9e
MJ
529{
530 jfunc->type = IPA_JF_PASS_THROUGH;
531 jfunc->value.pass_through.operand = NULL_TREE;
532 jfunc->value.pass_through.formal_id = formal_id;
533 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 534 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
535}
536
a2b4c188
KV
537/* Set JFUNC to be an unary pass through jump function. */
538
539static void
540ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
541 enum tree_code operation)
542{
543 jfunc->type = IPA_JF_PASS_THROUGH;
544 jfunc->value.pass_through.operand = NULL_TREE;
545 jfunc->value.pass_through.formal_id = formal_id;
546 jfunc->value.pass_through.operation = operation;
547 jfunc->value.pass_through.agg_preserved = false;
548}
7b872d9e
MJ
549/* Set JFUNC to be an arithmetic pass through jump function. */
550
551static void
552ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
553 tree operand, enum tree_code operation)
554{
555 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 556 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
557 jfunc->value.pass_through.formal_id = formal_id;
558 jfunc->value.pass_through.operation = operation;
8b7773a4 559 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
560}
561
562/* Set JFUNC to be an ancestor jump function. */
563
564static void
565ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 566 int formal_id, bool agg_preserved)
7b872d9e
MJ
567{
568 jfunc->type = IPA_JF_ANCESTOR;
569 jfunc->value.ancestor.formal_id = formal_id;
570 jfunc->value.ancestor.offset = offset;
8b7773a4 571 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
572}
573
8aab5218
MJ
574/* Get IPA BB information about the given BB. FBI is the context of analyzis
575 of this function body. */
576
577static struct ipa_bb_info *
56b40062 578ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
579{
580 gcc_checking_assert (fbi);
581 return &fbi->bb_infos[bb->index];
582}
583
f65cf2b7
MJ
584/* Structure to be passed in between detect_type_change and
585 check_stmt_for_type_change. */
586
11478306 587struct prop_type_change_info
f65cf2b7 588{
290ebcb7
MJ
589 /* Offset into the object where there is the virtual method pointer we are
590 looking for. */
591 HOST_WIDE_INT offset;
592 /* The declaration or SSA_NAME pointer of the base that we are checking for
593 type change. */
594 tree object;
f65cf2b7
MJ
595 /* Set to true if dynamic type change has been detected. */
596 bool type_maybe_changed;
597};
598
599/* Return true if STMT can modify a virtual method table pointer.
600
601 This function makes special assumptions about both constructors and
602 destructors which are all the functions that are allowed to alter the VMT
603 pointers. It assumes that destructors begin with assignment into all VMT
604 pointers and that constructors essentially look in the following way:
605
606 1) The very first thing they do is that they call constructors of ancestor
607 sub-objects that have them.
608
609 2) Then VMT pointers of this and all its ancestors is set to new values
610 corresponding to the type corresponding to the constructor.
611
612 3) Only afterwards, other stuff such as constructor of member sub-objects
613 and the code written by the user is run. Only this may include calling
614 virtual functions, directly or indirectly.
615
616 There is no way to call a constructor of an ancestor sub-object in any
617 other way.
618
619 This means that we do not have to care whether constructors get the correct
620 type information because they will always change it (in fact, if we define
621 the type to be given by the VMT pointer, it is undefined).
622
623 The most important fact to derive from the above is that if, for some
624 statement in the section 3, we try to detect whether the dynamic type has
625 changed, we can safely ignore all calls as we examine the function body
626 backwards until we reach statements in section 2 because these calls cannot
627 be ancestor constructors or destructors (if the input is not bogus) and so
628 do not change the dynamic type (this holds true only for automatically
629 allocated objects but at the moment we devirtualize only these). We then
630 must detect that statements in section 2 change the dynamic type and can try
631 to derive the new type. That is enough and we can stop, we will never see
632 the calls into constructors of sub-objects in this code. Therefore we can
633 safely ignore all call statements that we traverse.
634 */
635
636static bool
355fe088 637stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
638{
639 if (is_gimple_call (stmt))
640 return false;
70f633c5
JH
641 if (gimple_clobber_p (stmt))
642 return false;
f65cf2b7
MJ
643 else if (is_gimple_assign (stmt))
644 {
645 tree lhs = gimple_assign_lhs (stmt);
646
0004f992
MJ
647 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
648 {
649 if (flag_strict_aliasing
650 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
651 return false;
652
653 if (TREE_CODE (lhs) == COMPONENT_REF
654 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 655 return false;
450aa0ee 656 /* In the future we might want to use get_ref_base_and_extent to find
0004f992
MJ
657 if there is a field corresponding to the offset and if so, proceed
658 almost like if it was a component ref. */
659 }
f65cf2b7
MJ
660 }
661 return true;
662}
663
3b97a5c7
MJ
664/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
665 to check whether a particular statement may modify the virtual table
666 pointerIt stores its result into DATA, which points to a
11478306 667 prop_type_change_info structure. */
f65cf2b7
MJ
668
669static bool
670check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
671{
355fe088 672 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 673 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
674
675 if (stmt_may_be_vtbl_ptr_store (stmt))
676 {
677 tci->type_maybe_changed = true;
678 return true;
679 }
680 else
681 return false;
682}
683
058d0a90
JH
684/* See if ARG is PARAM_DECl describing instance passed by pointer
685 or reference in FUNCTION. Return false if the dynamic type may change
686 in between beggining of the function until CALL is invoked.
290ebcb7 687
058d0a90
JH
688 Generally functions are not allowed to change type of such instances,
689 but they call destructors. We assume that methods can not destroy the THIS
690 pointer. Also as a special cases, constructor and destructors may change
691 type of the THIS pointer. */
692
693static bool
355fe088 694param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90
JH
695{
696 /* Pure functions can not do any changes on the dynamic type;
697 that require writting to memory. */
698 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
699 return false;
700 /* We need to check if we are within inlined consturctor
701 or destructor (ideally we would have way to check that the
702 inline cdtor is actually working on ARG, but we don't have
703 easy tie on this, so punt on all non-pure cdtors.
704 We may also record the types of cdtors and once we know type
705 of the instance match them.
706
707 Also code unification optimizations may merge calls from
708 different blocks making return values unreliable. So
709 do nothing during late optimization. */
710 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
711 return true;
712 if (TREE_CODE (arg) == SSA_NAME
713 && SSA_NAME_IS_DEFAULT_DEF (arg)
714 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
715 {
716 /* Normal (non-THIS) argument. */
717 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
718 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 719 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
720 and destructors as those definitely may change the dynamic
721 type. */
722 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
723 && !DECL_CXX_CONSTRUCTOR_P (function)
724 && !DECL_CXX_DESTRUCTOR_P (function)
725 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
726 {
727 /* Walk the inline stack and watch out for ctors/dtors. */
728 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
729 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
730 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
731 return true;
058d0a90
JH
732 return false;
733 }
734 }
735 return true;
736}
290ebcb7 737
06d65050
JH
738/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
739 callsite CALL) by looking for assignments to its virtual table pointer. If
740 it is, return true and fill in the jump function JFUNC with relevant type
741 information or set it to unknown. ARG is the object itself (not a pointer
742 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
743 returned by get_ref_base_and_extent, as is the offset.
744
745 This is helper function for detect_type_change and detect_type_change_ssa
746 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
747
748static bool
058d0a90 749detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
538dd0b7 750 gcall *call, struct ipa_jump_func *jfunc,
058d0a90 751 HOST_WIDE_INT offset)
f65cf2b7 752{
11478306 753 struct prop_type_change_info tci;
f65cf2b7 754 ao_ref ao;
70f633c5 755 bool entry_reached = false;
f65cf2b7
MJ
756
757 gcc_checking_assert (DECL_P (arg)
758 || TREE_CODE (arg) == MEM_REF
759 || handled_component_p (arg));
f65cf2b7 760
b49407f8
JH
761 comp_type = TYPE_MAIN_VARIANT (comp_type);
762
d570d364
JH
763 /* Const calls cannot call virtual methods through VMT and so type changes do
764 not matter. */
765 if (!flag_devirtualize || !gimple_vuse (call)
766 /* Be sure expected_type is polymorphic. */
767 || !comp_type
768 || TREE_CODE (comp_type) != RECORD_TYPE
769 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
770 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
771 return true;
4bf2a588 772
dd887943 773 ao_ref_init (&ao, arg);
f65cf2b7
MJ
774 ao.base = base;
775 ao.offset = offset;
776 ao.size = POINTER_SIZE;
777 ao.max_size = ao.size;
f65cf2b7 778
290ebcb7
MJ
779 tci.offset = offset;
780 tci.object = get_base_address (arg);
290ebcb7 781 tci.type_maybe_changed = false;
290ebcb7 782
f65cf2b7 783 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
70f633c5 784 &tci, NULL, &entry_reached);
f65cf2b7
MJ
785 if (!tci.type_maybe_changed)
786 return false;
787
04be694e 788 ipa_set_jf_unknown (jfunc);
f65cf2b7
MJ
789 return true;
790}
791
058d0a90
JH
792/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
793 If it is, return true and fill in the jump function JFUNC with relevant type
794 information or set it to unknown. ARG is the object itself (not a pointer
795 to it, unless dereferenced). BASE is the base of the memory access as
796 returned by get_ref_base_and_extent, as is the offset. */
797
798static bool
538dd0b7 799detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
058d0a90
JH
800 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
801{
802 if (!flag_devirtualize)
803 return false;
804
805 if (TREE_CODE (base) == MEM_REF
806 && !param_type_may_change_p (current_function_decl,
807 TREE_OPERAND (base, 0),
808 call))
809 return false;
810 return detect_type_change_from_memory_writes (arg, base, comp_type,
811 call, jfunc, offset);
812}
813
f65cf2b7
MJ
814/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
815 SSA name (its dereference will become the base and the offset is assumed to
816 be zero). */
817
818static bool
06d65050 819detect_type_change_ssa (tree arg, tree comp_type,
538dd0b7 820 gcall *call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
821{
822 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 823 if (!flag_devirtualize
06d65050 824 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
825 return false;
826
058d0a90
JH
827 if (!param_type_may_change_p (current_function_decl, arg, call))
828 return false;
829
f65cf2b7 830 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 831 build_int_cst (ptr_type_node, 0));
f65cf2b7 832
058d0a90
JH
833 return detect_type_change_from_memory_writes (arg, arg, comp_type,
834 call, jfunc, 0);
f65cf2b7
MJ
835}
836
fdb0e1b4
MJ
837/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
838 boolean variable pointed to by DATA. */
839
840static bool
841mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
842 void *data)
843{
844 bool *b = (bool *) data;
845 *b = true;
846 return true;
847}
848
8aab5218
MJ
849/* Return true if we have already walked so many statements in AA that we
850 should really just start giving up. */
851
852static bool
56b40062 853aa_overwalked (struct ipa_func_body_info *fbi)
8aab5218
MJ
854{
855 gcc_checking_assert (fbi);
856 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
857}
858
859/* Find the nearest valid aa status for parameter specified by INDEX that
860 dominates BB. */
861
56b40062
MJ
862static struct ipa_param_aa_status *
863find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
864 int index)
865{
866 while (true)
867 {
868 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
869 if (!bb)
870 return NULL;
871 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
872 if (!bi->param_aa_statuses.is_empty ()
873 && bi->param_aa_statuses[index].valid)
874 return &bi->param_aa_statuses[index];
875 }
876}
877
878/* Get AA status structure for the given BB and parameter with INDEX. Allocate
879 structures and/or intialize the result with a dominating description as
880 necessary. */
881
56b40062
MJ
882static struct ipa_param_aa_status *
883parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
884 int index)
885{
886 gcc_checking_assert (fbi);
887 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
888 if (bi->param_aa_statuses.is_empty ())
889 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
56b40062 890 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
891 if (!paa->valid)
892 {
893 gcc_checking_assert (!paa->parm_modified
894 && !paa->ref_modified
895 && !paa->pt_modified);
56b40062 896 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
897 dom_paa = find_dominating_aa_status (fbi, bb, index);
898 if (dom_paa)
899 *paa = *dom_paa;
900 else
901 paa->valid = true;
902 }
903
904 return paa;
905}
906
688010ba 907/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 908 a value known not to be modified in this function before reaching the
8aab5218
MJ
909 statement STMT. FBI holds information about the function we have so far
910 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
911
912static bool
56b40062 913parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 914 gimple *stmt, tree parm_load)
fdb0e1b4 915{
56b40062 916 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
917 bool modified = false;
918 ao_ref refd;
919
776e4fe2
MJ
920 tree base = get_base_address (parm_load);
921 gcc_assert (TREE_CODE (base) == PARM_DECL);
922 if (TREE_READONLY (base))
923 return true;
924
8aab5218
MJ
925 /* FIXME: FBI can be NULL if we are being called from outside
926 ipa_node_analysis or ipcp_transform_function, which currently happens
927 during inlining analysis. It would be great to extend fbi's lifetime and
928 always have it. Currently, we are just not afraid of too much walking in
929 that case. */
930 if (fbi)
931 {
932 if (aa_overwalked (fbi))
933 return false;
934 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
935 if (paa->parm_modified)
936 return false;
937 }
938 else
939 paa = NULL;
fdb0e1b4
MJ
940
941 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 942 ao_ref_init (&refd, parm_load);
8aab5218
MJ
943 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
944 &modified, NULL);
945 if (fbi)
946 fbi->aa_walked += walked;
947 if (paa && modified)
948 paa->parm_modified = true;
8b7773a4 949 return !modified;
fdb0e1b4
MJ
950}
951
a2b4c188
KV
952/* If STMT is an assignment that loads a value from an parameter declaration,
953 return the index of the parameter in ipa_node_params which has not been
954 modified. Otherwise return -1. */
955
956static int
957load_from_unmodified_param (struct ipa_func_body_info *fbi,
f65f1ae3 958 vec<ipa_param_descriptor, va_gc> *descriptors,
a2b4c188
KV
959 gimple *stmt)
960{
bda2bc48
MJ
961 int index;
962 tree op1;
963
a2b4c188
KV
964 if (!gimple_assign_single_p (stmt))
965 return -1;
966
bda2bc48
MJ
967 op1 = gimple_assign_rhs1 (stmt);
968 if (TREE_CODE (op1) != PARM_DECL)
a2b4c188
KV
969 return -1;
970
bda2bc48
MJ
971 index = ipa_get_param_decl_index_1 (descriptors, op1);
972 if (index < 0
973 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
a2b4c188
KV
974 return -1;
975
bda2bc48 976 return index;
a2b4c188
KV
977}
978
8aab5218
MJ
979/* Return true if memory reference REF (which must be a load through parameter
980 with INDEX) loads data that are known to be unmodified in this function
981 before reaching statement STMT. */
8b7773a4
MJ
982
983static bool
56b40062 984parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 985 int index, gimple *stmt, tree ref)
8b7773a4 986{
56b40062 987 struct ipa_param_aa_status *paa;
8b7773a4
MJ
988 bool modified = false;
989 ao_ref refd;
990
8aab5218
MJ
991 /* FIXME: FBI can be NULL if we are being called from outside
992 ipa_node_analysis or ipcp_transform_function, which currently happens
993 during inlining analysis. It would be great to extend fbi's lifetime and
994 always have it. Currently, we are just not afraid of too much walking in
995 that case. */
996 if (fbi)
997 {
998 if (aa_overwalked (fbi))
999 return false;
1000 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1001 if (paa->ref_modified)
1002 return false;
1003 }
1004 else
1005 paa = NULL;
8b7773a4 1006
8aab5218 1007 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 1008 ao_ref_init (&refd, ref);
8aab5218
MJ
1009 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1010 &modified, NULL);
1011 if (fbi)
1012 fbi->aa_walked += walked;
1013 if (paa && modified)
1014 paa->ref_modified = true;
8b7773a4
MJ
1015 return !modified;
1016}
1017
8aab5218
MJ
1018/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1019 is known to be unmodified in this function before reaching call statement
1020 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1021
1022static bool
56b40062 1023parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 1024 gimple *call, tree parm)
8b7773a4
MJ
1025{
1026 bool modified = false;
1027 ao_ref refd;
1028
1029 /* It's unnecessary to calculate anything about memory contnets for a const
1030 function because it is not goin to use it. But do not cache the result
1031 either. Also, no such calculations for non-pointers. */
1032 if (!gimple_vuse (call)
8aab5218
MJ
1033 || !POINTER_TYPE_P (TREE_TYPE (parm))
1034 || aa_overwalked (fbi))
8b7773a4
MJ
1035 return false;
1036
56b40062
MJ
1037 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1038 gimple_bb (call),
1039 index);
8aab5218 1040 if (paa->pt_modified)
8b7773a4
MJ
1041 return false;
1042
1043 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218
MJ
1044 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1045 &modified, NULL);
1046 fbi->aa_walked += walked;
8b7773a4 1047 if (modified)
8aab5218 1048 paa->pt_modified = true;
8b7773a4
MJ
1049 return !modified;
1050}
1051
91bb9f80
MJ
1052/* Return true if we can prove that OP is a memory reference loading
1053 data from an aggregate passed as a parameter.
1054
1055 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1056 false if it cannot prove that the value has not been modified before the
1057 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1058 if it cannot prove the value has not been modified, in that case it will
1059 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1060
8b7773a4
MJ
1061 INFO and PARMS_AINFO describe parameters of the current function (but the
1062 latter can be NULL), STMT is the load statement. If function returns true,
1063 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1064 within the aggregate and whether it is a load from a value passed by
1065 reference respectively. */
1066
ff302741 1067bool
56b40062 1068ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
f65f1ae3 1069 vec<ipa_param_descriptor, va_gc> *descriptors,
355fe088 1070 gimple *stmt, tree op, int *index_p,
ff302741 1071 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
91bb9f80 1072 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
1073{
1074 int index;
588db50c 1075 HOST_WIDE_INT size;
ee45a32d 1076 bool reverse;
588db50c 1077 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
8b7773a4 1078
588db50c 1079 if (!base)
8b7773a4
MJ
1080 return false;
1081
1082 if (DECL_P (base))
1083 {
d044dd17 1084 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1085 if (index >= 0
8aab5218 1086 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1087 {
1088 *index_p = index;
1089 *by_ref_p = false;
3ff2ca23
JJ
1090 if (size_p)
1091 *size_p = size;
91bb9f80
MJ
1092 if (guaranteed_unmodified)
1093 *guaranteed_unmodified = true;
8b7773a4
MJ
1094 return true;
1095 }
1096 return false;
1097 }
1098
1099 if (TREE_CODE (base) != MEM_REF
1100 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1101 || !integer_zerop (TREE_OPERAND (base, 1)))
1102 return false;
1103
1104 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1105 {
1106 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1107 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1108 }
1109 else
1110 {
1111 /* This branch catches situations where a pointer parameter is not a
1112 gimple register, for example:
1113
1114 void hip7(S*) (struct S * p)
1115 {
1116 void (*<T2e4>) (struct S *) D.1867;
1117 struct S * p.1;
1118
1119 <bb 2>:
1120 p.1_1 = p;
1121 D.1867_2 = p.1_1->f;
1122 D.1867_2 ();
1123 gdp = &p;
1124 */
1125
355fe088 1126 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1127 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1128 }
1129
91bb9f80 1130 if (index >= 0)
8b7773a4 1131 {
91bb9f80
MJ
1132 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1133 if (!data_preserved && !guaranteed_unmodified)
1134 return false;
1135
8b7773a4
MJ
1136 *index_p = index;
1137 *by_ref_p = true;
3ff2ca23
JJ
1138 if (size_p)
1139 *size_p = size;
91bb9f80
MJ
1140 if (guaranteed_unmodified)
1141 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1142 return true;
1143 }
1144 return false;
1145}
1146
b258210c 1147/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1148 of an assignment statement STMT, try to determine whether we are actually
1149 handling any of the following cases and construct an appropriate jump
1150 function into JFUNC if so:
1151
1152 1) The passed value is loaded from a formal parameter which is not a gimple
1153 register (most probably because it is addressable, the value has to be
1154 scalar) and we can guarantee the value has not changed. This case can
1155 therefore be described by a simple pass-through jump function. For example:
1156
1157 foo (int a)
1158 {
1159 int a.0;
1160
1161 a.0_2 = a;
1162 bar (a.0_2);
1163
1164 2) The passed value can be described by a simple arithmetic pass-through
1165 jump function. E.g.
1166
1167 foo (int a)
1168 {
1169 int D.2064;
1170
1171 D.2064_4 = a.1(D) + 4;
1172 bar (D.2064_4);
1173
1174 This case can also occur in combination of the previous one, e.g.:
1175
1176 foo (int a, int z)
1177 {
1178 int a.0;
1179 int D.2064;
1180
1181 a.0_3 = a;
1182 D.2064_4 = a.0_3 + 4;
1183 foo (D.2064_4);
1184
1185 3) The passed value is an address of an object within another one (which
1186 also passed by reference). Such situations are described by an ancestor
1187 jump function and describe situations such as:
1188
1189 B::foo() (struct B * const this)
1190 {
1191 struct A * D.1845;
1192
1193 D.1845_2 = &this_1(D)->D.1748;
1194 A::bar (D.1845_2);
1195
1196 INFO is the structure describing individual parameters access different
1197 stages of IPA optimizations. PARMS_AINFO contains the information that is
1198 only needed for intraprocedural analysis. */
685b0d13
MJ
1199
1200static void
56b40062 1201compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1202 struct ipa_node_params *info,
b258210c 1203 struct ipa_jump_func *jfunc,
355fe088 1204 gcall *call, gimple *stmt, tree name,
06d65050 1205 tree param_type)
685b0d13 1206{
588db50c 1207 HOST_WIDE_INT offset, size;
fdb0e1b4 1208 tree op1, tc_ssa, base, ssa;
ee45a32d 1209 bool reverse;
685b0d13 1210 int index;
685b0d13 1211
685b0d13 1212 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1213
fdb0e1b4 1214 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1215 {
fdb0e1b4
MJ
1216 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1217 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1218 else
bda2bc48
MJ
1219 index = load_from_unmodified_param (fbi, info->descriptors,
1220 SSA_NAME_DEF_STMT (op1));
fdb0e1b4
MJ
1221 tc_ssa = op1;
1222 }
1223 else
1224 {
bda2bc48 1225 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1226 tc_ssa = gimple_assign_lhs (stmt);
1227 }
1228
1229 if (index >= 0)
1230 {
a77af182 1231 switch (gimple_assign_rhs_class (stmt))
8b7773a4 1232 {
a77af182
RB
1233 case GIMPLE_BINARY_RHS:
1234 {
1235 tree op2 = gimple_assign_rhs2 (stmt);
1236 if (!is_gimple_ip_invariant (op2)
1237 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1238 != tcc_comparison)
1239 && !useless_type_conversion_p (TREE_TYPE (name),
1240 TREE_TYPE (op1))))
1241 return;
1242
1243 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1244 gimple_assign_rhs_code (stmt));
1245 break;
1246 }
1247 case GIMPLE_SINGLE_RHS:
1248 {
1249 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1250 tc_ssa);
1251 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1252 break;
1253 }
1254 case GIMPLE_UNARY_RHS:
bda2bc48
MJ
1255 if (is_gimple_assign (stmt)
1256 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1257 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
a77af182 1258 ipa_set_jf_unary_pass_through (jfunc, index,
bda2bc48 1259 gimple_assign_rhs_code (stmt));
a77af182 1260 default:;
8b7773a4 1261 }
685b0d13
MJ
1262 return;
1263 }
1264
1265 if (TREE_CODE (op1) != ADDR_EXPR)
1266 return;
1267 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1268 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1269 return;
588db50c 1270 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
aca52e6f
RS
1271 offset_int mem_offset;
1272 if (!base
1273 || TREE_CODE (base) != MEM_REF
1274 || !mem_ref_offset (base).is_constant (&mem_offset))
685b0d13 1275 return;
aca52e6f 1276 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1277 ssa = TREE_OPERAND (base, 0);
1278 if (TREE_CODE (ssa) != SSA_NAME
1279 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1280 || offset < 0)
685b0d13
MJ
1281 return;
1282
b8f6e610 1283 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1284 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1285 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1286 ipa_set_ancestor_jf (jfunc, offset, index,
1287 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1288}
1289
40591473
MJ
1290/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1291 it looks like:
1292
1293 iftmp.1_3 = &obj_2(D)->D.1762;
1294
1295 The base of the MEM_REF must be a default definition SSA NAME of a
1296 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1297 whole MEM_REF expression is returned and the offset calculated from any
1298 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1299 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1300
1301static tree
355fe088 1302get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473 1303{
588db50c 1304 HOST_WIDE_INT size;
40591473 1305 tree expr, parm, obj;
ee45a32d 1306 bool reverse;
40591473
MJ
1307
1308 if (!gimple_assign_single_p (assign))
1309 return NULL_TREE;
1310 expr = gimple_assign_rhs1 (assign);
1311
1312 if (TREE_CODE (expr) != ADDR_EXPR)
1313 return NULL_TREE;
1314 expr = TREE_OPERAND (expr, 0);
1315 obj = expr;
588db50c 1316 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
40591473 1317
aca52e6f
RS
1318 offset_int mem_offset;
1319 if (!expr
1320 || TREE_CODE (expr) != MEM_REF
1321 || !mem_ref_offset (expr).is_constant (&mem_offset))
40591473
MJ
1322 return NULL_TREE;
1323 parm = TREE_OPERAND (expr, 0);
1324 if (TREE_CODE (parm) != SSA_NAME
1325 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1326 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1327 return NULL_TREE;
1328
aca52e6f 1329 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1330 *obj_p = obj;
1331 return expr;
1332}
1333
685b0d13 1334
b258210c
MJ
1335/* Given that an actual argument is an SSA_NAME that is a result of a phi
1336 statement PHI, try to find out whether NAME is in fact a
1337 multiple-inheritance typecast from a descendant into an ancestor of a formal
1338 parameter and thus can be described by an ancestor jump function and if so,
1339 write the appropriate function into JFUNC.
1340
1341 Essentially we want to match the following pattern:
1342
1343 if (obj_2(D) != 0B)
1344 goto <bb 3>;
1345 else
1346 goto <bb 4>;
1347
1348 <bb 3>:
1349 iftmp.1_3 = &obj_2(D)->D.1762;
1350
1351 <bb 4>:
1352 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1353 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1354 return D.1879_6; */
1355
1356static void
56b40062 1357compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1358 struct ipa_node_params *info,
b258210c 1359 struct ipa_jump_func *jfunc,
538dd0b7 1360 gcall *call, gphi *phi)
b258210c 1361{
40591473 1362 HOST_WIDE_INT offset;
355fe088 1363 gimple *assign, *cond;
b258210c 1364 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1365 tree tmp, parm, expr, obj;
b258210c
MJ
1366 int index, i;
1367
54e348cb 1368 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1369 return;
1370
54e348cb
MJ
1371 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1372 tmp = PHI_ARG_DEF (phi, 0);
1373 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1374 tmp = PHI_ARG_DEF (phi, 1);
1375 else
1376 return;
b258210c
MJ
1377 if (TREE_CODE (tmp) != SSA_NAME
1378 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1379 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1380 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1381 return;
1382
1383 assign = SSA_NAME_DEF_STMT (tmp);
1384 assign_bb = gimple_bb (assign);
40591473 1385 if (!single_pred_p (assign_bb))
b258210c 1386 return;
40591473
MJ
1387 expr = get_ancestor_addr_info (assign, &obj, &offset);
1388 if (!expr)
b258210c
MJ
1389 return;
1390 parm = TREE_OPERAND (expr, 0);
b258210c 1391 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1392 if (index < 0)
1393 return;
b258210c
MJ
1394
1395 cond_bb = single_pred (assign_bb);
1396 cond = last_stmt (cond_bb);
69610617
SB
1397 if (!cond
1398 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1399 || gimple_cond_code (cond) != NE_EXPR
1400 || gimple_cond_lhs (cond) != parm
1401 || !integer_zerop (gimple_cond_rhs (cond)))
1402 return;
1403
b258210c
MJ
1404 phi_bb = gimple_bb (phi);
1405 for (i = 0; i < 2; i++)
1406 {
1407 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1408 if (pred != assign_bb && pred != cond_bb)
1409 return;
1410 }
1411
3b97a5c7
MJ
1412 ipa_set_ancestor_jf (jfunc, offset, index,
1413 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1414}
1415
be95e2b9
MJ
1416/* Inspect the given TYPE and return true iff it has the same structure (the
1417 same number of fields of the same types) as a C++ member pointer. If
1418 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1419 corresponding fields there. */
1420
3e293154
MJ
1421static bool
1422type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1423{
1424 tree fld;
1425
1426 if (TREE_CODE (type) != RECORD_TYPE)
1427 return false;
1428
1429 fld = TYPE_FIELDS (type);
1430 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1431 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1432 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1433 return false;
1434
1435 if (method_ptr)
1436 *method_ptr = fld;
1437
910ad8de 1438 fld = DECL_CHAIN (fld);
8b7773a4 1439 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1440 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1441 return false;
1442 if (delta)
1443 *delta = fld;
1444
910ad8de 1445 if (DECL_CHAIN (fld))
3e293154
MJ
1446 return false;
1447
1448 return true;
1449}
1450
61502ca8 1451/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1452 return the rhs of its defining statement. Otherwise return RHS as it
1453 is. */
7ec49257
MJ
1454
1455static inline tree
1456get_ssa_def_if_simple_copy (tree rhs)
1457{
1458 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1459 {
355fe088 1460 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1461
1462 if (gimple_assign_single_p (def_stmt))
1463 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1464 else
1465 break;
7ec49257
MJ
1466 }
1467 return rhs;
1468}
1469
8b7773a4
MJ
1470/* Simple linked list, describing known contents of an aggregate beforere
1471 call. */
1472
1473struct ipa_known_agg_contents_list
1474{
1475 /* Offset and size of the described part of the aggregate. */
1476 HOST_WIDE_INT offset, size;
1477 /* Known constant value or NULL if the contents is known to be unknown. */
1478 tree constant;
1479 /* Pointer to the next structure in the list. */
1480 struct ipa_known_agg_contents_list *next;
1481};
3e293154 1482
0d48ee34
MJ
1483/* Find the proper place in linked list of ipa_known_agg_contents_list
1484 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1485 unless there is a partial overlap, in which case return NULL, or such
1486 element is already there, in which case set *ALREADY_THERE to true. */
1487
1488static struct ipa_known_agg_contents_list **
1489get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1490 HOST_WIDE_INT lhs_offset,
1491 HOST_WIDE_INT lhs_size,
1492 bool *already_there)
1493{
1494 struct ipa_known_agg_contents_list **p = list;
1495 while (*p && (*p)->offset < lhs_offset)
1496 {
1497 if ((*p)->offset + (*p)->size > lhs_offset)
1498 return NULL;
1499 p = &(*p)->next;
1500 }
1501
1502 if (*p && (*p)->offset < lhs_offset + lhs_size)
1503 {
1504 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1505 /* We already know this value is subsequently overwritten with
1506 something else. */
1507 *already_there = true;
1508 else
1509 /* Otherwise this is a partial overlap which we cannot
1510 represent. */
1511 return NULL;
1512 }
1513 return p;
1514}
1515
1516/* Build aggregate jump function from LIST, assuming there are exactly
1517 CONST_COUNT constant entries there and that th offset of the passed argument
1518 is ARG_OFFSET and store it into JFUNC. */
1519
1520static void
1521build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1522 int const_count, HOST_WIDE_INT arg_offset,
1523 struct ipa_jump_func *jfunc)
1524{
1525 vec_alloc (jfunc->agg.items, const_count);
1526 while (list)
1527 {
1528 if (list->constant)
1529 {
1530 struct ipa_agg_jf_item item;
1531 item.offset = list->offset - arg_offset;
1532 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1533 item.value = unshare_expr_without_location (list->constant);
1534 jfunc->agg.items->quick_push (item);
1535 }
1536 list = list->next;
1537 }
1538}
1539
8b7773a4
MJ
1540/* Traverse statements from CALL backwards, scanning whether an aggregate given
1541 in ARG is filled in with constant values. ARG can either be an aggregate
0d48ee34
MJ
1542 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1543 aggregate. JFUNC is the jump function into which the constants are
1544 subsequently stored. */
be95e2b9 1545
3e293154 1546static void
538dd0b7
DM
1547determine_locally_known_aggregate_parts (gcall *call, tree arg,
1548 tree arg_type,
0d48ee34 1549 struct ipa_jump_func *jfunc)
3e293154 1550{
8b7773a4
MJ
1551 struct ipa_known_agg_contents_list *list = NULL;
1552 int item_count = 0, const_count = 0;
1553 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1554 gimple_stmt_iterator gsi;
8b7773a4
MJ
1555 tree arg_base;
1556 bool check_ref, by_ref;
1557 ao_ref r;
3e293154 1558
29799e9d
MJ
1559 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1560 return;
1561
8b7773a4
MJ
1562 /* The function operates in three stages. First, we prepare check_ref, r,
1563 arg_base and arg_offset based on what is actually passed as an actual
1564 argument. */
3e293154 1565
85942f45 1566 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1567 {
1568 by_ref = true;
1569 if (TREE_CODE (arg) == SSA_NAME)
1570 {
1571 tree type_size;
85942f45 1572 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
8b7773a4
MJ
1573 return;
1574 check_ref = true;
1575 arg_base = arg;
1576 arg_offset = 0;
85942f45 1577 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1578 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1579 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1580 }
1581 else if (TREE_CODE (arg) == ADDR_EXPR)
1582 {
ee45a32d 1583 bool reverse;
8b7773a4
MJ
1584
1585 arg = TREE_OPERAND (arg, 0);
588db50c
RS
1586 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1587 &arg_size, &reverse);
1588 if (!arg_base)
8b7773a4
MJ
1589 return;
1590 if (DECL_P (arg_base))
1591 {
8b7773a4 1592 check_ref = false;
0d48ee34 1593 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1594 }
1595 else
1596 return;
1597 }
1598 else
1599 return;
1600 }
1601 else
1602 {
ee45a32d 1603 bool reverse;
8b7773a4
MJ
1604
1605 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1606
1607 by_ref = false;
1608 check_ref = false;
588db50c
RS
1609 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1610 &arg_size, &reverse);
1611 if (!arg_base)
8b7773a4
MJ
1612 return;
1613
1614 ao_ref_init (&r, arg);
1615 }
1616
1617 /* Second stage walks back the BB, looks at individual statements and as long
1618 as it is confident of how the statements affect contents of the
1619 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1620 describing it. */
1621 gsi = gsi_for_stmt (call);
726a989a
RB
1622 gsi_prev (&gsi);
1623 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1624 {
8b7773a4 1625 struct ipa_known_agg_contents_list *n, **p;
355fe088 1626 gimple *stmt = gsi_stmt (gsi);
588db50c 1627 HOST_WIDE_INT lhs_offset, lhs_size;
8b7773a4 1628 tree lhs, rhs, lhs_base;
ee45a32d 1629 bool reverse;
3e293154 1630
8b7773a4 1631 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1632 continue;
8b75fc9b 1633 if (!gimple_assign_single_p (stmt))
8b7773a4 1634 break;
3e293154 1635
726a989a
RB
1636 lhs = gimple_assign_lhs (stmt);
1637 rhs = gimple_assign_rhs1 (stmt);
0c6b087c 1638 if (!is_gimple_reg_type (TREE_TYPE (rhs))
7d2fb524
MJ
1639 || TREE_CODE (lhs) == BIT_FIELD_REF
1640 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1641 break;
3e293154 1642
588db50c
RS
1643 lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1644 &lhs_size, &reverse);
1645 if (!lhs_base)
8b7773a4 1646 break;
3e293154 1647
8b7773a4 1648 if (check_ref)
518dc859 1649 {
8b7773a4
MJ
1650 if (TREE_CODE (lhs_base) != MEM_REF
1651 || TREE_OPERAND (lhs_base, 0) != arg_base
1652 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1653 break;
3e293154 1654 }
8b7773a4 1655 else if (lhs_base != arg_base)
774b8a55
MJ
1656 {
1657 if (DECL_P (lhs_base))
1658 continue;
1659 else
1660 break;
1661 }
3e293154 1662
0d48ee34
MJ
1663 bool already_there = false;
1664 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1665 &already_there);
1666 if (!p)
8b7773a4 1667 break;
0d48ee34
MJ
1668 if (already_there)
1669 continue;
3e293154 1670
8b7773a4
MJ
1671 rhs = get_ssa_def_if_simple_copy (rhs);
1672 n = XALLOCA (struct ipa_known_agg_contents_list);
1673 n->size = lhs_size;
1674 n->offset = lhs_offset;
1675 if (is_gimple_ip_invariant (rhs))
1676 {
1677 n->constant = rhs;
1678 const_count++;
1679 }
1680 else
1681 n->constant = NULL_TREE;
1682 n->next = *p;
1683 *p = n;
3e293154 1684
8b7773a4 1685 item_count++;
dfea20f1
MJ
1686 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1687 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1688 break;
1689 }
be95e2b9 1690
8b7773a4
MJ
1691 /* Third stage just goes over the list and creates an appropriate vector of
1692 ipa_agg_jf_item structures out of it, of sourse only if there are
1693 any known constants to begin with. */
3e293154 1694
8b7773a4 1695 if (const_count)
3e293154 1696 {
8b7773a4 1697 jfunc->agg.by_ref = by_ref;
0d48ee34 1698 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
3e293154
MJ
1699 }
1700}
1701
5d5f1e95
KV
1702/* Return the Ith param type of callee associated with call graph
1703 edge E. */
1704
1705tree
06d65050
JH
1706ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1707{
1708 int n;
1709 tree type = (e->callee
67348ccc 1710 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1711 : gimple_call_fntype (e->call_stmt));
1712 tree t = TYPE_ARG_TYPES (type);
1713
1714 for (n = 0; n < i; n++)
1715 {
1716 if (!t)
1717 break;
1718 t = TREE_CHAIN (t);
1719 }
1720 if (t)
1721 return TREE_VALUE (t);
1722 if (!e->callee)
1723 return NULL;
67348ccc 1724 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1725 for (n = 0; n < i; n++)
1726 {
1727 if (!t)
1728 return NULL;
1729 t = TREE_CHAIN (t);
1730 }
1731 if (t)
1732 return TREE_TYPE (t);
1733 return NULL;
1734}
1735
86cd0334
MJ
1736/* Return ipa_bits with VALUE and MASK values, which can be either a newly
1737 allocated structure or a previously existing one shared with other jump
1738 functions and/or transformation summaries. */
1739
1740ipa_bits *
1741ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1742{
1743 ipa_bits tmp;
1744 tmp.value = value;
1745 tmp.mask = mask;
1746
1747 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1748 if (*slot)
1749 return *slot;
1750
1751 ipa_bits *res = ggc_alloc<ipa_bits> ();
1752 res->value = value;
1753 res->mask = mask;
1754 *slot = res;
1755
1756 return res;
1757}
1758
1759/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1760 table in order to avoid creating multiple same ipa_bits structures. */
1761
1762static void
1763ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1764 const widest_int &mask)
1765{
1766 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1767}
1768
1769/* Return a pointer to a value_range just like *TMP, but either find it in
1770 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1771
1772static value_range *
1773ipa_get_value_range (value_range *tmp)
1774{
1775 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1776 if (*slot)
1777 return *slot;
1778
1779 value_range *vr = ggc_alloc<value_range> ();
1780 *vr = *tmp;
1781 *slot = vr;
1782
1783 return vr;
1784}
1785
1786/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1787 equiv set. Use hash table in order to avoid creating multiple same copies of
1788 value_ranges. */
1789
1790static value_range *
1791ipa_get_value_range (enum value_range_type type, tree min, tree max)
1792{
1793 value_range tmp;
1794 tmp.type = type;
1795 tmp.min = min;
1796 tmp.max = max;
1797 tmp.equiv = NULL;
1798 return ipa_get_value_range (&tmp);
1799}
1800
1801/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1802 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1803 same value_range structures. */
1804
1805static void
1806ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1807 tree min, tree max)
1808{
1809 jf->m_vr = ipa_get_value_range (type, min, max);
1810}
1811
1812/* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1813 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1814
1815static void
1816ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1817{
1818 jf->m_vr = ipa_get_value_range (tmp);
1819}
1820
3e293154
MJ
1821/* Compute jump function for all arguments of callsite CS and insert the
1822 information in the jump_functions array in the ipa_edge_args corresponding
1823 to this callsite. */
be95e2b9 1824
749aa96d 1825static void
56b40062 1826ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 1827 struct cgraph_edge *cs)
3e293154
MJ
1828{
1829 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09 1830 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
538dd0b7 1831 gcall *call = cs->call_stmt;
8b7773a4 1832 int n, arg_num = gimple_call_num_args (call);
5ce97055 1833 bool useful_context = false;
3e293154 1834
606d9a09 1835 if (arg_num == 0 || args->jump_functions)
3e293154 1836 return;
9771b263 1837 vec_safe_grow_cleared (args->jump_functions, arg_num);
5ce97055
JH
1838 if (flag_devirtualize)
1839 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
3e293154 1840
96e24d49
JJ
1841 if (gimple_call_internal_p (call))
1842 return;
5fe8e757
MJ
1843 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1844 return;
1845
8b7773a4
MJ
1846 for (n = 0; n < arg_num; n++)
1847 {
1848 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1849 tree arg = gimple_call_arg (call, n);
06d65050 1850 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
1851 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1852 {
049e6d36 1853 tree instance;
5ce97055
JH
1854 struct ipa_polymorphic_call_context context (cs->caller->decl,
1855 arg, cs->call_stmt,
049e6d36
JH
1856 &instance);
1857 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
5ce97055
JH
1858 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1859 if (!context.useless_p ())
1860 useful_context = true;
1861 }
3e293154 1862
718625ad
KV
1863 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1864 {
f7503699
KV
1865 bool addr_nonzero = false;
1866 bool strict_overflow = false;
1867
718625ad
KV
1868 if (TREE_CODE (arg) == SSA_NAME
1869 && param_type
1870 && get_ptr_nonnull (arg))
f7503699
KV
1871 addr_nonzero = true;
1872 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1873 addr_nonzero = true;
1874
1875 if (addr_nonzero)
718625ad 1876 {
86cd0334
MJ
1877 tree z = build_int_cst (TREE_TYPE (arg), 0);
1878 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
718625ad
KV
1879 }
1880 else
86cd0334 1881 gcc_assert (!jfunc->m_vr);
718625ad
KV
1882 }
1883 else
8bc5448f
KV
1884 {
1885 wide_int min, max;
1886 value_range_type type;
1887 if (TREE_CODE (arg) == SSA_NAME
1888 && param_type
1889 && (type = get_range_info (arg, &min, &max))
3a4228ba 1890 && (type == VR_RANGE || type == VR_ANTI_RANGE))
8bc5448f 1891 {
86cd0334
MJ
1892 value_range tmpvr,resvr;
1893
1894 tmpvr.type = type;
1895 tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1896 tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1897 tmpvr.equiv = NULL;
1898 memset (&resvr, 0, sizeof (resvr));
1899 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1900 &tmpvr, TREE_TYPE (arg));
1901 if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1902 ipa_set_jfunc_vr (jfunc, &resvr);
3a4228ba 1903 else
86cd0334 1904 gcc_assert (!jfunc->m_vr);
8bc5448f
KV
1905 }
1906 else
86cd0334 1907 gcc_assert (!jfunc->m_vr);
8bc5448f 1908 }
04be694e 1909
209ca542
PK
1910 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1911 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1912 {
209ca542 1913 if (TREE_CODE (arg) == SSA_NAME)
86cd0334
MJ
1914 ipa_set_jfunc_bits (jfunc, 0,
1915 widest_int::from (get_nonzero_bits (arg),
1916 TYPE_SIGN (TREE_TYPE (arg))));
209ca542 1917 else
86cd0334 1918 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
209ca542 1919 }
67b97478
PK
1920 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1921 {
1922 unsigned HOST_WIDE_INT bitpos;
1923 unsigned align;
1924
67b97478 1925 get_pointer_alignment_1 (arg, &align, &bitpos);
7b27cb4b
RS
1926 widest_int mask = wi::bit_and_not
1927 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1928 align / BITS_PER_UNIT - 1);
86cd0334
MJ
1929 widest_int value = bitpos / BITS_PER_UNIT;
1930 ipa_set_jfunc_bits (jfunc, value, mask);
67b97478 1931 }
209ca542 1932 else
86cd0334 1933 gcc_assert (!jfunc->bits);
209ca542 1934
04643334 1935 if (is_gimple_ip_invariant (arg)
8813a647 1936 || (VAR_P (arg)
04643334
MJ
1937 && is_global_var (arg)
1938 && TREE_READONLY (arg)))
4502fe8d 1939 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1940 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1941 && TREE_CODE (arg) == PARM_DECL)
1942 {
1943 int index = ipa_get_param_decl_index (info, arg);
1944
1945 gcc_assert (index >=0);
1946 /* Aggregate passed by value, check for pass-through, otherwise we
1947 will attempt to fill in aggregate contents later in this
1948 for cycle. */
8aab5218 1949 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 1950 {
3b97a5c7 1951 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
1952 continue;
1953 }
1954 }
1955 else if (TREE_CODE (arg) == SSA_NAME)
1956 {
1957 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1958 {
1959 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1960 if (index >= 0)
8b7773a4 1961 {
3b97a5c7 1962 bool agg_p;
8aab5218 1963 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 1964 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
1965 }
1966 }
1967 else
1968 {
355fe088 1969 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 1970 if (is_gimple_assign (stmt))
8aab5218 1971 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 1972 call, stmt, arg, param_type);
8b7773a4 1973 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 1974 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
1975 call,
1976 as_a <gphi *> (stmt));
8b7773a4
MJ
1977 }
1978 }
3e293154 1979
85942f45
JH
1980 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1981 passed (because type conversions are ignored in gimple). Usually we can
1982 safely get type from function declaration, but in case of K&R prototypes or
1983 variadic functions we can try our luck with type of the pointer passed.
1984 TODO: Since we look for actual initialization of the memory object, we may better
1985 work out the type based on the memory stores we find. */
1986 if (!param_type)
1987 param_type = TREE_TYPE (arg);
1988
8b7773a4
MJ
1989 if ((jfunc->type != IPA_JF_PASS_THROUGH
1990 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1991 && (jfunc->type != IPA_JF_ANCESTOR
1992 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1993 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 1994 || POINTER_TYPE_P (param_type)))
0d48ee34 1995 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
8b7773a4 1996 }
5ce97055
JH
1997 if (!useful_context)
1998 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
1999}
2000
749aa96d 2001/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 2002 from BB. */
749aa96d 2003
062c604f 2004static void
56b40062 2005ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 2006{
8aab5218
MJ
2007 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2008 int i;
749aa96d
MJ
2009 struct cgraph_edge *cs;
2010
8aab5218 2011 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 2012 {
8aab5218 2013 struct cgraph_node *callee = cs->callee;
749aa96d 2014
8aab5218
MJ
2015 if (callee)
2016 {
d52f5295 2017 callee->ultimate_alias_target ();
8aab5218
MJ
2018 /* We do not need to bother analyzing calls to unknown functions
2019 unless they may become known during lto/whopr. */
2020 if (!callee->definition && !flag_lto)
2021 continue;
2022 }
2023 ipa_compute_jump_functions_for_edge (fbi, cs);
2024 }
749aa96d
MJ
2025}
2026
8b7773a4
MJ
2027/* If STMT looks like a statement loading a value from a member pointer formal
2028 parameter, return that parameter and store the offset of the field to
2029 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2030 might be clobbered). If USE_DELTA, then we look for a use of the delta
2031 field rather than the pfn. */
be95e2b9 2032
3e293154 2033static tree
355fe088 2034ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 2035 HOST_WIDE_INT *offset_p)
3e293154 2036{
8b7773a4
MJ
2037 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2038
2039 if (!gimple_assign_single_p (stmt))
2040 return NULL_TREE;
3e293154 2041
8b7773a4 2042 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2043 if (TREE_CODE (rhs) == COMPONENT_REF)
2044 {
2045 ref_field = TREE_OPERAND (rhs, 1);
2046 rhs = TREE_OPERAND (rhs, 0);
2047 }
2048 else
2049 ref_field = NULL_TREE;
d242d063 2050 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2051 return NULL_TREE;
3e293154 2052 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2053 if (TREE_CODE (rec) != ADDR_EXPR)
2054 return NULL_TREE;
2055 rec = TREE_OPERAND (rec, 0);
3e293154 2056 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2057 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2058 return NULL_TREE;
d242d063 2059 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2060
8b7773a4
MJ
2061 if (use_delta)
2062 fld = delta_field;
2063 else
2064 fld = ptr_field;
2065 if (offset_p)
2066 *offset_p = int_bit_position (fld);
2067
ae788515
EB
2068 if (ref_field)
2069 {
2070 if (integer_nonzerop (ref_offset))
2071 return NULL_TREE;
ae788515
EB
2072 return ref_field == fld ? rec : NULL_TREE;
2073 }
3e293154 2074 else
8b7773a4
MJ
2075 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2076 : NULL_TREE;
3e293154
MJ
2077}
2078
2079/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2080
3e293154
MJ
2081static bool
2082ipa_is_ssa_with_stmt_def (tree t)
2083{
2084 if (TREE_CODE (t) == SSA_NAME
2085 && !SSA_NAME_IS_DEFAULT_DEF (t))
2086 return true;
2087 else
2088 return false;
2089}
2090
40591473
MJ
2091/* Find the indirect call graph edge corresponding to STMT and mark it as a
2092 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2093 indirect call graph edge. */
be95e2b9 2094
40591473 2095static struct cgraph_edge *
538dd0b7
DM
2096ipa_note_param_call (struct cgraph_node *node, int param_index,
2097 gcall *stmt)
3e293154 2098{
e33c6cd6 2099 struct cgraph_edge *cs;
3e293154 2100
d52f5295 2101 cs = node->get_edge (stmt);
b258210c 2102 cs->indirect_info->param_index = param_index;
8b7773a4 2103 cs->indirect_info->agg_contents = 0;
c13bc3d9 2104 cs->indirect_info->member_ptr = 0;
91bb9f80 2105 cs->indirect_info->guaranteed_unmodified = 0;
40591473 2106 return cs;
3e293154
MJ
2107}
2108
e33c6cd6 2109/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2110 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2111 intermediate information about each formal parameter. Currently it checks
2112 whether the call calls a pointer that is a formal parameter and if so, the
2113 parameter is marked with the called flag and an indirect call graph edge
2114 describing the call is created. This is very simple for ordinary pointers
2115 represented in SSA but not-so-nice when it comes to member pointers. The
2116 ugly part of this function does nothing more than trying to match the
2117 pattern of such a call. An example of such a pattern is the gimple dump
2118 below, the call is on the last line:
3e293154 2119
ae788515
EB
2120 <bb 2>:
2121 f$__delta_5 = f.__delta;
2122 f$__pfn_24 = f.__pfn;
2123
2124 or
3e293154 2125 <bb 2>:
d242d063
MJ
2126 f$__delta_5 = MEM[(struct *)&f];
2127 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2128
ae788515 2129 and a few lines below:
8aa29647
MJ
2130
2131 <bb 5>
3e293154
MJ
2132 D.2496_3 = (int) f$__pfn_24;
2133 D.2497_4 = D.2496_3 & 1;
2134 if (D.2497_4 != 0)
2135 goto <bb 3>;
2136 else
2137 goto <bb 4>;
2138
8aa29647 2139 <bb 6>:
3e293154
MJ
2140 D.2500_7 = (unsigned int) f$__delta_5;
2141 D.2501_8 = &S + D.2500_7;
2142 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2143 D.2503_10 = *D.2502_9;
2144 D.2504_12 = f$__pfn_24 + -1;
2145 D.2505_13 = (unsigned int) D.2504_12;
2146 D.2506_14 = D.2503_10 + D.2505_13;
2147 D.2507_15 = *D.2506_14;
2148 iftmp.11_16 = (String:: *) D.2507_15;
2149
8aa29647 2150 <bb 7>:
3e293154
MJ
2151 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2152 D.2500_19 = (unsigned int) f$__delta_5;
2153 D.2508_20 = &S + D.2500_19;
2154 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2155
2156 Such patterns are results of simple calls to a member pointer:
2157
2158 int doprinting (int (MyString::* f)(int) const)
2159 {
2160 MyString S ("somestring");
2161
2162 return (S.*f)(4);
2163 }
8b7773a4
MJ
2164
2165 Moreover, the function also looks for called pointers loaded from aggregates
2166 passed by value or reference. */
3e293154
MJ
2167
2168static void
56b40062 2169ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 2170 tree target)
3e293154 2171{
8aab5218 2172 struct ipa_node_params *info = fbi->info;
8b7773a4
MJ
2173 HOST_WIDE_INT offset;
2174 bool by_ref;
3e293154 2175
3e293154
MJ
2176 if (SSA_NAME_IS_DEFAULT_DEF (target))
2177 {
b258210c 2178 tree var = SSA_NAME_VAR (target);
8aab5218 2179 int index = ipa_get_param_decl_index (info, var);
3e293154 2180 if (index >= 0)
8aab5218 2181 ipa_note_param_call (fbi->node, index, call);
3e293154
MJ
2182 return;
2183 }
2184
8aab5218 2185 int index;
355fe088 2186 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 2187 bool guaranteed_unmodified;
8b7773a4 2188 if (gimple_assign_single_p (def)
ff302741
PB
2189 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2190 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2191 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2192 {
8aab5218 2193 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2194 cs->indirect_info->offset = offset;
2195 cs->indirect_info->agg_contents = 1;
2196 cs->indirect_info->by_ref = by_ref;
91bb9f80 2197 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2198 return;
2199 }
2200
3e293154
MJ
2201 /* Now we need to try to match the complex pattern of calling a member
2202 pointer. */
8b7773a4
MJ
2203 if (gimple_code (def) != GIMPLE_PHI
2204 || gimple_phi_num_args (def) != 2
2205 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2206 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2207 return;
2208
3e293154
MJ
2209 /* First, we need to check whether one of these is a load from a member
2210 pointer that is a parameter to this function. */
8aab5218
MJ
2211 tree n1 = PHI_ARG_DEF (def, 0);
2212 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2213 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2214 return;
355fe088
TS
2215 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2216 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2217
8aab5218
MJ
2218 tree rec;
2219 basic_block bb, virt_bb;
2220 basic_block join = gimple_bb (def);
8b7773a4 2221 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2222 {
8b7773a4 2223 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2224 return;
2225
8aa29647 2226 bb = EDGE_PRED (join, 0)->src;
726a989a 2227 virt_bb = gimple_bb (d2);
3e293154 2228 }
8b7773a4 2229 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2230 {
8aa29647 2231 bb = EDGE_PRED (join, 1)->src;
726a989a 2232 virt_bb = gimple_bb (d1);
3e293154
MJ
2233 }
2234 else
2235 return;
2236
2237 /* Second, we need to check that the basic blocks are laid out in the way
2238 corresponding to the pattern. */
2239
3e293154
MJ
2240 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2241 || single_pred (virt_bb) != bb
2242 || single_succ (virt_bb) != join)
2243 return;
2244
2245 /* Third, let's see that the branching is done depending on the least
2246 significant bit of the pfn. */
2247
355fe088 2248 gimple *branch = last_stmt (bb);
8aa29647 2249 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2250 return;
2251
12430896
RG
2252 if ((gimple_cond_code (branch) != NE_EXPR
2253 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2254 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2255 return;
3e293154 2256
8aab5218 2257 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2258 if (!ipa_is_ssa_with_stmt_def (cond))
2259 return;
2260
726a989a 2261 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2262 if (!is_gimple_assign (def)
726a989a
RB
2263 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2264 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2265 return;
726a989a
RB
2266
2267 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2268 if (!ipa_is_ssa_with_stmt_def (cond))
2269 return;
2270
726a989a 2271 def = SSA_NAME_DEF_STMT (cond);
3e293154 2272
8b75fc9b
MJ
2273 if (is_gimple_assign (def)
2274 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2275 {
726a989a 2276 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2277 if (!ipa_is_ssa_with_stmt_def (cond))
2278 return;
726a989a 2279 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2280 }
2281
8aab5218 2282 tree rec2;
6f7b8b70
RE
2283 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2284 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2285 == ptrmemfunc_vbit_in_delta),
2286 NULL);
3e293154
MJ
2287 if (rec != rec2)
2288 return;
2289
2290 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2291 if (index >= 0
8aab5218 2292 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2293 {
8aab5218 2294 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2295 cs->indirect_info->offset = offset;
2296 cs->indirect_info->agg_contents = 1;
c13bc3d9 2297 cs->indirect_info->member_ptr = 1;
91bb9f80 2298 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2299 }
3e293154
MJ
2300
2301 return;
2302}
2303
b258210c
MJ
2304/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2305 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2306 FBI->node (described by FBI->info), create a call note for the
2307 statement. */
b258210c
MJ
2308
2309static void
56b40062 2310ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2311 gcall *call, tree target)
b258210c
MJ
2312{
2313 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2314 int index;
40591473 2315 HOST_WIDE_INT anc_offset;
b258210c 2316
05842ff5
MJ
2317 if (!flag_devirtualize)
2318 return;
2319
40591473 2320 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2321 return;
2322
8aab5218 2323 struct ipa_node_params *info = fbi->info;
40591473
MJ
2324 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2325 {
8aab5218 2326 struct ipa_jump_func jfunc;
40591473
MJ
2327 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2328 return;
b258210c 2329
40591473
MJ
2330 anc_offset = 0;
2331 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2332 gcc_assert (index >= 0);
06d65050
JH
2333 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2334 call, &jfunc))
40591473
MJ
2335 return;
2336 }
2337 else
2338 {
8aab5218 2339 struct ipa_jump_func jfunc;
355fe088 2340 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2341 tree expr;
2342
2343 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2344 if (!expr)
2345 return;
2346 index = ipa_get_param_decl_index (info,
2347 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2348 gcc_assert (index >= 0);
06d65050
JH
2349 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2350 call, &jfunc, anc_offset))
40591473
MJ
2351 return;
2352 }
2353
8aab5218
MJ
2354 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2355 struct cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2356 ii->offset = anc_offset;
ae7e9ddd 2357 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2358 ii->otr_type = obj_type_ref_class (target);
40591473 2359 ii->polymorphic = 1;
b258210c
MJ
2360}
2361
2362/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2363 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2364 containing intermediate information about each formal parameter. */
b258210c
MJ
2365
2366static void
56b40062 2367ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2368{
2369 tree target = gimple_call_fn (call);
b786d31f
JH
2370
2371 if (!target
2372 || (TREE_CODE (target) != SSA_NAME
2373 && !virtual_method_call_p (target)))
2374 return;
b258210c 2375
7d0aa05b 2376 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2377 /* If we previously turned the call into a direct call, there is
2378 no need to analyze. */
b786d31f 2379 if (cs && !cs->indirect_unknown_callee)
25583c4f 2380 return;
7d0aa05b 2381
a5b58b28 2382 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2383 {
7d0aa05b
JH
2384 tree instance;
2385 tree target = gimple_call_fn (call);
6f8091fc
JH
2386 ipa_polymorphic_call_context context (current_function_decl,
2387 target, call, &instance);
7d0aa05b 2388
ba392339
JH
2389 gcc_checking_assert (cs->indirect_info->otr_type
2390 == obj_type_ref_class (target));
2391 gcc_checking_assert (cs->indirect_info->otr_token
2392 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2393
29c43c83
JH
2394 cs->indirect_info->vptr_changed
2395 = !context.get_dynamic_type (instance,
2396 OBJ_TYPE_REF_OBJECT (target),
2397 obj_type_ref_class (target), call);
0127c169 2398 cs->indirect_info->context = context;
7d0aa05b
JH
2399 }
2400
b258210c 2401 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2402 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2403 else if (virtual_method_call_p (target))
8aab5218 2404 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2405}
2406
2407
e33c6cd6 2408/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2409 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2410 formal parameters are called. */
be95e2b9 2411
3e293154 2412static void
355fe088 2413ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2414{
726a989a 2415 if (is_gimple_call (stmt))
538dd0b7 2416 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2417}
2418
2419/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2420 If OP is a parameter declaration, mark it as used in the info structure
2421 passed in DATA. */
2422
2423static bool
355fe088 2424visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f
MJ
2425{
2426 struct ipa_node_params *info = (struct ipa_node_params *) data;
2427
2428 op = get_base_address (op);
2429 if (op
2430 && TREE_CODE (op) == PARM_DECL)
2431 {
2432 int index = ipa_get_param_decl_index (info, op);
2433 gcc_assert (index >= 0);
310bc633 2434 ipa_set_param_used (info, index, true);
062c604f
MJ
2435 }
2436
2437 return false;
3e293154
MJ
2438}
2439
8aab5218
MJ
2440/* Scan the statements in BB and inspect the uses of formal parameters. Store
2441 the findings in various structures of the associated ipa_node_params
2442 structure, such as parameter flags, notes etc. FBI holds various data about
2443 the function being analyzed. */
be95e2b9 2444
062c604f 2445static void
56b40062 2446ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2447{
726a989a 2448 gimple_stmt_iterator gsi;
8aab5218
MJ
2449 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2450 {
355fe088 2451 gimple *stmt = gsi_stmt (gsi);
3e293154 2452
8aab5218
MJ
2453 if (is_gimple_debug (stmt))
2454 continue;
3e293154 2455
8aab5218
MJ
2456 ipa_analyze_stmt_uses (fbi, stmt);
2457 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2458 visit_ref_for_mod_analysis,
2459 visit_ref_for_mod_analysis,
2460 visit_ref_for_mod_analysis);
5fe8e757 2461 }
8aab5218
MJ
2462 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2463 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2464 visit_ref_for_mod_analysis,
2465 visit_ref_for_mod_analysis,
2466 visit_ref_for_mod_analysis);
2467}
2468
2469/* Calculate controlled uses of parameters of NODE. */
2470
2471static void
2472ipa_analyze_controlled_uses (struct cgraph_node *node)
2473{
2474 struct ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2475
8aab5218 2476 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2477 {
2478 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2479 int controlled_uses = 0;
2480
062c604f
MJ
2481 /* For SSA regs see if parameter is used. For non-SSA we compute
2482 the flag during modification analysis. */
4502fe8d
MJ
2483 if (is_gimple_reg (parm))
2484 {
67348ccc 2485 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2486 parm);
2487 if (ddef && !has_zero_uses (ddef))
2488 {
2489 imm_use_iterator imm_iter;
2490 use_operand_p use_p;
2491
2492 ipa_set_param_used (info, i, true);
2493 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2494 if (!is_gimple_call (USE_STMT (use_p)))
2495 {
c6de6665
JJ
2496 if (!is_gimple_debug (USE_STMT (use_p)))
2497 {
2498 controlled_uses = IPA_UNDESCRIBED_USE;
2499 break;
2500 }
4502fe8d
MJ
2501 }
2502 else
2503 controlled_uses++;
2504 }
2505 else
2506 controlled_uses = 0;
2507 }
2508 else
2509 controlled_uses = IPA_UNDESCRIBED_USE;
2510 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2511 }
8aab5218 2512}
062c604f 2513
8aab5218 2514/* Free stuff in BI. */
062c604f 2515
8aab5218
MJ
2516static void
2517free_ipa_bb_info (struct ipa_bb_info *bi)
2518{
2519 bi->cg_edges.release ();
2520 bi->param_aa_statuses.release ();
3e293154
MJ
2521}
2522
8aab5218 2523/* Dominator walker driving the analysis. */
2c9561b5 2524
8aab5218 2525class analysis_dom_walker : public dom_walker
2c9561b5 2526{
8aab5218 2527public:
56b40062 2528 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 2529 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2530
3daacdcd 2531 virtual edge before_dom_children (basic_block);
8aab5218
MJ
2532
2533private:
56b40062 2534 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
2535};
2536
3daacdcd 2537edge
8aab5218
MJ
2538analysis_dom_walker::before_dom_children (basic_block bb)
2539{
2540 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2541 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 2542 return NULL;
2c9561b5
MJ
2543}
2544
c3431191
ML
2545/* Release body info FBI. */
2546
2547void
2548ipa_release_body_info (struct ipa_func_body_info *fbi)
2549{
2550 int i;
2551 struct ipa_bb_info *bi;
2552
2553 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2554 free_ipa_bb_info (bi);
2555 fbi->bb_infos.release ();
2556}
2557
026c3cfd 2558/* Initialize the array describing properties of formal parameters
dd5a833e
MS
2559 of NODE, analyze their uses and compute jump functions associated
2560 with actual arguments of calls from within NODE. */
062c604f
MJ
2561
2562void
2563ipa_analyze_node (struct cgraph_node *node)
2564{
56b40062 2565 struct ipa_func_body_info fbi;
57dbdc5a 2566 struct ipa_node_params *info;
062c604f 2567
57dbdc5a
MJ
2568 ipa_check_create_node_params ();
2569 ipa_check_create_edge_args ();
2570 info = IPA_NODE_REF (node);
8aab5218
MJ
2571
2572 if (info->analysis_done)
2573 return;
2574 info->analysis_done = 1;
2575
2576 if (ipa_func_spec_opts_forbid_analysis_p (node))
2577 {
2578 for (int i = 0; i < ipa_get_param_count (info); i++)
2579 {
2580 ipa_set_param_used (info, i, true);
2581 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2582 }
2583 return;
2584 }
2585
2586 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2587 push_cfun (func);
2588 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2589 ipa_initialize_node_params (node);
8aab5218 2590 ipa_analyze_controlled_uses (node);
062c604f 2591
8aab5218
MJ
2592 fbi.node = node;
2593 fbi.info = IPA_NODE_REF (node);
2594 fbi.bb_infos = vNULL;
2595 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2596 fbi.param_count = ipa_get_param_count (info);
2597 fbi.aa_walked = 0;
062c604f 2598
8aab5218
MJ
2599 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2600 {
2601 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2602 bi->cg_edges.safe_push (cs);
2603 }
062c604f 2604
8aab5218
MJ
2605 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2606 {
2607 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2608 bi->cg_edges.safe_push (cs);
2609 }
2610
2611 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2612
c3431191 2613 ipa_release_body_info (&fbi);
8aab5218 2614 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2615 pop_cfun ();
062c604f 2616}
062c604f 2617
be95e2b9 2618/* Update the jump functions associated with call graph edge E when the call
3e293154 2619 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2620 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2621
3e293154
MJ
2622static void
2623update_jump_functions_after_inlining (struct cgraph_edge *cs,
2624 struct cgraph_edge *e)
2625{
2626 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2627 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2628 int count = ipa_get_cs_argument_count (args);
2629 int i;
2630
2631 for (i = 0; i < count; i++)
2632 {
b258210c 2633 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
5ce97055
JH
2634 struct ipa_polymorphic_call_context *dst_ctx
2635 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2636
685b0d13
MJ
2637 if (dst->type == IPA_JF_ANCESTOR)
2638 {
b258210c 2639 struct ipa_jump_func *src;
8b7773a4 2640 int dst_fid = dst->value.ancestor.formal_id;
5ce97055
JH
2641 struct ipa_polymorphic_call_context *src_ctx
2642 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 2643
b258210c
MJ
2644 /* Variable number of arguments can cause havoc if we try to access
2645 one that does not exist in the inlined edge. So make sure we
2646 don't. */
8b7773a4 2647 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 2648 {
04be694e 2649 ipa_set_jf_unknown (dst);
b258210c
MJ
2650 continue;
2651 }
2652
8b7773a4
MJ
2653 src = ipa_get_ith_jump_func (top, dst_fid);
2654
5ce97055
JH
2655 if (src_ctx && !src_ctx->useless_p ())
2656 {
2657 struct ipa_polymorphic_call_context ctx = *src_ctx;
2658
2659 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2660 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 2661 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2662 ctx.offset_by (dst->value.ancestor.offset);
2663 if (!ctx.useless_p ())
2664 {
a7d1f3fe
ML
2665 if (!dst_ctx)
2666 {
2667 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2668 count);
2669 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2670 }
2671
2672 dst_ctx->combine_with (ctx);
5ce97055
JH
2673 }
2674 }
2675
8b7773a4
MJ
2676 if (src->agg.items
2677 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2678 {
2679 struct ipa_agg_jf_item *item;
2680 int j;
2681
2682 /* Currently we do not produce clobber aggregate jump functions,
2683 replace with merging when we do. */
2684 gcc_assert (!dst->agg.items);
2685
9771b263 2686 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2687 dst->agg.by_ref = src->agg.by_ref;
9771b263 2688 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2689 item->offset -= dst->value.ancestor.offset;
2690 }
2691
3b97a5c7
MJ
2692 if (src->type == IPA_JF_PASS_THROUGH
2693 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2694 {
2695 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2696 dst->value.ancestor.agg_preserved &=
2697 src->value.pass_through.agg_preserved;
2698 }
a2b4c188
KV
2699 else if (src->type == IPA_JF_PASS_THROUGH
2700 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2701 {
2702 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2703 dst->value.ancestor.agg_preserved = false;
2704 }
b258210c
MJ
2705 else if (src->type == IPA_JF_ANCESTOR)
2706 {
2707 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2708 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2709 dst->value.ancestor.agg_preserved &=
2710 src->value.ancestor.agg_preserved;
b258210c
MJ
2711 }
2712 else
04be694e 2713 ipa_set_jf_unknown (dst);
b258210c
MJ
2714 }
2715 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2716 {
b258210c
MJ
2717 struct ipa_jump_func *src;
2718 /* We must check range due to calls with variable number of arguments
2719 and we cannot combine jump functions with operations. */
2720 if (dst->value.pass_through.operation == NOP_EXPR
2721 && (dst->value.pass_through.formal_id
2722 < ipa_get_cs_argument_count (top)))
2723 {
8b7773a4
MJ
2724 int dst_fid = dst->value.pass_through.formal_id;
2725 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2726 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
5ce97055
JH
2727 struct ipa_polymorphic_call_context *src_ctx
2728 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 2729
5ce97055
JH
2730 if (src_ctx && !src_ctx->useless_p ())
2731 {
2732 struct ipa_polymorphic_call_context ctx = *src_ctx;
2733
2734 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2735 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 2736 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2737 if (!ctx.useless_p ())
2738 {
2739 if (!dst_ctx)
2740 {
2741 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2742 count);
2743 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2744 }
2745 dst_ctx->combine_with (ctx);
2746 }
2747 }
b8f6e610
MJ
2748 switch (src->type)
2749 {
2750 case IPA_JF_UNKNOWN:
04be694e 2751 ipa_set_jf_unknown (dst);
b8f6e610 2752 break;
b8f6e610
MJ
2753 case IPA_JF_CONST:
2754 ipa_set_jf_cst_copy (dst, src);
2755 break;
2756
2757 case IPA_JF_PASS_THROUGH:
2758 {
2759 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2760 enum tree_code operation;
2761 operation = ipa_get_jf_pass_through_operation (src);
2762
2763 if (operation == NOP_EXPR)
2764 {
3b97a5c7 2765 bool agg_p;
b8f6e610
MJ
2766 agg_p = dst_agg_p
2767 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 2768 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610 2769 }
a2b4c188
KV
2770 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2771 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
b8f6e610
MJ
2772 else
2773 {
2774 tree operand = ipa_get_jf_pass_through_operand (src);
2775 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2776 operation);
2777 }
2778 break;
2779 }
2780 case IPA_JF_ANCESTOR:
2781 {
3b97a5c7 2782 bool agg_p;
b8f6e610
MJ
2783 agg_p = dst_agg_p
2784 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
2785 ipa_set_ancestor_jf (dst,
2786 ipa_get_jf_ancestor_offset (src),
b8f6e610 2787 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 2788 agg_p);
b8f6e610
MJ
2789 break;
2790 }
2791 default:
2792 gcc_unreachable ();
2793 }
8b7773a4
MJ
2794
2795 if (src->agg.items
b8f6e610 2796 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2797 {
2798 /* Currently we do not produce clobber aggregate jump
2799 functions, replace with merging when we do. */
2800 gcc_assert (!dst->agg.items);
2801
2802 dst->agg.by_ref = src->agg.by_ref;
9771b263 2803 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2804 }
b258210c
MJ
2805 }
2806 else
04be694e 2807 ipa_set_jf_unknown (dst);
3e293154 2808 }
b258210c
MJ
2809 }
2810}
2811
5ce97055
JH
2812/* If TARGET is an addr_expr of a function declaration, make it the
2813 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2814 Otherwise, return NULL. */
b258210c 2815
3949c4a7 2816struct cgraph_edge *
5ce97055
JH
2817ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2818 bool speculative)
b258210c
MJ
2819{
2820 struct cgraph_node *callee;
99353fcf 2821 struct ipa_call_summary *es = ipa_call_summaries->get_create (ie);
48b1474e 2822 bool unreachable = false;
b258210c 2823
ceeffab0
MJ
2824 if (TREE_CODE (target) == ADDR_EXPR)
2825 target = TREE_OPERAND (target, 0);
b258210c 2826 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2827 {
2828 target = canonicalize_constructor_val (target, NULL);
2829 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2830 {
db66bf68
JH
2831 /* Member pointer call that goes through a VMT lookup. */
2832 if (ie->indirect_info->member_ptr
2833 /* Or if target is not an invariant expression and we do not
2834 know if it will evaulate to function at runtime.
2835 This can happen when folding through &VAR, where &VAR
2836 is IP invariant, but VAR itself is not.
2837
2838 TODO: Revisit this when GCC 5 is branched. It seems that
2839 member_ptr check is not needed and that we may try to fold
2840 the expression and see if VAR is readonly. */
2841 || !is_gimple_ip_invariant (target))
2842 {
2843 if (dump_enabled_p ())
2844 {
2845 location_t loc = gimple_location_safe (ie->call_stmt);
2846 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
464d0118
ML
2847 "discovered direct call non-invariant %s\n",
2848 ie->caller->dump_name ());
db66bf68
JH
2849 }
2850 return NULL;
2851 }
2852
c13bc3d9 2853
2b5f0895
XDL
2854 if (dump_enabled_p ())
2855 {
807b7d62
ML
2856 location_t loc = gimple_location_safe (ie->call_stmt);
2857 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
464d0118 2858 "discovered direct call to non-function in %s, "
807b7d62 2859 "making it __builtin_unreachable\n",
464d0118 2860 ie->caller->dump_name ());
2b5f0895 2861 }
3c9e6fca 2862
48b1474e 2863 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 2864 callee = cgraph_node::get_create (target);
48b1474e 2865 unreachable = true;
a0a7b611 2866 }
48b1474e 2867 else
d52f5295 2868 callee = cgraph_node::get (target);
a0a7b611 2869 }
48b1474e 2870 else
d52f5295 2871 callee = cgraph_node::get (target);
a0a7b611
JH
2872
2873 /* Because may-edges are not explicitely represented and vtable may be external,
2874 we may create the first reference to the object in the unit. */
2875 if (!callee || callee->global.inlined_to)
2876 {
a0a7b611
JH
2877
2878 /* We are better to ensure we can refer to it.
2879 In the case of static functions we are out of luck, since we already
2880 removed its body. In the case of public functions we may or may
2881 not introduce the reference. */
2882 if (!canonicalize_constructor_val (target, NULL)
2883 || !TREE_PUBLIC (target))
2884 {
2885 if (dump_file)
2886 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
464d0118
ML
2887 "(%s -> %s) but can not refer to it. Giving up.\n",
2888 ie->caller->dump_name (),
2889 ie->callee->dump_name ());
a0a7b611
JH
2890 return NULL;
2891 }
d52f5295 2892 callee = cgraph_node::get_create (target);
a0a7b611 2893 }
2b5f0895 2894
0127c169
JH
2895 /* If the edge is already speculated. */
2896 if (speculative && ie->speculative)
2897 {
2898 struct cgraph_edge *e2;
2899 struct ipa_ref *ref;
2900 ie->speculative_call_info (e2, ie, ref);
2901 if (e2->callee->ultimate_alias_target ()
2902 != callee->ultimate_alias_target ())
2903 {
2904 if (dump_file)
464d0118
ML
2905 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2906 "target (%s -> %s) but the call is already "
2907 "speculated to %s. Giving up.\n",
2908 ie->caller->dump_name (), callee->dump_name (),
2909 e2->callee->dump_name ());
0127c169
JH
2910 }
2911 else
2912 {
2913 if (dump_file)
2914 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
464d0118
ML
2915 "(%s -> %s) this agree with previous speculation.\n",
2916 ie->caller->dump_name (), callee->dump_name ());
0127c169
JH
2917 }
2918 return NULL;
2919 }
2920
2b5f0895
XDL
2921 if (!dbg_cnt (devirt))
2922 return NULL;
2923
1dbee8c9 2924 ipa_check_create_node_params ();
ceeffab0 2925
81fa35bd
MJ
2926 /* We can not make edges to inline clones. It is bug that someone removed
2927 the cgraph node too early. */
17afc0fe
JH
2928 gcc_assert (!callee->global.inlined_to);
2929
48b1474e 2930 if (dump_file && !unreachable)
b258210c 2931 {
5ce97055 2932 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
464d0118 2933 "(%s -> %s), for stmt ",
b258210c 2934 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 2935 speculative ? "speculative" : "known",
464d0118
ML
2936 ie->caller->dump_name (),
2937 callee->dump_name ());
b258210c
MJ
2938 if (ie->call_stmt)
2939 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2940 else
2941 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 2942 }
2b5f0895
XDL
2943 if (dump_enabled_p ())
2944 {
807b7d62 2945 location_t loc = gimple_location_safe (ie->call_stmt);
3c9e6fca 2946
807b7d62
ML
2947 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2948 "converting indirect call in %s to direct call to %s\n",
2949 ie->caller->name (), callee->name ());
2b5f0895 2950 }
5ce97055 2951 if (!speculative)
d8d5aef1
JH
2952 {
2953 struct cgraph_edge *orig = ie;
2954 ie = ie->make_direct (callee);
2955 /* If we resolved speculative edge the cost is already up to date
2956 for direct call (adjusted by inline_edge_duplication_hook). */
2957 if (ie == orig)
2958 {
99353fcf 2959 es = ipa_call_summaries->get_create (ie);
d8d5aef1
JH
2960 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2961 - eni_size_weights.call_cost);
2962 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2963 - eni_time_weights.call_cost);
2964 }
2965 }
5ce97055
JH
2966 else
2967 {
2968 if (!callee->can_be_discarded_p ())
2969 {
2970 cgraph_node *alias;
2971 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2972 if (alias)
2973 callee = alias;
2974 }
d8d5aef1 2975 /* make_speculative will update ie's cost to direct call cost. */
5ce97055 2976 ie = ie->make_speculative
1bad9c18 2977 (callee, ie->count.apply_scale (8, 10));
5ce97055 2978 }
749aa96d 2979
b258210c 2980 return ie;
3e293154
MJ
2981}
2982
91bb9f80
MJ
2983/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2984 CONSTRUCTOR and return it. Return NULL if the search fails for some
2985 reason. */
2986
2987static tree
2988find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2989{
2990 tree type = TREE_TYPE (constructor);
2991 if (TREE_CODE (type) != ARRAY_TYPE
2992 && TREE_CODE (type) != RECORD_TYPE)
2993 return NULL;
2994
2995 unsigned ix;
2996 tree index, val;
2997 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2998 {
2999 HOST_WIDE_INT elt_offset;
3000 if (TREE_CODE (type) == ARRAY_TYPE)
3001 {
3002 offset_int off;
3003 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3004 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3005
3006 if (index)
3007 {
db9bbdec
RB
3008 if (TREE_CODE (index) == RANGE_EXPR)
3009 off = wi::to_offset (TREE_OPERAND (index, 0));
3010 else
3011 off = wi::to_offset (index);
91bb9f80
MJ
3012 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3013 {
3014 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3015 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3016 off = wi::sext (off - wi::to_offset (low_bound),
3017 TYPE_PRECISION (TREE_TYPE (index)));
3018 }
3019 off *= wi::to_offset (unit_size);
db9bbdec
RB
3020 /* ??? Handle more than just the first index of a
3021 RANGE_EXPR. */
91bb9f80
MJ
3022 }
3023 else
3024 off = wi::to_offset (unit_size) * ix;
3025
3026 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3027 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3028 continue;
3029 elt_offset = off.to_shwi ();
3030 }
3031 else if (TREE_CODE (type) == RECORD_TYPE)
3032 {
3033 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3034 if (DECL_BIT_FIELD (index))
3035 continue;
3036 elt_offset = int_bit_position (index);
3037 }
3038 else
3039 gcc_unreachable ();
3040
3041 if (elt_offset > req_offset)
3042 return NULL;
3043
3044 if (TREE_CODE (val) == CONSTRUCTOR)
3045 return find_constructor_constant_at_offset (val,
3046 req_offset - elt_offset);
3047
3048 if (elt_offset == req_offset
3049 && is_gimple_reg_type (TREE_TYPE (val))
3050 && is_gimple_ip_invariant (val))
3051 return val;
3052 }
3053 return NULL;
3054}
3055
3056/* Check whether SCALAR could be used to look up an aggregate interprocedural
3057 invariant from a static constructor and if so, return it. Otherwise return
3058 NULL. */
3059
3060static tree
3061ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3062{
3063 if (by_ref)
3064 {
3065 if (TREE_CODE (scalar) != ADDR_EXPR)
3066 return NULL;
3067 scalar = TREE_OPERAND (scalar, 0);
3068 }
3069
8813a647 3070 if (!VAR_P (scalar)
91bb9f80
MJ
3071 || !is_global_var (scalar)
3072 || !TREE_READONLY (scalar)
3073 || !DECL_INITIAL (scalar)
3074 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3075 return NULL;
3076
3077 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3078}
3079
3080/* Retrieve value from aggregate jump function AGG or static initializer of
3081 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3082 none. BY_REF specifies whether the value has to be passed by reference or
3083 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3084 to is set to true if the value comes from an initializer of a constant. */
8b7773a4
MJ
3085
3086tree
91bb9f80
MJ
3087ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3088 HOST_WIDE_INT offset, bool by_ref,
3089 bool *from_global_constant)
8b7773a4
MJ
3090{
3091 struct ipa_agg_jf_item *item;
3092 int i;
3093
91bb9f80
MJ
3094 if (scalar)
3095 {
3096 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3097 if (res)
3098 {
3099 if (from_global_constant)
3100 *from_global_constant = true;
3101 return res;
3102 }
3103 }
3104
3105 if (!agg
3106 || by_ref != agg->by_ref)
8b7773a4
MJ
3107 return NULL;
3108
9771b263 3109 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
3110 if (item->offset == offset)
3111 {
3112 /* Currently we do not have clobber values, return NULL for them once
3113 we do. */
3114 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
3115 if (from_global_constant)
3116 *from_global_constant = false;
2c9561b5
MJ
3117 return item->value;
3118 }
8b7773a4
MJ
3119 return NULL;
3120}
3121
4502fe8d 3122/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
3123 reference description RDESC. Return true if the reference has been
3124 successfully found and removed. */
4502fe8d 3125
568cda29 3126static bool
5e20cdc9 3127remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3128{
3129 struct ipa_ref *to_del;
3130 struct cgraph_edge *origin;
3131
3132 origin = rdesc->cs;
a854f856
MJ
3133 if (!origin)
3134 return false;
d122681a
ML
3135 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3136 origin->lto_stmt_uid);
568cda29
MJ
3137 if (!to_del)
3138 return false;
3139
d122681a 3140 to_del->remove_reference ();
4502fe8d 3141 if (dump_file)
464d0118
ML
3142 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3143 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
568cda29 3144 return true;
4502fe8d
MJ
3145}
3146
3147/* If JFUNC has a reference description with refcount different from
3148 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3149 NULL. JFUNC must be a constant jump function. */
3150
3151static struct ipa_cst_ref_desc *
3152jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3153{
3154 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3155 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3156 return rdesc;
3157 else
3158 return NULL;
3159}
3160
568cda29
MJ
3161/* If the value of constant jump function JFUNC is an address of a function
3162 declaration, return the associated call graph node. Otherwise return
3163 NULL. */
3164
3165static cgraph_node *
3166cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3167{
3168 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3169 tree cst = ipa_get_jf_constant (jfunc);
3170 if (TREE_CODE (cst) != ADDR_EXPR
3171 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3172 return NULL;
3173
d52f5295 3174 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3175}
3176
3177
3178/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3179 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3180 the edge specified in the rdesc. Return false if either the symbol or the
3181 reference could not be found, otherwise return true. */
3182
3183static bool
3184try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3185{
3186 struct ipa_cst_ref_desc *rdesc;
3187 if (jfunc->type == IPA_JF_CONST
3188 && (rdesc = jfunc_rdesc_usable (jfunc))
3189 && --rdesc->refcount == 0)
3190 {
5e20cdc9 3191 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
3192 if (!symbol)
3193 return false;
3194
3195 return remove_described_reference (symbol, rdesc);
3196 }
3197 return true;
3198}
3199
b258210c
MJ
3200/* Try to find a destination for indirect edge IE that corresponds to a simple
3201 call or a call of a member function pointer and where the destination is a
e5cf5e11
PK
3202 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3203 the type of the parameter to which the result of JFUNC is passed. If it can
3204 be determined, return the newly direct edge, otherwise return NULL.
d250540a 3205 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 3206
b258210c
MJ
3207static struct cgraph_edge *
3208try_make_edge_direct_simple_call (struct cgraph_edge *ie,
e5cf5e11 3209 struct ipa_jump_func *jfunc, tree target_type,
d250540a 3210 struct ipa_node_params *new_root_info)
b258210c 3211{
4502fe8d 3212 struct cgraph_edge *cs;
b258210c 3213 tree target;
042ae7d2 3214 bool agg_contents = ie->indirect_info->agg_contents;
e5cf5e11 3215 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
91bb9f80
MJ
3216 if (agg_contents)
3217 {
3218 bool from_global_constant;
3219 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3220 ie->indirect_info->offset,
3221 ie->indirect_info->by_ref,
3222 &from_global_constant);
3223 if (target
3224 && !from_global_constant
3225 && !ie->indirect_info->guaranteed_unmodified)
3226 return NULL;
3227 }
b258210c 3228 else
91bb9f80 3229 target = scalar;
d250540a
MJ
3230 if (!target)
3231 return NULL;
4502fe8d
MJ
3232 cs = ipa_make_edge_direct_to_target (ie, target);
3233
a12cd2db 3234 if (cs && !agg_contents)
568cda29
MJ
3235 {
3236 bool ok;
3237 gcc_checking_assert (cs->callee
ae6d0907
MJ
3238 && (cs != ie
3239 || jfunc->type != IPA_JF_CONST
568cda29
MJ
3240 || !cgraph_node_for_jfunc (jfunc)
3241 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3242 ok = try_decrement_rdesc_refcount (jfunc);
3243 gcc_checking_assert (ok);
3244 }
4502fe8d
MJ
3245
3246 return cs;
b258210c
MJ
3247}
3248
bec81025
MJ
3249/* Return the target to be used in cases of impossible devirtualization. IE
3250 and target (the latter can be NULL) are dumped when dumping is enabled. */
3251
72972c22
MJ
3252tree
3253ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3254{
3255 if (dump_file)
3256 {
3257 if (target)
3258 fprintf (dump_file,
464d0118
ML
3259 "Type inconsistent devirtualization: %s->%s\n",
3260 ie->caller->dump_name (),
bec81025
MJ
3261 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3262 else
3263 fprintf (dump_file,
464d0118
ML
3264 "No devirtualization target in %s\n",
3265 ie->caller->dump_name ());
bec81025
MJ
3266 }
3267 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3268 cgraph_node::get_create (new_target);
bec81025
MJ
3269 return new_target;
3270}
3271
d250540a
MJ
3272/* Try to find a destination for indirect edge IE that corresponds to a virtual
3273 call based on a formal parameter which is described by jump function JFUNC
3274 and if it can be determined, make it direct and return the direct edge.
44210a96
MJ
3275 Otherwise, return NULL. CTX describes the polymorphic context that the
3276 parameter the call is based on brings along with it. */
b258210c
MJ
3277
3278static struct cgraph_edge *
3279try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3280 struct ipa_jump_func *jfunc,
44210a96 3281 struct ipa_polymorphic_call_context ctx)
3e293154 3282{
44210a96 3283 tree target = NULL;
5ce97055 3284 bool speculative = false;
85942f45 3285
2bf86c84 3286 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3287 return NULL;
b258210c 3288
44210a96 3289 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3290
3291 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3292 if (!ie->indirect_info->vptr_changed
3293 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3294 {
9de2f554
JH
3295 tree vtable;
3296 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3297 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3298 : NULL;
3299 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
85942f45
JH
3300 ie->indirect_info->offset,
3301 true);
9de2f554
JH
3302 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3303 {
2994ab20 3304 bool can_refer;
0127c169 3305 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3306 vtable, offset, &can_refer);
3307 if (can_refer)
9de2f554 3308 {
2994ab20
JH
3309 if (!t
3310 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3311 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
9de2f554 3312 || !possible_polymorphic_call_target_p
0127c169
JH
3313 (ie, cgraph_node::get (t)))
3314 {
33c3b6be 3315 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3316 if (!ie->indirect_info->vptr_changed)
3317 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3318 else
3319 target = NULL;
0127c169
JH
3320 }
3321 else
3322 {
3323 target = t;
3324 speculative = ie->indirect_info->vptr_changed;
3325 }
9de2f554
JH
3326 }
3327 }
85942f45
JH
3328 }
3329
44210a96
MJ
3330 ipa_polymorphic_call_context ie_context (ie);
3331 vec <cgraph_node *>targets;
3332 bool final;
d250540a 3333
44210a96
MJ
3334 ctx.offset_by (ie->indirect_info->offset);
3335 if (ie->indirect_info->vptr_changed)
3336 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3337 ie->indirect_info->otr_type);
3338 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3339 targets = possible_polymorphic_call_targets
3340 (ie->indirect_info->otr_type,
3341 ie->indirect_info->otr_token,
3342 ctx, &final);
3343 if (final && targets.length () <= 1)
5ce97055 3344 {
33c3b6be 3345 speculative = false;
44210a96
MJ
3346 if (targets.length () == 1)
3347 target = targets[0]->decl;
3348 else
3349 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3350 }
2bf86c84 3351 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3352 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3353 {
44210a96
MJ
3354 cgraph_node *n;
3355 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3356 ie->indirect_info->otr_token,
3357 ie->indirect_info->context);
3358 if (n)
5ce97055 3359 {
44210a96
MJ
3360 target = n->decl;
3361 speculative = true;
5ce97055 3362 }
5bccb77a 3363 }
b258210c
MJ
3364
3365 if (target)
450ad0cd 3366 {
44210a96
MJ
3367 if (!possible_polymorphic_call_target_p
3368 (ie, cgraph_node::get_create (target)))
0127c169 3369 {
29c43c83 3370 if (speculative)
0127c169
JH
3371 return NULL;
3372 target = ipa_impossible_devirt_target (ie, target);
3373 }
5ce97055 3374 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3375 }
b258210c
MJ
3376 else
3377 return NULL;
3e293154
MJ
3378}
3379
3380/* Update the param called notes associated with NODE when CS is being inlined,
3381 assuming NODE is (potentially indirectly) inlined into CS->callee.
3382 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3383 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3384 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3385
f8e2a1ed 3386static bool
e33c6cd6
MJ
3387update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3388 struct cgraph_node *node,
d52f5295 3389 vec<cgraph_edge *> *new_edges)
3e293154 3390{
9e97ff61 3391 struct ipa_edge_args *top;
b258210c 3392 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
e5cf5e11 3393 struct ipa_node_params *new_root_info, *inlined_node_info;
f8e2a1ed 3394 bool res = false;
3e293154 3395
e33c6cd6 3396 ipa_check_create_edge_args ();
9e97ff61 3397 top = IPA_EDGE_REF (cs);
d250540a
MJ
3398 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3399 ? cs->caller->global.inlined_to
3400 : cs->caller);
e5cf5e11 3401 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
e33c6cd6
MJ
3402
3403 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3404 {
e33c6cd6 3405 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3406 struct ipa_jump_func *jfunc;
8b7773a4 3407 int param_index;
3ff29913 3408 cgraph_node *spec_target = NULL;
3e293154 3409
e33c6cd6 3410 next_ie = ie->next_callee;
3e293154 3411
5f902d76
JH
3412 if (ici->param_index == -1)
3413 continue;
e33c6cd6 3414
3e293154 3415 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 3416 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3417 {
5ee53a06 3418 ici->param_index = -1;
3e293154
MJ
3419 continue;
3420 }
3421
8b7773a4
MJ
3422 param_index = ici->param_index;
3423 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3424
3ff29913
JH
3425 if (ie->speculative)
3426 {
3427 struct cgraph_edge *de;
3428 struct ipa_ref *ref;
3429 ie->speculative_call_info (de, ie, ref);
3430 spec_target = de->callee;
3431 }
3432
2bf86c84 3433 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3434 new_direct_edge = NULL;
3435 else if (ici->polymorphic)
5ce97055 3436 {
44210a96
MJ
3437 ipa_polymorphic_call_context ctx;
3438 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3439 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
5ce97055 3440 }
b258210c 3441 else
e5cf5e11
PK
3442 {
3443 tree target_type = ipa_get_type (inlined_node_info, param_index);
3444 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3445 target_type,
3446 new_root_info);
3447 }
3448
042ae7d2 3449 /* If speculation was removed, then we need to do nothing. */
3ff29913
JH
3450 if (new_direct_edge && new_direct_edge != ie
3451 && new_direct_edge->callee == spec_target)
042ae7d2
JH
3452 {
3453 new_direct_edge->indirect_inlining_edge = 1;
3454 top = IPA_EDGE_REF (cs);
3455 res = true;
73d098df
JH
3456 if (!new_direct_edge->speculative)
3457 continue;
042ae7d2
JH
3458 }
3459 else if (new_direct_edge)
685b0d13 3460 {
b258210c 3461 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
3462 if (new_direct_edge->call_stmt)
3463 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
3464 = !gimple_check_call_matching_types (
3465 new_direct_edge->call_stmt,
67348ccc 3466 new_direct_edge->callee->decl, false);
b258210c
MJ
3467 if (new_edges)
3468 {
9771b263 3469 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3470 res = true;
3471 }
042ae7d2 3472 top = IPA_EDGE_REF (cs);
3ff29913
JH
3473 /* If speculative edge was introduced we still need to update
3474 call info of the indirect edge. */
3475 if (!new_direct_edge->speculative)
3476 continue;
685b0d13 3477 }
3ff29913
JH
3478 if (jfunc->type == IPA_JF_PASS_THROUGH
3479 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 3480 {
d0502276
JH
3481 if (ici->agg_contents
3482 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3483 && !ici->polymorphic)
36b72910
JH
3484 ici->param_index = -1;
3485 else
d0502276
JH
3486 {
3487 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3488 if (ici->polymorphic
3489 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3490 ici->vptr_changed = true;
3491 }
36b72910
JH
3492 }
3493 else if (jfunc->type == IPA_JF_ANCESTOR)
3494 {
d0502276
JH
3495 if (ici->agg_contents
3496 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3497 && !ici->polymorphic)
36b72910
JH
3498 ici->param_index = -1;
3499 else
3500 {
3501 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3502 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
3503 if (ici->polymorphic
3504 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3505 ici->vptr_changed = true;
36b72910
JH
3506 }
3507 }
3508 else
3509 /* Either we can find a destination for this edge now or never. */
3510 ici->param_index = -1;
3e293154 3511 }
e33c6cd6 3512
f8e2a1ed 3513 return res;
3e293154
MJ
3514}
3515
3516/* Recursively traverse subtree of NODE (including node) made of inlined
3517 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3518 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3519 update_jump_functions_after_inlining on all non-inlined edges that lead out
3520 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3521 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3522 created. */
be95e2b9 3523
f8e2a1ed 3524static bool
3e293154
MJ
3525propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3526 struct cgraph_node *node,
d52f5295 3527 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3528{
3529 struct cgraph_edge *e;
f8e2a1ed 3530 bool res;
3e293154 3531
e33c6cd6 3532 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3533
3534 for (e = node->callees; e; e = e->next_callee)
3535 if (!e->inline_failed)
f8e2a1ed 3536 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3537 else
3538 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3539 for (e = node->indirect_calls; e; e = e->next_callee)
3540 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3541
3542 return res;
3e293154
MJ
3543}
3544
4502fe8d
MJ
3545/* Combine two controlled uses counts as done during inlining. */
3546
3547static int
3548combine_controlled_uses_counters (int c, int d)
3549{
3550 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3551 return IPA_UNDESCRIBED_USE;
3552 else
3553 return c + d - 1;
3554}
3555
3556/* Propagate number of controlled users from CS->caleee to the new root of the
3557 tree of inlined nodes. */
3558
3559static void
3560propagate_controlled_uses (struct cgraph_edge *cs)
3561{
3562 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3563 struct cgraph_node *new_root = cs->caller->global.inlined_to
3564 ? cs->caller->global.inlined_to : cs->caller;
3565 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3566 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3567 int count, i;
3568
3569 count = MIN (ipa_get_cs_argument_count (args),
3570 ipa_get_param_count (old_root_info));
3571 for (i = 0; i < count; i++)
3572 {
3573 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3574 struct ipa_cst_ref_desc *rdesc;
3575
3576 if (jf->type == IPA_JF_PASS_THROUGH)
3577 {
3578 int src_idx, c, d;
3579 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3580 c = ipa_get_controlled_uses (new_root_info, src_idx);
3581 d = ipa_get_controlled_uses (old_root_info, i);
3582
3583 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3584 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3585 c = combine_controlled_uses_counters (c, d);
3586 ipa_set_controlled_uses (new_root_info, src_idx, c);
3587 if (c == 0 && new_root_info->ipcp_orig_node)
3588 {
3589 struct cgraph_node *n;
3590 struct ipa_ref *ref;
44210a96 3591 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
3592
3593 if (t && TREE_CODE (t) == ADDR_EXPR
3594 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 3595 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 3596 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
3597 {
3598 if (dump_file)
3599 fprintf (dump_file, "ipa-prop: Removing cloning-created "
464d0118
ML
3600 "reference from %s to %s.\n",
3601 new_root->dump_name (),
3602 n->dump_name ());
d122681a 3603 ref->remove_reference ();
4502fe8d
MJ
3604 }
3605 }
3606 }
3607 else if (jf->type == IPA_JF_CONST
3608 && (rdesc = jfunc_rdesc_usable (jf)))
3609 {
3610 int d = ipa_get_controlled_uses (old_root_info, i);
3611 int c = rdesc->refcount;
3612 rdesc->refcount = combine_controlled_uses_counters (c, d);
3613 if (rdesc->refcount == 0)
3614 {
3615 tree cst = ipa_get_jf_constant (jf);
3616 struct cgraph_node *n;
3617 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3618 && TREE_CODE (TREE_OPERAND (cst, 0))
3619 == FUNCTION_DECL);
d52f5295 3620 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
3621 if (n)
3622 {
3623 struct cgraph_node *clone;
568cda29 3624 bool ok;
67348ccc 3625 ok = remove_described_reference (n, rdesc);
568cda29 3626 gcc_checking_assert (ok);
4502fe8d
MJ
3627
3628 clone = cs->caller;
3629 while (clone->global.inlined_to
3630 && clone != rdesc->cs->caller
3631 && IPA_NODE_REF (clone)->ipcp_orig_node)
3632 {
3633 struct ipa_ref *ref;
d122681a 3634 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
3635 if (ref)
3636 {
3637 if (dump_file)
3638 fprintf (dump_file, "ipa-prop: Removing "
3639 "cloning-created reference "
464d0118
ML
3640 "from %s to %s.\n",
3641 clone->dump_name (),
3642 n->dump_name ());
d122681a 3643 ref->remove_reference ();
4502fe8d
MJ
3644 }
3645 clone = clone->callers->caller;
3646 }
3647 }
3648 }
3649 }
3650 }
3651
3652 for (i = ipa_get_param_count (old_root_info);
3653 i < ipa_get_cs_argument_count (args);
3654 i++)
3655 {
3656 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3657
3658 if (jf->type == IPA_JF_CONST)
3659 {
3660 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3661 if (rdesc)
3662 rdesc->refcount = IPA_UNDESCRIBED_USE;
3663 }
3664 else if (jf->type == IPA_JF_PASS_THROUGH)
3665 ipa_set_controlled_uses (new_root_info,
3666 jf->value.pass_through.formal_id,
3667 IPA_UNDESCRIBED_USE);
3668 }
3669}
3670
3e293154
MJ
3671/* Update jump functions and call note functions on inlining the call site CS.
3672 CS is expected to lead to a node already cloned by
3673 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3674 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3675 created. */
be95e2b9 3676
f8e2a1ed 3677bool
3e293154 3678ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 3679 vec<cgraph_edge *> *new_edges)
3e293154 3680{
5ee53a06 3681 bool changed;
f8e2a1ed
MJ
3682 /* Do nothing if the preparation phase has not been carried out yet
3683 (i.e. during early inlining). */
dd912cb8 3684 if (!ipa_node_params_sum)
f8e2a1ed 3685 return false;
6fe906a3 3686 gcc_assert (ipa_edge_args_sum);
f8e2a1ed 3687
4502fe8d 3688 propagate_controlled_uses (cs);
5ee53a06
JH
3689 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3690
5ee53a06 3691 return changed;
518dc859
RL
3692}
3693
86cd0334
MJ
3694/* Ensure that array of edge arguments infos is big enough to accommodate a
3695 structure for all edges and reallocates it if not. Also, allocate
3696 associated hash tables is they do not already exist. */
3697
3698void
3699ipa_check_create_edge_args (void)
3700{
6fe906a3
MJ
3701 if (!ipa_edge_args_sum)
3702 ipa_edge_args_sum
3703 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3704 ipa_edge_args_sum_t (symtab, true));
86cd0334
MJ
3705 if (!ipa_bits_hash_table)
3706 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3707 if (!ipa_vr_hash_table)
3708 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3709}
3710
771578a0 3711/* Free all ipa_edge structures. */
be95e2b9 3712
518dc859 3713void
771578a0 3714ipa_free_all_edge_args (void)
518dc859 3715{
6fe906a3 3716 if (!ipa_edge_args_sum)
9771b263
DN
3717 return;
3718
6fe906a3
MJ
3719 ipa_edge_args_sum->release ();
3720 ipa_edge_args_sum = NULL;
518dc859
RL
3721}
3722
771578a0 3723/* Free all ipa_node_params structures. */
be95e2b9 3724
518dc859 3725void
771578a0 3726ipa_free_all_node_params (void)
518dc859 3727{
a0a348b1 3728 ipa_node_params_sum->release ();
dd912cb8 3729 ipa_node_params_sum = NULL;
771578a0
MJ
3730}
3731
86cd0334
MJ
3732/* Grow ipcp_transformations if necessary. Also allocate any necessary hash
3733 tables if they do not already exist. */
04be694e
MJ
3734
3735void
3736ipcp_grow_transformations_if_necessary (void)
3737{
3738 if (vec_safe_length (ipcp_transformations)
3739 <= (unsigned) symtab->cgraph_max_uid)
3740 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
86cd0334
MJ
3741 if (!ipa_bits_hash_table)
3742 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3743 if (!ipa_vr_hash_table)
3744 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
04be694e
MJ
3745}
3746
2c9561b5
MJ
3747/* Set the aggregate replacements of NODE to be AGGVALS. */
3748
3749void
3750ipa_set_node_agg_value_chain (struct cgraph_node *node,
3751 struct ipa_agg_replacement_value *aggvals)
3752{
04be694e
MJ
3753 ipcp_grow_transformations_if_necessary ();
3754 (*ipcp_transformations)[node->uid].agg_values = aggvals;
2c9561b5
MJ
3755}
3756
6fe906a3
MJ
3757/* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3758 count data structures accordingly. */
be95e2b9 3759
6fe906a3
MJ
3760void
3761ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
771578a0 3762{
568cda29
MJ
3763 if (args->jump_functions)
3764 {
3765 struct ipa_jump_func *jf;
3766 int i;
3767 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3768 {
3769 struct ipa_cst_ref_desc *rdesc;
3770 try_decrement_rdesc_refcount (jf);
3771 if (jf->type == IPA_JF_CONST
3772 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3773 && rdesc->cs == cs)
3774 rdesc->cs = NULL;
3775 }
568cda29 3776 }
518dc859
RL
3777}
3778
6fe906a3
MJ
3779/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3780 reference count data strucutres accordingly. */
be95e2b9 3781
6fe906a3
MJ
3782void
3783ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3784 ipa_edge_args *old_args, ipa_edge_args *new_args)
771578a0 3785{
8b7773a4 3786 unsigned int i;
771578a0 3787
9771b263 3788 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
3789 if (old_args->polymorphic_call_contexts)
3790 new_args->polymorphic_call_contexts
3791 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 3792
9771b263 3793 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3794 {
3795 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3796 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3797
3798 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3799
3800 if (src_jf->type == IPA_JF_CONST)
3801 {
3802 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3803
3804 if (!src_rdesc)
3805 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3806 else if (src->caller == dst->caller)
3807 {
3808 struct ipa_ref *ref;
5e20cdc9 3809 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3810 gcc_checking_assert (n);
d122681a
ML
3811 ref = src->caller->find_reference (n, src->call_stmt,
3812 src->lto_stmt_uid);
568cda29 3813 gcc_checking_assert (ref);
d122681a 3814 dst->caller->clone_reference (ref, ref->stmt);
568cda29 3815
601f3293 3816 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
568cda29
MJ
3817 dst_rdesc->cs = dst;
3818 dst_rdesc->refcount = src_rdesc->refcount;
3819 dst_rdesc->next_duplicate = NULL;
3820 dst_jf->value.constant.rdesc = dst_rdesc;
3821 }
4502fe8d
MJ
3822 else if (src_rdesc->cs == src)
3823 {
601f3293 3824 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 3825 dst_rdesc->cs = dst;
4502fe8d 3826 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3827 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3828 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3829 dst_jf->value.constant.rdesc = dst_rdesc;
3830 }
3831 else
3832 {
3833 struct ipa_cst_ref_desc *dst_rdesc;
3834 /* This can happen during inlining, when a JFUNC can refer to a
3835 reference taken in a function up in the tree of inline clones.
3836 We need to find the duplicate that refers to our tree of
3837 inline clones. */
3838
3839 gcc_assert (dst->caller->global.inlined_to);
3840 for (dst_rdesc = src_rdesc->next_duplicate;
3841 dst_rdesc;
3842 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3843 {
3844 struct cgraph_node *top;
3845 top = dst_rdesc->cs->caller->global.inlined_to
3846 ? dst_rdesc->cs->caller->global.inlined_to
3847 : dst_rdesc->cs->caller;
3848 if (dst->caller->global.inlined_to == top)
3849 break;
3850 }
44a60244 3851 gcc_assert (dst_rdesc);
4502fe8d
MJ
3852 dst_jf->value.constant.rdesc = dst_rdesc;
3853 }
3854 }
6fe45955
MJ
3855 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3856 && src->caller == dst->caller)
3857 {
3858 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3859 ? dst->caller->global.inlined_to : dst->caller;
3860 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3861 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3862
3863 int c = ipa_get_controlled_uses (root_info, idx);
3864 if (c != IPA_UNDESCRIBED_USE)
3865 {
3866 c++;
3867 ipa_set_controlled_uses (root_info, idx, c);
3868 }
3869 }
4502fe8d 3870 }
771578a0
MJ
3871}
3872
dd912cb8 3873/* Analyze newly added function into callgraph. */
be95e2b9 3874
771578a0 3875static void
dd912cb8 3876ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3877{
dd912cb8
ML
3878 if (node->has_gimple_body_p ())
3879 ipa_analyze_node (node);
3880}
771578a0 3881
dd912cb8
ML
3882/* Hook that is called by summary when a node is duplicated. */
3883
3884void
3885ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3886 ipa_node_params *old_info,
3887 ipa_node_params *new_info)
3888{
3889 ipa_agg_replacement_value *old_av, *new_av;
771578a0 3890
f65f1ae3 3891 new_info->descriptors = vec_safe_copy (old_info->descriptors);
310bc633 3892 new_info->lattices = NULL;
771578a0 3893 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
f65f1ae3
MJ
3894 new_info->known_csts = old_info->known_csts.copy ();
3895 new_info->known_contexts = old_info->known_contexts.copy ();
3949c4a7 3896
8aab5218 3897 new_info->analysis_done = old_info->analysis_done;
3949c4a7 3898 new_info->node_enqueued = old_info->node_enqueued;
7e729474 3899 new_info->versionable = old_info->versionable;
2c9561b5
MJ
3900
3901 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 3902 if (old_av)
2c9561b5 3903 {
04be694e
MJ
3904 new_av = NULL;
3905 while (old_av)
3906 {
3907 struct ipa_agg_replacement_value *v;
2c9561b5 3908
04be694e
MJ
3909 v = ggc_alloc<ipa_agg_replacement_value> ();
3910 memcpy (v, old_av, sizeof (*v));
3911 v->next = new_av;
3912 new_av = v;
3913 old_av = old_av->next;
3914 }
3915 ipa_set_node_agg_value_chain (dst, new_av);
3916 }
3917
86cd0334
MJ
3918 ipcp_transformation_summary *src_trans
3919 = ipcp_get_transformation_summary (src);
04be694e 3920
8bc5448f 3921 if (src_trans)
04be694e
MJ
3922 {
3923 ipcp_grow_transformations_if_necessary ();
3924 src_trans = ipcp_get_transformation_summary (src);
86cd0334
MJ
3925 ipcp_transformation_summary *dst_trans
3926 = ipcp_get_transformation_summary (dst);
3927
3928 dst_trans->bits = vec_safe_copy (src_trans->bits);
3929
8bc5448f 3930 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
8bc5448f
KV
3931 vec<ipa_vr, va_gc> *&dst_vr
3932 = ipcp_get_transformation_summary (dst)->m_vr;
8bc5448f
KV
3933 if (vec_safe_length (src_trans->m_vr) > 0)
3934 {
3935 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3936 for (unsigned i = 0; i < src_vr->length (); ++i)
3937 dst_vr->quick_push ((*src_vr)[i]);
3938 }
2c9561b5 3939 }
771578a0
MJ
3940}
3941
3942/* Register our cgraph hooks if they are not already there. */
be95e2b9 3943
518dc859 3944void
771578a0 3945ipa_register_cgraph_hooks (void)
518dc859 3946{
dd912cb8 3947 ipa_check_create_node_params ();
6fe906a3 3948 ipa_check_create_edge_args ();
dd912cb8 3949
dd912cb8 3950 function_insertion_hook_holder =
3dafb85c 3951 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3952}
518dc859 3953
771578a0 3954/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3955
771578a0
MJ
3956static void
3957ipa_unregister_cgraph_hooks (void)
3958{
3dafb85c 3959 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 3960 function_insertion_hook_holder = NULL;
771578a0
MJ
3961}
3962
3963/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3964 longer needed after ipa-cp. */
be95e2b9 3965
771578a0 3966void
e33c6cd6 3967ipa_free_all_structures_after_ipa_cp (void)
3e293154 3968{
2bf86c84 3969 if (!optimize && !in_lto_p)
3e293154
MJ
3970 {
3971 ipa_free_all_edge_args ();
3972 ipa_free_all_node_params ();
2651e637
ML
3973 ipcp_sources_pool.release ();
3974 ipcp_cst_values_pool.release ();
3975 ipcp_poly_ctx_values_pool.release ();
3976 ipcp_agg_lattice_pool.release ();
3e293154 3977 ipa_unregister_cgraph_hooks ();
601f3293 3978 ipa_refdesc_pool.release ();
3e293154
MJ
3979 }
3980}
3981
3982/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3983 longer needed after indirect inlining. */
be95e2b9 3984
3e293154 3985void
e33c6cd6 3986ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3987{
3988 ipa_free_all_edge_args ();
3989 ipa_free_all_node_params ();
3990 ipa_unregister_cgraph_hooks ();
2651e637
ML
3991 ipcp_sources_pool.release ();
3992 ipcp_cst_values_pool.release ();
3993 ipcp_poly_ctx_values_pool.release ();
3994 ipcp_agg_lattice_pool.release ();
601f3293 3995 ipa_refdesc_pool.release ();
518dc859
RL
3996}
3997
dcd416e3 3998/* Print ipa_tree_map data structures of all functions in the
518dc859 3999 callgraph to F. */
be95e2b9 4000
518dc859 4001void
2c9561b5 4002ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
4003{
4004 int i, count;
3e293154 4005 struct ipa_node_params *info;
518dc859 4006
67348ccc 4007 if (!node->definition)
3e293154
MJ
4008 return;
4009 info = IPA_NODE_REF (node);
464d0118 4010 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
3e293154
MJ
4011 count = ipa_get_param_count (info);
4012 for (i = 0; i < count; i++)
518dc859 4013 {
4502fe8d
MJ
4014 int c;
4015
a4e33812 4016 fprintf (f, " ");
e067bd43 4017 ipa_dump_param (f, info, i);
339f49ec
JH
4018 if (ipa_is_param_used (info, i))
4019 fprintf (f, " used");
4502fe8d
MJ
4020 c = ipa_get_controlled_uses (info, i);
4021 if (c == IPA_UNDESCRIBED_USE)
4022 fprintf (f, " undescribed_use");
4023 else
4024 fprintf (f, " controlled_uses=%i", c);
3e293154 4025 fprintf (f, "\n");
518dc859
RL
4026 }
4027}
dcd416e3 4028
ca30a539 4029/* Print ipa_tree_map data structures of all functions in the
3e293154 4030 callgraph to F. */
be95e2b9 4031
3e293154 4032void
ca30a539 4033ipa_print_all_params (FILE * f)
3e293154
MJ
4034{
4035 struct cgraph_node *node;
4036
ca30a539 4037 fprintf (f, "\nFunction parameters:\n");
65c70e6b 4038 FOR_EACH_FUNCTION (node)
ca30a539 4039 ipa_print_node_params (f, node);
3e293154 4040}
3f84bf08 4041
2c9561b5
MJ
4042/* Dump the AV linked list. */
4043
4044void
4045ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4046{
4047 bool comma = false;
4048 fprintf (f, " Aggregate replacements:");
4049 for (; av; av = av->next)
4050 {
4051 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4052 av->index, av->offset);
ef6cb4c7 4053 print_generic_expr (f, av->value);
2c9561b5
MJ
4054 comma = true;
4055 }
4056 fprintf (f, "\n");
4057}
4058
fb3f88cc
JH
4059/* Stream out jump function JUMP_FUNC to OB. */
4060
4061static void
4062ipa_write_jump_function (struct output_block *ob,
4063 struct ipa_jump_func *jump_func)
4064{
8b7773a4
MJ
4065 struct ipa_agg_jf_item *item;
4066 struct bitpack_d bp;
4067 int i, count;
fb3f88cc 4068
8b7773a4 4069 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
4070 switch (jump_func->type)
4071 {
4072 case IPA_JF_UNKNOWN:
4073 break;
4074 case IPA_JF_CONST:
5368224f 4075 gcc_assert (
4502fe8d
MJ
4076 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4077 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
4078 break;
4079 case IPA_JF_PASS_THROUGH:
412288f1 4080 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4081 if (jump_func->value.pass_through.operation == NOP_EXPR)
4082 {
4083 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4084 bp = bitpack_create (ob->main_stream);
4085 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4086 streamer_write_bitpack (&bp);
4087 }
a2b4c188
KV
4088 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4089 == tcc_unary)
4090 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4a53743e
MJ
4091 else
4092 {
4093 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4094 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4095 }
fb3f88cc
JH
4096 break;
4097 case IPA_JF_ANCESTOR:
412288f1 4098 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4099 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4100 bp = bitpack_create (ob->main_stream);
4101 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4102 streamer_write_bitpack (&bp);
fb3f88cc 4103 break;
8b7773a4
MJ
4104 }
4105
9771b263 4106 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4107 streamer_write_uhwi (ob, count);
4108 if (count)
4109 {
4110 bp = bitpack_create (ob->main_stream);
4111 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4112 streamer_write_bitpack (&bp);
4113 }
4114
9771b263 4115 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4116 {
4117 streamer_write_uhwi (ob, item->offset);
4118 stream_write_tree (ob, item->value, true);
fb3f88cc 4119 }
04be694e 4120
209ca542 4121 bp = bitpack_create (ob->main_stream);
86cd0334 4122 bp_pack_value (&bp, !!jump_func->bits, 1);
209ca542 4123 streamer_write_bitpack (&bp);
86cd0334 4124 if (jump_func->bits)
209ca542 4125 {
86cd0334
MJ
4126 streamer_write_widest_int (ob, jump_func->bits->value);
4127 streamer_write_widest_int (ob, jump_func->bits->mask);
a5e14a42 4128 }
86cd0334 4129 bp_pack_value (&bp, !!jump_func->m_vr, 1);
8bc5448f 4130 streamer_write_bitpack (&bp);
86cd0334 4131 if (jump_func->m_vr)
8bc5448f
KV
4132 {
4133 streamer_write_enum (ob->main_stream, value_rang_type,
86cd0334
MJ
4134 VR_LAST, jump_func->m_vr->type);
4135 stream_write_tree (ob, jump_func->m_vr->min, true);
4136 stream_write_tree (ob, jump_func->m_vr->max, true);
8bc5448f 4137 }
fb3f88cc
JH
4138}
4139
4140/* Read in jump function JUMP_FUNC from IB. */
4141
4142static void
4143ipa_read_jump_function (struct lto_input_block *ib,
4144 struct ipa_jump_func *jump_func,
4502fe8d 4145 struct cgraph_edge *cs,
fb3f88cc
JH
4146 struct data_in *data_in)
4147{
4a53743e
MJ
4148 enum jump_func_type jftype;
4149 enum tree_code operation;
8b7773a4 4150 int i, count;
fb3f88cc 4151
4a53743e
MJ
4152 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4153 switch (jftype)
fb3f88cc
JH
4154 {
4155 case IPA_JF_UNKNOWN:
04be694e 4156 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4157 break;
4158 case IPA_JF_CONST:
4502fe8d 4159 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4160 break;
4161 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4162 operation = (enum tree_code) streamer_read_uhwi (ib);
4163 if (operation == NOP_EXPR)
4164 {
4165 int formal_id = streamer_read_uhwi (ib);
4166 struct bitpack_d bp = streamer_read_bitpack (ib);
4167 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4168 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e 4169 }
a2b4c188
KV
4170 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4171 {
4172 int formal_id = streamer_read_uhwi (ib);
4173 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4174 }
4a53743e
MJ
4175 else
4176 {
4177 tree operand = stream_read_tree (ib, data_in);
4178 int formal_id = streamer_read_uhwi (ib);
4179 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4180 operation);
4181 }
fb3f88cc
JH
4182 break;
4183 case IPA_JF_ANCESTOR:
4a53743e
MJ
4184 {
4185 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4186 int formal_id = streamer_read_uhwi (ib);
4187 struct bitpack_d bp = streamer_read_bitpack (ib);
4188 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4189 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4190 break;
4191 }
8b7773a4
MJ
4192 }
4193
4194 count = streamer_read_uhwi (ib);
9771b263 4195 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4196 if (count)
4197 {
4a53743e 4198 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4199 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4200 }
4201 for (i = 0; i < count; i++)
4202 {
f32682ca
DN
4203 struct ipa_agg_jf_item item;
4204 item.offset = streamer_read_uhwi (ib);
4205 item.value = stream_read_tree (ib, data_in);
9771b263 4206 jump_func->agg.items->quick_push (item);
fb3f88cc 4207 }
04be694e
MJ
4208
4209 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4210 bool bits_known = bp_unpack_value (&bp, 1);
4211 if (bits_known)
4212 {
86cd0334
MJ
4213 widest_int value = streamer_read_widest_int (ib);
4214 widest_int mask = streamer_read_widest_int (ib);
4215 ipa_set_jfunc_bits (jump_func, value, mask);
209ca542
PK
4216 }
4217 else
86cd0334 4218 jump_func->bits = NULL;
8bc5448f
KV
4219
4220 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4221 bool vr_known = bp_unpack_value (&vr_bp, 1);
4222 if (vr_known)
4223 {
86cd0334
MJ
4224 enum value_range_type type = streamer_read_enum (ib, value_range_type,
4225 VR_LAST);
4226 tree min = stream_read_tree (ib, data_in);
4227 tree max = stream_read_tree (ib, data_in);
4228 ipa_set_jfunc_vr (jump_func, type, min, max);
8bc5448f
KV
4229 }
4230 else
86cd0334 4231 jump_func->m_vr = NULL;
fb3f88cc
JH
4232}
4233
e33c6cd6
MJ
4234/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4235 relevant to indirect inlining to OB. */
661e7330
MJ
4236
4237static void
e33c6cd6
MJ
4238ipa_write_indirect_edge_info (struct output_block *ob,
4239 struct cgraph_edge *cs)
661e7330 4240{
e33c6cd6 4241 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4242 struct bitpack_d bp;
e33c6cd6 4243
412288f1 4244 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4245 bp = bitpack_create (ob->main_stream);
4246 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4247 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4248 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4249 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4250 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4251 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4252 streamer_write_bitpack (&bp);
ba392339
JH
4253 if (ii->agg_contents || ii->polymorphic)
4254 streamer_write_hwi (ob, ii->offset);
4255 else
4256 gcc_assert (ii->offset == 0);
b258210c
MJ
4257
4258 if (ii->polymorphic)
4259 {
412288f1 4260 streamer_write_hwi (ob, ii->otr_token);
b9393656 4261 stream_write_tree (ob, ii->otr_type, true);
ba392339 4262 ii->context.stream_out (ob);
b258210c 4263 }
661e7330
MJ
4264}
4265
e33c6cd6
MJ
4266/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4267 relevant to indirect inlining from IB. */
661e7330
MJ
4268
4269static void
e33c6cd6 4270ipa_read_indirect_edge_info (struct lto_input_block *ib,
ba392339 4271 struct data_in *data_in,
e33c6cd6 4272 struct cgraph_edge *cs)
661e7330 4273{
e33c6cd6 4274 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4275 struct bitpack_d bp;
661e7330 4276
412288f1 4277 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 4278 bp = streamer_read_bitpack (ib);
2465dcc2 4279 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4280 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4281 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4282 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 4283 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 4284 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
4285 if (ii->agg_contents || ii->polymorphic)
4286 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4287 else
4288 ii->offset = 0;
b258210c
MJ
4289 if (ii->polymorphic)
4290 {
412288f1 4291 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4292 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 4293 ii->context.stream_in (ib, data_in);
b258210c 4294 }
661e7330
MJ
4295}
4296
fb3f88cc
JH
4297/* Stream out NODE info to OB. */
4298
4299static void
4300ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4301{
4302 int node_ref;
7380e6ef 4303 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4304 struct ipa_node_params *info = IPA_NODE_REF (node);
4305 int j;
4306 struct cgraph_edge *e;
2465dcc2 4307 struct bitpack_d bp;
fb3f88cc 4308
7380e6ef 4309 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4310 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4311 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4312
0e8853ee
JH
4313 streamer_write_uhwi (ob, ipa_get_param_count (info));
4314 for (j = 0; j < ipa_get_param_count (info); j++)
4315 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4316 bp = bitpack_create (ob->main_stream);
8aab5218 4317 gcc_assert (info->analysis_done
661e7330 4318 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4319 gcc_assert (!info->node_enqueued);
4320 gcc_assert (!info->ipcp_orig_node);
4321 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4322 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4323 streamer_write_bitpack (&bp);
4502fe8d 4324 for (j = 0; j < ipa_get_param_count (info); j++)
a5e14a42
MJ
4325 {
4326 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4327 stream_write_tree (ob, ipa_get_type (info, j), true);
4328 }
fb3f88cc
JH
4329 for (e = node->callees; e; e = e->next_callee)
4330 {
4331 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4332
5ce97055
JH
4333 streamer_write_uhwi (ob,
4334 ipa_get_cs_argument_count (args) * 2
4335 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 4336 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4337 {
4338 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4339 if (args->polymorphic_call_contexts != NULL)
4340 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4341 }
fb3f88cc 4342 }
e33c6cd6 4343 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4344 {
4345 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4346
5ce97055
JH
4347 streamer_write_uhwi (ob,
4348 ipa_get_cs_argument_count (args) * 2
4349 + (args->polymorphic_call_contexts != NULL));
c8246dbe 4350 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4351 {
4352 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4353 if (args->polymorphic_call_contexts != NULL)
4354 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4355 }
c8246dbe
JH
4356 ipa_write_indirect_edge_info (ob, e);
4357 }
fb3f88cc
JH
4358}
4359
61502ca8 4360/* Stream in NODE info from IB. */
fb3f88cc
JH
4361
4362static void
4363ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4364 struct data_in *data_in)
4365{
4366 struct ipa_node_params *info = IPA_NODE_REF (node);
4367 int k;
4368 struct cgraph_edge *e;
2465dcc2 4369 struct bitpack_d bp;
fb3f88cc 4370
0e8853ee 4371 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4372
0e8853ee 4373 for (k = 0; k < ipa_get_param_count (info); k++)
f65f1ae3 4374 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
a5e14a42 4375
412288f1 4376 bp = streamer_read_bitpack (ib);
fb3f88cc 4377 if (ipa_get_param_count (info) != 0)
8aab5218 4378 info->analysis_done = true;
fb3f88cc
JH
4379 info->node_enqueued = false;
4380 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4381 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a 4382 for (k = 0; k < ipa_get_param_count (info); k++)
a5e14a42
MJ
4383 {
4384 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
f65f1ae3 4385 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
a5e14a42 4386 }
fb3f88cc
JH
4387 for (e = node->callees; e; e = e->next_callee)
4388 {
4389 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4390 int count = streamer_read_uhwi (ib);
5ce97055
JH
4391 bool contexts_computed = count & 1;
4392 count /= 2;
fb3f88cc 4393
fb3f88cc
JH
4394 if (!count)
4395 continue;
9771b263 4396 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
4397 if (contexts_computed)
4398 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
fb3f88cc 4399
fb3f88cc 4400 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
4401 {
4402 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4403 data_in);
4404 if (contexts_computed)
4405 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4406 }
fb3f88cc 4407 }
e33c6cd6 4408 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4409 {
4410 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4411 int count = streamer_read_uhwi (ib);
5ce97055
JH
4412 bool contexts_computed = count & 1;
4413 count /= 2;
c8246dbe 4414
c8246dbe
JH
4415 if (count)
4416 {
9771b263 4417 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
4418 if (contexts_computed)
4419 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
c8246dbe 4420 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
4421 {
4422 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4423 data_in);
4424 if (contexts_computed)
4425 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4426 }
c8246dbe
JH
4427 }
4428 ipa_read_indirect_edge_info (ib, data_in, e);
4429 }
fb3f88cc
JH
4430}
4431
4432/* Write jump functions for nodes in SET. */
4433
4434void
f27c1867 4435ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4436{
4437 struct cgraph_node *node;
93536c97 4438 struct output_block *ob;
fb3f88cc 4439 unsigned int count = 0;
f27c1867
JH
4440 lto_symtab_encoder_iterator lsei;
4441 lto_symtab_encoder_t encoder;
4442
6fe906a3 4443 if (!ipa_node_params_sum || !ipa_edge_args_sum)
93536c97 4444 return;
fb3f88cc 4445
93536c97 4446 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4447 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 4448 ob->symbol = NULL;
f27c1867
JH
4449 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4450 lsei_next_function_in_partition (&lsei))
fb3f88cc 4451 {
f27c1867 4452 node = lsei_cgraph_node (lsei);
d52f5295 4453 if (node->has_gimple_body_p ()
c47d0034 4454 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4455 count++;
4456 }
4457
412288f1 4458 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4459
4460 /* Process all of the functions. */
f27c1867
JH
4461 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4462 lsei_next_function_in_partition (&lsei))
fb3f88cc 4463 {
f27c1867 4464 node = lsei_cgraph_node (lsei);
d52f5295 4465 if (node->has_gimple_body_p ()
c47d0034 4466 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4467 ipa_write_node_info (ob, node);
4468 }
412288f1 4469 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4470 produce_asm (ob, NULL);
4471 destroy_output_block (ob);
4472}
4473
4474/* Read section in file FILE_DATA of length LEN with data DATA. */
4475
4476static void
4477ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4478 size_t len)
4479{
4480 const struct lto_function_header *header =
4481 (const struct lto_function_header *) data;
4ad9a9de
EB
4482 const int cfg_offset = sizeof (struct lto_function_header);
4483 const int main_offset = cfg_offset + header->cfg_size;
4484 const int string_offset = main_offset + header->main_size;
fb3f88cc 4485 struct data_in *data_in;
fb3f88cc
JH
4486 unsigned int i;
4487 unsigned int count;
4488
207c68cd 4489 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 4490 header->main_size, file_data->mode_table);
fb3f88cc
JH
4491
4492 data_in =
4493 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4494 header->string_size, vNULL);
412288f1 4495 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4496
4497 for (i = 0; i < count; i++)
4498 {
4499 unsigned int index;
4500 struct cgraph_node *node;
7380e6ef 4501 lto_symtab_encoder_t encoder;
fb3f88cc 4502
412288f1 4503 index = streamer_read_uhwi (&ib_main);
7380e6ef 4504 encoder = file_data->symtab_node_encoder;
d52f5295
ML
4505 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4506 index));
67348ccc 4507 gcc_assert (node->definition);
fb3f88cc
JH
4508 ipa_read_node_info (&ib_main, node, data_in);
4509 }
4510 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4511 len);
4512 lto_data_in_delete (data_in);
4513}
4514
4515/* Read ipcp jump functions. */
4516
4517void
4518ipa_prop_read_jump_functions (void)
4519{
4520 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4521 struct lto_file_decl_data *file_data;
4522 unsigned int j = 0;
4523
4524 ipa_check_create_node_params ();
4525 ipa_check_create_edge_args ();
4526 ipa_register_cgraph_hooks ();
4527
4528 while ((file_data = file_data_vec[j++]))
4529 {
4530 size_t len;
4531 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4532
4533 if (data)
4534 ipa_prop_read_section (file_data, data, len);
4535 }
4536}
4537
2c9561b5 4538void
04be694e 4539write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
4540{
4541 int node_ref;
4542 unsigned int count = 0;
4543 lto_symtab_encoder_t encoder;
4544 struct ipa_agg_replacement_value *aggvals, *av;
4545
4546 aggvals = ipa_get_agg_replacements_for_node (node);
4547 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4548 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4549 streamer_write_uhwi (ob, node_ref);
4550
4551 for (av = aggvals; av; av = av->next)
4552 count++;
4553 streamer_write_uhwi (ob, count);
4554
4555 for (av = aggvals; av; av = av->next)
4556 {
7b920a9a
MJ
4557 struct bitpack_d bp;
4558
2c9561b5
MJ
4559 streamer_write_uhwi (ob, av->offset);
4560 streamer_write_uhwi (ob, av->index);
4561 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4562
4563 bp = bitpack_create (ob->main_stream);
4564 bp_pack_value (&bp, av->by_ref, 1);
4565 streamer_write_bitpack (&bp);
2c9561b5 4566 }
04be694e
MJ
4567
4568 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
4569 if (ts && vec_safe_length (ts->m_vr) > 0)
4570 {
4571 count = ts->m_vr->length ();
4572 streamer_write_uhwi (ob, count);
4573 for (unsigned i = 0; i < count; ++i)
4574 {
4575 struct bitpack_d bp;
4576 ipa_vr *parm_vr = &(*ts->m_vr)[i];
4577 bp = bitpack_create (ob->main_stream);
4578 bp_pack_value (&bp, parm_vr->known, 1);
4579 streamer_write_bitpack (&bp);
4580 if (parm_vr->known)
4581 {
4582 streamer_write_enum (ob->main_stream, value_rang_type,
4583 VR_LAST, parm_vr->type);
4584 streamer_write_wide_int (ob, parm_vr->min);
4585 streamer_write_wide_int (ob, parm_vr->max);
4586 }
4587 }
4588 }
4589 else
4590 streamer_write_uhwi (ob, 0);
4591
209ca542
PK
4592 if (ts && vec_safe_length (ts->bits) > 0)
4593 {
4594 count = ts->bits->length ();
4595 streamer_write_uhwi (ob, count);
4596
4597 for (unsigned i = 0; i < count; ++i)
4598 {
86cd0334 4599 const ipa_bits *bits_jfunc = (*ts->bits)[i];
209ca542 4600 struct bitpack_d bp = bitpack_create (ob->main_stream);
86cd0334 4601 bp_pack_value (&bp, !!bits_jfunc, 1);
209ca542 4602 streamer_write_bitpack (&bp);
86cd0334 4603 if (bits_jfunc)
209ca542 4604 {
86cd0334
MJ
4605 streamer_write_widest_int (ob, bits_jfunc->value);
4606 streamer_write_widest_int (ob, bits_jfunc->mask);
209ca542
PK
4607 }
4608 }
4609 }
4610 else
4611 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
4612}
4613
4614/* Stream in the aggregate value replacement chain for NODE from IB. */
4615
4616static void
04be694e
MJ
4617read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4618 data_in *data_in)
2c9561b5
MJ
4619{
4620 struct ipa_agg_replacement_value *aggvals = NULL;
4621 unsigned int count, i;
4622
4623 count = streamer_read_uhwi (ib);
4624 for (i = 0; i <count; i++)
4625 {
4626 struct ipa_agg_replacement_value *av;
7b920a9a 4627 struct bitpack_d bp;
2c9561b5 4628
766090c2 4629 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
4630 av->offset = streamer_read_uhwi (ib);
4631 av->index = streamer_read_uhwi (ib);
4632 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4633 bp = streamer_read_bitpack (ib);
4634 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4635 av->next = aggvals;
4636 aggvals = av;
4637 }
4638 ipa_set_node_agg_value_chain (node, aggvals);
67b97478 4639
209ca542
PK
4640 count = streamer_read_uhwi (ib);
4641 if (count > 0)
4642 {
4643 ipcp_grow_transformations_if_necessary ();
8bc5448f
KV
4644
4645 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4646 vec_safe_grow_cleared (ts->m_vr, count);
4647 for (i = 0; i < count; i++)
4648 {
4649 ipa_vr *parm_vr;
4650 parm_vr = &(*ts->m_vr)[i];
4651 struct bitpack_d bp;
4652 bp = streamer_read_bitpack (ib);
4653 parm_vr->known = bp_unpack_value (&bp, 1);
4654 if (parm_vr->known)
4655 {
4656 parm_vr->type = streamer_read_enum (ib, value_range_type,
4657 VR_LAST);
4658 parm_vr->min = streamer_read_wide_int (ib);
4659 parm_vr->max = streamer_read_wide_int (ib);
4660 }
4661 }
4662 }
4663 count = streamer_read_uhwi (ib);
4664 if (count > 0)
4665 {
4666 ipcp_grow_transformations_if_necessary ();
4667
209ca542
PK
4668 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4669 vec_safe_grow_cleared (ts->bits, count);
4670
4671 for (i = 0; i < count; i++)
4672 {
209ca542 4673 struct bitpack_d bp = streamer_read_bitpack (ib);
86cd0334
MJ
4674 bool known = bp_unpack_value (&bp, 1);
4675 if (known)
209ca542 4676 {
86cd0334
MJ
4677 ipa_bits *bits
4678 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4679 streamer_read_widest_int (ib));
4680 (*ts->bits)[i] = bits;
209ca542
PK
4681 }
4682 }
4683 }
2c9561b5
MJ
4684}
4685
4686/* Write all aggregate replacement for nodes in set. */
4687
4688void
04be694e 4689ipcp_write_transformation_summaries (void)
2c9561b5
MJ
4690{
4691 struct cgraph_node *node;
4692 struct output_block *ob;
4693 unsigned int count = 0;
4694 lto_symtab_encoder_iterator lsei;
4695 lto_symtab_encoder_t encoder;
4696
2c9561b5
MJ
4697 ob = create_output_block (LTO_section_ipcp_transform);
4698 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 4699 ob->symbol = NULL;
2c9561b5
MJ
4700 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4701 lsei_next_function_in_partition (&lsei))
4702 {
4703 node = lsei_cgraph_node (lsei);
04be694e 4704 if (node->has_gimple_body_p ())
2c9561b5
MJ
4705 count++;
4706 }
4707
4708 streamer_write_uhwi (ob, count);
4709
4710 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4711 lsei_next_function_in_partition (&lsei))
4712 {
4713 node = lsei_cgraph_node (lsei);
04be694e
MJ
4714 if (node->has_gimple_body_p ())
4715 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
4716 }
4717 streamer_write_char_stream (ob->main_stream, 0);
4718 produce_asm (ob, NULL);
4719 destroy_output_block (ob);
4720}
4721
4722/* Read replacements section in file FILE_DATA of length LEN with data
4723 DATA. */
4724
4725static void
4726read_replacements_section (struct lto_file_decl_data *file_data,
4727 const char *data,
4728 size_t len)
4729{
4730 const struct lto_function_header *header =
4731 (const struct lto_function_header *) data;
4732 const int cfg_offset = sizeof (struct lto_function_header);
4733 const int main_offset = cfg_offset + header->cfg_size;
4734 const int string_offset = main_offset + header->main_size;
4735 struct data_in *data_in;
2c9561b5
MJ
4736 unsigned int i;
4737 unsigned int count;
4738
207c68cd 4739 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 4740 header->main_size, file_data->mode_table);
2c9561b5
MJ
4741
4742 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4743 header->string_size, vNULL);
2c9561b5
MJ
4744 count = streamer_read_uhwi (&ib_main);
4745
4746 for (i = 0; i < count; i++)
4747 {
4748 unsigned int index;
4749 struct cgraph_node *node;
4750 lto_symtab_encoder_t encoder;
4751
4752 index = streamer_read_uhwi (&ib_main);
4753 encoder = file_data->symtab_node_encoder;
d52f5295
ML
4754 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4755 index));
67348ccc 4756 gcc_assert (node->definition);
04be694e 4757 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
4758 }
4759 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4760 len);
4761 lto_data_in_delete (data_in);
4762}
4763
4764/* Read IPA-CP aggregate replacements. */
4765
4766void
04be694e 4767ipcp_read_transformation_summaries (void)
2c9561b5
MJ
4768{
4769 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4770 struct lto_file_decl_data *file_data;
4771 unsigned int j = 0;
4772
4773 while ((file_data = file_data_vec[j++]))
4774 {
4775 size_t len;
4776 const char *data = lto_get_section_data (file_data,
4777 LTO_section_ipcp_transform,
4778 NULL, &len);
4779 if (data)
4780 read_replacements_section (file_data, data, len);
4781 }
4782}
4783
4784/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4785 NODE. */
4786
4787static void
4788adjust_agg_replacement_values (struct cgraph_node *node,
4789 struct ipa_agg_replacement_value *aggval)
4790{
4791 struct ipa_agg_replacement_value *v;
4792 int i, c = 0, d = 0, *adj;
4793
4794 if (!node->clone.combined_args_to_skip)
4795 return;
4796
4797 for (v = aggval; v; v = v->next)
4798 {
4799 gcc_assert (v->index >= 0);
4800 if (c < v->index)
4801 c = v->index;
4802 }
4803 c++;
4804
4805 adj = XALLOCAVEC (int, c);
4806 for (i = 0; i < c; i++)
4807 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4808 {
4809 adj[i] = -1;
4810 d++;
4811 }
4812 else
4813 adj[i] = i - d;
4814
4815 for (v = aggval; v; v = v->next)
4816 v->index = adj[v->index];
4817}
4818
8aab5218
MJ
4819/* Dominator walker driving the ipcp modification phase. */
4820
4821class ipcp_modif_dom_walker : public dom_walker
4822{
4823public:
56b40062 4824 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
f65f1ae3 4825 vec<ipa_param_descriptor, va_gc> *descs,
8aab5218
MJ
4826 struct ipa_agg_replacement_value *av,
4827 bool *sc, bool *cc)
4828 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4829 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4830
3daacdcd 4831 virtual edge before_dom_children (basic_block);
8aab5218
MJ
4832
4833private:
56b40062 4834 struct ipa_func_body_info *m_fbi;
f65f1ae3 4835 vec<ipa_param_descriptor, va_gc> *m_descriptors;
8aab5218
MJ
4836 struct ipa_agg_replacement_value *m_aggval;
4837 bool *m_something_changed, *m_cfg_changed;
4838};
4839
3daacdcd 4840edge
8aab5218
MJ
4841ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4842{
4843 gimple_stmt_iterator gsi;
4844 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4845 {
4846 struct ipa_agg_replacement_value *v;
355fe088 4847 gimple *stmt = gsi_stmt (gsi);
8aab5218
MJ
4848 tree rhs, val, t;
4849 HOST_WIDE_INT offset, size;
4850 int index;
4851 bool by_ref, vce;
4852
4853 if (!gimple_assign_load_p (stmt))
4854 continue;
4855 rhs = gimple_assign_rhs1 (stmt);
4856 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4857 continue;
2c9561b5 4858
8aab5218
MJ
4859 vce = false;
4860 t = rhs;
4861 while (handled_component_p (t))
4862 {
4863 /* V_C_E can do things like convert an array of integers to one
4864 bigger integer and similar things we do not handle below. */
4865 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4866 {
4867 vce = true;
4868 break;
4869 }
4870 t = TREE_OPERAND (t, 0);
4871 }
4872 if (vce)
4873 continue;
4874
ff302741
PB
4875 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4876 &offset, &size, &by_ref))
8aab5218
MJ
4877 continue;
4878 for (v = m_aggval; v; v = v->next)
4879 if (v->index == index
4880 && v->offset == offset)
4881 break;
4882 if (!v
4883 || v->by_ref != by_ref
4884 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4885 continue;
4886
4887 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4888 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4889 {
4890 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4891 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4892 else if (TYPE_SIZE (TREE_TYPE (rhs))
4893 == TYPE_SIZE (TREE_TYPE (v->value)))
4894 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4895 else
4896 {
4897 if (dump_file)
4898 {
4899 fprintf (dump_file, " const ");
ef6cb4c7 4900 print_generic_expr (dump_file, v->value);
8aab5218 4901 fprintf (dump_file, " can't be converted to type of ");
ef6cb4c7 4902 print_generic_expr (dump_file, rhs);
8aab5218
MJ
4903 fprintf (dump_file, "\n");
4904 }
4905 continue;
4906 }
4907 }
4908 else
4909 val = v->value;
4910
4911 if (dump_file && (dump_flags & TDF_DETAILS))
4912 {
4913 fprintf (dump_file, "Modifying stmt:\n ");
ef6cb4c7 4914 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
4915 }
4916 gimple_assign_set_rhs_from_tree (&gsi, val);
4917 update_stmt (stmt);
4918
4919 if (dump_file && (dump_flags & TDF_DETAILS))
4920 {
4921 fprintf (dump_file, "into:\n ");
ef6cb4c7 4922 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
4923 fprintf (dump_file, "\n");
4924 }
4925
4926 *m_something_changed = true;
4927 if (maybe_clean_eh_stmt (stmt)
4928 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4929 *m_cfg_changed = true;
4930 }
3daacdcd 4931 return NULL;
8aab5218
MJ
4932}
4933
209ca542
PK
4934/* Update bits info of formal parameters as described in
4935 ipcp_transformation_summary. */
4936
4937static void
4938ipcp_update_bits (struct cgraph_node *node)
4939{
4940 tree parm = DECL_ARGUMENTS (node->decl);
4941 tree next_parm = parm;
4942 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4943
4944 if (!ts || vec_safe_length (ts->bits) == 0)
4945 return;
4946
86cd0334 4947 vec<ipa_bits *, va_gc> &bits = *ts->bits;
209ca542
PK
4948 unsigned count = bits.length ();
4949
4950 for (unsigned i = 0; i < count; ++i, parm = next_parm)
4951 {
4952 if (node->clone.combined_args_to_skip
4953 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
4954 continue;
4955
4956 gcc_checking_assert (parm);
4957 next_parm = DECL_CHAIN (parm);
4958
86cd0334
MJ
4959 if (!bits[i]
4960 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
4961 || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542 4962 || !is_gimple_reg (parm))
86cd0334 4963 continue;
209ca542
PK
4964
4965 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
4966 if (!ddef)
4967 continue;
4968
4969 if (dump_file)
4970 {
86cd0334
MJ
4971 fprintf (dump_file, "Adjusting mask for param %u to ", i);
4972 print_hex (bits[i]->mask, dump_file);
209ca542
PK
4973 fprintf (dump_file, "\n");
4974 }
4975
67b97478
PK
4976 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
4977 {
4978 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
4979 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
4980
86cd0334
MJ
4981 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
4982 | wide_int::from (bits[i]->value, prec, sgn);
67b97478
PK
4983 set_nonzero_bits (ddef, nonzero_bits);
4984 }
4985 else
4986 {
86cd0334
MJ
4987 unsigned tem = bits[i]->mask.to_uhwi ();
4988 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
67b97478
PK
4989 unsigned align = tem & -tem;
4990 unsigned misalign = bitpos & (align - 1);
209ca542 4991
67b97478
PK
4992 if (align > 1)
4993 {
4994 if (dump_file)
4995 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
4996
4997 unsigned old_align, old_misalign;
4998 struct ptr_info_def *pi = get_ptr_info (ddef);
4999 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5000
5001 if (old_known
5002 && old_align > align)
5003 {
5004 if (dump_file)
5005 {
5006 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5007 if ((old_misalign & (align - 1)) != misalign)
5008 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5009 old_misalign, misalign);
5010 }
5011 continue;
5012 }
5013
5014 if (old_known
5015 && ((misalign & (old_align - 1)) != old_misalign)
5016 && dump_file)
5017 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5018 old_misalign, misalign);
5019
5020 set_ptr_info_alignment (pi, align, misalign);
5021 }
5022 }
209ca542
PK
5023 }
5024}
5025
8bc5448f
KV
5026/* Update value range of formal parameters as described in
5027 ipcp_transformation_summary. */
5028
5029static void
5030ipcp_update_vr (struct cgraph_node *node)
5031{
5032 tree fndecl = node->decl;
5033 tree parm = DECL_ARGUMENTS (fndecl);
5034 tree next_parm = parm;
5035 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5036 if (!ts || vec_safe_length (ts->m_vr) == 0)
5037 return;
5038 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5039 unsigned count = vr.length ();
5040
5041 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5042 {
5043 if (node->clone.combined_args_to_skip
5044 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5045 continue;
5046 gcc_checking_assert (parm);
5047 next_parm = DECL_CHAIN (parm);
5048 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5049
5050 if (!ddef || !is_gimple_reg (parm))
5051 continue;
5052
5053 if (vr[i].known
8bc5448f
KV
5054 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5055 {
5056 tree type = TREE_TYPE (ddef);
5057 unsigned prec = TYPE_PRECISION (type);
718625ad
KV
5058 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5059 {
5060 if (dump_file)
5061 {
5062 fprintf (dump_file, "Setting value range of param %u ", i);
5063 fprintf (dump_file, "%s[",
5064 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5065 print_decs (vr[i].min, dump_file);
5066 fprintf (dump_file, ", ");
5067 print_decs (vr[i].max, dump_file);
5068 fprintf (dump_file, "]\n");
5069 }
5070 set_range_info (ddef, vr[i].type,
5071 wide_int_storage::from (vr[i].min, prec,
5072 TYPE_SIGN (type)),
5073 wide_int_storage::from (vr[i].max, prec,
5074 TYPE_SIGN (type)));
5075 }
5076 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5077 && vr[i].type == VR_ANTI_RANGE
5078 && wi::eq_p (vr[i].min, 0)
5079 && wi::eq_p (vr[i].max, 0))
8bc5448f 5080 {
718625ad
KV
5081 if (dump_file)
5082 fprintf (dump_file, "Setting nonnull for %u\n", i);
5083 set_ptr_nonnull (ddef);
8bc5448f 5084 }
8bc5448f
KV
5085 }
5086 }
5087}
5088
8aab5218 5089/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5090
5091unsigned int
5092ipcp_transform_function (struct cgraph_node *node)
5093{
f65f1ae3 5094 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
56b40062 5095 struct ipa_func_body_info fbi;
2c9561b5 5096 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5097 int param_count;
5098 bool cfg_changed = false, something_changed = false;
5099
5100 gcc_checking_assert (cfun);
5101 gcc_checking_assert (current_function_decl);
5102
5103 if (dump_file)
464d0118
ML
5104 fprintf (dump_file, "Modification phase of node %s\n",
5105 node->dump_name ());
2c9561b5 5106
209ca542 5107 ipcp_update_bits (node);
8bc5448f 5108 ipcp_update_vr (node);
2c9561b5
MJ
5109 aggval = ipa_get_agg_replacements_for_node (node);
5110 if (!aggval)
5111 return 0;
67348ccc 5112 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5113 if (param_count == 0)
5114 return 0;
5115 adjust_agg_replacement_values (node, aggval);
5116 if (dump_file)
5117 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5118
8aab5218
MJ
5119 fbi.node = node;
5120 fbi.info = NULL;
5121 fbi.bb_infos = vNULL;
5122 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5123 fbi.param_count = param_count;
5124 fbi.aa_walked = 0;
2c9561b5 5125
f65f1ae3
MJ
5126 vec_safe_grow_cleared (descriptors, param_count);
5127 ipa_populate_param_decls (node, *descriptors);
8aab5218
MJ
5128 calculate_dominance_info (CDI_DOMINATORS);
5129 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5130 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5131
8aab5218
MJ
5132 int i;
5133 struct ipa_bb_info *bi;
5134 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5135 free_ipa_bb_info (bi);
5136 fbi.bb_infos.release ();
5137 free_dominance_info (CDI_DOMINATORS);
04be694e 5138 (*ipcp_transformations)[node->uid].agg_values = NULL;
676b4899
PK
5139 (*ipcp_transformations)[node->uid].bits = NULL;
5140 (*ipcp_transformations)[node->uid].m_vr = NULL;
5141
f65f1ae3 5142 vec_free (descriptors);
2c9561b5
MJ
5143
5144 if (!something_changed)
5145 return 0;
5146 else if (cfg_changed)
5147 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5148 else
5149 return TODO_update_ssa_only_virtuals;
5150}
86cd0334
MJ
5151
5152#include "gt-ipa-prop.h"