]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
i386.c (make_resolver_func): Update.
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
cbe34bb5 2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
27d020cf 48#include "ipa-fnsummary.h"
cf835838 49#include "gimple-pretty-print.h"
dfea20f1 50#include "params.h"
450ad0cd 51#include "ipa-utils.h"
2b5f0895 52#include "dbgcnt.h"
8aab5218 53#include "domwalk.h"
9b2b7279 54#include "builtins.h"
771578a0 55
dd912cb8
ML
56/* Function summary where the parameter infos are actually stored. */
57ipa_node_params_t *ipa_node_params_sum = NULL;
04be694e
MJ
58/* Vector of IPA-CP transformation data for each clone. */
59vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
6fe906a3
MJ
60/* Edge summary for IPA-CP edge information. */
61ipa_edge_args_sum_t *ipa_edge_args_sum;
771578a0 62
86cd0334
MJ
63/* Traits for a hash table for reusing already existing ipa_bits. */
64
65struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
66{
67 typedef ipa_bits *value_type;
68 typedef ipa_bits *compare_type;
69 static hashval_t
70 hash (const ipa_bits *p)
71 {
72 hashval_t t = (hashval_t) p->value.to_shwi ();
73 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
74 }
75 static bool
76 equal (const ipa_bits *a, const ipa_bits *b)
77 {
78 return a->value == b->value && a->mask == b->mask;
79 }
80 static void
81 mark_empty (ipa_bits *&p)
82 {
83 p = NULL;
84 }
85 static bool
86 is_empty (const ipa_bits *p)
87 {
88 return p == NULL;
89 }
90 static bool
91 is_deleted (const ipa_bits *p)
92 {
93 return p == reinterpret_cast<const ipa_bits *> (1);
94 }
95 static void
96 mark_deleted (ipa_bits *&p)
97 {
98 p = reinterpret_cast<ipa_bits *> (1);
99 }
100};
101
102/* Hash table for avoid repeated allocations of equal ipa_bits. */
103static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
104
105/* Traits for a hash table for reusing value_ranges used for IPA. Note that
106 the equiv bitmap is not hashed and is expected to be NULL. */
107
108struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
109{
110 typedef value_range *value_type;
111 typedef value_range *compare_type;
112 static hashval_t
113 hash (const value_range *p)
114 {
115 gcc_checking_assert (!p->equiv);
116 hashval_t t = (hashval_t) p->type;
117 t = iterative_hash_expr (p->min, t);
118 return iterative_hash_expr (p->max, t);
119 }
120 static bool
121 equal (const value_range *a, const value_range *b)
122 {
123 return a->type == b->type && a->min == b->min && a->max == b->max;
124 }
125 static void
126 mark_empty (value_range *&p)
127 {
128 p = NULL;
129 }
130 static bool
131 is_empty (const value_range *p)
132 {
133 return p == NULL;
134 }
135 static bool
136 is_deleted (const value_range *p)
137 {
138 return p == reinterpret_cast<const value_range *> (1);
139 }
140 static void
141 mark_deleted (value_range *&p)
142 {
143 p = reinterpret_cast<value_range *> (1);
144 }
145};
146
147/* Hash table for avoid repeated allocations of equal value_ranges. */
148static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
149
771578a0 150/* Holders of ipa cgraph hooks: */
40982661 151static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 152
4502fe8d
MJ
153/* Description of a reference to an IPA constant. */
154struct ipa_cst_ref_desc
155{
156 /* Edge that corresponds to the statement which took the reference. */
157 struct cgraph_edge *cs;
158 /* Linked list of duplicates created when call graph edges are cloned. */
159 struct ipa_cst_ref_desc *next_duplicate;
160 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
161 if out of control. */
162 int refcount;
163};
164
165/* Allocation pool for reference descriptions. */
166
fb0b2914 167static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 168 ("IPA-PROP ref descriptions");
4502fe8d 169
5fe8e757
MJ
170/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
171 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
172
173static bool
174ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
175{
67348ccc 176 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
177
178 if (!fs_opts)
179 return false;
2bf86c84 180 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
181}
182
be95e2b9
MJ
183/* Return index of the formal whose tree is PTREE in function which corresponds
184 to INFO. */
185
d044dd17 186static int
f65f1ae3
MJ
187ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
188 tree ptree)
518dc859
RL
189{
190 int i, count;
191
f65f1ae3 192 count = vec_safe_length (descriptors);
518dc859 193 for (i = 0; i < count; i++)
f65f1ae3 194 if ((*descriptors)[i].decl_or_type == ptree)
518dc859
RL
195 return i;
196
197 return -1;
198}
199
d044dd17
MJ
200/* Return index of the formal whose tree is PTREE in function which corresponds
201 to INFO. */
202
203int
204ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
205{
206 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
207}
208
209/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
210 NODE. */
be95e2b9 211
f8e2a1ed
MJ
212static void
213ipa_populate_param_decls (struct cgraph_node *node,
f65f1ae3 214 vec<ipa_param_descriptor, va_gc> &descriptors)
518dc859
RL
215{
216 tree fndecl;
217 tree fnargs;
218 tree parm;
219 int param_num;
3e293154 220
67348ccc 221 fndecl = node->decl;
0e8853ee 222 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
223 fnargs = DECL_ARGUMENTS (fndecl);
224 param_num = 0;
910ad8de 225 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 226 {
209ca542 227 descriptors[param_num].decl_or_type = parm;
b4c9af96
RB
228 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
229 true);
518dc859
RL
230 param_num++;
231 }
232}
233
3f84bf08
MJ
234/* Return how many formal parameters FNDECL has. */
235
fd29c024 236int
310bc633 237count_formal_params (tree fndecl)
3f84bf08
MJ
238{
239 tree parm;
240 int count = 0;
0e8853ee 241 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 242
910ad8de 243 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
244 count++;
245
246 return count;
247}
248
0e8853ee
JH
249/* Return the declaration of Ith formal parameter of the function corresponding
250 to INFO. Note there is no setter function as this array is built just once
251 using ipa_initialize_node_params. */
252
253void
254ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
255{
256 fprintf (file, "param #%i", i);
f65f1ae3 257 if ((*info->descriptors)[i].decl_or_type)
0e8853ee
JH
258 {
259 fprintf (file, " ");
ef6cb4c7 260 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
0e8853ee
JH
261 }
262}
263
159f01f8
MJ
264/* If necessary, allocate vector of parameter descriptors in info of NODE.
265 Return true if they were allocated, false if not. */
0e8853ee 266
159f01f8 267static bool
0e8853ee
JH
268ipa_alloc_node_params (struct cgraph_node *node, int param_count)
269{
270 struct ipa_node_params *info = IPA_NODE_REF (node);
271
f65f1ae3 272 if (!info->descriptors && param_count)
159f01f8
MJ
273 {
274 vec_safe_grow_cleared (info->descriptors, param_count);
275 return true;
276 }
277 else
278 return false;
0e8853ee
JH
279}
280
f8e2a1ed
MJ
281/* Initialize the ipa_node_params structure associated with NODE by counting
282 the function parameters, creating the descriptors and populating their
283 param_decls. */
be95e2b9 284
f8e2a1ed
MJ
285void
286ipa_initialize_node_params (struct cgraph_node *node)
287{
288 struct ipa_node_params *info = IPA_NODE_REF (node);
289
159f01f8
MJ
290 if (!info->descriptors
291 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
292 ipa_populate_param_decls (node, *info->descriptors);
518dc859
RL
293}
294
749aa96d
MJ
295/* Print the jump functions associated with call graph edge CS to file F. */
296
297static void
298ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
299{
300 int i, count;
301
302 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
303 for (i = 0; i < count; i++)
304 {
305 struct ipa_jump_func *jump_func;
306 enum jump_func_type type;
307
308 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
309 type = jump_func->type;
310
311 fprintf (f, " param %d: ", i);
312 if (type == IPA_JF_UNKNOWN)
313 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
314 else if (type == IPA_JF_CONST)
315 {
4502fe8d 316 tree val = jump_func->value.constant.value;
749aa96d 317 fprintf (f, "CONST: ");
ef6cb4c7 318 print_generic_expr (f, val);
749aa96d
MJ
319 if (TREE_CODE (val) == ADDR_EXPR
320 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
321 {
322 fprintf (f, " -> ");
ef6cb4c7 323 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
749aa96d
MJ
324 }
325 fprintf (f, "\n");
326 }
749aa96d
MJ
327 else if (type == IPA_JF_PASS_THROUGH)
328 {
329 fprintf (f, "PASS THROUGH: ");
8b7773a4 330 fprintf (f, "%d, op %s",
749aa96d 331 jump_func->value.pass_through.formal_id,
5806f481 332 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 333 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
334 {
335 fprintf (f, " ");
ef6cb4c7 336 print_generic_expr (f, jump_func->value.pass_through.operand);
8b7773a4
MJ
337 }
338 if (jump_func->value.pass_through.agg_preserved)
339 fprintf (f, ", agg_preserved");
3ea6239f 340 fprintf (f, "\n");
749aa96d
MJ
341 }
342 else if (type == IPA_JF_ANCESTOR)
343 {
344 fprintf (f, "ANCESTOR: ");
16998094 345 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
346 jump_func->value.ancestor.formal_id,
347 jump_func->value.ancestor.offset);
8b7773a4
MJ
348 if (jump_func->value.ancestor.agg_preserved)
349 fprintf (f, ", agg_preserved");
3ea6239f 350 fprintf (f, "\n");
749aa96d 351 }
8b7773a4
MJ
352
353 if (jump_func->agg.items)
354 {
355 struct ipa_agg_jf_item *item;
356 int j;
357
358 fprintf (f, " Aggregate passed by %s:\n",
359 jump_func->agg.by_ref ? "reference" : "value");
9771b263 360 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
361 {
362 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
363 item->offset);
364 if (TYPE_P (item->value))
365 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 366 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
367 else
368 {
369 fprintf (f, "cst: ");
ef6cb4c7 370 print_generic_expr (f, item->value);
8b7773a4
MJ
371 }
372 fprintf (f, "\n");
373 }
374 }
44210a96
MJ
375
376 struct ipa_polymorphic_call_context *ctx
377 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
378 if (ctx && !ctx->useless_p ())
379 {
380 fprintf (f, " Context: ");
381 ctx->dump (dump_file);
382 }
04be694e 383
86cd0334 384 if (jump_func->bits)
209ca542 385 {
86cd0334
MJ
386 fprintf (f, " value: ");
387 print_hex (jump_func->bits->value, f);
388 fprintf (f, ", mask: ");
389 print_hex (jump_func->bits->mask, f);
209ca542
PK
390 fprintf (f, "\n");
391 }
392 else
393 fprintf (f, " Unknown bits\n");
8bc5448f 394
86cd0334 395 if (jump_func->m_vr)
8bc5448f
KV
396 {
397 fprintf (f, " VR ");
398 fprintf (f, "%s[",
86cd0334
MJ
399 (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
400 print_decs (jump_func->m_vr->min, f);
8bc5448f 401 fprintf (f, ", ");
86cd0334 402 print_decs (jump_func->m_vr->max, f);
8bc5448f
KV
403 fprintf (f, "]\n");
404 }
405 else
406 fprintf (f, " Unknown VR\n");
749aa96d
MJ
407 }
408}
409
410
be95e2b9
MJ
411/* Print the jump functions of all arguments on all call graph edges going from
412 NODE to file F. */
413
518dc859 414void
3e293154 415ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 416{
3e293154 417 struct cgraph_edge *cs;
518dc859 418
464d0118 419 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
3e293154
MJ
420 for (cs = node->callees; cs; cs = cs->next_callee)
421 {
422 if (!ipa_edge_args_info_available_for_edge_p (cs))
423 continue;
424
464d0118
ML
425 fprintf (f, " callsite %s -> %s : \n",
426 node->dump_name (),
427 cs->callee->dump_name ());
749aa96d
MJ
428 ipa_print_node_jump_functions_for_edge (f, cs);
429 }
518dc859 430
9de04252 431 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 432 {
9de04252 433 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
434 if (!ipa_edge_args_info_available_for_edge_p (cs))
435 continue;
3e293154 436
9de04252
MJ
437 ii = cs->indirect_info;
438 if (ii->agg_contents)
c13bc3d9 439 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 440 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 441 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
442 ii->param_index, ii->offset,
443 ii->by_ref ? "by reference" : "by_value");
444 else
85942f45
JH
445 fprintf (f, " indirect %s callsite, calling param %i, "
446 "offset " HOST_WIDE_INT_PRINT_DEC,
447 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
448 ii->offset);
9de04252 449
749aa96d
MJ
450 if (cs->call_stmt)
451 {
9de04252 452 fprintf (f, ", for stmt ");
749aa96d 453 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 454 }
749aa96d 455 else
9de04252 456 fprintf (f, "\n");
ba392339
JH
457 if (ii->polymorphic)
458 ii->context.dump (f);
749aa96d 459 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
460 }
461}
462
463/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 464
3e293154
MJ
465void
466ipa_print_all_jump_functions (FILE *f)
467{
468 struct cgraph_node *node;
469
ca30a539 470 fprintf (f, "\nJump functions:\n");
65c70e6b 471 FOR_EACH_FUNCTION (node)
3e293154
MJ
472 {
473 ipa_print_node_jump_functions (f, node);
474 }
475}
476
04be694e
MJ
477/* Set jfunc to be a know-really nothing jump function. */
478
479static void
480ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
481{
482 jfunc->type = IPA_JF_UNKNOWN;
86cd0334
MJ
483 jfunc->bits = NULL;
484 jfunc->m_vr = NULL;
04be694e
MJ
485}
486
b8f6e610
MJ
487/* Set JFUNC to be a copy of another jmp (to be used by jump function
488 combination code). The two functions will share their rdesc. */
489
490static void
491ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
492 struct ipa_jump_func *src)
493
494{
495 gcc_checking_assert (src->type == IPA_JF_CONST);
496 dst->type = IPA_JF_CONST;
497 dst->value.constant = src->value.constant;
498}
499
7b872d9e
MJ
500/* Set JFUNC to be a constant jmp function. */
501
502static void
4502fe8d
MJ
503ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
504 struct cgraph_edge *cs)
7b872d9e
MJ
505{
506 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
507 jfunc->value.constant.value = unshare_expr_without_location (constant);
508
509 if (TREE_CODE (constant) == ADDR_EXPR
510 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
511 {
512 struct ipa_cst_ref_desc *rdesc;
4502fe8d 513
601f3293 514 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
515 rdesc->cs = cs;
516 rdesc->next_duplicate = NULL;
517 rdesc->refcount = 1;
518 jfunc->value.constant.rdesc = rdesc;
519 }
520 else
521 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
522}
523
524/* Set JFUNC to be a simple pass-through jump function. */
525static void
8b7773a4 526ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 527 bool agg_preserved)
7b872d9e
MJ
528{
529 jfunc->type = IPA_JF_PASS_THROUGH;
530 jfunc->value.pass_through.operand = NULL_TREE;
531 jfunc->value.pass_through.formal_id = formal_id;
532 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 533 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
534}
535
a2b4c188
KV
536/* Set JFUNC to be an unary pass through jump function. */
537
538static void
539ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
540 enum tree_code operation)
541{
542 jfunc->type = IPA_JF_PASS_THROUGH;
543 jfunc->value.pass_through.operand = NULL_TREE;
544 jfunc->value.pass_through.formal_id = formal_id;
545 jfunc->value.pass_through.operation = operation;
546 jfunc->value.pass_through.agg_preserved = false;
547}
7b872d9e
MJ
548/* Set JFUNC to be an arithmetic pass through jump function. */
549
550static void
551ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
552 tree operand, enum tree_code operation)
553{
554 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 555 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
556 jfunc->value.pass_through.formal_id = formal_id;
557 jfunc->value.pass_through.operation = operation;
8b7773a4 558 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
559}
560
561/* Set JFUNC to be an ancestor jump function. */
562
563static void
564ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 565 int formal_id, bool agg_preserved)
7b872d9e
MJ
566{
567 jfunc->type = IPA_JF_ANCESTOR;
568 jfunc->value.ancestor.formal_id = formal_id;
569 jfunc->value.ancestor.offset = offset;
8b7773a4 570 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
571}
572
8aab5218
MJ
573/* Get IPA BB information about the given BB. FBI is the context of analyzis
574 of this function body. */
575
576static struct ipa_bb_info *
56b40062 577ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
578{
579 gcc_checking_assert (fbi);
580 return &fbi->bb_infos[bb->index];
581}
582
f65cf2b7
MJ
583/* Structure to be passed in between detect_type_change and
584 check_stmt_for_type_change. */
585
11478306 586struct prop_type_change_info
f65cf2b7 587{
290ebcb7
MJ
588 /* Offset into the object where there is the virtual method pointer we are
589 looking for. */
590 HOST_WIDE_INT offset;
591 /* The declaration or SSA_NAME pointer of the base that we are checking for
592 type change. */
593 tree object;
f65cf2b7
MJ
594 /* Set to true if dynamic type change has been detected. */
595 bool type_maybe_changed;
596};
597
598/* Return true if STMT can modify a virtual method table pointer.
599
600 This function makes special assumptions about both constructors and
601 destructors which are all the functions that are allowed to alter the VMT
602 pointers. It assumes that destructors begin with assignment into all VMT
603 pointers and that constructors essentially look in the following way:
604
605 1) The very first thing they do is that they call constructors of ancestor
606 sub-objects that have them.
607
608 2) Then VMT pointers of this and all its ancestors is set to new values
609 corresponding to the type corresponding to the constructor.
610
611 3) Only afterwards, other stuff such as constructor of member sub-objects
612 and the code written by the user is run. Only this may include calling
613 virtual functions, directly or indirectly.
614
615 There is no way to call a constructor of an ancestor sub-object in any
616 other way.
617
618 This means that we do not have to care whether constructors get the correct
619 type information because they will always change it (in fact, if we define
620 the type to be given by the VMT pointer, it is undefined).
621
622 The most important fact to derive from the above is that if, for some
623 statement in the section 3, we try to detect whether the dynamic type has
624 changed, we can safely ignore all calls as we examine the function body
625 backwards until we reach statements in section 2 because these calls cannot
626 be ancestor constructors or destructors (if the input is not bogus) and so
627 do not change the dynamic type (this holds true only for automatically
628 allocated objects but at the moment we devirtualize only these). We then
629 must detect that statements in section 2 change the dynamic type and can try
630 to derive the new type. That is enough and we can stop, we will never see
631 the calls into constructors of sub-objects in this code. Therefore we can
632 safely ignore all call statements that we traverse.
633 */
634
635static bool
355fe088 636stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
637{
638 if (is_gimple_call (stmt))
639 return false;
70f633c5
JH
640 if (gimple_clobber_p (stmt))
641 return false;
f65cf2b7
MJ
642 else if (is_gimple_assign (stmt))
643 {
644 tree lhs = gimple_assign_lhs (stmt);
645
0004f992
MJ
646 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
647 {
648 if (flag_strict_aliasing
649 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
650 return false;
651
652 if (TREE_CODE (lhs) == COMPONENT_REF
653 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 654 return false;
0004f992
MJ
655 /* In the future we might want to use get_base_ref_and_offset to find
656 if there is a field corresponding to the offset and if so, proceed
657 almost like if it was a component ref. */
658 }
f65cf2b7
MJ
659 }
660 return true;
661}
662
3b97a5c7
MJ
663/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
664 to check whether a particular statement may modify the virtual table
665 pointerIt stores its result into DATA, which points to a
11478306 666 prop_type_change_info structure. */
f65cf2b7
MJ
667
668static bool
669check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
670{
355fe088 671 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 672 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
673
674 if (stmt_may_be_vtbl_ptr_store (stmt))
675 {
676 tci->type_maybe_changed = true;
677 return true;
678 }
679 else
680 return false;
681}
682
058d0a90
JH
683/* See if ARG is PARAM_DECl describing instance passed by pointer
684 or reference in FUNCTION. Return false if the dynamic type may change
685 in between beggining of the function until CALL is invoked.
290ebcb7 686
058d0a90
JH
687 Generally functions are not allowed to change type of such instances,
688 but they call destructors. We assume that methods can not destroy the THIS
689 pointer. Also as a special cases, constructor and destructors may change
690 type of the THIS pointer. */
691
692static bool
355fe088 693param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90
JH
694{
695 /* Pure functions can not do any changes on the dynamic type;
696 that require writting to memory. */
697 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
698 return false;
699 /* We need to check if we are within inlined consturctor
700 or destructor (ideally we would have way to check that the
701 inline cdtor is actually working on ARG, but we don't have
702 easy tie on this, so punt on all non-pure cdtors.
703 We may also record the types of cdtors and once we know type
704 of the instance match them.
705
706 Also code unification optimizations may merge calls from
707 different blocks making return values unreliable. So
708 do nothing during late optimization. */
709 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
710 return true;
711 if (TREE_CODE (arg) == SSA_NAME
712 && SSA_NAME_IS_DEFAULT_DEF (arg)
713 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
714 {
715 /* Normal (non-THIS) argument. */
716 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
717 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 718 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
719 and destructors as those definitely may change the dynamic
720 type. */
721 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
722 && !DECL_CXX_CONSTRUCTOR_P (function)
723 && !DECL_CXX_DESTRUCTOR_P (function)
724 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
725 {
726 /* Walk the inline stack and watch out for ctors/dtors. */
727 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
728 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
729 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
730 return true;
058d0a90
JH
731 return false;
732 }
733 }
734 return true;
735}
290ebcb7 736
06d65050
JH
737/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
738 callsite CALL) by looking for assignments to its virtual table pointer. If
739 it is, return true and fill in the jump function JFUNC with relevant type
740 information or set it to unknown. ARG is the object itself (not a pointer
741 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
742 returned by get_ref_base_and_extent, as is the offset.
743
744 This is helper function for detect_type_change and detect_type_change_ssa
745 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
746
747static bool
058d0a90 748detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
538dd0b7 749 gcall *call, struct ipa_jump_func *jfunc,
058d0a90 750 HOST_WIDE_INT offset)
f65cf2b7 751{
11478306 752 struct prop_type_change_info tci;
f65cf2b7 753 ao_ref ao;
70f633c5 754 bool entry_reached = false;
f65cf2b7
MJ
755
756 gcc_checking_assert (DECL_P (arg)
757 || TREE_CODE (arg) == MEM_REF
758 || handled_component_p (arg));
f65cf2b7 759
b49407f8
JH
760 comp_type = TYPE_MAIN_VARIANT (comp_type);
761
d570d364
JH
762 /* Const calls cannot call virtual methods through VMT and so type changes do
763 not matter. */
764 if (!flag_devirtualize || !gimple_vuse (call)
765 /* Be sure expected_type is polymorphic. */
766 || !comp_type
767 || TREE_CODE (comp_type) != RECORD_TYPE
768 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
769 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
770 return true;
4bf2a588 771
dd887943 772 ao_ref_init (&ao, arg);
f65cf2b7
MJ
773 ao.base = base;
774 ao.offset = offset;
775 ao.size = POINTER_SIZE;
776 ao.max_size = ao.size;
f65cf2b7 777
290ebcb7
MJ
778 tci.offset = offset;
779 tci.object = get_base_address (arg);
290ebcb7 780 tci.type_maybe_changed = false;
290ebcb7 781
f65cf2b7 782 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
70f633c5 783 &tci, NULL, &entry_reached);
f65cf2b7
MJ
784 if (!tci.type_maybe_changed)
785 return false;
786
04be694e 787 ipa_set_jf_unknown (jfunc);
f65cf2b7
MJ
788 return true;
789}
790
058d0a90
JH
791/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
792 If it is, return true and fill in the jump function JFUNC with relevant type
793 information or set it to unknown. ARG is the object itself (not a pointer
794 to it, unless dereferenced). BASE is the base of the memory access as
795 returned by get_ref_base_and_extent, as is the offset. */
796
797static bool
538dd0b7 798detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
058d0a90
JH
799 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
800{
801 if (!flag_devirtualize)
802 return false;
803
804 if (TREE_CODE (base) == MEM_REF
805 && !param_type_may_change_p (current_function_decl,
806 TREE_OPERAND (base, 0),
807 call))
808 return false;
809 return detect_type_change_from_memory_writes (arg, base, comp_type,
810 call, jfunc, offset);
811}
812
f65cf2b7
MJ
813/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
814 SSA name (its dereference will become the base and the offset is assumed to
815 be zero). */
816
817static bool
06d65050 818detect_type_change_ssa (tree arg, tree comp_type,
538dd0b7 819 gcall *call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
820{
821 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 822 if (!flag_devirtualize
06d65050 823 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
824 return false;
825
058d0a90
JH
826 if (!param_type_may_change_p (current_function_decl, arg, call))
827 return false;
828
f65cf2b7 829 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 830 build_int_cst (ptr_type_node, 0));
f65cf2b7 831
058d0a90
JH
832 return detect_type_change_from_memory_writes (arg, arg, comp_type,
833 call, jfunc, 0);
f65cf2b7
MJ
834}
835
fdb0e1b4
MJ
836/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
837 boolean variable pointed to by DATA. */
838
839static bool
840mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
841 void *data)
842{
843 bool *b = (bool *) data;
844 *b = true;
845 return true;
846}
847
8aab5218
MJ
848/* Return true if we have already walked so many statements in AA that we
849 should really just start giving up. */
850
851static bool
56b40062 852aa_overwalked (struct ipa_func_body_info *fbi)
8aab5218
MJ
853{
854 gcc_checking_assert (fbi);
855 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
856}
857
858/* Find the nearest valid aa status for parameter specified by INDEX that
859 dominates BB. */
860
56b40062
MJ
861static struct ipa_param_aa_status *
862find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
863 int index)
864{
865 while (true)
866 {
867 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
868 if (!bb)
869 return NULL;
870 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
871 if (!bi->param_aa_statuses.is_empty ()
872 && bi->param_aa_statuses[index].valid)
873 return &bi->param_aa_statuses[index];
874 }
875}
876
877/* Get AA status structure for the given BB and parameter with INDEX. Allocate
878 structures and/or intialize the result with a dominating description as
879 necessary. */
880
56b40062
MJ
881static struct ipa_param_aa_status *
882parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
883 int index)
884{
885 gcc_checking_assert (fbi);
886 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
887 if (bi->param_aa_statuses.is_empty ())
888 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
56b40062 889 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
890 if (!paa->valid)
891 {
892 gcc_checking_assert (!paa->parm_modified
893 && !paa->ref_modified
894 && !paa->pt_modified);
56b40062 895 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
896 dom_paa = find_dominating_aa_status (fbi, bb, index);
897 if (dom_paa)
898 *paa = *dom_paa;
899 else
900 paa->valid = true;
901 }
902
903 return paa;
904}
905
688010ba 906/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 907 a value known not to be modified in this function before reaching the
8aab5218
MJ
908 statement STMT. FBI holds information about the function we have so far
909 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
910
911static bool
56b40062 912parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 913 gimple *stmt, tree parm_load)
fdb0e1b4 914{
56b40062 915 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
916 bool modified = false;
917 ao_ref refd;
918
776e4fe2
MJ
919 tree base = get_base_address (parm_load);
920 gcc_assert (TREE_CODE (base) == PARM_DECL);
921 if (TREE_READONLY (base))
922 return true;
923
8aab5218
MJ
924 /* FIXME: FBI can be NULL if we are being called from outside
925 ipa_node_analysis or ipcp_transform_function, which currently happens
926 during inlining analysis. It would be great to extend fbi's lifetime and
927 always have it. Currently, we are just not afraid of too much walking in
928 that case. */
929 if (fbi)
930 {
931 if (aa_overwalked (fbi))
932 return false;
933 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
934 if (paa->parm_modified)
935 return false;
936 }
937 else
938 paa = NULL;
fdb0e1b4
MJ
939
940 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 941 ao_ref_init (&refd, parm_load);
8aab5218
MJ
942 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
943 &modified, NULL);
944 if (fbi)
945 fbi->aa_walked += walked;
946 if (paa && modified)
947 paa->parm_modified = true;
8b7773a4 948 return !modified;
fdb0e1b4
MJ
949}
950
a2b4c188
KV
951/* If STMT is an assignment that loads a value from an parameter declaration,
952 return the index of the parameter in ipa_node_params which has not been
953 modified. Otherwise return -1. */
954
955static int
956load_from_unmodified_param (struct ipa_func_body_info *fbi,
f65f1ae3 957 vec<ipa_param_descriptor, va_gc> *descriptors,
a2b4c188
KV
958 gimple *stmt)
959{
bda2bc48
MJ
960 int index;
961 tree op1;
962
a2b4c188
KV
963 if (!gimple_assign_single_p (stmt))
964 return -1;
965
bda2bc48
MJ
966 op1 = gimple_assign_rhs1 (stmt);
967 if (TREE_CODE (op1) != PARM_DECL)
a2b4c188
KV
968 return -1;
969
bda2bc48
MJ
970 index = ipa_get_param_decl_index_1 (descriptors, op1);
971 if (index < 0
972 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
a2b4c188
KV
973 return -1;
974
bda2bc48 975 return index;
a2b4c188
KV
976}
977
8aab5218
MJ
978/* Return true if memory reference REF (which must be a load through parameter
979 with INDEX) loads data that are known to be unmodified in this function
980 before reaching statement STMT. */
8b7773a4
MJ
981
982static bool
56b40062 983parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 984 int index, gimple *stmt, tree ref)
8b7773a4 985{
56b40062 986 struct ipa_param_aa_status *paa;
8b7773a4
MJ
987 bool modified = false;
988 ao_ref refd;
989
8aab5218
MJ
990 /* FIXME: FBI can be NULL if we are being called from outside
991 ipa_node_analysis or ipcp_transform_function, which currently happens
992 during inlining analysis. It would be great to extend fbi's lifetime and
993 always have it. Currently, we are just not afraid of too much walking in
994 that case. */
995 if (fbi)
996 {
997 if (aa_overwalked (fbi))
998 return false;
999 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1000 if (paa->ref_modified)
1001 return false;
1002 }
1003 else
1004 paa = NULL;
8b7773a4 1005
8aab5218 1006 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 1007 ao_ref_init (&refd, ref);
8aab5218
MJ
1008 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1009 &modified, NULL);
1010 if (fbi)
1011 fbi->aa_walked += walked;
1012 if (paa && modified)
1013 paa->ref_modified = true;
8b7773a4
MJ
1014 return !modified;
1015}
1016
8aab5218
MJ
1017/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1018 is known to be unmodified in this function before reaching call statement
1019 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1020
1021static bool
56b40062 1022parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 1023 gimple *call, tree parm)
8b7773a4
MJ
1024{
1025 bool modified = false;
1026 ao_ref refd;
1027
1028 /* It's unnecessary to calculate anything about memory contnets for a const
1029 function because it is not goin to use it. But do not cache the result
1030 either. Also, no such calculations for non-pointers. */
1031 if (!gimple_vuse (call)
8aab5218
MJ
1032 || !POINTER_TYPE_P (TREE_TYPE (parm))
1033 || aa_overwalked (fbi))
8b7773a4
MJ
1034 return false;
1035
56b40062
MJ
1036 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1037 gimple_bb (call),
1038 index);
8aab5218 1039 if (paa->pt_modified)
8b7773a4
MJ
1040 return false;
1041
1042 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218
MJ
1043 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1044 &modified, NULL);
1045 fbi->aa_walked += walked;
8b7773a4 1046 if (modified)
8aab5218 1047 paa->pt_modified = true;
8b7773a4
MJ
1048 return !modified;
1049}
1050
91bb9f80
MJ
1051/* Return true if we can prove that OP is a memory reference loading
1052 data from an aggregate passed as a parameter.
1053
1054 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1055 false if it cannot prove that the value has not been modified before the
1056 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1057 if it cannot prove the value has not been modified, in that case it will
1058 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1059
8b7773a4
MJ
1060 INFO and PARMS_AINFO describe parameters of the current function (but the
1061 latter can be NULL), STMT is the load statement. If function returns true,
1062 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1063 within the aggregate and whether it is a load from a value passed by
1064 reference respectively. */
1065
ff302741 1066bool
56b40062 1067ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
f65f1ae3 1068 vec<ipa_param_descriptor, va_gc> *descriptors,
355fe088 1069 gimple *stmt, tree op, int *index_p,
ff302741 1070 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
91bb9f80 1071 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
1072{
1073 int index;
1074 HOST_WIDE_INT size, max_size;
ee45a32d
EB
1075 bool reverse;
1076 tree base
1077 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
8b7773a4
MJ
1078
1079 if (max_size == -1 || max_size != size || *offset_p < 0)
1080 return false;
1081
1082 if (DECL_P (base))
1083 {
d044dd17 1084 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1085 if (index >= 0
8aab5218 1086 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1087 {
1088 *index_p = index;
1089 *by_ref_p = false;
3ff2ca23
JJ
1090 if (size_p)
1091 *size_p = size;
91bb9f80
MJ
1092 if (guaranteed_unmodified)
1093 *guaranteed_unmodified = true;
8b7773a4
MJ
1094 return true;
1095 }
1096 return false;
1097 }
1098
1099 if (TREE_CODE (base) != MEM_REF
1100 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1101 || !integer_zerop (TREE_OPERAND (base, 1)))
1102 return false;
1103
1104 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1105 {
1106 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1107 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1108 }
1109 else
1110 {
1111 /* This branch catches situations where a pointer parameter is not a
1112 gimple register, for example:
1113
1114 void hip7(S*) (struct S * p)
1115 {
1116 void (*<T2e4>) (struct S *) D.1867;
1117 struct S * p.1;
1118
1119 <bb 2>:
1120 p.1_1 = p;
1121 D.1867_2 = p.1_1->f;
1122 D.1867_2 ();
1123 gdp = &p;
1124 */
1125
355fe088 1126 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1127 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1128 }
1129
91bb9f80 1130 if (index >= 0)
8b7773a4 1131 {
91bb9f80
MJ
1132 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1133 if (!data_preserved && !guaranteed_unmodified)
1134 return false;
1135
8b7773a4
MJ
1136 *index_p = index;
1137 *by_ref_p = true;
3ff2ca23
JJ
1138 if (size_p)
1139 *size_p = size;
91bb9f80
MJ
1140 if (guaranteed_unmodified)
1141 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1142 return true;
1143 }
1144 return false;
1145}
1146
b258210c 1147/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1148 of an assignment statement STMT, try to determine whether we are actually
1149 handling any of the following cases and construct an appropriate jump
1150 function into JFUNC if so:
1151
1152 1) The passed value is loaded from a formal parameter which is not a gimple
1153 register (most probably because it is addressable, the value has to be
1154 scalar) and we can guarantee the value has not changed. This case can
1155 therefore be described by a simple pass-through jump function. For example:
1156
1157 foo (int a)
1158 {
1159 int a.0;
1160
1161 a.0_2 = a;
1162 bar (a.0_2);
1163
1164 2) The passed value can be described by a simple arithmetic pass-through
1165 jump function. E.g.
1166
1167 foo (int a)
1168 {
1169 int D.2064;
1170
1171 D.2064_4 = a.1(D) + 4;
1172 bar (D.2064_4);
1173
1174 This case can also occur in combination of the previous one, e.g.:
1175
1176 foo (int a, int z)
1177 {
1178 int a.0;
1179 int D.2064;
1180
1181 a.0_3 = a;
1182 D.2064_4 = a.0_3 + 4;
1183 foo (D.2064_4);
1184
1185 3) The passed value is an address of an object within another one (which
1186 also passed by reference). Such situations are described by an ancestor
1187 jump function and describe situations such as:
1188
1189 B::foo() (struct B * const this)
1190 {
1191 struct A * D.1845;
1192
1193 D.1845_2 = &this_1(D)->D.1748;
1194 A::bar (D.1845_2);
1195
1196 INFO is the structure describing individual parameters access different
1197 stages of IPA optimizations. PARMS_AINFO contains the information that is
1198 only needed for intraprocedural analysis. */
685b0d13
MJ
1199
1200static void
56b40062 1201compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1202 struct ipa_node_params *info,
b258210c 1203 struct ipa_jump_func *jfunc,
355fe088 1204 gcall *call, gimple *stmt, tree name,
06d65050 1205 tree param_type)
685b0d13
MJ
1206{
1207 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 1208 tree op1, tc_ssa, base, ssa;
ee45a32d 1209 bool reverse;
685b0d13 1210 int index;
685b0d13 1211
685b0d13 1212 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1213
fdb0e1b4 1214 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1215 {
fdb0e1b4
MJ
1216 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1217 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1218 else
bda2bc48
MJ
1219 index = load_from_unmodified_param (fbi, info->descriptors,
1220 SSA_NAME_DEF_STMT (op1));
fdb0e1b4
MJ
1221 tc_ssa = op1;
1222 }
1223 else
1224 {
bda2bc48 1225 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1226 tc_ssa = gimple_assign_lhs (stmt);
1227 }
1228
1229 if (index >= 0)
1230 {
a77af182 1231 switch (gimple_assign_rhs_class (stmt))
8b7773a4 1232 {
a77af182
RB
1233 case GIMPLE_BINARY_RHS:
1234 {
1235 tree op2 = gimple_assign_rhs2 (stmt);
1236 if (!is_gimple_ip_invariant (op2)
1237 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1238 != tcc_comparison)
1239 && !useless_type_conversion_p (TREE_TYPE (name),
1240 TREE_TYPE (op1))))
1241 return;
1242
1243 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1244 gimple_assign_rhs_code (stmt));
1245 break;
1246 }
1247 case GIMPLE_SINGLE_RHS:
1248 {
1249 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1250 tc_ssa);
1251 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1252 break;
1253 }
1254 case GIMPLE_UNARY_RHS:
bda2bc48
MJ
1255 if (is_gimple_assign (stmt)
1256 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1257 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
a77af182 1258 ipa_set_jf_unary_pass_through (jfunc, index,
bda2bc48 1259 gimple_assign_rhs_code (stmt));
a77af182 1260 default:;
8b7773a4 1261 }
685b0d13
MJ
1262 return;
1263 }
1264
1265 if (TREE_CODE (op1) != ADDR_EXPR)
1266 return;
1267 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1268 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1269 return;
ee45a32d 1270 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
32aa622c 1271 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1272 /* If this is a varying address, punt. */
1273 || max_size == -1
1274 || max_size != size)
685b0d13 1275 return;
807e902e 1276 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1277 ssa = TREE_OPERAND (base, 0);
1278 if (TREE_CODE (ssa) != SSA_NAME
1279 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1280 || offset < 0)
685b0d13
MJ
1281 return;
1282
b8f6e610 1283 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1284 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1285 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1286 ipa_set_ancestor_jf (jfunc, offset, index,
1287 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1288}
1289
40591473
MJ
1290/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1291 it looks like:
1292
1293 iftmp.1_3 = &obj_2(D)->D.1762;
1294
1295 The base of the MEM_REF must be a default definition SSA NAME of a
1296 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1297 whole MEM_REF expression is returned and the offset calculated from any
1298 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1299 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1300
1301static tree
355fe088 1302get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473
MJ
1303{
1304 HOST_WIDE_INT size, max_size;
1305 tree expr, parm, obj;
ee45a32d 1306 bool reverse;
40591473
MJ
1307
1308 if (!gimple_assign_single_p (assign))
1309 return NULL_TREE;
1310 expr = gimple_assign_rhs1 (assign);
1311
1312 if (TREE_CODE (expr) != ADDR_EXPR)
1313 return NULL_TREE;
1314 expr = TREE_OPERAND (expr, 0);
1315 obj = expr;
ee45a32d 1316 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
40591473
MJ
1317
1318 if (TREE_CODE (expr) != MEM_REF
1319 /* If this is a varying address, punt. */
1320 || max_size == -1
1321 || max_size != size
1322 || *offset < 0)
1323 return NULL_TREE;
1324 parm = TREE_OPERAND (expr, 0);
1325 if (TREE_CODE (parm) != SSA_NAME
1326 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1327 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1328 return NULL_TREE;
1329
807e902e 1330 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1331 *obj_p = obj;
1332 return expr;
1333}
1334
685b0d13 1335
b258210c
MJ
1336/* Given that an actual argument is an SSA_NAME that is a result of a phi
1337 statement PHI, try to find out whether NAME is in fact a
1338 multiple-inheritance typecast from a descendant into an ancestor of a formal
1339 parameter and thus can be described by an ancestor jump function and if so,
1340 write the appropriate function into JFUNC.
1341
1342 Essentially we want to match the following pattern:
1343
1344 if (obj_2(D) != 0B)
1345 goto <bb 3>;
1346 else
1347 goto <bb 4>;
1348
1349 <bb 3>:
1350 iftmp.1_3 = &obj_2(D)->D.1762;
1351
1352 <bb 4>:
1353 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1354 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1355 return D.1879_6; */
1356
1357static void
56b40062 1358compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1359 struct ipa_node_params *info,
b258210c 1360 struct ipa_jump_func *jfunc,
538dd0b7 1361 gcall *call, gphi *phi)
b258210c 1362{
40591473 1363 HOST_WIDE_INT offset;
355fe088 1364 gimple *assign, *cond;
b258210c 1365 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1366 tree tmp, parm, expr, obj;
b258210c
MJ
1367 int index, i;
1368
54e348cb 1369 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1370 return;
1371
54e348cb
MJ
1372 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1373 tmp = PHI_ARG_DEF (phi, 0);
1374 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1375 tmp = PHI_ARG_DEF (phi, 1);
1376 else
1377 return;
b258210c
MJ
1378 if (TREE_CODE (tmp) != SSA_NAME
1379 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1380 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1381 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1382 return;
1383
1384 assign = SSA_NAME_DEF_STMT (tmp);
1385 assign_bb = gimple_bb (assign);
40591473 1386 if (!single_pred_p (assign_bb))
b258210c 1387 return;
40591473
MJ
1388 expr = get_ancestor_addr_info (assign, &obj, &offset);
1389 if (!expr)
b258210c
MJ
1390 return;
1391 parm = TREE_OPERAND (expr, 0);
b258210c 1392 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1393 if (index < 0)
1394 return;
b258210c
MJ
1395
1396 cond_bb = single_pred (assign_bb);
1397 cond = last_stmt (cond_bb);
69610617
SB
1398 if (!cond
1399 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1400 || gimple_cond_code (cond) != NE_EXPR
1401 || gimple_cond_lhs (cond) != parm
1402 || !integer_zerop (gimple_cond_rhs (cond)))
1403 return;
1404
b258210c
MJ
1405 phi_bb = gimple_bb (phi);
1406 for (i = 0; i < 2; i++)
1407 {
1408 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1409 if (pred != assign_bb && pred != cond_bb)
1410 return;
1411 }
1412
3b97a5c7
MJ
1413 ipa_set_ancestor_jf (jfunc, offset, index,
1414 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1415}
1416
be95e2b9
MJ
1417/* Inspect the given TYPE and return true iff it has the same structure (the
1418 same number of fields of the same types) as a C++ member pointer. If
1419 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1420 corresponding fields there. */
1421
3e293154
MJ
1422static bool
1423type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1424{
1425 tree fld;
1426
1427 if (TREE_CODE (type) != RECORD_TYPE)
1428 return false;
1429
1430 fld = TYPE_FIELDS (type);
1431 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1432 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1433 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1434 return false;
1435
1436 if (method_ptr)
1437 *method_ptr = fld;
1438
910ad8de 1439 fld = DECL_CHAIN (fld);
8b7773a4 1440 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1441 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1442 return false;
1443 if (delta)
1444 *delta = fld;
1445
910ad8de 1446 if (DECL_CHAIN (fld))
3e293154
MJ
1447 return false;
1448
1449 return true;
1450}
1451
61502ca8 1452/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1453 return the rhs of its defining statement. Otherwise return RHS as it
1454 is. */
7ec49257
MJ
1455
1456static inline tree
1457get_ssa_def_if_simple_copy (tree rhs)
1458{
1459 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1460 {
355fe088 1461 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1462
1463 if (gimple_assign_single_p (def_stmt))
1464 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1465 else
1466 break;
7ec49257
MJ
1467 }
1468 return rhs;
1469}
1470
8b7773a4
MJ
1471/* Simple linked list, describing known contents of an aggregate beforere
1472 call. */
1473
1474struct ipa_known_agg_contents_list
1475{
1476 /* Offset and size of the described part of the aggregate. */
1477 HOST_WIDE_INT offset, size;
1478 /* Known constant value or NULL if the contents is known to be unknown. */
1479 tree constant;
1480 /* Pointer to the next structure in the list. */
1481 struct ipa_known_agg_contents_list *next;
1482};
3e293154 1483
0d48ee34
MJ
1484/* Find the proper place in linked list of ipa_known_agg_contents_list
1485 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1486 unless there is a partial overlap, in which case return NULL, or such
1487 element is already there, in which case set *ALREADY_THERE to true. */
1488
1489static struct ipa_known_agg_contents_list **
1490get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1491 HOST_WIDE_INT lhs_offset,
1492 HOST_WIDE_INT lhs_size,
1493 bool *already_there)
1494{
1495 struct ipa_known_agg_contents_list **p = list;
1496 while (*p && (*p)->offset < lhs_offset)
1497 {
1498 if ((*p)->offset + (*p)->size > lhs_offset)
1499 return NULL;
1500 p = &(*p)->next;
1501 }
1502
1503 if (*p && (*p)->offset < lhs_offset + lhs_size)
1504 {
1505 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1506 /* We already know this value is subsequently overwritten with
1507 something else. */
1508 *already_there = true;
1509 else
1510 /* Otherwise this is a partial overlap which we cannot
1511 represent. */
1512 return NULL;
1513 }
1514 return p;
1515}
1516
1517/* Build aggregate jump function from LIST, assuming there are exactly
1518 CONST_COUNT constant entries there and that th offset of the passed argument
1519 is ARG_OFFSET and store it into JFUNC. */
1520
1521static void
1522build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1523 int const_count, HOST_WIDE_INT arg_offset,
1524 struct ipa_jump_func *jfunc)
1525{
1526 vec_alloc (jfunc->agg.items, const_count);
1527 while (list)
1528 {
1529 if (list->constant)
1530 {
1531 struct ipa_agg_jf_item item;
1532 item.offset = list->offset - arg_offset;
1533 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1534 item.value = unshare_expr_without_location (list->constant);
1535 jfunc->agg.items->quick_push (item);
1536 }
1537 list = list->next;
1538 }
1539}
1540
8b7773a4
MJ
1541/* Traverse statements from CALL backwards, scanning whether an aggregate given
1542 in ARG is filled in with constant values. ARG can either be an aggregate
0d48ee34
MJ
1543 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1544 aggregate. JFUNC is the jump function into which the constants are
1545 subsequently stored. */
be95e2b9 1546
3e293154 1547static void
538dd0b7
DM
1548determine_locally_known_aggregate_parts (gcall *call, tree arg,
1549 tree arg_type,
0d48ee34 1550 struct ipa_jump_func *jfunc)
3e293154 1551{
8b7773a4
MJ
1552 struct ipa_known_agg_contents_list *list = NULL;
1553 int item_count = 0, const_count = 0;
1554 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1555 gimple_stmt_iterator gsi;
8b7773a4
MJ
1556 tree arg_base;
1557 bool check_ref, by_ref;
1558 ao_ref r;
3e293154 1559
29799e9d
MJ
1560 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1561 return;
1562
8b7773a4
MJ
1563 /* The function operates in three stages. First, we prepare check_ref, r,
1564 arg_base and arg_offset based on what is actually passed as an actual
1565 argument. */
3e293154 1566
85942f45 1567 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1568 {
1569 by_ref = true;
1570 if (TREE_CODE (arg) == SSA_NAME)
1571 {
1572 tree type_size;
85942f45 1573 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
8b7773a4
MJ
1574 return;
1575 check_ref = true;
1576 arg_base = arg;
1577 arg_offset = 0;
85942f45 1578 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1579 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1580 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1581 }
1582 else if (TREE_CODE (arg) == ADDR_EXPR)
1583 {
1584 HOST_WIDE_INT arg_max_size;
ee45a32d 1585 bool reverse;
8b7773a4
MJ
1586
1587 arg = TREE_OPERAND (arg, 0);
1588 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
ee45a32d 1589 &arg_max_size, &reverse);
8b7773a4
MJ
1590 if (arg_max_size == -1
1591 || arg_max_size != arg_size
1592 || arg_offset < 0)
1593 return;
1594 if (DECL_P (arg_base))
1595 {
8b7773a4 1596 check_ref = false;
0d48ee34 1597 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1598 }
1599 else
1600 return;
1601 }
1602 else
1603 return;
1604 }
1605 else
1606 {
1607 HOST_WIDE_INT arg_max_size;
ee45a32d 1608 bool reverse;
8b7773a4
MJ
1609
1610 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1611
1612 by_ref = false;
1613 check_ref = false;
1614 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
ee45a32d 1615 &arg_max_size, &reverse);
8b7773a4
MJ
1616 if (arg_max_size == -1
1617 || arg_max_size != arg_size
1618 || arg_offset < 0)
1619 return;
1620
1621 ao_ref_init (&r, arg);
1622 }
1623
1624 /* Second stage walks back the BB, looks at individual statements and as long
1625 as it is confident of how the statements affect contents of the
1626 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1627 describing it. */
1628 gsi = gsi_for_stmt (call);
726a989a
RB
1629 gsi_prev (&gsi);
1630 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1631 {
8b7773a4 1632 struct ipa_known_agg_contents_list *n, **p;
355fe088 1633 gimple *stmt = gsi_stmt (gsi);
8b7773a4
MJ
1634 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1635 tree lhs, rhs, lhs_base;
ee45a32d 1636 bool reverse;
3e293154 1637
8b7773a4 1638 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1639 continue;
8b75fc9b 1640 if (!gimple_assign_single_p (stmt))
8b7773a4 1641 break;
3e293154 1642
726a989a
RB
1643 lhs = gimple_assign_lhs (stmt);
1644 rhs = gimple_assign_rhs1 (stmt);
0c6b087c 1645 if (!is_gimple_reg_type (TREE_TYPE (rhs))
7d2fb524
MJ
1646 || TREE_CODE (lhs) == BIT_FIELD_REF
1647 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1648 break;
3e293154 1649
8b7773a4 1650 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
ee45a32d 1651 &lhs_max_size, &reverse);
8b7773a4 1652 if (lhs_max_size == -1
0d48ee34 1653 || lhs_max_size != lhs_size)
8b7773a4 1654 break;
3e293154 1655
8b7773a4 1656 if (check_ref)
518dc859 1657 {
8b7773a4
MJ
1658 if (TREE_CODE (lhs_base) != MEM_REF
1659 || TREE_OPERAND (lhs_base, 0) != arg_base
1660 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1661 break;
3e293154 1662 }
8b7773a4 1663 else if (lhs_base != arg_base)
774b8a55
MJ
1664 {
1665 if (DECL_P (lhs_base))
1666 continue;
1667 else
1668 break;
1669 }
3e293154 1670
0d48ee34
MJ
1671 bool already_there = false;
1672 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1673 &already_there);
1674 if (!p)
8b7773a4 1675 break;
0d48ee34
MJ
1676 if (already_there)
1677 continue;
3e293154 1678
8b7773a4
MJ
1679 rhs = get_ssa_def_if_simple_copy (rhs);
1680 n = XALLOCA (struct ipa_known_agg_contents_list);
1681 n->size = lhs_size;
1682 n->offset = lhs_offset;
1683 if (is_gimple_ip_invariant (rhs))
1684 {
1685 n->constant = rhs;
1686 const_count++;
1687 }
1688 else
1689 n->constant = NULL_TREE;
1690 n->next = *p;
1691 *p = n;
3e293154 1692
8b7773a4 1693 item_count++;
dfea20f1
MJ
1694 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1695 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1696 break;
1697 }
be95e2b9 1698
8b7773a4
MJ
1699 /* Third stage just goes over the list and creates an appropriate vector of
1700 ipa_agg_jf_item structures out of it, of sourse only if there are
1701 any known constants to begin with. */
3e293154 1702
8b7773a4 1703 if (const_count)
3e293154 1704 {
8b7773a4 1705 jfunc->agg.by_ref = by_ref;
0d48ee34 1706 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
3e293154
MJ
1707 }
1708}
1709
5d5f1e95
KV
1710/* Return the Ith param type of callee associated with call graph
1711 edge E. */
1712
1713tree
06d65050
JH
1714ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1715{
1716 int n;
1717 tree type = (e->callee
67348ccc 1718 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1719 : gimple_call_fntype (e->call_stmt));
1720 tree t = TYPE_ARG_TYPES (type);
1721
1722 for (n = 0; n < i; n++)
1723 {
1724 if (!t)
1725 break;
1726 t = TREE_CHAIN (t);
1727 }
1728 if (t)
1729 return TREE_VALUE (t);
1730 if (!e->callee)
1731 return NULL;
67348ccc 1732 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1733 for (n = 0; n < i; n++)
1734 {
1735 if (!t)
1736 return NULL;
1737 t = TREE_CHAIN (t);
1738 }
1739 if (t)
1740 return TREE_TYPE (t);
1741 return NULL;
1742}
1743
86cd0334
MJ
1744/* Return ipa_bits with VALUE and MASK values, which can be either a newly
1745 allocated structure or a previously existing one shared with other jump
1746 functions and/or transformation summaries. */
1747
1748ipa_bits *
1749ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1750{
1751 ipa_bits tmp;
1752 tmp.value = value;
1753 tmp.mask = mask;
1754
1755 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1756 if (*slot)
1757 return *slot;
1758
1759 ipa_bits *res = ggc_alloc<ipa_bits> ();
1760 res->value = value;
1761 res->mask = mask;
1762 *slot = res;
1763
1764 return res;
1765}
1766
1767/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1768 table in order to avoid creating multiple same ipa_bits structures. */
1769
1770static void
1771ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1772 const widest_int &mask)
1773{
1774 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1775}
1776
1777/* Return a pointer to a value_range just like *TMP, but either find it in
1778 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1779
1780static value_range *
1781ipa_get_value_range (value_range *tmp)
1782{
1783 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1784 if (*slot)
1785 return *slot;
1786
1787 value_range *vr = ggc_alloc<value_range> ();
1788 *vr = *tmp;
1789 *slot = vr;
1790
1791 return vr;
1792}
1793
1794/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1795 equiv set. Use hash table in order to avoid creating multiple same copies of
1796 value_ranges. */
1797
1798static value_range *
1799ipa_get_value_range (enum value_range_type type, tree min, tree max)
1800{
1801 value_range tmp;
1802 tmp.type = type;
1803 tmp.min = min;
1804 tmp.max = max;
1805 tmp.equiv = NULL;
1806 return ipa_get_value_range (&tmp);
1807}
1808
1809/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1810 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1811 same value_range structures. */
1812
1813static void
1814ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1815 tree min, tree max)
1816{
1817 jf->m_vr = ipa_get_value_range (type, min, max);
1818}
1819
1820/* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1821 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1822
1823static void
1824ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1825{
1826 jf->m_vr = ipa_get_value_range (tmp);
1827}
1828
3e293154
MJ
1829/* Compute jump function for all arguments of callsite CS and insert the
1830 information in the jump_functions array in the ipa_edge_args corresponding
1831 to this callsite. */
be95e2b9 1832
749aa96d 1833static void
56b40062 1834ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 1835 struct cgraph_edge *cs)
3e293154
MJ
1836{
1837 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09 1838 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
538dd0b7 1839 gcall *call = cs->call_stmt;
8b7773a4 1840 int n, arg_num = gimple_call_num_args (call);
5ce97055 1841 bool useful_context = false;
3e293154 1842
606d9a09 1843 if (arg_num == 0 || args->jump_functions)
3e293154 1844 return;
9771b263 1845 vec_safe_grow_cleared (args->jump_functions, arg_num);
5ce97055
JH
1846 if (flag_devirtualize)
1847 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
3e293154 1848
96e24d49
JJ
1849 if (gimple_call_internal_p (call))
1850 return;
5fe8e757
MJ
1851 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1852 return;
1853
8b7773a4
MJ
1854 for (n = 0; n < arg_num; n++)
1855 {
1856 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1857 tree arg = gimple_call_arg (call, n);
06d65050 1858 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
1859 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1860 {
049e6d36 1861 tree instance;
5ce97055
JH
1862 struct ipa_polymorphic_call_context context (cs->caller->decl,
1863 arg, cs->call_stmt,
049e6d36
JH
1864 &instance);
1865 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
5ce97055
JH
1866 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1867 if (!context.useless_p ())
1868 useful_context = true;
1869 }
3e293154 1870
718625ad
KV
1871 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1872 {
f7503699
KV
1873 bool addr_nonzero = false;
1874 bool strict_overflow = false;
1875
718625ad
KV
1876 if (TREE_CODE (arg) == SSA_NAME
1877 && param_type
1878 && get_ptr_nonnull (arg))
f7503699
KV
1879 addr_nonzero = true;
1880 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1881 addr_nonzero = true;
1882
1883 if (addr_nonzero)
718625ad 1884 {
86cd0334
MJ
1885 tree z = build_int_cst (TREE_TYPE (arg), 0);
1886 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
718625ad
KV
1887 }
1888 else
86cd0334 1889 gcc_assert (!jfunc->m_vr);
718625ad
KV
1890 }
1891 else
8bc5448f
KV
1892 {
1893 wide_int min, max;
1894 value_range_type type;
1895 if (TREE_CODE (arg) == SSA_NAME
1896 && param_type
1897 && (type = get_range_info (arg, &min, &max))
3a4228ba 1898 && (type == VR_RANGE || type == VR_ANTI_RANGE))
8bc5448f 1899 {
86cd0334
MJ
1900 value_range tmpvr,resvr;
1901
1902 tmpvr.type = type;
1903 tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1904 tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1905 tmpvr.equiv = NULL;
1906 memset (&resvr, 0, sizeof (resvr));
1907 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1908 &tmpvr, TREE_TYPE (arg));
1909 if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1910 ipa_set_jfunc_vr (jfunc, &resvr);
3a4228ba 1911 else
86cd0334 1912 gcc_assert (!jfunc->m_vr);
8bc5448f
KV
1913 }
1914 else
86cd0334 1915 gcc_assert (!jfunc->m_vr);
8bc5448f 1916 }
04be694e 1917
209ca542
PK
1918 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1919 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1920 {
209ca542 1921 if (TREE_CODE (arg) == SSA_NAME)
86cd0334
MJ
1922 ipa_set_jfunc_bits (jfunc, 0,
1923 widest_int::from (get_nonzero_bits (arg),
1924 TYPE_SIGN (TREE_TYPE (arg))));
209ca542 1925 else
86cd0334 1926 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
209ca542 1927 }
67b97478
PK
1928 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1929 {
1930 unsigned HOST_WIDE_INT bitpos;
1931 unsigned align;
1932
67b97478 1933 get_pointer_alignment_1 (arg, &align, &bitpos);
86cd0334
MJ
1934 widest_int mask
1935 = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1936 .and_not (align / BITS_PER_UNIT - 1);
1937 widest_int value = bitpos / BITS_PER_UNIT;
1938 ipa_set_jfunc_bits (jfunc, value, mask);
67b97478 1939 }
209ca542 1940 else
86cd0334 1941 gcc_assert (!jfunc->bits);
209ca542 1942
04643334 1943 if (is_gimple_ip_invariant (arg)
8813a647 1944 || (VAR_P (arg)
04643334
MJ
1945 && is_global_var (arg)
1946 && TREE_READONLY (arg)))
4502fe8d 1947 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1948 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1949 && TREE_CODE (arg) == PARM_DECL)
1950 {
1951 int index = ipa_get_param_decl_index (info, arg);
1952
1953 gcc_assert (index >=0);
1954 /* Aggregate passed by value, check for pass-through, otherwise we
1955 will attempt to fill in aggregate contents later in this
1956 for cycle. */
8aab5218 1957 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 1958 {
3b97a5c7 1959 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
1960 continue;
1961 }
1962 }
1963 else if (TREE_CODE (arg) == SSA_NAME)
1964 {
1965 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1966 {
1967 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1968 if (index >= 0)
8b7773a4 1969 {
3b97a5c7 1970 bool agg_p;
8aab5218 1971 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 1972 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
1973 }
1974 }
1975 else
1976 {
355fe088 1977 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 1978 if (is_gimple_assign (stmt))
8aab5218 1979 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 1980 call, stmt, arg, param_type);
8b7773a4 1981 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 1982 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
1983 call,
1984 as_a <gphi *> (stmt));
8b7773a4
MJ
1985 }
1986 }
3e293154 1987
85942f45
JH
1988 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1989 passed (because type conversions are ignored in gimple). Usually we can
1990 safely get type from function declaration, but in case of K&R prototypes or
1991 variadic functions we can try our luck with type of the pointer passed.
1992 TODO: Since we look for actual initialization of the memory object, we may better
1993 work out the type based on the memory stores we find. */
1994 if (!param_type)
1995 param_type = TREE_TYPE (arg);
1996
8b7773a4
MJ
1997 if ((jfunc->type != IPA_JF_PASS_THROUGH
1998 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1999 && (jfunc->type != IPA_JF_ANCESTOR
2000 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2001 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 2002 || POINTER_TYPE_P (param_type)))
0d48ee34 2003 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
8b7773a4 2004 }
5ce97055
JH
2005 if (!useful_context)
2006 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
2007}
2008
749aa96d 2009/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 2010 from BB. */
749aa96d 2011
062c604f 2012static void
56b40062 2013ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 2014{
8aab5218
MJ
2015 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2016 int i;
749aa96d
MJ
2017 struct cgraph_edge *cs;
2018
8aab5218 2019 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 2020 {
8aab5218 2021 struct cgraph_node *callee = cs->callee;
749aa96d 2022
8aab5218
MJ
2023 if (callee)
2024 {
d52f5295 2025 callee->ultimate_alias_target ();
8aab5218
MJ
2026 /* We do not need to bother analyzing calls to unknown functions
2027 unless they may become known during lto/whopr. */
2028 if (!callee->definition && !flag_lto)
2029 continue;
2030 }
2031 ipa_compute_jump_functions_for_edge (fbi, cs);
2032 }
749aa96d
MJ
2033}
2034
8b7773a4
MJ
2035/* If STMT looks like a statement loading a value from a member pointer formal
2036 parameter, return that parameter and store the offset of the field to
2037 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2038 might be clobbered). If USE_DELTA, then we look for a use of the delta
2039 field rather than the pfn. */
be95e2b9 2040
3e293154 2041static tree
355fe088 2042ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 2043 HOST_WIDE_INT *offset_p)
3e293154 2044{
8b7773a4
MJ
2045 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2046
2047 if (!gimple_assign_single_p (stmt))
2048 return NULL_TREE;
3e293154 2049
8b7773a4 2050 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2051 if (TREE_CODE (rhs) == COMPONENT_REF)
2052 {
2053 ref_field = TREE_OPERAND (rhs, 1);
2054 rhs = TREE_OPERAND (rhs, 0);
2055 }
2056 else
2057 ref_field = NULL_TREE;
d242d063 2058 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2059 return NULL_TREE;
3e293154 2060 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2061 if (TREE_CODE (rec) != ADDR_EXPR)
2062 return NULL_TREE;
2063 rec = TREE_OPERAND (rec, 0);
3e293154 2064 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2065 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2066 return NULL_TREE;
d242d063 2067 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2068
8b7773a4
MJ
2069 if (use_delta)
2070 fld = delta_field;
2071 else
2072 fld = ptr_field;
2073 if (offset_p)
2074 *offset_p = int_bit_position (fld);
2075
ae788515
EB
2076 if (ref_field)
2077 {
2078 if (integer_nonzerop (ref_offset))
2079 return NULL_TREE;
ae788515
EB
2080 return ref_field == fld ? rec : NULL_TREE;
2081 }
3e293154 2082 else
8b7773a4
MJ
2083 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2084 : NULL_TREE;
3e293154
MJ
2085}
2086
2087/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2088
3e293154
MJ
2089static bool
2090ipa_is_ssa_with_stmt_def (tree t)
2091{
2092 if (TREE_CODE (t) == SSA_NAME
2093 && !SSA_NAME_IS_DEFAULT_DEF (t))
2094 return true;
2095 else
2096 return false;
2097}
2098
40591473
MJ
2099/* Find the indirect call graph edge corresponding to STMT and mark it as a
2100 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2101 indirect call graph edge. */
be95e2b9 2102
40591473 2103static struct cgraph_edge *
538dd0b7
DM
2104ipa_note_param_call (struct cgraph_node *node, int param_index,
2105 gcall *stmt)
3e293154 2106{
e33c6cd6 2107 struct cgraph_edge *cs;
3e293154 2108
d52f5295 2109 cs = node->get_edge (stmt);
b258210c 2110 cs->indirect_info->param_index = param_index;
8b7773a4 2111 cs->indirect_info->agg_contents = 0;
c13bc3d9 2112 cs->indirect_info->member_ptr = 0;
91bb9f80 2113 cs->indirect_info->guaranteed_unmodified = 0;
40591473 2114 return cs;
3e293154
MJ
2115}
2116
e33c6cd6 2117/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2118 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2119 intermediate information about each formal parameter. Currently it checks
2120 whether the call calls a pointer that is a formal parameter and if so, the
2121 parameter is marked with the called flag and an indirect call graph edge
2122 describing the call is created. This is very simple for ordinary pointers
2123 represented in SSA but not-so-nice when it comes to member pointers. The
2124 ugly part of this function does nothing more than trying to match the
2125 pattern of such a call. An example of such a pattern is the gimple dump
2126 below, the call is on the last line:
3e293154 2127
ae788515
EB
2128 <bb 2>:
2129 f$__delta_5 = f.__delta;
2130 f$__pfn_24 = f.__pfn;
2131
2132 or
3e293154 2133 <bb 2>:
d242d063
MJ
2134 f$__delta_5 = MEM[(struct *)&f];
2135 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2136
ae788515 2137 and a few lines below:
8aa29647
MJ
2138
2139 <bb 5>
3e293154
MJ
2140 D.2496_3 = (int) f$__pfn_24;
2141 D.2497_4 = D.2496_3 & 1;
2142 if (D.2497_4 != 0)
2143 goto <bb 3>;
2144 else
2145 goto <bb 4>;
2146
8aa29647 2147 <bb 6>:
3e293154
MJ
2148 D.2500_7 = (unsigned int) f$__delta_5;
2149 D.2501_8 = &S + D.2500_7;
2150 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2151 D.2503_10 = *D.2502_9;
2152 D.2504_12 = f$__pfn_24 + -1;
2153 D.2505_13 = (unsigned int) D.2504_12;
2154 D.2506_14 = D.2503_10 + D.2505_13;
2155 D.2507_15 = *D.2506_14;
2156 iftmp.11_16 = (String:: *) D.2507_15;
2157
8aa29647 2158 <bb 7>:
3e293154
MJ
2159 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2160 D.2500_19 = (unsigned int) f$__delta_5;
2161 D.2508_20 = &S + D.2500_19;
2162 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2163
2164 Such patterns are results of simple calls to a member pointer:
2165
2166 int doprinting (int (MyString::* f)(int) const)
2167 {
2168 MyString S ("somestring");
2169
2170 return (S.*f)(4);
2171 }
8b7773a4
MJ
2172
2173 Moreover, the function also looks for called pointers loaded from aggregates
2174 passed by value or reference. */
3e293154
MJ
2175
2176static void
56b40062 2177ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 2178 tree target)
3e293154 2179{
8aab5218 2180 struct ipa_node_params *info = fbi->info;
8b7773a4
MJ
2181 HOST_WIDE_INT offset;
2182 bool by_ref;
3e293154 2183
3e293154
MJ
2184 if (SSA_NAME_IS_DEFAULT_DEF (target))
2185 {
b258210c 2186 tree var = SSA_NAME_VAR (target);
8aab5218 2187 int index = ipa_get_param_decl_index (info, var);
3e293154 2188 if (index >= 0)
8aab5218 2189 ipa_note_param_call (fbi->node, index, call);
3e293154
MJ
2190 return;
2191 }
2192
8aab5218 2193 int index;
355fe088 2194 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 2195 bool guaranteed_unmodified;
8b7773a4 2196 if (gimple_assign_single_p (def)
ff302741
PB
2197 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2198 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2199 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2200 {
8aab5218 2201 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2202 cs->indirect_info->offset = offset;
2203 cs->indirect_info->agg_contents = 1;
2204 cs->indirect_info->by_ref = by_ref;
91bb9f80 2205 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2206 return;
2207 }
2208
3e293154
MJ
2209 /* Now we need to try to match the complex pattern of calling a member
2210 pointer. */
8b7773a4
MJ
2211 if (gimple_code (def) != GIMPLE_PHI
2212 || gimple_phi_num_args (def) != 2
2213 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2214 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2215 return;
2216
3e293154
MJ
2217 /* First, we need to check whether one of these is a load from a member
2218 pointer that is a parameter to this function. */
8aab5218
MJ
2219 tree n1 = PHI_ARG_DEF (def, 0);
2220 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2221 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2222 return;
355fe088
TS
2223 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2224 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2225
8aab5218
MJ
2226 tree rec;
2227 basic_block bb, virt_bb;
2228 basic_block join = gimple_bb (def);
8b7773a4 2229 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2230 {
8b7773a4 2231 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2232 return;
2233
8aa29647 2234 bb = EDGE_PRED (join, 0)->src;
726a989a 2235 virt_bb = gimple_bb (d2);
3e293154 2236 }
8b7773a4 2237 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2238 {
8aa29647 2239 bb = EDGE_PRED (join, 1)->src;
726a989a 2240 virt_bb = gimple_bb (d1);
3e293154
MJ
2241 }
2242 else
2243 return;
2244
2245 /* Second, we need to check that the basic blocks are laid out in the way
2246 corresponding to the pattern. */
2247
3e293154
MJ
2248 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2249 || single_pred (virt_bb) != bb
2250 || single_succ (virt_bb) != join)
2251 return;
2252
2253 /* Third, let's see that the branching is done depending on the least
2254 significant bit of the pfn. */
2255
355fe088 2256 gimple *branch = last_stmt (bb);
8aa29647 2257 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2258 return;
2259
12430896
RG
2260 if ((gimple_cond_code (branch) != NE_EXPR
2261 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2262 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2263 return;
3e293154 2264
8aab5218 2265 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2266 if (!ipa_is_ssa_with_stmt_def (cond))
2267 return;
2268
726a989a 2269 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2270 if (!is_gimple_assign (def)
726a989a
RB
2271 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2272 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2273 return;
726a989a
RB
2274
2275 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2276 if (!ipa_is_ssa_with_stmt_def (cond))
2277 return;
2278
726a989a 2279 def = SSA_NAME_DEF_STMT (cond);
3e293154 2280
8b75fc9b
MJ
2281 if (is_gimple_assign (def)
2282 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2283 {
726a989a 2284 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2285 if (!ipa_is_ssa_with_stmt_def (cond))
2286 return;
726a989a 2287 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2288 }
2289
8aab5218 2290 tree rec2;
6f7b8b70
RE
2291 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2292 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2293 == ptrmemfunc_vbit_in_delta),
2294 NULL);
3e293154
MJ
2295 if (rec != rec2)
2296 return;
2297
2298 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2299 if (index >= 0
8aab5218 2300 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2301 {
8aab5218 2302 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2303 cs->indirect_info->offset = offset;
2304 cs->indirect_info->agg_contents = 1;
c13bc3d9 2305 cs->indirect_info->member_ptr = 1;
91bb9f80 2306 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2307 }
3e293154
MJ
2308
2309 return;
2310}
2311
b258210c
MJ
2312/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2313 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2314 FBI->node (described by FBI->info), create a call note for the
2315 statement. */
b258210c
MJ
2316
2317static void
56b40062 2318ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2319 gcall *call, tree target)
b258210c
MJ
2320{
2321 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2322 int index;
40591473 2323 HOST_WIDE_INT anc_offset;
b258210c 2324
05842ff5
MJ
2325 if (!flag_devirtualize)
2326 return;
2327
40591473 2328 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2329 return;
2330
8aab5218 2331 struct ipa_node_params *info = fbi->info;
40591473
MJ
2332 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2333 {
8aab5218 2334 struct ipa_jump_func jfunc;
40591473
MJ
2335 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2336 return;
b258210c 2337
40591473
MJ
2338 anc_offset = 0;
2339 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2340 gcc_assert (index >= 0);
06d65050
JH
2341 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2342 call, &jfunc))
40591473
MJ
2343 return;
2344 }
2345 else
2346 {
8aab5218 2347 struct ipa_jump_func jfunc;
355fe088 2348 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2349 tree expr;
2350
2351 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2352 if (!expr)
2353 return;
2354 index = ipa_get_param_decl_index (info,
2355 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2356 gcc_assert (index >= 0);
06d65050
JH
2357 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2358 call, &jfunc, anc_offset))
40591473
MJ
2359 return;
2360 }
2361
8aab5218
MJ
2362 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2363 struct cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2364 ii->offset = anc_offset;
ae7e9ddd 2365 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2366 ii->otr_type = obj_type_ref_class (target);
40591473 2367 ii->polymorphic = 1;
b258210c
MJ
2368}
2369
2370/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2371 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2372 containing intermediate information about each formal parameter. */
b258210c
MJ
2373
2374static void
56b40062 2375ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2376{
2377 tree target = gimple_call_fn (call);
b786d31f
JH
2378
2379 if (!target
2380 || (TREE_CODE (target) != SSA_NAME
2381 && !virtual_method_call_p (target)))
2382 return;
b258210c 2383
7d0aa05b 2384 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2385 /* If we previously turned the call into a direct call, there is
2386 no need to analyze. */
b786d31f 2387 if (cs && !cs->indirect_unknown_callee)
25583c4f 2388 return;
7d0aa05b 2389
a5b58b28 2390 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2391 {
7d0aa05b
JH
2392 tree instance;
2393 tree target = gimple_call_fn (call);
6f8091fc
JH
2394 ipa_polymorphic_call_context context (current_function_decl,
2395 target, call, &instance);
7d0aa05b 2396
ba392339
JH
2397 gcc_checking_assert (cs->indirect_info->otr_type
2398 == obj_type_ref_class (target));
2399 gcc_checking_assert (cs->indirect_info->otr_token
2400 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2401
29c43c83
JH
2402 cs->indirect_info->vptr_changed
2403 = !context.get_dynamic_type (instance,
2404 OBJ_TYPE_REF_OBJECT (target),
2405 obj_type_ref_class (target), call);
0127c169 2406 cs->indirect_info->context = context;
7d0aa05b
JH
2407 }
2408
b258210c 2409 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2410 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2411 else if (virtual_method_call_p (target))
8aab5218 2412 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2413}
2414
2415
e33c6cd6 2416/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2417 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2418 formal parameters are called. */
be95e2b9 2419
3e293154 2420static void
355fe088 2421ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2422{
726a989a 2423 if (is_gimple_call (stmt))
538dd0b7 2424 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2425}
2426
2427/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2428 If OP is a parameter declaration, mark it as used in the info structure
2429 passed in DATA. */
2430
2431static bool
355fe088 2432visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f
MJ
2433{
2434 struct ipa_node_params *info = (struct ipa_node_params *) data;
2435
2436 op = get_base_address (op);
2437 if (op
2438 && TREE_CODE (op) == PARM_DECL)
2439 {
2440 int index = ipa_get_param_decl_index (info, op);
2441 gcc_assert (index >= 0);
310bc633 2442 ipa_set_param_used (info, index, true);
062c604f
MJ
2443 }
2444
2445 return false;
3e293154
MJ
2446}
2447
8aab5218
MJ
2448/* Scan the statements in BB and inspect the uses of formal parameters. Store
2449 the findings in various structures of the associated ipa_node_params
2450 structure, such as parameter flags, notes etc. FBI holds various data about
2451 the function being analyzed. */
be95e2b9 2452
062c604f 2453static void
56b40062 2454ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2455{
726a989a 2456 gimple_stmt_iterator gsi;
8aab5218
MJ
2457 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2458 {
355fe088 2459 gimple *stmt = gsi_stmt (gsi);
3e293154 2460
8aab5218
MJ
2461 if (is_gimple_debug (stmt))
2462 continue;
3e293154 2463
8aab5218
MJ
2464 ipa_analyze_stmt_uses (fbi, stmt);
2465 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2466 visit_ref_for_mod_analysis,
2467 visit_ref_for_mod_analysis,
2468 visit_ref_for_mod_analysis);
5fe8e757 2469 }
8aab5218
MJ
2470 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2471 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2472 visit_ref_for_mod_analysis,
2473 visit_ref_for_mod_analysis,
2474 visit_ref_for_mod_analysis);
2475}
2476
2477/* Calculate controlled uses of parameters of NODE. */
2478
2479static void
2480ipa_analyze_controlled_uses (struct cgraph_node *node)
2481{
2482 struct ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2483
8aab5218 2484 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2485 {
2486 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2487 int controlled_uses = 0;
2488
062c604f
MJ
2489 /* For SSA regs see if parameter is used. For non-SSA we compute
2490 the flag during modification analysis. */
4502fe8d
MJ
2491 if (is_gimple_reg (parm))
2492 {
67348ccc 2493 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2494 parm);
2495 if (ddef && !has_zero_uses (ddef))
2496 {
2497 imm_use_iterator imm_iter;
2498 use_operand_p use_p;
2499
2500 ipa_set_param_used (info, i, true);
2501 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2502 if (!is_gimple_call (USE_STMT (use_p)))
2503 {
c6de6665
JJ
2504 if (!is_gimple_debug (USE_STMT (use_p)))
2505 {
2506 controlled_uses = IPA_UNDESCRIBED_USE;
2507 break;
2508 }
4502fe8d
MJ
2509 }
2510 else
2511 controlled_uses++;
2512 }
2513 else
2514 controlled_uses = 0;
2515 }
2516 else
2517 controlled_uses = IPA_UNDESCRIBED_USE;
2518 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2519 }
8aab5218 2520}
062c604f 2521
8aab5218 2522/* Free stuff in BI. */
062c604f 2523
8aab5218
MJ
2524static void
2525free_ipa_bb_info (struct ipa_bb_info *bi)
2526{
2527 bi->cg_edges.release ();
2528 bi->param_aa_statuses.release ();
3e293154
MJ
2529}
2530
8aab5218 2531/* Dominator walker driving the analysis. */
2c9561b5 2532
8aab5218 2533class analysis_dom_walker : public dom_walker
2c9561b5 2534{
8aab5218 2535public:
56b40062 2536 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 2537 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2538
3daacdcd 2539 virtual edge before_dom_children (basic_block);
8aab5218
MJ
2540
2541private:
56b40062 2542 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
2543};
2544
3daacdcd 2545edge
8aab5218
MJ
2546analysis_dom_walker::before_dom_children (basic_block bb)
2547{
2548 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2549 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 2550 return NULL;
2c9561b5
MJ
2551}
2552
c3431191
ML
2553/* Release body info FBI. */
2554
2555void
2556ipa_release_body_info (struct ipa_func_body_info *fbi)
2557{
2558 int i;
2559 struct ipa_bb_info *bi;
2560
2561 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2562 free_ipa_bb_info (bi);
2563 fbi->bb_infos.release ();
2564}
2565
026c3cfd 2566/* Initialize the array describing properties of formal parameters
dd5a833e
MS
2567 of NODE, analyze their uses and compute jump functions associated
2568 with actual arguments of calls from within NODE. */
062c604f
MJ
2569
2570void
2571ipa_analyze_node (struct cgraph_node *node)
2572{
56b40062 2573 struct ipa_func_body_info fbi;
57dbdc5a 2574 struct ipa_node_params *info;
062c604f 2575
57dbdc5a
MJ
2576 ipa_check_create_node_params ();
2577 ipa_check_create_edge_args ();
2578 info = IPA_NODE_REF (node);
8aab5218
MJ
2579
2580 if (info->analysis_done)
2581 return;
2582 info->analysis_done = 1;
2583
2584 if (ipa_func_spec_opts_forbid_analysis_p (node))
2585 {
2586 for (int i = 0; i < ipa_get_param_count (info); i++)
2587 {
2588 ipa_set_param_used (info, i, true);
2589 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2590 }
2591 return;
2592 }
2593
2594 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2595 push_cfun (func);
2596 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2597 ipa_initialize_node_params (node);
8aab5218 2598 ipa_analyze_controlled_uses (node);
062c604f 2599
8aab5218
MJ
2600 fbi.node = node;
2601 fbi.info = IPA_NODE_REF (node);
2602 fbi.bb_infos = vNULL;
2603 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2604 fbi.param_count = ipa_get_param_count (info);
2605 fbi.aa_walked = 0;
062c604f 2606
8aab5218
MJ
2607 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2608 {
2609 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2610 bi->cg_edges.safe_push (cs);
2611 }
062c604f 2612
8aab5218
MJ
2613 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2614 {
2615 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2616 bi->cg_edges.safe_push (cs);
2617 }
2618
2619 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2620
c3431191 2621 ipa_release_body_info (&fbi);
8aab5218 2622 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2623 pop_cfun ();
062c604f 2624}
062c604f 2625
be95e2b9 2626/* Update the jump functions associated with call graph edge E when the call
3e293154 2627 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2628 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2629
3e293154
MJ
2630static void
2631update_jump_functions_after_inlining (struct cgraph_edge *cs,
2632 struct cgraph_edge *e)
2633{
2634 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2635 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2636 int count = ipa_get_cs_argument_count (args);
2637 int i;
2638
2639 for (i = 0; i < count; i++)
2640 {
b258210c 2641 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
5ce97055
JH
2642 struct ipa_polymorphic_call_context *dst_ctx
2643 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2644
685b0d13
MJ
2645 if (dst->type == IPA_JF_ANCESTOR)
2646 {
b258210c 2647 struct ipa_jump_func *src;
8b7773a4 2648 int dst_fid = dst->value.ancestor.formal_id;
5ce97055
JH
2649 struct ipa_polymorphic_call_context *src_ctx
2650 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 2651
b258210c
MJ
2652 /* Variable number of arguments can cause havoc if we try to access
2653 one that does not exist in the inlined edge. So make sure we
2654 don't. */
8b7773a4 2655 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 2656 {
04be694e 2657 ipa_set_jf_unknown (dst);
b258210c
MJ
2658 continue;
2659 }
2660
8b7773a4
MJ
2661 src = ipa_get_ith_jump_func (top, dst_fid);
2662
5ce97055
JH
2663 if (src_ctx && !src_ctx->useless_p ())
2664 {
2665 struct ipa_polymorphic_call_context ctx = *src_ctx;
2666
2667 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2668 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 2669 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2670 ctx.offset_by (dst->value.ancestor.offset);
2671 if (!ctx.useless_p ())
2672 {
a7d1f3fe
ML
2673 if (!dst_ctx)
2674 {
2675 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2676 count);
2677 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2678 }
2679
2680 dst_ctx->combine_with (ctx);
5ce97055
JH
2681 }
2682 }
2683
8b7773a4
MJ
2684 if (src->agg.items
2685 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2686 {
2687 struct ipa_agg_jf_item *item;
2688 int j;
2689
2690 /* Currently we do not produce clobber aggregate jump functions,
2691 replace with merging when we do. */
2692 gcc_assert (!dst->agg.items);
2693
9771b263 2694 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2695 dst->agg.by_ref = src->agg.by_ref;
9771b263 2696 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2697 item->offset -= dst->value.ancestor.offset;
2698 }
2699
3b97a5c7
MJ
2700 if (src->type == IPA_JF_PASS_THROUGH
2701 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2702 {
2703 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2704 dst->value.ancestor.agg_preserved &=
2705 src->value.pass_through.agg_preserved;
2706 }
a2b4c188
KV
2707 else if (src->type == IPA_JF_PASS_THROUGH
2708 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2709 {
2710 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2711 dst->value.ancestor.agg_preserved = false;
2712 }
b258210c
MJ
2713 else if (src->type == IPA_JF_ANCESTOR)
2714 {
2715 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2716 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2717 dst->value.ancestor.agg_preserved &=
2718 src->value.ancestor.agg_preserved;
b258210c
MJ
2719 }
2720 else
04be694e 2721 ipa_set_jf_unknown (dst);
b258210c
MJ
2722 }
2723 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2724 {
b258210c
MJ
2725 struct ipa_jump_func *src;
2726 /* We must check range due to calls with variable number of arguments
2727 and we cannot combine jump functions with operations. */
2728 if (dst->value.pass_through.operation == NOP_EXPR
2729 && (dst->value.pass_through.formal_id
2730 < ipa_get_cs_argument_count (top)))
2731 {
8b7773a4
MJ
2732 int dst_fid = dst->value.pass_through.formal_id;
2733 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2734 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
5ce97055
JH
2735 struct ipa_polymorphic_call_context *src_ctx
2736 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 2737
5ce97055
JH
2738 if (src_ctx && !src_ctx->useless_p ())
2739 {
2740 struct ipa_polymorphic_call_context ctx = *src_ctx;
2741
2742 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2743 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 2744 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2745 if (!ctx.useless_p ())
2746 {
2747 if (!dst_ctx)
2748 {
2749 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2750 count);
2751 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2752 }
2753 dst_ctx->combine_with (ctx);
2754 }
2755 }
b8f6e610
MJ
2756 switch (src->type)
2757 {
2758 case IPA_JF_UNKNOWN:
04be694e 2759 ipa_set_jf_unknown (dst);
b8f6e610 2760 break;
b8f6e610
MJ
2761 case IPA_JF_CONST:
2762 ipa_set_jf_cst_copy (dst, src);
2763 break;
2764
2765 case IPA_JF_PASS_THROUGH:
2766 {
2767 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2768 enum tree_code operation;
2769 operation = ipa_get_jf_pass_through_operation (src);
2770
2771 if (operation == NOP_EXPR)
2772 {
3b97a5c7 2773 bool agg_p;
b8f6e610
MJ
2774 agg_p = dst_agg_p
2775 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 2776 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610 2777 }
a2b4c188
KV
2778 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2779 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
b8f6e610
MJ
2780 else
2781 {
2782 tree operand = ipa_get_jf_pass_through_operand (src);
2783 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2784 operation);
2785 }
2786 break;
2787 }
2788 case IPA_JF_ANCESTOR:
2789 {
3b97a5c7 2790 bool agg_p;
b8f6e610
MJ
2791 agg_p = dst_agg_p
2792 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
2793 ipa_set_ancestor_jf (dst,
2794 ipa_get_jf_ancestor_offset (src),
b8f6e610 2795 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 2796 agg_p);
b8f6e610
MJ
2797 break;
2798 }
2799 default:
2800 gcc_unreachable ();
2801 }
8b7773a4
MJ
2802
2803 if (src->agg.items
b8f6e610 2804 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2805 {
2806 /* Currently we do not produce clobber aggregate jump
2807 functions, replace with merging when we do. */
2808 gcc_assert (!dst->agg.items);
2809
2810 dst->agg.by_ref = src->agg.by_ref;
9771b263 2811 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2812 }
b258210c
MJ
2813 }
2814 else
04be694e 2815 ipa_set_jf_unknown (dst);
3e293154 2816 }
b258210c
MJ
2817 }
2818}
2819
5ce97055
JH
2820/* If TARGET is an addr_expr of a function declaration, make it the
2821 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2822 Otherwise, return NULL. */
b258210c 2823
3949c4a7 2824struct cgraph_edge *
5ce97055
JH
2825ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2826 bool speculative)
b258210c
MJ
2827{
2828 struct cgraph_node *callee;
263e19c7 2829 struct ipa_call_summary *es = ipa_call_summaries->get (ie);
48b1474e 2830 bool unreachable = false;
b258210c 2831
ceeffab0
MJ
2832 if (TREE_CODE (target) == ADDR_EXPR)
2833 target = TREE_OPERAND (target, 0);
b258210c 2834 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2835 {
2836 target = canonicalize_constructor_val (target, NULL);
2837 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2838 {
db66bf68
JH
2839 /* Member pointer call that goes through a VMT lookup. */
2840 if (ie->indirect_info->member_ptr
2841 /* Or if target is not an invariant expression and we do not
2842 know if it will evaulate to function at runtime.
2843 This can happen when folding through &VAR, where &VAR
2844 is IP invariant, but VAR itself is not.
2845
2846 TODO: Revisit this when GCC 5 is branched. It seems that
2847 member_ptr check is not needed and that we may try to fold
2848 the expression and see if VAR is readonly. */
2849 || !is_gimple_ip_invariant (target))
2850 {
2851 if (dump_enabled_p ())
2852 {
2853 location_t loc = gimple_location_safe (ie->call_stmt);
2854 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
464d0118
ML
2855 "discovered direct call non-invariant %s\n",
2856 ie->caller->dump_name ());
db66bf68
JH
2857 }
2858 return NULL;
2859 }
2860
c13bc3d9 2861
2b5f0895
XDL
2862 if (dump_enabled_p ())
2863 {
807b7d62
ML
2864 location_t loc = gimple_location_safe (ie->call_stmt);
2865 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
464d0118 2866 "discovered direct call to non-function in %s, "
807b7d62 2867 "making it __builtin_unreachable\n",
464d0118 2868 ie->caller->dump_name ());
2b5f0895 2869 }
3c9e6fca 2870
48b1474e 2871 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 2872 callee = cgraph_node::get_create (target);
48b1474e 2873 unreachable = true;
a0a7b611 2874 }
48b1474e 2875 else
d52f5295 2876 callee = cgraph_node::get (target);
a0a7b611 2877 }
48b1474e 2878 else
d52f5295 2879 callee = cgraph_node::get (target);
a0a7b611
JH
2880
2881 /* Because may-edges are not explicitely represented and vtable may be external,
2882 we may create the first reference to the object in the unit. */
2883 if (!callee || callee->global.inlined_to)
2884 {
a0a7b611
JH
2885
2886 /* We are better to ensure we can refer to it.
2887 In the case of static functions we are out of luck, since we already
2888 removed its body. In the case of public functions we may or may
2889 not introduce the reference. */
2890 if (!canonicalize_constructor_val (target, NULL)
2891 || !TREE_PUBLIC (target))
2892 {
2893 if (dump_file)
2894 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
464d0118
ML
2895 "(%s -> %s) but can not refer to it. Giving up.\n",
2896 ie->caller->dump_name (),
2897 ie->callee->dump_name ());
a0a7b611
JH
2898 return NULL;
2899 }
d52f5295 2900 callee = cgraph_node::get_create (target);
a0a7b611 2901 }
2b5f0895 2902
0127c169
JH
2903 /* If the edge is already speculated. */
2904 if (speculative && ie->speculative)
2905 {
2906 struct cgraph_edge *e2;
2907 struct ipa_ref *ref;
2908 ie->speculative_call_info (e2, ie, ref);
2909 if (e2->callee->ultimate_alias_target ()
2910 != callee->ultimate_alias_target ())
2911 {
2912 if (dump_file)
464d0118
ML
2913 fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2914 "target (%s -> %s) but the call is already "
2915 "speculated to %s. Giving up.\n",
2916 ie->caller->dump_name (), callee->dump_name (),
2917 e2->callee->dump_name ());
0127c169
JH
2918 }
2919 else
2920 {
2921 if (dump_file)
2922 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
464d0118
ML
2923 "(%s -> %s) this agree with previous speculation.\n",
2924 ie->caller->dump_name (), callee->dump_name ());
0127c169
JH
2925 }
2926 return NULL;
2927 }
2928
2b5f0895
XDL
2929 if (!dbg_cnt (devirt))
2930 return NULL;
2931
1dbee8c9 2932 ipa_check_create_node_params ();
ceeffab0 2933
81fa35bd
MJ
2934 /* We can not make edges to inline clones. It is bug that someone removed
2935 the cgraph node too early. */
17afc0fe
JH
2936 gcc_assert (!callee->global.inlined_to);
2937
48b1474e 2938 if (dump_file && !unreachable)
b258210c 2939 {
5ce97055 2940 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
464d0118 2941 "(%s -> %s), for stmt ",
b258210c 2942 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 2943 speculative ? "speculative" : "known",
464d0118
ML
2944 ie->caller->dump_name (),
2945 callee->dump_name ());
b258210c
MJ
2946 if (ie->call_stmt)
2947 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2948 else
2949 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 2950 }
2b5f0895
XDL
2951 if (dump_enabled_p ())
2952 {
807b7d62 2953 location_t loc = gimple_location_safe (ie->call_stmt);
3c9e6fca 2954
807b7d62
ML
2955 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2956 "converting indirect call in %s to direct call to %s\n",
2957 ie->caller->name (), callee->name ());
2b5f0895 2958 }
5ce97055 2959 if (!speculative)
d8d5aef1
JH
2960 {
2961 struct cgraph_edge *orig = ie;
2962 ie = ie->make_direct (callee);
2963 /* If we resolved speculative edge the cost is already up to date
2964 for direct call (adjusted by inline_edge_duplication_hook). */
2965 if (ie == orig)
2966 {
263e19c7 2967 es = ipa_call_summaries->get (ie);
d8d5aef1
JH
2968 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2969 - eni_size_weights.call_cost);
2970 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2971 - eni_time_weights.call_cost);
2972 }
2973 }
5ce97055
JH
2974 else
2975 {
2976 if (!callee->can_be_discarded_p ())
2977 {
2978 cgraph_node *alias;
2979 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2980 if (alias)
2981 callee = alias;
2982 }
d8d5aef1 2983 /* make_speculative will update ie's cost to direct call cost. */
5ce97055 2984 ie = ie->make_speculative
3995f3a2 2985 (callee, ie->count.apply_scale (8, 10), ie->frequency * 8 / 10);
5ce97055 2986 }
749aa96d 2987
b258210c 2988 return ie;
3e293154
MJ
2989}
2990
91bb9f80
MJ
2991/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2992 CONSTRUCTOR and return it. Return NULL if the search fails for some
2993 reason. */
2994
2995static tree
2996find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2997{
2998 tree type = TREE_TYPE (constructor);
2999 if (TREE_CODE (type) != ARRAY_TYPE
3000 && TREE_CODE (type) != RECORD_TYPE)
3001 return NULL;
3002
3003 unsigned ix;
3004 tree index, val;
3005 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3006 {
3007 HOST_WIDE_INT elt_offset;
3008 if (TREE_CODE (type) == ARRAY_TYPE)
3009 {
3010 offset_int off;
3011 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3012 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3013
3014 if (index)
3015 {
3016 off = wi::to_offset (index);
3017 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3018 {
3019 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3020 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3021 off = wi::sext (off - wi::to_offset (low_bound),
3022 TYPE_PRECISION (TREE_TYPE (index)));
3023 }
3024 off *= wi::to_offset (unit_size);
3025 }
3026 else
3027 off = wi::to_offset (unit_size) * ix;
3028
3029 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3030 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3031 continue;
3032 elt_offset = off.to_shwi ();
3033 }
3034 else if (TREE_CODE (type) == RECORD_TYPE)
3035 {
3036 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3037 if (DECL_BIT_FIELD (index))
3038 continue;
3039 elt_offset = int_bit_position (index);
3040 }
3041 else
3042 gcc_unreachable ();
3043
3044 if (elt_offset > req_offset)
3045 return NULL;
3046
3047 if (TREE_CODE (val) == CONSTRUCTOR)
3048 return find_constructor_constant_at_offset (val,
3049 req_offset - elt_offset);
3050
3051 if (elt_offset == req_offset
3052 && is_gimple_reg_type (TREE_TYPE (val))
3053 && is_gimple_ip_invariant (val))
3054 return val;
3055 }
3056 return NULL;
3057}
3058
3059/* Check whether SCALAR could be used to look up an aggregate interprocedural
3060 invariant from a static constructor and if so, return it. Otherwise return
3061 NULL. */
3062
3063static tree
3064ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3065{
3066 if (by_ref)
3067 {
3068 if (TREE_CODE (scalar) != ADDR_EXPR)
3069 return NULL;
3070 scalar = TREE_OPERAND (scalar, 0);
3071 }
3072
8813a647 3073 if (!VAR_P (scalar)
91bb9f80
MJ
3074 || !is_global_var (scalar)
3075 || !TREE_READONLY (scalar)
3076 || !DECL_INITIAL (scalar)
3077 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3078 return NULL;
3079
3080 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3081}
3082
3083/* Retrieve value from aggregate jump function AGG or static initializer of
3084 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3085 none. BY_REF specifies whether the value has to be passed by reference or
3086 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3087 to is set to true if the value comes from an initializer of a constant. */
8b7773a4
MJ
3088
3089tree
91bb9f80
MJ
3090ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3091 HOST_WIDE_INT offset, bool by_ref,
3092 bool *from_global_constant)
8b7773a4
MJ
3093{
3094 struct ipa_agg_jf_item *item;
3095 int i;
3096
91bb9f80
MJ
3097 if (scalar)
3098 {
3099 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3100 if (res)
3101 {
3102 if (from_global_constant)
3103 *from_global_constant = true;
3104 return res;
3105 }
3106 }
3107
3108 if (!agg
3109 || by_ref != agg->by_ref)
8b7773a4
MJ
3110 return NULL;
3111
9771b263 3112 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
3113 if (item->offset == offset)
3114 {
3115 /* Currently we do not have clobber values, return NULL for them once
3116 we do. */
3117 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
3118 if (from_global_constant)
3119 *from_global_constant = false;
2c9561b5
MJ
3120 return item->value;
3121 }
8b7773a4
MJ
3122 return NULL;
3123}
3124
4502fe8d 3125/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
3126 reference description RDESC. Return true if the reference has been
3127 successfully found and removed. */
4502fe8d 3128
568cda29 3129static bool
5e20cdc9 3130remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3131{
3132 struct ipa_ref *to_del;
3133 struct cgraph_edge *origin;
3134
3135 origin = rdesc->cs;
a854f856
MJ
3136 if (!origin)
3137 return false;
d122681a
ML
3138 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3139 origin->lto_stmt_uid);
568cda29
MJ
3140 if (!to_del)
3141 return false;
3142
d122681a 3143 to_del->remove_reference ();
4502fe8d 3144 if (dump_file)
464d0118
ML
3145 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3146 origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
568cda29 3147 return true;
4502fe8d
MJ
3148}
3149
3150/* If JFUNC has a reference description with refcount different from
3151 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3152 NULL. JFUNC must be a constant jump function. */
3153
3154static struct ipa_cst_ref_desc *
3155jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3156{
3157 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3158 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3159 return rdesc;
3160 else
3161 return NULL;
3162}
3163
568cda29
MJ
3164/* If the value of constant jump function JFUNC is an address of a function
3165 declaration, return the associated call graph node. Otherwise return
3166 NULL. */
3167
3168static cgraph_node *
3169cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3170{
3171 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3172 tree cst = ipa_get_jf_constant (jfunc);
3173 if (TREE_CODE (cst) != ADDR_EXPR
3174 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3175 return NULL;
3176
d52f5295 3177 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3178}
3179
3180
3181/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3182 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3183 the edge specified in the rdesc. Return false if either the symbol or the
3184 reference could not be found, otherwise return true. */
3185
3186static bool
3187try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3188{
3189 struct ipa_cst_ref_desc *rdesc;
3190 if (jfunc->type == IPA_JF_CONST
3191 && (rdesc = jfunc_rdesc_usable (jfunc))
3192 && --rdesc->refcount == 0)
3193 {
5e20cdc9 3194 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
3195 if (!symbol)
3196 return false;
3197
3198 return remove_described_reference (symbol, rdesc);
3199 }
3200 return true;
3201}
3202
b258210c
MJ
3203/* Try to find a destination for indirect edge IE that corresponds to a simple
3204 call or a call of a member function pointer and where the destination is a
3205 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
3206 determined, return the newly direct edge, otherwise return NULL.
3207 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 3208
b258210c
MJ
3209static struct cgraph_edge *
3210try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
3211 struct ipa_jump_func *jfunc,
3212 struct ipa_node_params *new_root_info)
b258210c 3213{
4502fe8d 3214 struct cgraph_edge *cs;
b258210c 3215 tree target;
042ae7d2 3216 bool agg_contents = ie->indirect_info->agg_contents;
91bb9f80
MJ
3217 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3218 if (agg_contents)
3219 {
3220 bool from_global_constant;
3221 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3222 ie->indirect_info->offset,
3223 ie->indirect_info->by_ref,
3224 &from_global_constant);
3225 if (target
3226 && !from_global_constant
3227 && !ie->indirect_info->guaranteed_unmodified)
3228 return NULL;
3229 }
b258210c 3230 else
91bb9f80 3231 target = scalar;
d250540a
MJ
3232 if (!target)
3233 return NULL;
4502fe8d
MJ
3234 cs = ipa_make_edge_direct_to_target (ie, target);
3235
a12cd2db 3236 if (cs && !agg_contents)
568cda29
MJ
3237 {
3238 bool ok;
3239 gcc_checking_assert (cs->callee
ae6d0907
MJ
3240 && (cs != ie
3241 || jfunc->type != IPA_JF_CONST
568cda29
MJ
3242 || !cgraph_node_for_jfunc (jfunc)
3243 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3244 ok = try_decrement_rdesc_refcount (jfunc);
3245 gcc_checking_assert (ok);
3246 }
4502fe8d
MJ
3247
3248 return cs;
b258210c
MJ
3249}
3250
bec81025
MJ
3251/* Return the target to be used in cases of impossible devirtualization. IE
3252 and target (the latter can be NULL) are dumped when dumping is enabled. */
3253
72972c22
MJ
3254tree
3255ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3256{
3257 if (dump_file)
3258 {
3259 if (target)
3260 fprintf (dump_file,
464d0118
ML
3261 "Type inconsistent devirtualization: %s->%s\n",
3262 ie->caller->dump_name (),
bec81025
MJ
3263 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3264 else
3265 fprintf (dump_file,
464d0118
ML
3266 "No devirtualization target in %s\n",
3267 ie->caller->dump_name ());
bec81025
MJ
3268 }
3269 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3270 cgraph_node::get_create (new_target);
bec81025
MJ
3271 return new_target;
3272}
3273
d250540a
MJ
3274/* Try to find a destination for indirect edge IE that corresponds to a virtual
3275 call based on a formal parameter which is described by jump function JFUNC
3276 and if it can be determined, make it direct and return the direct edge.
44210a96
MJ
3277 Otherwise, return NULL. CTX describes the polymorphic context that the
3278 parameter the call is based on brings along with it. */
b258210c
MJ
3279
3280static struct cgraph_edge *
3281try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3282 struct ipa_jump_func *jfunc,
44210a96 3283 struct ipa_polymorphic_call_context ctx)
3e293154 3284{
44210a96 3285 tree target = NULL;
5ce97055 3286 bool speculative = false;
85942f45 3287
2bf86c84 3288 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3289 return NULL;
b258210c 3290
44210a96 3291 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3292
3293 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3294 if (!ie->indirect_info->vptr_changed
3295 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3296 {
9de2f554
JH
3297 tree vtable;
3298 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3299 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3300 : NULL;
3301 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
85942f45
JH
3302 ie->indirect_info->offset,
3303 true);
9de2f554
JH
3304 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3305 {
2994ab20 3306 bool can_refer;
0127c169 3307 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3308 vtable, offset, &can_refer);
3309 if (can_refer)
9de2f554 3310 {
2994ab20
JH
3311 if (!t
3312 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3313 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
9de2f554 3314 || !possible_polymorphic_call_target_p
0127c169
JH
3315 (ie, cgraph_node::get (t)))
3316 {
33c3b6be 3317 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3318 if (!ie->indirect_info->vptr_changed)
3319 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3320 else
3321 target = NULL;
0127c169
JH
3322 }
3323 else
3324 {
3325 target = t;
3326 speculative = ie->indirect_info->vptr_changed;
3327 }
9de2f554
JH
3328 }
3329 }
85942f45
JH
3330 }
3331
44210a96
MJ
3332 ipa_polymorphic_call_context ie_context (ie);
3333 vec <cgraph_node *>targets;
3334 bool final;
d250540a 3335
44210a96
MJ
3336 ctx.offset_by (ie->indirect_info->offset);
3337 if (ie->indirect_info->vptr_changed)
3338 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3339 ie->indirect_info->otr_type);
3340 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3341 targets = possible_polymorphic_call_targets
3342 (ie->indirect_info->otr_type,
3343 ie->indirect_info->otr_token,
3344 ctx, &final);
3345 if (final && targets.length () <= 1)
5ce97055 3346 {
33c3b6be 3347 speculative = false;
44210a96
MJ
3348 if (targets.length () == 1)
3349 target = targets[0]->decl;
3350 else
3351 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3352 }
2bf86c84 3353 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3354 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3355 {
44210a96
MJ
3356 cgraph_node *n;
3357 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3358 ie->indirect_info->otr_token,
3359 ie->indirect_info->context);
3360 if (n)
5ce97055 3361 {
44210a96
MJ
3362 target = n->decl;
3363 speculative = true;
5ce97055 3364 }
5bccb77a 3365 }
b258210c
MJ
3366
3367 if (target)
450ad0cd 3368 {
44210a96
MJ
3369 if (!possible_polymorphic_call_target_p
3370 (ie, cgraph_node::get_create (target)))
0127c169 3371 {
29c43c83 3372 if (speculative)
0127c169
JH
3373 return NULL;
3374 target = ipa_impossible_devirt_target (ie, target);
3375 }
5ce97055 3376 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3377 }
b258210c
MJ
3378 else
3379 return NULL;
3e293154
MJ
3380}
3381
3382/* Update the param called notes associated with NODE when CS is being inlined,
3383 assuming NODE is (potentially indirectly) inlined into CS->callee.
3384 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3385 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3386 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3387
f8e2a1ed 3388static bool
e33c6cd6
MJ
3389update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3390 struct cgraph_node *node,
d52f5295 3391 vec<cgraph_edge *> *new_edges)
3e293154 3392{
9e97ff61 3393 struct ipa_edge_args *top;
b258210c 3394 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 3395 struct ipa_node_params *new_root_info;
f8e2a1ed 3396 bool res = false;
3e293154 3397
e33c6cd6 3398 ipa_check_create_edge_args ();
9e97ff61 3399 top = IPA_EDGE_REF (cs);
d250540a
MJ
3400 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3401 ? cs->caller->global.inlined_to
3402 : cs->caller);
e33c6cd6
MJ
3403
3404 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3405 {
e33c6cd6 3406 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3407 struct ipa_jump_func *jfunc;
8b7773a4 3408 int param_index;
3ff29913 3409 cgraph_node *spec_target = NULL;
3e293154 3410
e33c6cd6 3411 next_ie = ie->next_callee;
3e293154 3412
5f902d76
JH
3413 if (ici->param_index == -1)
3414 continue;
e33c6cd6 3415
3e293154 3416 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 3417 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3418 {
5ee53a06 3419 ici->param_index = -1;
3e293154
MJ
3420 continue;
3421 }
3422
8b7773a4
MJ
3423 param_index = ici->param_index;
3424 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3425
3ff29913
JH
3426 if (ie->speculative)
3427 {
3428 struct cgraph_edge *de;
3429 struct ipa_ref *ref;
3430 ie->speculative_call_info (de, ie, ref);
3431 spec_target = de->callee;
3432 }
3433
2bf86c84 3434 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3435 new_direct_edge = NULL;
3436 else if (ici->polymorphic)
5ce97055 3437 {
44210a96
MJ
3438 ipa_polymorphic_call_context ctx;
3439 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3440 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
5ce97055 3441 }
b258210c 3442 else
d250540a
MJ
3443 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3444 new_root_info);
042ae7d2 3445 /* If speculation was removed, then we need to do nothing. */
3ff29913
JH
3446 if (new_direct_edge && new_direct_edge != ie
3447 && new_direct_edge->callee == spec_target)
042ae7d2
JH
3448 {
3449 new_direct_edge->indirect_inlining_edge = 1;
3450 top = IPA_EDGE_REF (cs);
3451 res = true;
73d098df
JH
3452 if (!new_direct_edge->speculative)
3453 continue;
042ae7d2
JH
3454 }
3455 else if (new_direct_edge)
685b0d13 3456 {
b258210c 3457 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
3458 if (new_direct_edge->call_stmt)
3459 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
3460 = !gimple_check_call_matching_types (
3461 new_direct_edge->call_stmt,
67348ccc 3462 new_direct_edge->callee->decl, false);
b258210c
MJ
3463 if (new_edges)
3464 {
9771b263 3465 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3466 res = true;
3467 }
042ae7d2 3468 top = IPA_EDGE_REF (cs);
3ff29913
JH
3469 /* If speculative edge was introduced we still need to update
3470 call info of the indirect edge. */
3471 if (!new_direct_edge->speculative)
3472 continue;
685b0d13 3473 }
3ff29913
JH
3474 if (jfunc->type == IPA_JF_PASS_THROUGH
3475 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 3476 {
d0502276
JH
3477 if (ici->agg_contents
3478 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3479 && !ici->polymorphic)
36b72910
JH
3480 ici->param_index = -1;
3481 else
d0502276
JH
3482 {
3483 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3484 if (ici->polymorphic
3485 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3486 ici->vptr_changed = true;
3487 }
36b72910
JH
3488 }
3489 else if (jfunc->type == IPA_JF_ANCESTOR)
3490 {
d0502276
JH
3491 if (ici->agg_contents
3492 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3493 && !ici->polymorphic)
36b72910
JH
3494 ici->param_index = -1;
3495 else
3496 {
3497 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3498 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
3499 if (ici->polymorphic
3500 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3501 ici->vptr_changed = true;
36b72910
JH
3502 }
3503 }
3504 else
3505 /* Either we can find a destination for this edge now or never. */
3506 ici->param_index = -1;
3e293154 3507 }
e33c6cd6 3508
f8e2a1ed 3509 return res;
3e293154
MJ
3510}
3511
3512/* Recursively traverse subtree of NODE (including node) made of inlined
3513 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3514 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3515 update_jump_functions_after_inlining on all non-inlined edges that lead out
3516 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3517 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3518 created. */
be95e2b9 3519
f8e2a1ed 3520static bool
3e293154
MJ
3521propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3522 struct cgraph_node *node,
d52f5295 3523 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3524{
3525 struct cgraph_edge *e;
f8e2a1ed 3526 bool res;
3e293154 3527
e33c6cd6 3528 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3529
3530 for (e = node->callees; e; e = e->next_callee)
3531 if (!e->inline_failed)
f8e2a1ed 3532 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3533 else
3534 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3535 for (e = node->indirect_calls; e; e = e->next_callee)
3536 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3537
3538 return res;
3e293154
MJ
3539}
3540
4502fe8d
MJ
3541/* Combine two controlled uses counts as done during inlining. */
3542
3543static int
3544combine_controlled_uses_counters (int c, int d)
3545{
3546 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3547 return IPA_UNDESCRIBED_USE;
3548 else
3549 return c + d - 1;
3550}
3551
3552/* Propagate number of controlled users from CS->caleee to the new root of the
3553 tree of inlined nodes. */
3554
3555static void
3556propagate_controlled_uses (struct cgraph_edge *cs)
3557{
3558 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3559 struct cgraph_node *new_root = cs->caller->global.inlined_to
3560 ? cs->caller->global.inlined_to : cs->caller;
3561 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3562 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3563 int count, i;
3564
3565 count = MIN (ipa_get_cs_argument_count (args),
3566 ipa_get_param_count (old_root_info));
3567 for (i = 0; i < count; i++)
3568 {
3569 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3570 struct ipa_cst_ref_desc *rdesc;
3571
3572 if (jf->type == IPA_JF_PASS_THROUGH)
3573 {
3574 int src_idx, c, d;
3575 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3576 c = ipa_get_controlled_uses (new_root_info, src_idx);
3577 d = ipa_get_controlled_uses (old_root_info, i);
3578
3579 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3580 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3581 c = combine_controlled_uses_counters (c, d);
3582 ipa_set_controlled_uses (new_root_info, src_idx, c);
3583 if (c == 0 && new_root_info->ipcp_orig_node)
3584 {
3585 struct cgraph_node *n;
3586 struct ipa_ref *ref;
44210a96 3587 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
3588
3589 if (t && TREE_CODE (t) == ADDR_EXPR
3590 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 3591 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 3592 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
3593 {
3594 if (dump_file)
3595 fprintf (dump_file, "ipa-prop: Removing cloning-created "
464d0118
ML
3596 "reference from %s to %s.\n",
3597 new_root->dump_name (),
3598 n->dump_name ());
d122681a 3599 ref->remove_reference ();
4502fe8d
MJ
3600 }
3601 }
3602 }
3603 else if (jf->type == IPA_JF_CONST
3604 && (rdesc = jfunc_rdesc_usable (jf)))
3605 {
3606 int d = ipa_get_controlled_uses (old_root_info, i);
3607 int c = rdesc->refcount;
3608 rdesc->refcount = combine_controlled_uses_counters (c, d);
3609 if (rdesc->refcount == 0)
3610 {
3611 tree cst = ipa_get_jf_constant (jf);
3612 struct cgraph_node *n;
3613 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3614 && TREE_CODE (TREE_OPERAND (cst, 0))
3615 == FUNCTION_DECL);
d52f5295 3616 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
3617 if (n)
3618 {
3619 struct cgraph_node *clone;
568cda29 3620 bool ok;
67348ccc 3621 ok = remove_described_reference (n, rdesc);
568cda29 3622 gcc_checking_assert (ok);
4502fe8d
MJ
3623
3624 clone = cs->caller;
3625 while (clone->global.inlined_to
3626 && clone != rdesc->cs->caller
3627 && IPA_NODE_REF (clone)->ipcp_orig_node)
3628 {
3629 struct ipa_ref *ref;
d122681a 3630 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
3631 if (ref)
3632 {
3633 if (dump_file)
3634 fprintf (dump_file, "ipa-prop: Removing "
3635 "cloning-created reference "
464d0118
ML
3636 "from %s to %s.\n",
3637 clone->dump_name (),
3638 n->dump_name ());
d122681a 3639 ref->remove_reference ();
4502fe8d
MJ
3640 }
3641 clone = clone->callers->caller;
3642 }
3643 }
3644 }
3645 }
3646 }
3647
3648 for (i = ipa_get_param_count (old_root_info);
3649 i < ipa_get_cs_argument_count (args);
3650 i++)
3651 {
3652 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3653
3654 if (jf->type == IPA_JF_CONST)
3655 {
3656 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3657 if (rdesc)
3658 rdesc->refcount = IPA_UNDESCRIBED_USE;
3659 }
3660 else if (jf->type == IPA_JF_PASS_THROUGH)
3661 ipa_set_controlled_uses (new_root_info,
3662 jf->value.pass_through.formal_id,
3663 IPA_UNDESCRIBED_USE);
3664 }
3665}
3666
3e293154
MJ
3667/* Update jump functions and call note functions on inlining the call site CS.
3668 CS is expected to lead to a node already cloned by
3669 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3670 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3671 created. */
be95e2b9 3672
f8e2a1ed 3673bool
3e293154 3674ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 3675 vec<cgraph_edge *> *new_edges)
3e293154 3676{
5ee53a06 3677 bool changed;
f8e2a1ed
MJ
3678 /* Do nothing if the preparation phase has not been carried out yet
3679 (i.e. during early inlining). */
dd912cb8 3680 if (!ipa_node_params_sum)
f8e2a1ed 3681 return false;
6fe906a3 3682 gcc_assert (ipa_edge_args_sum);
f8e2a1ed 3683
4502fe8d 3684 propagate_controlled_uses (cs);
5ee53a06
JH
3685 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3686
5ee53a06 3687 return changed;
518dc859
RL
3688}
3689
86cd0334
MJ
3690/* Ensure that array of edge arguments infos is big enough to accommodate a
3691 structure for all edges and reallocates it if not. Also, allocate
3692 associated hash tables is they do not already exist. */
3693
3694void
3695ipa_check_create_edge_args (void)
3696{
6fe906a3
MJ
3697 if (!ipa_edge_args_sum)
3698 ipa_edge_args_sum
3699 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3700 ipa_edge_args_sum_t (symtab, true));
86cd0334
MJ
3701 if (!ipa_bits_hash_table)
3702 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3703 if (!ipa_vr_hash_table)
3704 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3705}
3706
771578a0
MJ
3707/* Frees all dynamically allocated structures that the argument info points
3708 to. */
be95e2b9 3709
518dc859 3710void
771578a0 3711ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 3712{
9771b263 3713 vec_free (args->jump_functions);
771578a0 3714 memset (args, 0, sizeof (*args));
518dc859
RL
3715}
3716
771578a0 3717/* Free all ipa_edge structures. */
be95e2b9 3718
518dc859 3719void
771578a0 3720ipa_free_all_edge_args (void)
518dc859 3721{
6fe906a3 3722 if (!ipa_edge_args_sum)
9771b263
DN
3723 return;
3724
6fe906a3
MJ
3725 ipa_edge_args_sum->release ();
3726 ipa_edge_args_sum = NULL;
518dc859
RL
3727}
3728
771578a0 3729/* Free all ipa_node_params structures. */
be95e2b9 3730
518dc859 3731void
771578a0 3732ipa_free_all_node_params (void)
518dc859 3733{
a0a348b1 3734 ipa_node_params_sum->release ();
dd912cb8 3735 ipa_node_params_sum = NULL;
771578a0
MJ
3736}
3737
86cd0334
MJ
3738/* Grow ipcp_transformations if necessary. Also allocate any necessary hash
3739 tables if they do not already exist. */
04be694e
MJ
3740
3741void
3742ipcp_grow_transformations_if_necessary (void)
3743{
3744 if (vec_safe_length (ipcp_transformations)
3745 <= (unsigned) symtab->cgraph_max_uid)
3746 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
86cd0334
MJ
3747 if (!ipa_bits_hash_table)
3748 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3749 if (!ipa_vr_hash_table)
3750 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
04be694e
MJ
3751}
3752
2c9561b5
MJ
3753/* Set the aggregate replacements of NODE to be AGGVALS. */
3754
3755void
3756ipa_set_node_agg_value_chain (struct cgraph_node *node,
3757 struct ipa_agg_replacement_value *aggvals)
3758{
04be694e
MJ
3759 ipcp_grow_transformations_if_necessary ();
3760 (*ipcp_transformations)[node->uid].agg_values = aggvals;
2c9561b5
MJ
3761}
3762
6fe906a3
MJ
3763/* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3764 count data structures accordingly. */
be95e2b9 3765
6fe906a3
MJ
3766void
3767ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
771578a0 3768{
568cda29
MJ
3769 if (args->jump_functions)
3770 {
3771 struct ipa_jump_func *jf;
3772 int i;
3773 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3774 {
3775 struct ipa_cst_ref_desc *rdesc;
3776 try_decrement_rdesc_refcount (jf);
3777 if (jf->type == IPA_JF_CONST
3778 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3779 && rdesc->cs == cs)
3780 rdesc->cs = NULL;
3781 }
568cda29 3782 }
518dc859
RL
3783}
3784
6fe906a3
MJ
3785/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3786 reference count data strucutres accordingly. */
be95e2b9 3787
6fe906a3
MJ
3788void
3789ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3790 ipa_edge_args *old_args, ipa_edge_args *new_args)
771578a0 3791{
8b7773a4 3792 unsigned int i;
771578a0 3793
9771b263 3794 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
3795 if (old_args->polymorphic_call_contexts)
3796 new_args->polymorphic_call_contexts
3797 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 3798
9771b263 3799 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3800 {
3801 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3802 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3803
3804 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3805
3806 if (src_jf->type == IPA_JF_CONST)
3807 {
3808 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3809
3810 if (!src_rdesc)
3811 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3812 else if (src->caller == dst->caller)
3813 {
3814 struct ipa_ref *ref;
5e20cdc9 3815 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3816 gcc_checking_assert (n);
d122681a
ML
3817 ref = src->caller->find_reference (n, src->call_stmt,
3818 src->lto_stmt_uid);
568cda29 3819 gcc_checking_assert (ref);
d122681a 3820 dst->caller->clone_reference (ref, ref->stmt);
568cda29 3821
601f3293 3822 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
568cda29
MJ
3823 dst_rdesc->cs = dst;
3824 dst_rdesc->refcount = src_rdesc->refcount;
3825 dst_rdesc->next_duplicate = NULL;
3826 dst_jf->value.constant.rdesc = dst_rdesc;
3827 }
4502fe8d
MJ
3828 else if (src_rdesc->cs == src)
3829 {
601f3293 3830 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 3831 dst_rdesc->cs = dst;
4502fe8d 3832 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3833 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3834 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3835 dst_jf->value.constant.rdesc = dst_rdesc;
3836 }
3837 else
3838 {
3839 struct ipa_cst_ref_desc *dst_rdesc;
3840 /* This can happen during inlining, when a JFUNC can refer to a
3841 reference taken in a function up in the tree of inline clones.
3842 We need to find the duplicate that refers to our tree of
3843 inline clones. */
3844
3845 gcc_assert (dst->caller->global.inlined_to);
3846 for (dst_rdesc = src_rdesc->next_duplicate;
3847 dst_rdesc;
3848 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3849 {
3850 struct cgraph_node *top;
3851 top = dst_rdesc->cs->caller->global.inlined_to
3852 ? dst_rdesc->cs->caller->global.inlined_to
3853 : dst_rdesc->cs->caller;
3854 if (dst->caller->global.inlined_to == top)
3855 break;
3856 }
44a60244 3857 gcc_assert (dst_rdesc);
4502fe8d
MJ
3858 dst_jf->value.constant.rdesc = dst_rdesc;
3859 }
3860 }
6fe45955
MJ
3861 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3862 && src->caller == dst->caller)
3863 {
3864 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3865 ? dst->caller->global.inlined_to : dst->caller;
3866 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3867 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3868
3869 int c = ipa_get_controlled_uses (root_info, idx);
3870 if (c != IPA_UNDESCRIBED_USE)
3871 {
3872 c++;
3873 ipa_set_controlled_uses (root_info, idx, c);
3874 }
3875 }
4502fe8d 3876 }
771578a0
MJ
3877}
3878
dd912cb8 3879/* Analyze newly added function into callgraph. */
be95e2b9 3880
771578a0 3881static void
dd912cb8 3882ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3883{
dd912cb8
ML
3884 if (node->has_gimple_body_p ())
3885 ipa_analyze_node (node);
3886}
771578a0 3887
dd912cb8
ML
3888/* Hook that is called by summary when a node is duplicated. */
3889
3890void
3891ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3892 ipa_node_params *old_info,
3893 ipa_node_params *new_info)
3894{
3895 ipa_agg_replacement_value *old_av, *new_av;
771578a0 3896
f65f1ae3 3897 new_info->descriptors = vec_safe_copy (old_info->descriptors);
310bc633 3898 new_info->lattices = NULL;
771578a0 3899 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
f65f1ae3
MJ
3900 new_info->known_csts = old_info->known_csts.copy ();
3901 new_info->known_contexts = old_info->known_contexts.copy ();
3949c4a7 3902
8aab5218 3903 new_info->analysis_done = old_info->analysis_done;
3949c4a7 3904 new_info->node_enqueued = old_info->node_enqueued;
7e729474 3905 new_info->versionable = old_info->versionable;
2c9561b5
MJ
3906
3907 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 3908 if (old_av)
2c9561b5 3909 {
04be694e
MJ
3910 new_av = NULL;
3911 while (old_av)
3912 {
3913 struct ipa_agg_replacement_value *v;
2c9561b5 3914
04be694e
MJ
3915 v = ggc_alloc<ipa_agg_replacement_value> ();
3916 memcpy (v, old_av, sizeof (*v));
3917 v->next = new_av;
3918 new_av = v;
3919 old_av = old_av->next;
3920 }
3921 ipa_set_node_agg_value_chain (dst, new_av);
3922 }
3923
86cd0334
MJ
3924 ipcp_transformation_summary *src_trans
3925 = ipcp_get_transformation_summary (src);
04be694e 3926
8bc5448f 3927 if (src_trans)
04be694e
MJ
3928 {
3929 ipcp_grow_transformations_if_necessary ();
3930 src_trans = ipcp_get_transformation_summary (src);
86cd0334
MJ
3931 ipcp_transformation_summary *dst_trans
3932 = ipcp_get_transformation_summary (dst);
3933
3934 dst_trans->bits = vec_safe_copy (src_trans->bits);
3935
8bc5448f 3936 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
8bc5448f
KV
3937 vec<ipa_vr, va_gc> *&dst_vr
3938 = ipcp_get_transformation_summary (dst)->m_vr;
8bc5448f
KV
3939 if (vec_safe_length (src_trans->m_vr) > 0)
3940 {
3941 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3942 for (unsigned i = 0; i < src_vr->length (); ++i)
3943 dst_vr->quick_push ((*src_vr)[i]);
3944 }
2c9561b5 3945 }
771578a0
MJ
3946}
3947
3948/* Register our cgraph hooks if they are not already there. */
be95e2b9 3949
518dc859 3950void
771578a0 3951ipa_register_cgraph_hooks (void)
518dc859 3952{
dd912cb8 3953 ipa_check_create_node_params ();
6fe906a3 3954 ipa_check_create_edge_args ();
dd912cb8 3955
dd912cb8 3956 function_insertion_hook_holder =
3dafb85c 3957 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3958}
518dc859 3959
771578a0 3960/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3961
771578a0
MJ
3962static void
3963ipa_unregister_cgraph_hooks (void)
3964{
3dafb85c 3965 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 3966 function_insertion_hook_holder = NULL;
771578a0
MJ
3967}
3968
3969/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3970 longer needed after ipa-cp. */
be95e2b9 3971
771578a0 3972void
e33c6cd6 3973ipa_free_all_structures_after_ipa_cp (void)
3e293154 3974{
2bf86c84 3975 if (!optimize && !in_lto_p)
3e293154
MJ
3976 {
3977 ipa_free_all_edge_args ();
3978 ipa_free_all_node_params ();
2651e637
ML
3979 ipcp_sources_pool.release ();
3980 ipcp_cst_values_pool.release ();
3981 ipcp_poly_ctx_values_pool.release ();
3982 ipcp_agg_lattice_pool.release ();
3e293154 3983 ipa_unregister_cgraph_hooks ();
601f3293 3984 ipa_refdesc_pool.release ();
3e293154
MJ
3985 }
3986}
3987
3988/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3989 longer needed after indirect inlining. */
be95e2b9 3990
3e293154 3991void
e33c6cd6 3992ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3993{
3994 ipa_free_all_edge_args ();
3995 ipa_free_all_node_params ();
3996 ipa_unregister_cgraph_hooks ();
2651e637
ML
3997 ipcp_sources_pool.release ();
3998 ipcp_cst_values_pool.release ();
3999 ipcp_poly_ctx_values_pool.release ();
4000 ipcp_agg_lattice_pool.release ();
601f3293 4001 ipa_refdesc_pool.release ();
518dc859
RL
4002}
4003
dcd416e3 4004/* Print ipa_tree_map data structures of all functions in the
518dc859 4005 callgraph to F. */
be95e2b9 4006
518dc859 4007void
2c9561b5 4008ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
4009{
4010 int i, count;
3e293154 4011 struct ipa_node_params *info;
518dc859 4012
67348ccc 4013 if (!node->definition)
3e293154
MJ
4014 return;
4015 info = IPA_NODE_REF (node);
464d0118 4016 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
3e293154
MJ
4017 count = ipa_get_param_count (info);
4018 for (i = 0; i < count; i++)
518dc859 4019 {
4502fe8d
MJ
4020 int c;
4021
a4e33812 4022 fprintf (f, " ");
e067bd43 4023 ipa_dump_param (f, info, i);
339f49ec
JH
4024 if (ipa_is_param_used (info, i))
4025 fprintf (f, " used");
4502fe8d
MJ
4026 c = ipa_get_controlled_uses (info, i);
4027 if (c == IPA_UNDESCRIBED_USE)
4028 fprintf (f, " undescribed_use");
4029 else
4030 fprintf (f, " controlled_uses=%i", c);
3e293154 4031 fprintf (f, "\n");
518dc859
RL
4032 }
4033}
dcd416e3 4034
ca30a539 4035/* Print ipa_tree_map data structures of all functions in the
3e293154 4036 callgraph to F. */
be95e2b9 4037
3e293154 4038void
ca30a539 4039ipa_print_all_params (FILE * f)
3e293154
MJ
4040{
4041 struct cgraph_node *node;
4042
ca30a539 4043 fprintf (f, "\nFunction parameters:\n");
65c70e6b 4044 FOR_EACH_FUNCTION (node)
ca30a539 4045 ipa_print_node_params (f, node);
3e293154 4046}
3f84bf08
MJ
4047
4048/* Return a heap allocated vector containing formal parameters of FNDECL. */
4049
9771b263 4050vec<tree>
3f84bf08
MJ
4051ipa_get_vector_of_formal_parms (tree fndecl)
4052{
9771b263 4053 vec<tree> args;
3f84bf08
MJ
4054 int count;
4055 tree parm;
4056
0e8853ee 4057 gcc_assert (!flag_wpa);
310bc633 4058 count = count_formal_params (fndecl);
9771b263 4059 args.create (count);
910ad8de 4060 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 4061 args.quick_push (parm);
3f84bf08
MJ
4062
4063 return args;
4064}
4065
4066/* Return a heap allocated vector containing types of formal parameters of
4067 function type FNTYPE. */
4068
31519c38
AH
4069vec<tree>
4070ipa_get_vector_of_formal_parm_types (tree fntype)
3f84bf08 4071{
9771b263 4072 vec<tree> types;
3f84bf08
MJ
4073 int count = 0;
4074 tree t;
4075
4076 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4077 count++;
4078
9771b263 4079 types.create (count);
3f84bf08 4080 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 4081 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
4082
4083 return types;
4084}
4085
4086/* Modify the function declaration FNDECL and its type according to the plan in
4087 ADJUSTMENTS. It also sets base fields of individual adjustments structures
4088 to reflect the actual parameters being modified which are determined by the
4089 base_index field. */
4090
4091void
31519c38 4092ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3f84bf08 4093{
31519c38
AH
4094 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4095 tree orig_type = TREE_TYPE (fndecl);
4096 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3f84bf08
MJ
4097
4098 /* The following test is an ugly hack, some functions simply don't have any
4099 arguments in their type. This is probably a bug but well... */
31519c38
AH
4100 bool care_for_types = (old_arg_types != NULL_TREE);
4101 bool last_parm_void;
4102 vec<tree> otypes;
3f84bf08
MJ
4103 if (care_for_types)
4104 {
4105 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4106 == void_type_node);
31519c38 4107 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3f84bf08 4108 if (last_parm_void)
9771b263 4109 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 4110 else
9771b263 4111 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
4112 }
4113 else
4114 {
4115 last_parm_void = false;
9771b263 4116 otypes.create (0);
3f84bf08
MJ
4117 }
4118
31519c38
AH
4119 int len = adjustments.length ();
4120 tree *link = &DECL_ARGUMENTS (fndecl);
4121 tree new_arg_types = NULL;
4122 for (int i = 0; i < len; i++)
3f84bf08
MJ
4123 {
4124 struct ipa_parm_adjustment *adj;
4125 gcc_assert (link);
4126
9771b263 4127 adj = &adjustments[i];
31519c38
AH
4128 tree parm;
4129 if (adj->op == IPA_PARM_OP_NEW)
4130 parm = NULL;
4131 else
4132 parm = oparms[adj->base_index];
3f84bf08
MJ
4133 adj->base = parm;
4134
31519c38 4135 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
4136 {
4137 if (care_for_types)
9771b263 4138 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
4139 new_arg_types);
4140 *link = parm;
910ad8de 4141 link = &DECL_CHAIN (parm);
3f84bf08 4142 }
31519c38 4143 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4144 {
4145 tree new_parm;
4146 tree ptype;
4147
4148 if (adj->by_ref)
4149 ptype = build_pointer_type (adj->type);
4150 else
e69dbe37
MJ
4151 {
4152 ptype = adj->type;
26d7dc48
RB
4153 if (is_gimple_reg_type (ptype)
4154 && TYPE_MODE (ptype) != BLKmode)
e69dbe37
MJ
4155 {
4156 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
fe7afdf5 4157 if (TYPE_ALIGN (ptype) != malign)
e69dbe37
MJ
4158 ptype = build_aligned_type (ptype, malign);
4159 }
4160 }
3f84bf08
MJ
4161
4162 if (care_for_types)
4163 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4164
4165 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4166 ptype);
31519c38
AH
4167 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4168 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3f84bf08
MJ
4169 DECL_ARTIFICIAL (new_parm) = 1;
4170 DECL_ARG_TYPE (new_parm) = ptype;
4171 DECL_CONTEXT (new_parm) = fndecl;
4172 TREE_USED (new_parm) = 1;
4173 DECL_IGNORED_P (new_parm) = 1;
4174 layout_decl (new_parm, 0);
4175
31519c38
AH
4176 if (adj->op == IPA_PARM_OP_NEW)
4177 adj->base = NULL;
4178 else
4179 adj->base = parm;
4180 adj->new_decl = new_parm;
3f84bf08
MJ
4181
4182 *link = new_parm;
910ad8de 4183 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
4184 }
4185 }
4186
4187 *link = NULL_TREE;
4188
31519c38 4189 tree new_reversed = NULL;
3f84bf08
MJ
4190 if (care_for_types)
4191 {
4192 new_reversed = nreverse (new_arg_types);
4193 if (last_parm_void)
4194 {
4195 if (new_reversed)
4196 TREE_CHAIN (new_arg_types) = void_list_node;
4197 else
4198 new_reversed = void_list_node;
4199 }
4200 }
4201
4202 /* Use copy_node to preserve as much as possible from original type
4203 (debug info, attribute lists etc.)
4204 Exception is METHOD_TYPEs must have THIS argument.
4205 When we are asked to remove it, we need to build new FUNCTION_TYPE
4206 instead. */
31519c38 4207 tree new_type = NULL;
3f84bf08 4208 if (TREE_CODE (orig_type) != METHOD_TYPE
31519c38 4209 || (adjustments[0].op == IPA_PARM_OP_COPY
9771b263 4210 && adjustments[0].base_index == 0))
3f84bf08 4211 {
4eb3f32c 4212 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
4213 TYPE_ARG_TYPES (new_type) = new_reversed;
4214 }
4215 else
4216 {
4217 new_type
4218 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4219 new_reversed));
4220 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4221 DECL_VINDEX (fndecl) = NULL_TREE;
4222 }
4223
d402c33d
JH
4224 /* When signature changes, we need to clear builtin info. */
4225 if (DECL_BUILT_IN (fndecl))
4226 {
4227 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4228 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4229 }
4230
3f84bf08 4231 TREE_TYPE (fndecl) = new_type;
9b389a5e 4232 DECL_VIRTUAL_P (fndecl) = 0;
70d6d5c1 4233 DECL_LANG_SPECIFIC (fndecl) = NULL;
9771b263
DN
4234 otypes.release ();
4235 oparms.release ();
3f84bf08
MJ
4236}
4237
4238/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4239 If this is a directly recursive call, CS must be NULL. Otherwise it must
4240 contain the corresponding call graph edge. */
4241
4242void
538dd0b7 4243ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3f84bf08
MJ
4244 ipa_parm_adjustment_vec adjustments)
4245{
d52f5295 4246 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
9771b263
DN
4247 vec<tree> vargs;
4248 vec<tree, va_gc> **debug_args = NULL;
538dd0b7 4249 gcall *new_stmt;
82338059 4250 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
4251 tree callee_decl;
4252 int i, len;
4253
9771b263
DN
4254 len = adjustments.length ();
4255 vargs.create (len);
67348ccc 4256 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
d122681a 4257 current_node->remove_stmt_references (stmt);
3f84bf08
MJ
4258
4259 gsi = gsi_for_stmt (stmt);
82338059
MJ
4260 prev_gsi = gsi;
4261 gsi_prev (&prev_gsi);
3f84bf08
MJ
4262 for (i = 0; i < len; i++)
4263 {
4264 struct ipa_parm_adjustment *adj;
4265
9771b263 4266 adj = &adjustments[i];
3f84bf08 4267
31519c38 4268 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
4269 {
4270 tree arg = gimple_call_arg (stmt, adj->base_index);
4271
9771b263 4272 vargs.quick_push (arg);
3f84bf08 4273 }
31519c38 4274 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08 4275 {
fffe1e40
MJ
4276 tree expr, base, off;
4277 location_t loc;
f43245d1 4278 unsigned int deref_align = 0;
c1ed6a01 4279 bool deref_base = false;
fffe1e40
MJ
4280
4281 /* We create a new parameter out of the value of the old one, we can
4282 do the following kind of transformations:
4283
4284 - A scalar passed by reference is converted to a scalar passed by
4285 value. (adj->by_ref is false and the type of the original
4286 actual argument is a pointer to a scalar).
4287
4288 - A part of an aggregate is passed instead of the whole aggregate.
4289 The part can be passed either by value or by reference, this is
4290 determined by value of adj->by_ref. Moreover, the code below
4291 handles both situations when the original aggregate is passed by
4292 value (its type is not a pointer) and when it is passed by
4293 reference (it is a pointer to an aggregate).
4294
4295 When the new argument is passed by reference (adj->by_ref is true)
4296 it must be a part of an aggregate and therefore we form it by
4297 simply taking the address of a reference inside the original
4298 aggregate. */
4299
4300 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4301 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
4302 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4303 : EXPR_LOCATION (base);
fffe1e40 4304
82d49829
MJ
4305 if (TREE_CODE (base) != ADDR_EXPR
4306 && POINTER_TYPE_P (TREE_TYPE (base)))
4307 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 4308 adj->offset / BITS_PER_UNIT);
3f84bf08 4309 else
3f84bf08 4310 {
fffe1e40
MJ
4311 HOST_WIDE_INT base_offset;
4312 tree prev_base;
c1ed6a01 4313 bool addrof;
fffe1e40
MJ
4314
4315 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
4316 {
4317 base = TREE_OPERAND (base, 0);
4318 addrof = true;
4319 }
4320 else
4321 addrof = false;
fffe1e40
MJ
4322 prev_base = base;
4323 base = get_addr_base_and_unit_offset (base, &base_offset);
4324 /* Aggregate arguments can have non-invariant addresses. */
4325 if (!base)
4326 {
4327 base = build_fold_addr_expr (prev_base);
82d49829 4328 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4329 adj->offset / BITS_PER_UNIT);
4330 }
4331 else if (TREE_CODE (base) == MEM_REF)
4332 {
c1ed6a01
MJ
4333 if (!addrof)
4334 {
4335 deref_base = true;
4336 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4337 }
82d49829 4338 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4339 base_offset
4340 + adj->offset / BITS_PER_UNIT);
4341 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 4342 off);
fffe1e40
MJ
4343 base = TREE_OPERAND (base, 0);
4344 }
4345 else
4346 {
82d49829 4347 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4348 base_offset
4349 + adj->offset / BITS_PER_UNIT);
4350 base = build_fold_addr_expr (base);
4351 }
3f84bf08 4352 }
fffe1e40 4353
3a5a825a
RG
4354 if (!adj->by_ref)
4355 {
4356 tree type = adj->type;
4357 unsigned int align;
4358 unsigned HOST_WIDE_INT misalign;
644ffefd 4359
c1ed6a01
MJ
4360 if (deref_base)
4361 {
4362 align = deref_align;
4363 misalign = 0;
4364 }
4365 else
4366 {
4367 get_pointer_alignment_1 (base, &align, &misalign);
4368 if (TYPE_ALIGN (type) > align)
4369 align = TYPE_ALIGN (type);
4370 }
807e902e 4371 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3a5a825a
RG
4372 * BITS_PER_UNIT);
4373 misalign = misalign & (align - 1);
4374 if (misalign != 0)
146ec50f 4375 align = least_bit_hwi (misalign);
3a5a825a
RG
4376 if (align < TYPE_ALIGN (type))
4377 type = build_aligned_type (type, align);
4df65a85
RB
4378 base = force_gimple_operand_gsi (&gsi, base,
4379 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4380 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
ee45a32d 4381 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4df65a85
RB
4382 /* If expr is not a valid gimple call argument emit
4383 a load into a temporary. */
4384 if (is_gimple_reg_type (TREE_TYPE (expr)))
4385 {
355fe088 4386 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4df65a85
RB
4387 if (gimple_in_ssa_p (cfun))
4388 {
4389 gimple_set_vuse (tem, gimple_vuse (stmt));
4390 expr = make_ssa_name (TREE_TYPE (expr), tem);
4391 }
4392 else
b731b390 4393 expr = create_tmp_reg (TREE_TYPE (expr));
4df65a85
RB
4394 gimple_assign_set_lhs (tem, expr);
4395 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4396 }
3a5a825a
RG
4397 }
4398 else
4399 {
4400 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
ee45a32d 4401 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
3a5a825a 4402 expr = build_fold_addr_expr (expr);
4df65a85
RB
4403 expr = force_gimple_operand_gsi (&gsi, expr,
4404 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4405 }
9771b263 4406 vargs.quick_push (expr);
3f84bf08 4407 }
31519c38 4408 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
ddb555ed
JJ
4409 {
4410 unsigned int ix;
4411 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
355fe088 4412 gimple *def_temp;
ddb555ed
JJ
4413
4414 arg = gimple_call_arg (stmt, adj->base_index);
4415 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4416 {
4417 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4418 continue;
4419 arg = fold_convert_loc (gimple_location (stmt),
4420 TREE_TYPE (origin), arg);
4421 }
4422 if (debug_args == NULL)
4423 debug_args = decl_debug_args_insert (callee_decl);
9771b263 4424 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
4425 if (ddecl == origin)
4426 {
9771b263 4427 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
4428 break;
4429 }
4430 if (ddecl == NULL)
4431 {
4432 ddecl = make_node (DEBUG_EXPR_DECL);
4433 DECL_ARTIFICIAL (ddecl) = 1;
4434 TREE_TYPE (ddecl) = TREE_TYPE (origin);
899ca90e 4435 SET_DECL_MODE (ddecl, DECL_MODE (origin));
ddb555ed 4436
9771b263
DN
4437 vec_safe_push (*debug_args, origin);
4438 vec_safe_push (*debug_args, ddecl);
ddb555ed 4439 }
9771b263 4440 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
4441 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4442 }
3f84bf08
MJ
4443 }
4444
4445 if (dump_file && (dump_flags & TDF_DETAILS))
4446 {
4447 fprintf (dump_file, "replacing stmt:");
ef6cb4c7 4448 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
3f84bf08
MJ
4449 }
4450
3f84bf08 4451 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 4452 vargs.release ();
3f84bf08
MJ
4453 if (gimple_call_lhs (stmt))
4454 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4455
4456 gimple_set_block (new_stmt, gimple_block (stmt));
4457 if (gimple_has_location (stmt))
4458 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 4459 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 4460 gimple_call_copy_flags (new_stmt, stmt);
4df65a85
RB
4461 if (gimple_in_ssa_p (cfun))
4462 {
4463 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4464 if (gimple_vdef (stmt))
4465 {
4466 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4467 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4468 }
4469 }
3f84bf08
MJ
4470
4471 if (dump_file && (dump_flags & TDF_DETAILS))
4472 {
4473 fprintf (dump_file, "with stmt:");
ef6cb4c7 4474 print_gimple_stmt (dump_file, new_stmt, 0);
3f84bf08
MJ
4475 fprintf (dump_file, "\n");
4476 }
4477 gsi_replace (&gsi, new_stmt, true);
4478 if (cs)
3dafb85c 4479 cs->set_call_stmt (new_stmt);
82338059
MJ
4480 do
4481 {
d52f5295 4482 current_node->record_stmt_references (gsi_stmt (gsi));
82338059
MJ
4483 gsi_prev (&gsi);
4484 }
3d354792 4485 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
3f84bf08
MJ
4486}
4487
31519c38
AH
4488/* If the expression *EXPR should be replaced by a reduction of a parameter, do
4489 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4490 specifies whether the function should care about type incompatibility the
4491 current and new expressions. If it is false, the function will leave
4492 incompatibility issues to the caller. Return true iff the expression
4493 was modified. */
4494
4495bool
4496ipa_modify_expr (tree *expr, bool convert,
4497 ipa_parm_adjustment_vec adjustments)
4498{
4499 struct ipa_parm_adjustment *cand
4500 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4501 if (!cand)
4502 return false;
4503
4504 tree src;
4505 if (cand->by_ref)
ee45a32d
EB
4506 {
4507 src = build_simple_mem_ref (cand->new_decl);
4508 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4509 }
31519c38
AH
4510 else
4511 src = cand->new_decl;
4512
4513 if (dump_file && (dump_flags & TDF_DETAILS))
4514 {
4515 fprintf (dump_file, "About to replace expr ");
ef6cb4c7 4516 print_generic_expr (dump_file, *expr);
31519c38 4517 fprintf (dump_file, " with ");
ef6cb4c7 4518 print_generic_expr (dump_file, src);
31519c38
AH
4519 fprintf (dump_file, "\n");
4520 }
4521
4522 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4523 {
4524 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4525 *expr = vce;
4526 }
4527 else
4528 *expr = src;
4529 return true;
4530}
4531
4532/* If T is an SSA_NAME, return NULL if it is not a default def or
4533 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4534 the base variable is always returned, regardless if it is a default
4535 def. Return T if it is not an SSA_NAME. */
4536
4537static tree
4538get_ssa_base_param (tree t, bool ignore_default_def)
4539{
4540 if (TREE_CODE (t) == SSA_NAME)
4541 {
4542 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4543 return SSA_NAME_VAR (t);
4544 else
4545 return NULL_TREE;
4546 }
4547 return t;
4548}
4549
4550/* Given an expression, return an adjustment entry specifying the
4551 transformation to be done on EXPR. If no suitable adjustment entry
4552 was found, returns NULL.
4553
4554 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4555 default def, otherwise bail on them.
4556
4557 If CONVERT is non-NULL, this function will set *CONVERT if the
4558 expression provided is a component reference. ADJUSTMENTS is the
4559 adjustments vector. */
4560
4561ipa_parm_adjustment *
4562ipa_get_adjustment_candidate (tree **expr, bool *convert,
4563 ipa_parm_adjustment_vec adjustments,
4564 bool ignore_default_def)
4565{
4566 if (TREE_CODE (**expr) == BIT_FIELD_REF
4567 || TREE_CODE (**expr) == IMAGPART_EXPR
4568 || TREE_CODE (**expr) == REALPART_EXPR)
4569 {
4570 *expr = &TREE_OPERAND (**expr, 0);
4571 if (convert)
4572 *convert = true;
4573 }
4574
4575 HOST_WIDE_INT offset, size, max_size;
ee45a32d
EB
4576 bool reverse;
4577 tree base
4578 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
31519c38
AH
4579 if (!base || size == -1 || max_size == -1)
4580 return NULL;
4581
4582 if (TREE_CODE (base) == MEM_REF)
4583 {
807e902e 4584 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
31519c38
AH
4585 base = TREE_OPERAND (base, 0);
4586 }
4587
4588 base = get_ssa_base_param (base, ignore_default_def);
4589 if (!base || TREE_CODE (base) != PARM_DECL)
4590 return NULL;
4591
4592 struct ipa_parm_adjustment *cand = NULL;
4593 unsigned int len = adjustments.length ();
4594 for (unsigned i = 0; i < len; i++)
4595 {
4596 struct ipa_parm_adjustment *adj = &adjustments[i];
4597
4598 if (adj->base == base
4599 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4600 {
4601 cand = adj;
4602 break;
4603 }
4604 }
4605
4606 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4607 return NULL;
4608 return cand;
4609}
4610
3f84bf08
MJ
4611/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4612
4613static bool
4614index_in_adjustments_multiple_times_p (int base_index,
4615 ipa_parm_adjustment_vec adjustments)
4616{
9771b263 4617 int i, len = adjustments.length ();
3f84bf08
MJ
4618 bool one = false;
4619
4620 for (i = 0; i < len; i++)
4621 {
4622 struct ipa_parm_adjustment *adj;
9771b263 4623 adj = &adjustments[i];
3f84bf08
MJ
4624
4625 if (adj->base_index == base_index)
4626 {
4627 if (one)
4628 return true;
4629 else
4630 one = true;
4631 }
4632 }
4633 return false;
4634}
4635
4636
4637/* Return adjustments that should have the same effect on function parameters
4638 and call arguments as if they were first changed according to adjustments in
4639 INNER and then by adjustments in OUTER. */
4640
4641ipa_parm_adjustment_vec
4642ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4643 ipa_parm_adjustment_vec outer)
4644{
9771b263
DN
4645 int i, outlen = outer.length ();
4646 int inlen = inner.length ();
3f84bf08
MJ
4647 int removals = 0;
4648 ipa_parm_adjustment_vec adjustments, tmp;
4649
9771b263 4650 tmp.create (inlen);
3f84bf08
MJ
4651 for (i = 0; i < inlen; i++)
4652 {
4653 struct ipa_parm_adjustment *n;
9771b263 4654 n = &inner[i];
3f84bf08 4655
31519c38 4656 if (n->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4657 removals++;
4658 else
31519c38
AH
4659 {
4660 /* FIXME: Handling of new arguments are not implemented yet. */
4661 gcc_assert (n->op != IPA_PARM_OP_NEW);
4662 tmp.quick_push (*n);
4663 }
3f84bf08
MJ
4664 }
4665
9771b263 4666 adjustments.create (outlen + removals);
3f84bf08
MJ
4667 for (i = 0; i < outlen; i++)
4668 {
f32682ca 4669 struct ipa_parm_adjustment r;
9771b263
DN
4670 struct ipa_parm_adjustment *out = &outer[i];
4671 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 4672
f32682ca 4673 memset (&r, 0, sizeof (r));
31519c38
AH
4674 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4675 if (out->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4676 {
4677 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4678 {
31519c38 4679 r.op = IPA_PARM_OP_REMOVE;
9771b263 4680 adjustments.quick_push (r);
3f84bf08
MJ
4681 }
4682 continue;
4683 }
31519c38
AH
4684 else
4685 {
4686 /* FIXME: Handling of new arguments are not implemented yet. */
4687 gcc_assert (out->op != IPA_PARM_OP_NEW);
4688 }
3f84bf08 4689
f32682ca
DN
4690 r.base_index = in->base_index;
4691 r.type = out->type;
3f84bf08
MJ
4692
4693 /* FIXME: Create nonlocal value too. */
4694
31519c38
AH
4695 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4696 r.op = IPA_PARM_OP_COPY;
4697 else if (in->op == IPA_PARM_OP_COPY)
f32682ca 4698 r.offset = out->offset;
31519c38 4699 else if (out->op == IPA_PARM_OP_COPY)
f32682ca 4700 r.offset = in->offset;
3f84bf08 4701 else
f32682ca 4702 r.offset = in->offset + out->offset;
9771b263 4703 adjustments.quick_push (r);
3f84bf08
MJ
4704 }
4705
4706 for (i = 0; i < inlen; i++)
4707 {
9771b263 4708 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08 4709
31519c38 4710 if (n->op == IPA_PARM_OP_REMOVE)
9771b263 4711 adjustments.quick_push (*n);
3f84bf08
MJ
4712 }
4713
9771b263 4714 tmp.release ();
3f84bf08
MJ
4715 return adjustments;
4716}
4717
4718/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4719 friendly way, assuming they are meant to be applied to FNDECL. */
4720
4721void
4722ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4723 tree fndecl)
4724{
9771b263 4725 int i, len = adjustments.length ();
3f84bf08 4726 bool first = true;
9771b263 4727 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
4728
4729 fprintf (file, "IPA param adjustments: ");
4730 for (i = 0; i < len; i++)
4731 {
4732 struct ipa_parm_adjustment *adj;
9771b263 4733 adj = &adjustments[i];
3f84bf08
MJ
4734
4735 if (!first)
4736 fprintf (file, " ");
4737 else
4738 first = false;
4739
4740 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
ef6cb4c7 4741 print_generic_expr (file, parms[adj->base_index]);
3f84bf08
MJ
4742 if (adj->base)
4743 {
4744 fprintf (file, ", base: ");
ef6cb4c7 4745 print_generic_expr (file, adj->base);
3f84bf08 4746 }
31519c38 4747 if (adj->new_decl)
3f84bf08 4748 {
31519c38 4749 fprintf (file, ", new_decl: ");
ef6cb4c7 4750 print_generic_expr (file, adj->new_decl);
3f84bf08
MJ
4751 }
4752 if (adj->new_ssa_base)
4753 {
4754 fprintf (file, ", new_ssa_base: ");
ef6cb4c7 4755 print_generic_expr (file, adj->new_ssa_base);
3f84bf08
MJ
4756 }
4757
31519c38 4758 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08 4759 fprintf (file, ", copy_param");
31519c38 4760 else if (adj->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4761 fprintf (file, ", remove_param");
4762 else
4763 fprintf (file, ", offset %li", (long) adj->offset);
4764 if (adj->by_ref)
4765 fprintf (file, ", by_ref");
4766 print_node_brief (file, ", type: ", adj->type, 0);
4767 fprintf (file, "\n");
4768 }
9771b263 4769 parms.release ();
3f84bf08
MJ
4770}
4771
2c9561b5
MJ
4772/* Dump the AV linked list. */
4773
4774void
4775ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4776{
4777 bool comma = false;
4778 fprintf (f, " Aggregate replacements:");
4779 for (; av; av = av->next)
4780 {
4781 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4782 av->index, av->offset);
ef6cb4c7 4783 print_generic_expr (f, av->value);
2c9561b5
MJ
4784 comma = true;
4785 }
4786 fprintf (f, "\n");
4787}
4788
fb3f88cc
JH
4789/* Stream out jump function JUMP_FUNC to OB. */
4790
4791static void
4792ipa_write_jump_function (struct output_block *ob,
4793 struct ipa_jump_func *jump_func)
4794{
8b7773a4
MJ
4795 struct ipa_agg_jf_item *item;
4796 struct bitpack_d bp;
4797 int i, count;
fb3f88cc 4798
8b7773a4 4799 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
4800 switch (jump_func->type)
4801 {
4802 case IPA_JF_UNKNOWN:
4803 break;
4804 case IPA_JF_CONST:
5368224f 4805 gcc_assert (
4502fe8d
MJ
4806 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4807 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
4808 break;
4809 case IPA_JF_PASS_THROUGH:
412288f1 4810 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4811 if (jump_func->value.pass_through.operation == NOP_EXPR)
4812 {
4813 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4814 bp = bitpack_create (ob->main_stream);
4815 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4816 streamer_write_bitpack (&bp);
4817 }
a2b4c188
KV
4818 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4819 == tcc_unary)
4820 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4a53743e
MJ
4821 else
4822 {
4823 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4824 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4825 }
fb3f88cc
JH
4826 break;
4827 case IPA_JF_ANCESTOR:
412288f1 4828 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4829 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4830 bp = bitpack_create (ob->main_stream);
4831 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4832 streamer_write_bitpack (&bp);
fb3f88cc 4833 break;
8b7773a4
MJ
4834 }
4835
9771b263 4836 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4837 streamer_write_uhwi (ob, count);
4838 if (count)
4839 {
4840 bp = bitpack_create (ob->main_stream);
4841 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4842 streamer_write_bitpack (&bp);
4843 }
4844
9771b263 4845 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4846 {
4847 streamer_write_uhwi (ob, item->offset);
4848 stream_write_tree (ob, item->value, true);
fb3f88cc 4849 }
04be694e 4850
209ca542 4851 bp = bitpack_create (ob->main_stream);
86cd0334 4852 bp_pack_value (&bp, !!jump_func->bits, 1);
209ca542 4853 streamer_write_bitpack (&bp);
86cd0334 4854 if (jump_func->bits)
209ca542 4855 {
86cd0334
MJ
4856 streamer_write_widest_int (ob, jump_func->bits->value);
4857 streamer_write_widest_int (ob, jump_func->bits->mask);
a5e14a42 4858 }
86cd0334 4859 bp_pack_value (&bp, !!jump_func->m_vr, 1);
8bc5448f 4860 streamer_write_bitpack (&bp);
86cd0334 4861 if (jump_func->m_vr)
8bc5448f
KV
4862 {
4863 streamer_write_enum (ob->main_stream, value_rang_type,
86cd0334
MJ
4864 VR_LAST, jump_func->m_vr->type);
4865 stream_write_tree (ob, jump_func->m_vr->min, true);
4866 stream_write_tree (ob, jump_func->m_vr->max, true);
8bc5448f 4867 }
fb3f88cc
JH
4868}
4869
4870/* Read in jump function JUMP_FUNC from IB. */
4871
4872static void
4873ipa_read_jump_function (struct lto_input_block *ib,
4874 struct ipa_jump_func *jump_func,
4502fe8d 4875 struct cgraph_edge *cs,
fb3f88cc
JH
4876 struct data_in *data_in)
4877{
4a53743e
MJ
4878 enum jump_func_type jftype;
4879 enum tree_code operation;
8b7773a4 4880 int i, count;
fb3f88cc 4881
4a53743e
MJ
4882 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4883 switch (jftype)
fb3f88cc
JH
4884 {
4885 case IPA_JF_UNKNOWN:
04be694e 4886 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4887 break;
4888 case IPA_JF_CONST:
4502fe8d 4889 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4890 break;
4891 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4892 operation = (enum tree_code) streamer_read_uhwi (ib);
4893 if (operation == NOP_EXPR)
4894 {
4895 int formal_id = streamer_read_uhwi (ib);
4896 struct bitpack_d bp = streamer_read_bitpack (ib);
4897 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4898 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e 4899 }
a2b4c188
KV
4900 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4901 {
4902 int formal_id = streamer_read_uhwi (ib);
4903 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4904 }
4a53743e
MJ
4905 else
4906 {
4907 tree operand = stream_read_tree (ib, data_in);
4908 int formal_id = streamer_read_uhwi (ib);
4909 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4910 operation);
4911 }
fb3f88cc
JH
4912 break;
4913 case IPA_JF_ANCESTOR:
4a53743e
MJ
4914 {
4915 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4916 int formal_id = streamer_read_uhwi (ib);
4917 struct bitpack_d bp = streamer_read_bitpack (ib);
4918 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4919 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4920 break;
4921 }
8b7773a4
MJ
4922 }
4923
4924 count = streamer_read_uhwi (ib);
9771b263 4925 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4926 if (count)
4927 {
4a53743e 4928 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4929 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4930 }
4931 for (i = 0; i < count; i++)
4932 {
f32682ca
DN
4933 struct ipa_agg_jf_item item;
4934 item.offset = streamer_read_uhwi (ib);
4935 item.value = stream_read_tree (ib, data_in);
9771b263 4936 jump_func->agg.items->quick_push (item);
fb3f88cc 4937 }
04be694e
MJ
4938
4939 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4940 bool bits_known = bp_unpack_value (&bp, 1);
4941 if (bits_known)
4942 {
86cd0334
MJ
4943 widest_int value = streamer_read_widest_int (ib);
4944 widest_int mask = streamer_read_widest_int (ib);
4945 ipa_set_jfunc_bits (jump_func, value, mask);
209ca542
PK
4946 }
4947 else
86cd0334 4948 jump_func->bits = NULL;
8bc5448f
KV
4949
4950 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4951 bool vr_known = bp_unpack_value (&vr_bp, 1);
4952 if (vr_known)
4953 {
86cd0334
MJ
4954 enum value_range_type type = streamer_read_enum (ib, value_range_type,
4955 VR_LAST);
4956 tree min = stream_read_tree (ib, data_in);
4957 tree max = stream_read_tree (ib, data_in);
4958 ipa_set_jfunc_vr (jump_func, type, min, max);
8bc5448f
KV
4959 }
4960 else
86cd0334 4961 jump_func->m_vr = NULL;
fb3f88cc
JH
4962}
4963
e33c6cd6
MJ
4964/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4965 relevant to indirect inlining to OB. */
661e7330
MJ
4966
4967static void
e33c6cd6
MJ
4968ipa_write_indirect_edge_info (struct output_block *ob,
4969 struct cgraph_edge *cs)
661e7330 4970{
e33c6cd6 4971 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4972 struct bitpack_d bp;
e33c6cd6 4973
412288f1 4974 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4975 bp = bitpack_create (ob->main_stream);
4976 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4977 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4978 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4979 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4980 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4981 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4982 streamer_write_bitpack (&bp);
ba392339
JH
4983 if (ii->agg_contents || ii->polymorphic)
4984 streamer_write_hwi (ob, ii->offset);
4985 else
4986 gcc_assert (ii->offset == 0);
b258210c
MJ
4987
4988 if (ii->polymorphic)
4989 {
412288f1 4990 streamer_write_hwi (ob, ii->otr_token);
b9393656 4991 stream_write_tree (ob, ii->otr_type, true);
ba392339 4992 ii->context.stream_out (ob);
b258210c 4993 }
661e7330
MJ
4994}
4995
e33c6cd6
MJ
4996/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4997 relevant to indirect inlining from IB. */
661e7330
MJ
4998
4999static void
e33c6cd6 5000ipa_read_indirect_edge_info (struct lto_input_block *ib,
ba392339 5001 struct data_in *data_in,
e33c6cd6 5002 struct cgraph_edge *cs)
661e7330 5003{
e33c6cd6 5004 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 5005 struct bitpack_d bp;
661e7330 5006
412288f1 5007 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 5008 bp = streamer_read_bitpack (ib);
2465dcc2 5009 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 5010 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 5011 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 5012 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 5013 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 5014 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
5015 if (ii->agg_contents || ii->polymorphic)
5016 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5017 else
5018 ii->offset = 0;
b258210c
MJ
5019 if (ii->polymorphic)
5020 {
412288f1 5021 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 5022 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 5023 ii->context.stream_in (ib, data_in);
b258210c 5024 }
661e7330
MJ
5025}
5026
fb3f88cc
JH
5027/* Stream out NODE info to OB. */
5028
5029static void
5030ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5031{
5032 int node_ref;
7380e6ef 5033 lto_symtab_encoder_t encoder;
fb3f88cc
JH
5034 struct ipa_node_params *info = IPA_NODE_REF (node);
5035 int j;
5036 struct cgraph_edge *e;
2465dcc2 5037 struct bitpack_d bp;
fb3f88cc 5038
7380e6ef 5039 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5040 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 5041 streamer_write_uhwi (ob, node_ref);
fb3f88cc 5042
0e8853ee
JH
5043 streamer_write_uhwi (ob, ipa_get_param_count (info));
5044 for (j = 0; j < ipa_get_param_count (info); j++)
5045 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 5046 bp = bitpack_create (ob->main_stream);
8aab5218 5047 gcc_assert (info->analysis_done
661e7330 5048 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
5049 gcc_assert (!info->node_enqueued);
5050 gcc_assert (!info->ipcp_orig_node);
5051 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 5052 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 5053 streamer_write_bitpack (&bp);
4502fe8d 5054 for (j = 0; j < ipa_get_param_count (info); j++)
a5e14a42
MJ
5055 {
5056 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5057 stream_write_tree (ob, ipa_get_type (info, j), true);
5058 }
fb3f88cc
JH
5059 for (e = node->callees; e; e = e->next_callee)
5060 {
5061 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5062
5ce97055
JH
5063 streamer_write_uhwi (ob,
5064 ipa_get_cs_argument_count (args) * 2
5065 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 5066 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
5067 {
5068 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5069 if (args->polymorphic_call_contexts != NULL)
5070 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5071 }
fb3f88cc 5072 }
e33c6cd6 5073 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
5074 {
5075 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5076
5ce97055
JH
5077 streamer_write_uhwi (ob,
5078 ipa_get_cs_argument_count (args) * 2
5079 + (args->polymorphic_call_contexts != NULL));
c8246dbe 5080 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
5081 {
5082 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5083 if (args->polymorphic_call_contexts != NULL)
5084 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5085 }
c8246dbe
JH
5086 ipa_write_indirect_edge_info (ob, e);
5087 }
fb3f88cc
JH
5088}
5089
61502ca8 5090/* Stream in NODE info from IB. */
fb3f88cc
JH
5091
5092static void
5093ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
5094 struct data_in *data_in)
5095{
5096 struct ipa_node_params *info = IPA_NODE_REF (node);
5097 int k;
5098 struct cgraph_edge *e;
2465dcc2 5099 struct bitpack_d bp;
fb3f88cc 5100
0e8853ee 5101 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 5102
0e8853ee 5103 for (k = 0; k < ipa_get_param_count (info); k++)
f65f1ae3 5104 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
a5e14a42 5105
412288f1 5106 bp = streamer_read_bitpack (ib);
fb3f88cc 5107 if (ipa_get_param_count (info) != 0)
8aab5218 5108 info->analysis_done = true;
fb3f88cc
JH
5109 info->node_enqueued = false;
5110 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 5111 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a 5112 for (k = 0; k < ipa_get_param_count (info); k++)
a5e14a42
MJ
5113 {
5114 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
f65f1ae3 5115 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
a5e14a42 5116 }
fb3f88cc
JH
5117 for (e = node->callees; e; e = e->next_callee)
5118 {
5119 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 5120 int count = streamer_read_uhwi (ib);
5ce97055
JH
5121 bool contexts_computed = count & 1;
5122 count /= 2;
fb3f88cc 5123
fb3f88cc
JH
5124 if (!count)
5125 continue;
9771b263 5126 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
5127 if (contexts_computed)
5128 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
fb3f88cc 5129
fb3f88cc 5130 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
5131 {
5132 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5133 data_in);
5134 if (contexts_computed)
5135 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5136 }
fb3f88cc 5137 }
e33c6cd6 5138 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
5139 {
5140 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 5141 int count = streamer_read_uhwi (ib);
5ce97055
JH
5142 bool contexts_computed = count & 1;
5143 count /= 2;
c8246dbe 5144
c8246dbe
JH
5145 if (count)
5146 {
9771b263 5147 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
5148 if (contexts_computed)
5149 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
c8246dbe 5150 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
5151 {
5152 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5153 data_in);
5154 if (contexts_computed)
5155 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5156 }
c8246dbe
JH
5157 }
5158 ipa_read_indirect_edge_info (ib, data_in, e);
5159 }
fb3f88cc
JH
5160}
5161
5162/* Write jump functions for nodes in SET. */
5163
5164void
f27c1867 5165ipa_prop_write_jump_functions (void)
fb3f88cc
JH
5166{
5167 struct cgraph_node *node;
93536c97 5168 struct output_block *ob;
fb3f88cc 5169 unsigned int count = 0;
f27c1867
JH
5170 lto_symtab_encoder_iterator lsei;
5171 lto_symtab_encoder_t encoder;
5172
6fe906a3 5173 if (!ipa_node_params_sum || !ipa_edge_args_sum)
93536c97 5174 return;
fb3f88cc 5175
93536c97 5176 ob = create_output_block (LTO_section_jump_functions);
f27c1867 5177 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5178 ob->symbol = NULL;
f27c1867
JH
5179 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5180 lsei_next_function_in_partition (&lsei))
fb3f88cc 5181 {
f27c1867 5182 node = lsei_cgraph_node (lsei);
d52f5295 5183 if (node->has_gimple_body_p ()
c47d0034 5184 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5185 count++;
5186 }
5187
412288f1 5188 streamer_write_uhwi (ob, count);
fb3f88cc
JH
5189
5190 /* Process all of the functions. */
f27c1867
JH
5191 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5192 lsei_next_function_in_partition (&lsei))
fb3f88cc 5193 {
f27c1867 5194 node = lsei_cgraph_node (lsei);
d52f5295 5195 if (node->has_gimple_body_p ()
c47d0034 5196 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5197 ipa_write_node_info (ob, node);
5198 }
412288f1 5199 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
5200 produce_asm (ob, NULL);
5201 destroy_output_block (ob);
5202}
5203
5204/* Read section in file FILE_DATA of length LEN with data DATA. */
5205
5206static void
5207ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5208 size_t len)
5209{
5210 const struct lto_function_header *header =
5211 (const struct lto_function_header *) data;
4ad9a9de
EB
5212 const int cfg_offset = sizeof (struct lto_function_header);
5213 const int main_offset = cfg_offset + header->cfg_size;
5214 const int string_offset = main_offset + header->main_size;
fb3f88cc 5215 struct data_in *data_in;
fb3f88cc
JH
5216 unsigned int i;
5217 unsigned int count;
5218
207c68cd 5219 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5220 header->main_size, file_data->mode_table);
fb3f88cc
JH
5221
5222 data_in =
5223 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5224 header->string_size, vNULL);
412288f1 5225 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
5226
5227 for (i = 0; i < count; i++)
5228 {
5229 unsigned int index;
5230 struct cgraph_node *node;
7380e6ef 5231 lto_symtab_encoder_t encoder;
fb3f88cc 5232
412288f1 5233 index = streamer_read_uhwi (&ib_main);
7380e6ef 5234 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5235 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5236 index));
67348ccc 5237 gcc_assert (node->definition);
fb3f88cc
JH
5238 ipa_read_node_info (&ib_main, node, data_in);
5239 }
5240 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5241 len);
5242 lto_data_in_delete (data_in);
5243}
5244
5245/* Read ipcp jump functions. */
5246
5247void
5248ipa_prop_read_jump_functions (void)
5249{
5250 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5251 struct lto_file_decl_data *file_data;
5252 unsigned int j = 0;
5253
5254 ipa_check_create_node_params ();
5255 ipa_check_create_edge_args ();
5256 ipa_register_cgraph_hooks ();
5257
5258 while ((file_data = file_data_vec[j++]))
5259 {
5260 size_t len;
5261 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5262
5263 if (data)
5264 ipa_prop_read_section (file_data, data, len);
5265 }
5266}
5267
2c9561b5 5268void
04be694e 5269write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
5270{
5271 int node_ref;
5272 unsigned int count = 0;
5273 lto_symtab_encoder_t encoder;
5274 struct ipa_agg_replacement_value *aggvals, *av;
5275
5276 aggvals = ipa_get_agg_replacements_for_node (node);
5277 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5278 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
5279 streamer_write_uhwi (ob, node_ref);
5280
5281 for (av = aggvals; av; av = av->next)
5282 count++;
5283 streamer_write_uhwi (ob, count);
5284
5285 for (av = aggvals; av; av = av->next)
5286 {
7b920a9a
MJ
5287 struct bitpack_d bp;
5288
2c9561b5
MJ
5289 streamer_write_uhwi (ob, av->offset);
5290 streamer_write_uhwi (ob, av->index);
5291 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
5292
5293 bp = bitpack_create (ob->main_stream);
5294 bp_pack_value (&bp, av->by_ref, 1);
5295 streamer_write_bitpack (&bp);
2c9561b5 5296 }
04be694e
MJ
5297
5298 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5299 if (ts && vec_safe_length (ts->m_vr) > 0)
5300 {
5301 count = ts->m_vr->length ();
5302 streamer_write_uhwi (ob, count);
5303 for (unsigned i = 0; i < count; ++i)
5304 {
5305 struct bitpack_d bp;
5306 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5307 bp = bitpack_create (ob->main_stream);
5308 bp_pack_value (&bp, parm_vr->known, 1);
5309 streamer_write_bitpack (&bp);
5310 if (parm_vr->known)
5311 {
5312 streamer_write_enum (ob->main_stream, value_rang_type,
5313 VR_LAST, parm_vr->type);
5314 streamer_write_wide_int (ob, parm_vr->min);
5315 streamer_write_wide_int (ob, parm_vr->max);
5316 }
5317 }
5318 }
5319 else
5320 streamer_write_uhwi (ob, 0);
5321
209ca542
PK
5322 if (ts && vec_safe_length (ts->bits) > 0)
5323 {
5324 count = ts->bits->length ();
5325 streamer_write_uhwi (ob, count);
5326
5327 for (unsigned i = 0; i < count; ++i)
5328 {
86cd0334 5329 const ipa_bits *bits_jfunc = (*ts->bits)[i];
209ca542 5330 struct bitpack_d bp = bitpack_create (ob->main_stream);
86cd0334 5331 bp_pack_value (&bp, !!bits_jfunc, 1);
209ca542 5332 streamer_write_bitpack (&bp);
86cd0334 5333 if (bits_jfunc)
209ca542 5334 {
86cd0334
MJ
5335 streamer_write_widest_int (ob, bits_jfunc->value);
5336 streamer_write_widest_int (ob, bits_jfunc->mask);
209ca542
PK
5337 }
5338 }
5339 }
5340 else
5341 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
5342}
5343
5344/* Stream in the aggregate value replacement chain for NODE from IB. */
5345
5346static void
04be694e
MJ
5347read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5348 data_in *data_in)
2c9561b5
MJ
5349{
5350 struct ipa_agg_replacement_value *aggvals = NULL;
5351 unsigned int count, i;
5352
5353 count = streamer_read_uhwi (ib);
5354 for (i = 0; i <count; i++)
5355 {
5356 struct ipa_agg_replacement_value *av;
7b920a9a 5357 struct bitpack_d bp;
2c9561b5 5358
766090c2 5359 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
5360 av->offset = streamer_read_uhwi (ib);
5361 av->index = streamer_read_uhwi (ib);
5362 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
5363 bp = streamer_read_bitpack (ib);
5364 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
5365 av->next = aggvals;
5366 aggvals = av;
5367 }
5368 ipa_set_node_agg_value_chain (node, aggvals);
67b97478 5369
209ca542
PK
5370 count = streamer_read_uhwi (ib);
5371 if (count > 0)
5372 {
5373 ipcp_grow_transformations_if_necessary ();
8bc5448f
KV
5374
5375 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5376 vec_safe_grow_cleared (ts->m_vr, count);
5377 for (i = 0; i < count; i++)
5378 {
5379 ipa_vr *parm_vr;
5380 parm_vr = &(*ts->m_vr)[i];
5381 struct bitpack_d bp;
5382 bp = streamer_read_bitpack (ib);
5383 parm_vr->known = bp_unpack_value (&bp, 1);
5384 if (parm_vr->known)
5385 {
5386 parm_vr->type = streamer_read_enum (ib, value_range_type,
5387 VR_LAST);
5388 parm_vr->min = streamer_read_wide_int (ib);
5389 parm_vr->max = streamer_read_wide_int (ib);
5390 }
5391 }
5392 }
5393 count = streamer_read_uhwi (ib);
5394 if (count > 0)
5395 {
5396 ipcp_grow_transformations_if_necessary ();
5397
209ca542
PK
5398 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5399 vec_safe_grow_cleared (ts->bits, count);
5400
5401 for (i = 0; i < count; i++)
5402 {
209ca542 5403 struct bitpack_d bp = streamer_read_bitpack (ib);
86cd0334
MJ
5404 bool known = bp_unpack_value (&bp, 1);
5405 if (known)
209ca542 5406 {
86cd0334
MJ
5407 ipa_bits *bits
5408 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
5409 streamer_read_widest_int (ib));
5410 (*ts->bits)[i] = bits;
209ca542
PK
5411 }
5412 }
5413 }
2c9561b5
MJ
5414}
5415
5416/* Write all aggregate replacement for nodes in set. */
5417
5418void
04be694e 5419ipcp_write_transformation_summaries (void)
2c9561b5
MJ
5420{
5421 struct cgraph_node *node;
5422 struct output_block *ob;
5423 unsigned int count = 0;
5424 lto_symtab_encoder_iterator lsei;
5425 lto_symtab_encoder_t encoder;
5426
2c9561b5
MJ
5427 ob = create_output_block (LTO_section_ipcp_transform);
5428 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5429 ob->symbol = NULL;
2c9561b5
MJ
5430 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5431 lsei_next_function_in_partition (&lsei))
5432 {
5433 node = lsei_cgraph_node (lsei);
04be694e 5434 if (node->has_gimple_body_p ())
2c9561b5
MJ
5435 count++;
5436 }
5437
5438 streamer_write_uhwi (ob, count);
5439
5440 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5441 lsei_next_function_in_partition (&lsei))
5442 {
5443 node = lsei_cgraph_node (lsei);
04be694e
MJ
5444 if (node->has_gimple_body_p ())
5445 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
5446 }
5447 streamer_write_char_stream (ob->main_stream, 0);
5448 produce_asm (ob, NULL);
5449 destroy_output_block (ob);
5450}
5451
5452/* Read replacements section in file FILE_DATA of length LEN with data
5453 DATA. */
5454
5455static void
5456read_replacements_section (struct lto_file_decl_data *file_data,
5457 const char *data,
5458 size_t len)
5459{
5460 const struct lto_function_header *header =
5461 (const struct lto_function_header *) data;
5462 const int cfg_offset = sizeof (struct lto_function_header);
5463 const int main_offset = cfg_offset + header->cfg_size;
5464 const int string_offset = main_offset + header->main_size;
5465 struct data_in *data_in;
2c9561b5
MJ
5466 unsigned int i;
5467 unsigned int count;
5468
207c68cd 5469 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5470 header->main_size, file_data->mode_table);
2c9561b5
MJ
5471
5472 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5473 header->string_size, vNULL);
2c9561b5
MJ
5474 count = streamer_read_uhwi (&ib_main);
5475
5476 for (i = 0; i < count; i++)
5477 {
5478 unsigned int index;
5479 struct cgraph_node *node;
5480 lto_symtab_encoder_t encoder;
5481
5482 index = streamer_read_uhwi (&ib_main);
5483 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5484 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5485 index));
67348ccc 5486 gcc_assert (node->definition);
04be694e 5487 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
5488 }
5489 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5490 len);
5491 lto_data_in_delete (data_in);
5492}
5493
5494/* Read IPA-CP aggregate replacements. */
5495
5496void
04be694e 5497ipcp_read_transformation_summaries (void)
2c9561b5
MJ
5498{
5499 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5500 struct lto_file_decl_data *file_data;
5501 unsigned int j = 0;
5502
5503 while ((file_data = file_data_vec[j++]))
5504 {
5505 size_t len;
5506 const char *data = lto_get_section_data (file_data,
5507 LTO_section_ipcp_transform,
5508 NULL, &len);
5509 if (data)
5510 read_replacements_section (file_data, data, len);
5511 }
5512}
5513
5514/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5515 NODE. */
5516
5517static void
5518adjust_agg_replacement_values (struct cgraph_node *node,
5519 struct ipa_agg_replacement_value *aggval)
5520{
5521 struct ipa_agg_replacement_value *v;
5522 int i, c = 0, d = 0, *adj;
5523
5524 if (!node->clone.combined_args_to_skip)
5525 return;
5526
5527 for (v = aggval; v; v = v->next)
5528 {
5529 gcc_assert (v->index >= 0);
5530 if (c < v->index)
5531 c = v->index;
5532 }
5533 c++;
5534
5535 adj = XALLOCAVEC (int, c);
5536 for (i = 0; i < c; i++)
5537 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5538 {
5539 adj[i] = -1;
5540 d++;
5541 }
5542 else
5543 adj[i] = i - d;
5544
5545 for (v = aggval; v; v = v->next)
5546 v->index = adj[v->index];
5547}
5548
8aab5218
MJ
5549/* Dominator walker driving the ipcp modification phase. */
5550
5551class ipcp_modif_dom_walker : public dom_walker
5552{
5553public:
56b40062 5554 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
f65f1ae3 5555 vec<ipa_param_descriptor, va_gc> *descs,
8aab5218
MJ
5556 struct ipa_agg_replacement_value *av,
5557 bool *sc, bool *cc)
5558 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5559 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5560
3daacdcd 5561 virtual edge before_dom_children (basic_block);
8aab5218
MJ
5562
5563private:
56b40062 5564 struct ipa_func_body_info *m_fbi;
f65f1ae3 5565 vec<ipa_param_descriptor, va_gc> *m_descriptors;
8aab5218
MJ
5566 struct ipa_agg_replacement_value *m_aggval;
5567 bool *m_something_changed, *m_cfg_changed;
5568};
5569
3daacdcd 5570edge
8aab5218
MJ
5571ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5572{
5573 gimple_stmt_iterator gsi;
5574 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5575 {
5576 struct ipa_agg_replacement_value *v;
355fe088 5577 gimple *stmt = gsi_stmt (gsi);
8aab5218
MJ
5578 tree rhs, val, t;
5579 HOST_WIDE_INT offset, size;
5580 int index;
5581 bool by_ref, vce;
5582
5583 if (!gimple_assign_load_p (stmt))
5584 continue;
5585 rhs = gimple_assign_rhs1 (stmt);
5586 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5587 continue;
2c9561b5 5588
8aab5218
MJ
5589 vce = false;
5590 t = rhs;
5591 while (handled_component_p (t))
5592 {
5593 /* V_C_E can do things like convert an array of integers to one
5594 bigger integer and similar things we do not handle below. */
5595 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5596 {
5597 vce = true;
5598 break;
5599 }
5600 t = TREE_OPERAND (t, 0);
5601 }
5602 if (vce)
5603 continue;
5604
ff302741
PB
5605 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5606 &offset, &size, &by_ref))
8aab5218
MJ
5607 continue;
5608 for (v = m_aggval; v; v = v->next)
5609 if (v->index == index
5610 && v->offset == offset)
5611 break;
5612 if (!v
5613 || v->by_ref != by_ref
5614 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5615 continue;
5616
5617 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5618 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5619 {
5620 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5621 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5622 else if (TYPE_SIZE (TREE_TYPE (rhs))
5623 == TYPE_SIZE (TREE_TYPE (v->value)))
5624 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5625 else
5626 {
5627 if (dump_file)
5628 {
5629 fprintf (dump_file, " const ");
ef6cb4c7 5630 print_generic_expr (dump_file, v->value);
8aab5218 5631 fprintf (dump_file, " can't be converted to type of ");
ef6cb4c7 5632 print_generic_expr (dump_file, rhs);
8aab5218
MJ
5633 fprintf (dump_file, "\n");
5634 }
5635 continue;
5636 }
5637 }
5638 else
5639 val = v->value;
5640
5641 if (dump_file && (dump_flags & TDF_DETAILS))
5642 {
5643 fprintf (dump_file, "Modifying stmt:\n ");
ef6cb4c7 5644 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5645 }
5646 gimple_assign_set_rhs_from_tree (&gsi, val);
5647 update_stmt (stmt);
5648
5649 if (dump_file && (dump_flags & TDF_DETAILS))
5650 {
5651 fprintf (dump_file, "into:\n ");
ef6cb4c7 5652 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5653 fprintf (dump_file, "\n");
5654 }
5655
5656 *m_something_changed = true;
5657 if (maybe_clean_eh_stmt (stmt)
5658 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5659 *m_cfg_changed = true;
5660 }
3daacdcd 5661 return NULL;
8aab5218
MJ
5662}
5663
209ca542
PK
5664/* Update bits info of formal parameters as described in
5665 ipcp_transformation_summary. */
5666
5667static void
5668ipcp_update_bits (struct cgraph_node *node)
5669{
5670 tree parm = DECL_ARGUMENTS (node->decl);
5671 tree next_parm = parm;
5672 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5673
5674 if (!ts || vec_safe_length (ts->bits) == 0)
5675 return;
5676
86cd0334 5677 vec<ipa_bits *, va_gc> &bits = *ts->bits;
209ca542
PK
5678 unsigned count = bits.length ();
5679
5680 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5681 {
5682 if (node->clone.combined_args_to_skip
5683 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5684 continue;
5685
5686 gcc_checking_assert (parm);
5687 next_parm = DECL_CHAIN (parm);
5688
86cd0334
MJ
5689 if (!bits[i]
5690 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5691 || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542 5692 || !is_gimple_reg (parm))
86cd0334 5693 continue;
209ca542
PK
5694
5695 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5696 if (!ddef)
5697 continue;
5698
5699 if (dump_file)
5700 {
86cd0334
MJ
5701 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5702 print_hex (bits[i]->mask, dump_file);
209ca542
PK
5703 fprintf (dump_file, "\n");
5704 }
5705
67b97478
PK
5706 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5707 {
5708 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5709 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5710
86cd0334
MJ
5711 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5712 | wide_int::from (bits[i]->value, prec, sgn);
67b97478
PK
5713 set_nonzero_bits (ddef, nonzero_bits);
5714 }
5715 else
5716 {
86cd0334
MJ
5717 unsigned tem = bits[i]->mask.to_uhwi ();
5718 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
67b97478
PK
5719 unsigned align = tem & -tem;
5720 unsigned misalign = bitpos & (align - 1);
209ca542 5721
67b97478
PK
5722 if (align > 1)
5723 {
5724 if (dump_file)
5725 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5726
5727 unsigned old_align, old_misalign;
5728 struct ptr_info_def *pi = get_ptr_info (ddef);
5729 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5730
5731 if (old_known
5732 && old_align > align)
5733 {
5734 if (dump_file)
5735 {
5736 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5737 if ((old_misalign & (align - 1)) != misalign)
5738 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5739 old_misalign, misalign);
5740 }
5741 continue;
5742 }
5743
5744 if (old_known
5745 && ((misalign & (old_align - 1)) != old_misalign)
5746 && dump_file)
5747 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5748 old_misalign, misalign);
5749
5750 set_ptr_info_alignment (pi, align, misalign);
5751 }
5752 }
209ca542
PK
5753 }
5754}
5755
8bc5448f
KV
5756/* Update value range of formal parameters as described in
5757 ipcp_transformation_summary. */
5758
5759static void
5760ipcp_update_vr (struct cgraph_node *node)
5761{
5762 tree fndecl = node->decl;
5763 tree parm = DECL_ARGUMENTS (fndecl);
5764 tree next_parm = parm;
5765 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5766 if (!ts || vec_safe_length (ts->m_vr) == 0)
5767 return;
5768 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5769 unsigned count = vr.length ();
5770
5771 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5772 {
5773 if (node->clone.combined_args_to_skip
5774 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5775 continue;
5776 gcc_checking_assert (parm);
5777 next_parm = DECL_CHAIN (parm);
5778 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5779
5780 if (!ddef || !is_gimple_reg (parm))
5781 continue;
5782
5783 if (vr[i].known
8bc5448f
KV
5784 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5785 {
5786 tree type = TREE_TYPE (ddef);
5787 unsigned prec = TYPE_PRECISION (type);
718625ad
KV
5788 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5789 {
5790 if (dump_file)
5791 {
5792 fprintf (dump_file, "Setting value range of param %u ", i);
5793 fprintf (dump_file, "%s[",
5794 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5795 print_decs (vr[i].min, dump_file);
5796 fprintf (dump_file, ", ");
5797 print_decs (vr[i].max, dump_file);
5798 fprintf (dump_file, "]\n");
5799 }
5800 set_range_info (ddef, vr[i].type,
5801 wide_int_storage::from (vr[i].min, prec,
5802 TYPE_SIGN (type)),
5803 wide_int_storage::from (vr[i].max, prec,
5804 TYPE_SIGN (type)));
5805 }
5806 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5807 && vr[i].type == VR_ANTI_RANGE
5808 && wi::eq_p (vr[i].min, 0)
5809 && wi::eq_p (vr[i].max, 0))
8bc5448f 5810 {
718625ad
KV
5811 if (dump_file)
5812 fprintf (dump_file, "Setting nonnull for %u\n", i);
5813 set_ptr_nonnull (ddef);
8bc5448f 5814 }
8bc5448f
KV
5815 }
5816 }
5817}
5818
8aab5218 5819/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5820
5821unsigned int
5822ipcp_transform_function (struct cgraph_node *node)
5823{
f65f1ae3 5824 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
56b40062 5825 struct ipa_func_body_info fbi;
2c9561b5 5826 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5827 int param_count;
5828 bool cfg_changed = false, something_changed = false;
5829
5830 gcc_checking_assert (cfun);
5831 gcc_checking_assert (current_function_decl);
5832
5833 if (dump_file)
464d0118
ML
5834 fprintf (dump_file, "Modification phase of node %s\n",
5835 node->dump_name ());
2c9561b5 5836
209ca542 5837 ipcp_update_bits (node);
8bc5448f 5838 ipcp_update_vr (node);
2c9561b5
MJ
5839 aggval = ipa_get_agg_replacements_for_node (node);
5840 if (!aggval)
5841 return 0;
67348ccc 5842 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5843 if (param_count == 0)
5844 return 0;
5845 adjust_agg_replacement_values (node, aggval);
5846 if (dump_file)
5847 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5848
8aab5218
MJ
5849 fbi.node = node;
5850 fbi.info = NULL;
5851 fbi.bb_infos = vNULL;
5852 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5853 fbi.param_count = param_count;
5854 fbi.aa_walked = 0;
2c9561b5 5855
f65f1ae3
MJ
5856 vec_safe_grow_cleared (descriptors, param_count);
5857 ipa_populate_param_decls (node, *descriptors);
8aab5218
MJ
5858 calculate_dominance_info (CDI_DOMINATORS);
5859 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5860 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5861
8aab5218
MJ
5862 int i;
5863 struct ipa_bb_info *bi;
5864 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5865 free_ipa_bb_info (bi);
5866 fbi.bb_infos.release ();
5867 free_dominance_info (CDI_DOMINATORS);
04be694e 5868 (*ipcp_transformations)[node->uid].agg_values = NULL;
676b4899
PK
5869 (*ipcp_transformations)[node->uid].bits = NULL;
5870 (*ipcp_transformations)[node->uid].m_vr = NULL;
5871
f65f1ae3 5872 vec_free (descriptors);
2c9561b5
MJ
5873
5874 if (!something_changed)
5875 return 0;
5876 else if (cfg_changed)
5877 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5878 else
5879 return TODO_update_ssa_only_virtuals;
5880}
86cd0334
MJ
5881
5882#include "gt-ipa-prop.h"