]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
Move symtab_node::dump_table to symbol_table::dump
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
cbe34bb5 2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
27d020cf 48#include "ipa-fnsummary.h"
cf835838 49#include "gimple-pretty-print.h"
dfea20f1 50#include "params.h"
450ad0cd 51#include "ipa-utils.h"
2b5f0895 52#include "dbgcnt.h"
8aab5218 53#include "domwalk.h"
9b2b7279 54#include "builtins.h"
771578a0 55
dd912cb8
ML
56/* Function summary where the parameter infos are actually stored. */
57ipa_node_params_t *ipa_node_params_sum = NULL;
04be694e
MJ
58/* Vector of IPA-CP transformation data for each clone. */
59vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
6fe906a3
MJ
60/* Edge summary for IPA-CP edge information. */
61ipa_edge_args_sum_t *ipa_edge_args_sum;
771578a0 62
86cd0334
MJ
63/* Traits for a hash table for reusing already existing ipa_bits. */
64
65struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
66{
67 typedef ipa_bits *value_type;
68 typedef ipa_bits *compare_type;
69 static hashval_t
70 hash (const ipa_bits *p)
71 {
72 hashval_t t = (hashval_t) p->value.to_shwi ();
73 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
74 }
75 static bool
76 equal (const ipa_bits *a, const ipa_bits *b)
77 {
78 return a->value == b->value && a->mask == b->mask;
79 }
80 static void
81 mark_empty (ipa_bits *&p)
82 {
83 p = NULL;
84 }
85 static bool
86 is_empty (const ipa_bits *p)
87 {
88 return p == NULL;
89 }
90 static bool
91 is_deleted (const ipa_bits *p)
92 {
93 return p == reinterpret_cast<const ipa_bits *> (1);
94 }
95 static void
96 mark_deleted (ipa_bits *&p)
97 {
98 p = reinterpret_cast<ipa_bits *> (1);
99 }
100};
101
102/* Hash table for avoid repeated allocations of equal ipa_bits. */
103static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
104
105/* Traits for a hash table for reusing value_ranges used for IPA. Note that
106 the equiv bitmap is not hashed and is expected to be NULL. */
107
108struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
109{
110 typedef value_range *value_type;
111 typedef value_range *compare_type;
112 static hashval_t
113 hash (const value_range *p)
114 {
115 gcc_checking_assert (!p->equiv);
116 hashval_t t = (hashval_t) p->type;
117 t = iterative_hash_expr (p->min, t);
118 return iterative_hash_expr (p->max, t);
119 }
120 static bool
121 equal (const value_range *a, const value_range *b)
122 {
123 return a->type == b->type && a->min == b->min && a->max == b->max;
124 }
125 static void
126 mark_empty (value_range *&p)
127 {
128 p = NULL;
129 }
130 static bool
131 is_empty (const value_range *p)
132 {
133 return p == NULL;
134 }
135 static bool
136 is_deleted (const value_range *p)
137 {
138 return p == reinterpret_cast<const value_range *> (1);
139 }
140 static void
141 mark_deleted (value_range *&p)
142 {
143 p = reinterpret_cast<value_range *> (1);
144 }
145};
146
147/* Hash table for avoid repeated allocations of equal value_ranges. */
148static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
149
771578a0 150/* Holders of ipa cgraph hooks: */
40982661 151static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 152
4502fe8d
MJ
153/* Description of a reference to an IPA constant. */
154struct ipa_cst_ref_desc
155{
156 /* Edge that corresponds to the statement which took the reference. */
157 struct cgraph_edge *cs;
158 /* Linked list of duplicates created when call graph edges are cloned. */
159 struct ipa_cst_ref_desc *next_duplicate;
160 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
161 if out of control. */
162 int refcount;
163};
164
165/* Allocation pool for reference descriptions. */
166
fb0b2914 167static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 168 ("IPA-PROP ref descriptions");
4502fe8d 169
5fe8e757
MJ
170/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
171 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
172
173static bool
174ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
175{
67348ccc 176 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
177
178 if (!fs_opts)
179 return false;
2bf86c84 180 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
181}
182
be95e2b9
MJ
183/* Return index of the formal whose tree is PTREE in function which corresponds
184 to INFO. */
185
d044dd17 186static int
f65f1ae3
MJ
187ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
188 tree ptree)
518dc859
RL
189{
190 int i, count;
191
f65f1ae3 192 count = vec_safe_length (descriptors);
518dc859 193 for (i = 0; i < count; i++)
f65f1ae3 194 if ((*descriptors)[i].decl_or_type == ptree)
518dc859
RL
195 return i;
196
197 return -1;
198}
199
d044dd17
MJ
200/* Return index of the formal whose tree is PTREE in function which corresponds
201 to INFO. */
202
203int
204ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
205{
206 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
207}
208
209/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
210 NODE. */
be95e2b9 211
f8e2a1ed
MJ
212static void
213ipa_populate_param_decls (struct cgraph_node *node,
f65f1ae3 214 vec<ipa_param_descriptor, va_gc> &descriptors)
518dc859
RL
215{
216 tree fndecl;
217 tree fnargs;
218 tree parm;
219 int param_num;
3e293154 220
67348ccc 221 fndecl = node->decl;
0e8853ee 222 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
223 fnargs = DECL_ARGUMENTS (fndecl);
224 param_num = 0;
910ad8de 225 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 226 {
209ca542 227 descriptors[param_num].decl_or_type = parm;
b4c9af96
RB
228 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
229 true);
518dc859
RL
230 param_num++;
231 }
232}
233
3f84bf08
MJ
234/* Return how many formal parameters FNDECL has. */
235
fd29c024 236int
310bc633 237count_formal_params (tree fndecl)
3f84bf08
MJ
238{
239 tree parm;
240 int count = 0;
0e8853ee 241 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 242
910ad8de 243 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
244 count++;
245
246 return count;
247}
248
0e8853ee
JH
249/* Return the declaration of Ith formal parameter of the function corresponding
250 to INFO. Note there is no setter function as this array is built just once
251 using ipa_initialize_node_params. */
252
253void
254ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
255{
256 fprintf (file, "param #%i", i);
f65f1ae3 257 if ((*info->descriptors)[i].decl_or_type)
0e8853ee
JH
258 {
259 fprintf (file, " ");
ef6cb4c7 260 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
0e8853ee
JH
261 }
262}
263
159f01f8
MJ
264/* If necessary, allocate vector of parameter descriptors in info of NODE.
265 Return true if they were allocated, false if not. */
0e8853ee 266
159f01f8 267static bool
0e8853ee
JH
268ipa_alloc_node_params (struct cgraph_node *node, int param_count)
269{
270 struct ipa_node_params *info = IPA_NODE_REF (node);
271
f65f1ae3 272 if (!info->descriptors && param_count)
159f01f8
MJ
273 {
274 vec_safe_grow_cleared (info->descriptors, param_count);
275 return true;
276 }
277 else
278 return false;
0e8853ee
JH
279}
280
f8e2a1ed
MJ
281/* Initialize the ipa_node_params structure associated with NODE by counting
282 the function parameters, creating the descriptors and populating their
283 param_decls. */
be95e2b9 284
f8e2a1ed
MJ
285void
286ipa_initialize_node_params (struct cgraph_node *node)
287{
288 struct ipa_node_params *info = IPA_NODE_REF (node);
289
159f01f8
MJ
290 if (!info->descriptors
291 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
292 ipa_populate_param_decls (node, *info->descriptors);
518dc859
RL
293}
294
749aa96d
MJ
295/* Print the jump functions associated with call graph edge CS to file F. */
296
297static void
298ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
299{
300 int i, count;
301
302 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
303 for (i = 0; i < count; i++)
304 {
305 struct ipa_jump_func *jump_func;
306 enum jump_func_type type;
307
308 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
309 type = jump_func->type;
310
311 fprintf (f, " param %d: ", i);
312 if (type == IPA_JF_UNKNOWN)
313 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
314 else if (type == IPA_JF_CONST)
315 {
4502fe8d 316 tree val = jump_func->value.constant.value;
749aa96d 317 fprintf (f, "CONST: ");
ef6cb4c7 318 print_generic_expr (f, val);
749aa96d
MJ
319 if (TREE_CODE (val) == ADDR_EXPR
320 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
321 {
322 fprintf (f, " -> ");
ef6cb4c7 323 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
749aa96d
MJ
324 }
325 fprintf (f, "\n");
326 }
749aa96d
MJ
327 else if (type == IPA_JF_PASS_THROUGH)
328 {
329 fprintf (f, "PASS THROUGH: ");
8b7773a4 330 fprintf (f, "%d, op %s",
749aa96d 331 jump_func->value.pass_through.formal_id,
5806f481 332 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 333 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
334 {
335 fprintf (f, " ");
ef6cb4c7 336 print_generic_expr (f, jump_func->value.pass_through.operand);
8b7773a4
MJ
337 }
338 if (jump_func->value.pass_through.agg_preserved)
339 fprintf (f, ", agg_preserved");
3ea6239f 340 fprintf (f, "\n");
749aa96d
MJ
341 }
342 else if (type == IPA_JF_ANCESTOR)
343 {
344 fprintf (f, "ANCESTOR: ");
16998094 345 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
346 jump_func->value.ancestor.formal_id,
347 jump_func->value.ancestor.offset);
8b7773a4
MJ
348 if (jump_func->value.ancestor.agg_preserved)
349 fprintf (f, ", agg_preserved");
3ea6239f 350 fprintf (f, "\n");
749aa96d 351 }
8b7773a4
MJ
352
353 if (jump_func->agg.items)
354 {
355 struct ipa_agg_jf_item *item;
356 int j;
357
358 fprintf (f, " Aggregate passed by %s:\n",
359 jump_func->agg.by_ref ? "reference" : "value");
9771b263 360 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
361 {
362 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
363 item->offset);
364 if (TYPE_P (item->value))
365 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 366 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
367 else
368 {
369 fprintf (f, "cst: ");
ef6cb4c7 370 print_generic_expr (f, item->value);
8b7773a4
MJ
371 }
372 fprintf (f, "\n");
373 }
374 }
44210a96
MJ
375
376 struct ipa_polymorphic_call_context *ctx
377 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
378 if (ctx && !ctx->useless_p ())
379 {
380 fprintf (f, " Context: ");
381 ctx->dump (dump_file);
382 }
04be694e 383
86cd0334 384 if (jump_func->bits)
209ca542 385 {
86cd0334
MJ
386 fprintf (f, " value: ");
387 print_hex (jump_func->bits->value, f);
388 fprintf (f, ", mask: ");
389 print_hex (jump_func->bits->mask, f);
209ca542
PK
390 fprintf (f, "\n");
391 }
392 else
393 fprintf (f, " Unknown bits\n");
8bc5448f 394
86cd0334 395 if (jump_func->m_vr)
8bc5448f
KV
396 {
397 fprintf (f, " VR ");
398 fprintf (f, "%s[",
86cd0334
MJ
399 (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
400 print_decs (jump_func->m_vr->min, f);
8bc5448f 401 fprintf (f, ", ");
86cd0334 402 print_decs (jump_func->m_vr->max, f);
8bc5448f
KV
403 fprintf (f, "]\n");
404 }
405 else
406 fprintf (f, " Unknown VR\n");
749aa96d
MJ
407 }
408}
409
410
be95e2b9
MJ
411/* Print the jump functions of all arguments on all call graph edges going from
412 NODE to file F. */
413
518dc859 414void
3e293154 415ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 416{
3e293154 417 struct cgraph_edge *cs;
518dc859 418
fec39fa6 419 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
67348ccc 420 node->order);
3e293154
MJ
421 for (cs = node->callees; cs; cs = cs->next_callee)
422 {
423 if (!ipa_edge_args_info_available_for_edge_p (cs))
424 continue;
425
749aa96d 426 fprintf (f, " callsite %s/%i -> %s/%i : \n",
2a72a953
DM
427 xstrdup_for_dump (node->name ()), node->order,
428 xstrdup_for_dump (cs->callee->name ()),
67348ccc 429 cs->callee->order);
749aa96d
MJ
430 ipa_print_node_jump_functions_for_edge (f, cs);
431 }
518dc859 432
9de04252 433 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 434 {
9de04252 435 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
436 if (!ipa_edge_args_info_available_for_edge_p (cs))
437 continue;
3e293154 438
9de04252
MJ
439 ii = cs->indirect_info;
440 if (ii->agg_contents)
c13bc3d9 441 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 442 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 443 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
444 ii->param_index, ii->offset,
445 ii->by_ref ? "by reference" : "by_value");
446 else
85942f45
JH
447 fprintf (f, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC,
449 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
450 ii->offset);
9de04252 451
749aa96d
MJ
452 if (cs->call_stmt)
453 {
9de04252 454 fprintf (f, ", for stmt ");
749aa96d 455 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 456 }
749aa96d 457 else
9de04252 458 fprintf (f, "\n");
ba392339
JH
459 if (ii->polymorphic)
460 ii->context.dump (f);
749aa96d 461 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
462 }
463}
464
465/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 466
3e293154
MJ
467void
468ipa_print_all_jump_functions (FILE *f)
469{
470 struct cgraph_node *node;
471
ca30a539 472 fprintf (f, "\nJump functions:\n");
65c70e6b 473 FOR_EACH_FUNCTION (node)
3e293154
MJ
474 {
475 ipa_print_node_jump_functions (f, node);
476 }
477}
478
04be694e
MJ
479/* Set jfunc to be a know-really nothing jump function. */
480
481static void
482ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
483{
484 jfunc->type = IPA_JF_UNKNOWN;
86cd0334
MJ
485 jfunc->bits = NULL;
486 jfunc->m_vr = NULL;
04be694e
MJ
487}
488
b8f6e610
MJ
489/* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
491
492static void
493ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
494 struct ipa_jump_func *src)
495
496{
497 gcc_checking_assert (src->type == IPA_JF_CONST);
498 dst->type = IPA_JF_CONST;
499 dst->value.constant = src->value.constant;
500}
501
7b872d9e
MJ
502/* Set JFUNC to be a constant jmp function. */
503
504static void
4502fe8d
MJ
505ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
506 struct cgraph_edge *cs)
7b872d9e
MJ
507{
508 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
509 jfunc->value.constant.value = unshare_expr_without_location (constant);
510
511 if (TREE_CODE (constant) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
513 {
514 struct ipa_cst_ref_desc *rdesc;
4502fe8d 515
601f3293 516 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
517 rdesc->cs = cs;
518 rdesc->next_duplicate = NULL;
519 rdesc->refcount = 1;
520 jfunc->value.constant.rdesc = rdesc;
521 }
522 else
523 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
524}
525
526/* Set JFUNC to be a simple pass-through jump function. */
527static void
8b7773a4 528ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 529 bool agg_preserved)
7b872d9e
MJ
530{
531 jfunc->type = IPA_JF_PASS_THROUGH;
532 jfunc->value.pass_through.operand = NULL_TREE;
533 jfunc->value.pass_through.formal_id = formal_id;
534 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 535 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
536}
537
a2b4c188
KV
538/* Set JFUNC to be an unary pass through jump function. */
539
540static void
541ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
542 enum tree_code operation)
543{
544 jfunc->type = IPA_JF_PASS_THROUGH;
545 jfunc->value.pass_through.operand = NULL_TREE;
546 jfunc->value.pass_through.formal_id = formal_id;
547 jfunc->value.pass_through.operation = operation;
548 jfunc->value.pass_through.agg_preserved = false;
549}
7b872d9e
MJ
550/* Set JFUNC to be an arithmetic pass through jump function. */
551
552static void
553ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
554 tree operand, enum tree_code operation)
555{
556 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 557 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
558 jfunc->value.pass_through.formal_id = formal_id;
559 jfunc->value.pass_through.operation = operation;
8b7773a4 560 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
561}
562
563/* Set JFUNC to be an ancestor jump function. */
564
565static void
566ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 567 int formal_id, bool agg_preserved)
7b872d9e
MJ
568{
569 jfunc->type = IPA_JF_ANCESTOR;
570 jfunc->value.ancestor.formal_id = formal_id;
571 jfunc->value.ancestor.offset = offset;
8b7773a4 572 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
573}
574
8aab5218
MJ
575/* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
577
578static struct ipa_bb_info *
56b40062 579ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
580{
581 gcc_checking_assert (fbi);
582 return &fbi->bb_infos[bb->index];
583}
584
f65cf2b7
MJ
585/* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
587
11478306 588struct prop_type_change_info
f65cf2b7 589{
290ebcb7
MJ
590 /* Offset into the object where there is the virtual method pointer we are
591 looking for. */
592 HOST_WIDE_INT offset;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
594 type change. */
595 tree object;
f65cf2b7
MJ
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed;
598};
599
600/* Return true if STMT can modify a virtual method table pointer.
601
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
606
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
609
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
612
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
616
617 There is no way to call a constructor of an ancestor sub-object in any
618 other way.
619
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
623
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
635 */
636
637static bool
355fe088 638stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
639{
640 if (is_gimple_call (stmt))
641 return false;
70f633c5
JH
642 if (gimple_clobber_p (stmt))
643 return false;
f65cf2b7
MJ
644 else if (is_gimple_assign (stmt))
645 {
646 tree lhs = gimple_assign_lhs (stmt);
647
0004f992
MJ
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
649 {
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
652 return false;
653
654 if (TREE_CODE (lhs) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 656 return false;
0004f992
MJ
657 /* In the future we might want to use get_base_ref_and_offset to find
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
660 }
f65cf2b7
MJ
661 }
662 return true;
663}
664
3b97a5c7
MJ
665/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
11478306 668 prop_type_change_info structure. */
f65cf2b7
MJ
669
670static bool
671check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
672{
355fe088 673 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 674 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
675
676 if (stmt_may_be_vtbl_ptr_store (stmt))
677 {
678 tci->type_maybe_changed = true;
679 return true;
680 }
681 else
682 return false;
683}
684
058d0a90
JH
685/* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
290ebcb7 688
058d0a90
JH
689 Generally functions are not allowed to change type of such instances,
690 but they call destructors. We assume that methods can not destroy the THIS
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
693
694static bool
355fe088 695param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90
JH
696{
697 /* Pure functions can not do any changes on the dynamic type;
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
700 return false;
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
707
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
712 return true;
713 if (TREE_CODE (arg) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg)
715 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
716 {
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
719 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 720 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
721 and destructors as those definitely may change the dynamic
722 type. */
723 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function)
725 && !DECL_CXX_DESTRUCTOR_P (function)
726 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
727 {
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
730 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
731 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
732 return true;
058d0a90
JH
733 return false;
734 }
735 }
736 return true;
737}
290ebcb7 738
06d65050
JH
739/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
744 returned by get_ref_base_and_extent, as is the offset.
745
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
748
749static bool
058d0a90 750detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
538dd0b7 751 gcall *call, struct ipa_jump_func *jfunc,
058d0a90 752 HOST_WIDE_INT offset)
f65cf2b7 753{
11478306 754 struct prop_type_change_info tci;
f65cf2b7 755 ao_ref ao;
70f633c5 756 bool entry_reached = false;
f65cf2b7
MJ
757
758 gcc_checking_assert (DECL_P (arg)
759 || TREE_CODE (arg) == MEM_REF
760 || handled_component_p (arg));
f65cf2b7 761
b49407f8
JH
762 comp_type = TYPE_MAIN_VARIANT (comp_type);
763
d570d364
JH
764 /* Const calls cannot call virtual methods through VMT and so type changes do
765 not matter. */
766 if (!flag_devirtualize || !gimple_vuse (call)
767 /* Be sure expected_type is polymorphic. */
768 || !comp_type
769 || TREE_CODE (comp_type) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
772 return true;
4bf2a588 773
dd887943 774 ao_ref_init (&ao, arg);
f65cf2b7
MJ
775 ao.base = base;
776 ao.offset = offset;
777 ao.size = POINTER_SIZE;
778 ao.max_size = ao.size;
f65cf2b7 779
290ebcb7
MJ
780 tci.offset = offset;
781 tci.object = get_base_address (arg);
290ebcb7 782 tci.type_maybe_changed = false;
290ebcb7 783
f65cf2b7 784 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
70f633c5 785 &tci, NULL, &entry_reached);
f65cf2b7
MJ
786 if (!tci.type_maybe_changed)
787 return false;
788
04be694e 789 ipa_set_jf_unknown (jfunc);
f65cf2b7
MJ
790 return true;
791}
792
058d0a90
JH
793/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
794 If it is, return true and fill in the jump function JFUNC with relevant type
795 information or set it to unknown. ARG is the object itself (not a pointer
796 to it, unless dereferenced). BASE is the base of the memory access as
797 returned by get_ref_base_and_extent, as is the offset. */
798
799static bool
538dd0b7 800detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
058d0a90
JH
801 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
802{
803 if (!flag_devirtualize)
804 return false;
805
806 if (TREE_CODE (base) == MEM_REF
807 && !param_type_may_change_p (current_function_decl,
808 TREE_OPERAND (base, 0),
809 call))
810 return false;
811 return detect_type_change_from_memory_writes (arg, base, comp_type,
812 call, jfunc, offset);
813}
814
f65cf2b7
MJ
815/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
816 SSA name (its dereference will become the base and the offset is assumed to
817 be zero). */
818
819static bool
06d65050 820detect_type_change_ssa (tree arg, tree comp_type,
538dd0b7 821 gcall *call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
822{
823 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 824 if (!flag_devirtualize
06d65050 825 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
826 return false;
827
058d0a90
JH
828 if (!param_type_may_change_p (current_function_decl, arg, call))
829 return false;
830
f65cf2b7 831 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 832 build_int_cst (ptr_type_node, 0));
f65cf2b7 833
058d0a90
JH
834 return detect_type_change_from_memory_writes (arg, arg, comp_type,
835 call, jfunc, 0);
f65cf2b7
MJ
836}
837
fdb0e1b4
MJ
838/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
839 boolean variable pointed to by DATA. */
840
841static bool
842mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
843 void *data)
844{
845 bool *b = (bool *) data;
846 *b = true;
847 return true;
848}
849
8aab5218
MJ
850/* Return true if we have already walked so many statements in AA that we
851 should really just start giving up. */
852
853static bool
56b40062 854aa_overwalked (struct ipa_func_body_info *fbi)
8aab5218
MJ
855{
856 gcc_checking_assert (fbi);
857 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
858}
859
860/* Find the nearest valid aa status for parameter specified by INDEX that
861 dominates BB. */
862
56b40062
MJ
863static struct ipa_param_aa_status *
864find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
865 int index)
866{
867 while (true)
868 {
869 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
870 if (!bb)
871 return NULL;
872 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
873 if (!bi->param_aa_statuses.is_empty ()
874 && bi->param_aa_statuses[index].valid)
875 return &bi->param_aa_statuses[index];
876 }
877}
878
879/* Get AA status structure for the given BB and parameter with INDEX. Allocate
880 structures and/or intialize the result with a dominating description as
881 necessary. */
882
56b40062
MJ
883static struct ipa_param_aa_status *
884parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
885 int index)
886{
887 gcc_checking_assert (fbi);
888 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
889 if (bi->param_aa_statuses.is_empty ())
890 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
56b40062 891 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
892 if (!paa->valid)
893 {
894 gcc_checking_assert (!paa->parm_modified
895 && !paa->ref_modified
896 && !paa->pt_modified);
56b40062 897 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
898 dom_paa = find_dominating_aa_status (fbi, bb, index);
899 if (dom_paa)
900 *paa = *dom_paa;
901 else
902 paa->valid = true;
903 }
904
905 return paa;
906}
907
688010ba 908/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 909 a value known not to be modified in this function before reaching the
8aab5218
MJ
910 statement STMT. FBI holds information about the function we have so far
911 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
912
913static bool
56b40062 914parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 915 gimple *stmt, tree parm_load)
fdb0e1b4 916{
56b40062 917 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
918 bool modified = false;
919 ao_ref refd;
920
776e4fe2
MJ
921 tree base = get_base_address (parm_load);
922 gcc_assert (TREE_CODE (base) == PARM_DECL);
923 if (TREE_READONLY (base))
924 return true;
925
8aab5218
MJ
926 /* FIXME: FBI can be NULL if we are being called from outside
927 ipa_node_analysis or ipcp_transform_function, which currently happens
928 during inlining analysis. It would be great to extend fbi's lifetime and
929 always have it. Currently, we are just not afraid of too much walking in
930 that case. */
931 if (fbi)
932 {
933 if (aa_overwalked (fbi))
934 return false;
935 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
936 if (paa->parm_modified)
937 return false;
938 }
939 else
940 paa = NULL;
fdb0e1b4
MJ
941
942 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 943 ao_ref_init (&refd, parm_load);
8aab5218
MJ
944 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
945 &modified, NULL);
946 if (fbi)
947 fbi->aa_walked += walked;
948 if (paa && modified)
949 paa->parm_modified = true;
8b7773a4 950 return !modified;
fdb0e1b4
MJ
951}
952
a2b4c188
KV
953/* If STMT is an assignment that loads a value from an parameter declaration,
954 return the index of the parameter in ipa_node_params which has not been
955 modified. Otherwise return -1. */
956
957static int
958load_from_unmodified_param (struct ipa_func_body_info *fbi,
f65f1ae3 959 vec<ipa_param_descriptor, va_gc> *descriptors,
a2b4c188
KV
960 gimple *stmt)
961{
bda2bc48
MJ
962 int index;
963 tree op1;
964
a2b4c188
KV
965 if (!gimple_assign_single_p (stmt))
966 return -1;
967
bda2bc48
MJ
968 op1 = gimple_assign_rhs1 (stmt);
969 if (TREE_CODE (op1) != PARM_DECL)
a2b4c188
KV
970 return -1;
971
bda2bc48
MJ
972 index = ipa_get_param_decl_index_1 (descriptors, op1);
973 if (index < 0
974 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
a2b4c188
KV
975 return -1;
976
bda2bc48 977 return index;
a2b4c188
KV
978}
979
8aab5218
MJ
980/* Return true if memory reference REF (which must be a load through parameter
981 with INDEX) loads data that are known to be unmodified in this function
982 before reaching statement STMT. */
8b7773a4
MJ
983
984static bool
56b40062 985parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 986 int index, gimple *stmt, tree ref)
8b7773a4 987{
56b40062 988 struct ipa_param_aa_status *paa;
8b7773a4
MJ
989 bool modified = false;
990 ao_ref refd;
991
8aab5218
MJ
992 /* FIXME: FBI can be NULL if we are being called from outside
993 ipa_node_analysis or ipcp_transform_function, which currently happens
994 during inlining analysis. It would be great to extend fbi's lifetime and
995 always have it. Currently, we are just not afraid of too much walking in
996 that case. */
997 if (fbi)
998 {
999 if (aa_overwalked (fbi))
1000 return false;
1001 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1002 if (paa->ref_modified)
1003 return false;
1004 }
1005 else
1006 paa = NULL;
8b7773a4 1007
8aab5218 1008 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 1009 ao_ref_init (&refd, ref);
8aab5218
MJ
1010 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1011 &modified, NULL);
1012 if (fbi)
1013 fbi->aa_walked += walked;
1014 if (paa && modified)
1015 paa->ref_modified = true;
8b7773a4
MJ
1016 return !modified;
1017}
1018
8aab5218
MJ
1019/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1020 is known to be unmodified in this function before reaching call statement
1021 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1022
1023static bool
56b40062 1024parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 1025 gimple *call, tree parm)
8b7773a4
MJ
1026{
1027 bool modified = false;
1028 ao_ref refd;
1029
1030 /* It's unnecessary to calculate anything about memory contnets for a const
1031 function because it is not goin to use it. But do not cache the result
1032 either. Also, no such calculations for non-pointers. */
1033 if (!gimple_vuse (call)
8aab5218
MJ
1034 || !POINTER_TYPE_P (TREE_TYPE (parm))
1035 || aa_overwalked (fbi))
8b7773a4
MJ
1036 return false;
1037
56b40062
MJ
1038 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1039 gimple_bb (call),
1040 index);
8aab5218 1041 if (paa->pt_modified)
8b7773a4
MJ
1042 return false;
1043
1044 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218
MJ
1045 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1046 &modified, NULL);
1047 fbi->aa_walked += walked;
8b7773a4 1048 if (modified)
8aab5218 1049 paa->pt_modified = true;
8b7773a4
MJ
1050 return !modified;
1051}
1052
91bb9f80
MJ
1053/* Return true if we can prove that OP is a memory reference loading
1054 data from an aggregate passed as a parameter.
1055
1056 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1057 false if it cannot prove that the value has not been modified before the
1058 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1059 if it cannot prove the value has not been modified, in that case it will
1060 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1061
8b7773a4
MJ
1062 INFO and PARMS_AINFO describe parameters of the current function (but the
1063 latter can be NULL), STMT is the load statement. If function returns true,
1064 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1065 within the aggregate and whether it is a load from a value passed by
1066 reference respectively. */
1067
ff302741 1068bool
56b40062 1069ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
f65f1ae3 1070 vec<ipa_param_descriptor, va_gc> *descriptors,
355fe088 1071 gimple *stmt, tree op, int *index_p,
ff302741 1072 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
91bb9f80 1073 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
1074{
1075 int index;
1076 HOST_WIDE_INT size, max_size;
ee45a32d
EB
1077 bool reverse;
1078 tree base
1079 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
8b7773a4
MJ
1080
1081 if (max_size == -1 || max_size != size || *offset_p < 0)
1082 return false;
1083
1084 if (DECL_P (base))
1085 {
d044dd17 1086 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1087 if (index >= 0
8aab5218 1088 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1089 {
1090 *index_p = index;
1091 *by_ref_p = false;
3ff2ca23
JJ
1092 if (size_p)
1093 *size_p = size;
91bb9f80
MJ
1094 if (guaranteed_unmodified)
1095 *guaranteed_unmodified = true;
8b7773a4
MJ
1096 return true;
1097 }
1098 return false;
1099 }
1100
1101 if (TREE_CODE (base) != MEM_REF
1102 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1103 || !integer_zerop (TREE_OPERAND (base, 1)))
1104 return false;
1105
1106 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1107 {
1108 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1109 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1110 }
1111 else
1112 {
1113 /* This branch catches situations where a pointer parameter is not a
1114 gimple register, for example:
1115
1116 void hip7(S*) (struct S * p)
1117 {
1118 void (*<T2e4>) (struct S *) D.1867;
1119 struct S * p.1;
1120
1121 <bb 2>:
1122 p.1_1 = p;
1123 D.1867_2 = p.1_1->f;
1124 D.1867_2 ();
1125 gdp = &p;
1126 */
1127
355fe088 1128 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1129 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1130 }
1131
91bb9f80 1132 if (index >= 0)
8b7773a4 1133 {
91bb9f80
MJ
1134 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1135 if (!data_preserved && !guaranteed_unmodified)
1136 return false;
1137
8b7773a4
MJ
1138 *index_p = index;
1139 *by_ref_p = true;
3ff2ca23
JJ
1140 if (size_p)
1141 *size_p = size;
91bb9f80
MJ
1142 if (guaranteed_unmodified)
1143 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1144 return true;
1145 }
1146 return false;
1147}
1148
b258210c 1149/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1150 of an assignment statement STMT, try to determine whether we are actually
1151 handling any of the following cases and construct an appropriate jump
1152 function into JFUNC if so:
1153
1154 1) The passed value is loaded from a formal parameter which is not a gimple
1155 register (most probably because it is addressable, the value has to be
1156 scalar) and we can guarantee the value has not changed. This case can
1157 therefore be described by a simple pass-through jump function. For example:
1158
1159 foo (int a)
1160 {
1161 int a.0;
1162
1163 a.0_2 = a;
1164 bar (a.0_2);
1165
1166 2) The passed value can be described by a simple arithmetic pass-through
1167 jump function. E.g.
1168
1169 foo (int a)
1170 {
1171 int D.2064;
1172
1173 D.2064_4 = a.1(D) + 4;
1174 bar (D.2064_4);
1175
1176 This case can also occur in combination of the previous one, e.g.:
1177
1178 foo (int a, int z)
1179 {
1180 int a.0;
1181 int D.2064;
1182
1183 a.0_3 = a;
1184 D.2064_4 = a.0_3 + 4;
1185 foo (D.2064_4);
1186
1187 3) The passed value is an address of an object within another one (which
1188 also passed by reference). Such situations are described by an ancestor
1189 jump function and describe situations such as:
1190
1191 B::foo() (struct B * const this)
1192 {
1193 struct A * D.1845;
1194
1195 D.1845_2 = &this_1(D)->D.1748;
1196 A::bar (D.1845_2);
1197
1198 INFO is the structure describing individual parameters access different
1199 stages of IPA optimizations. PARMS_AINFO contains the information that is
1200 only needed for intraprocedural analysis. */
685b0d13
MJ
1201
1202static void
56b40062 1203compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1204 struct ipa_node_params *info,
b258210c 1205 struct ipa_jump_func *jfunc,
355fe088 1206 gcall *call, gimple *stmt, tree name,
06d65050 1207 tree param_type)
685b0d13
MJ
1208{
1209 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 1210 tree op1, tc_ssa, base, ssa;
ee45a32d 1211 bool reverse;
685b0d13 1212 int index;
685b0d13 1213
685b0d13 1214 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1215
fdb0e1b4 1216 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1217 {
fdb0e1b4
MJ
1218 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1219 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1220 else
bda2bc48
MJ
1221 index = load_from_unmodified_param (fbi, info->descriptors,
1222 SSA_NAME_DEF_STMT (op1));
fdb0e1b4
MJ
1223 tc_ssa = op1;
1224 }
1225 else
1226 {
bda2bc48 1227 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1228 tc_ssa = gimple_assign_lhs (stmt);
1229 }
1230
1231 if (index >= 0)
1232 {
a77af182 1233 switch (gimple_assign_rhs_class (stmt))
8b7773a4 1234 {
a77af182
RB
1235 case GIMPLE_BINARY_RHS:
1236 {
1237 tree op2 = gimple_assign_rhs2 (stmt);
1238 if (!is_gimple_ip_invariant (op2)
1239 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1240 != tcc_comparison)
1241 && !useless_type_conversion_p (TREE_TYPE (name),
1242 TREE_TYPE (op1))))
1243 return;
1244
1245 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1246 gimple_assign_rhs_code (stmt));
1247 break;
1248 }
1249 case GIMPLE_SINGLE_RHS:
1250 {
1251 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1252 tc_ssa);
1253 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1254 break;
1255 }
1256 case GIMPLE_UNARY_RHS:
bda2bc48
MJ
1257 if (is_gimple_assign (stmt)
1258 && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1259 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
a77af182 1260 ipa_set_jf_unary_pass_through (jfunc, index,
bda2bc48 1261 gimple_assign_rhs_code (stmt));
a77af182 1262 default:;
8b7773a4 1263 }
685b0d13
MJ
1264 return;
1265 }
1266
1267 if (TREE_CODE (op1) != ADDR_EXPR)
1268 return;
1269 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1270 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1271 return;
ee45a32d 1272 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
32aa622c 1273 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1274 /* If this is a varying address, punt. */
1275 || max_size == -1
1276 || max_size != size)
685b0d13 1277 return;
807e902e 1278 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1279 ssa = TREE_OPERAND (base, 0);
1280 if (TREE_CODE (ssa) != SSA_NAME
1281 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1282 || offset < 0)
685b0d13
MJ
1283 return;
1284
b8f6e610 1285 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1286 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1287 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1288 ipa_set_ancestor_jf (jfunc, offset, index,
1289 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1290}
1291
40591473
MJ
1292/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1293 it looks like:
1294
1295 iftmp.1_3 = &obj_2(D)->D.1762;
1296
1297 The base of the MEM_REF must be a default definition SSA NAME of a
1298 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1299 whole MEM_REF expression is returned and the offset calculated from any
1300 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1301 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1302
1303static tree
355fe088 1304get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473
MJ
1305{
1306 HOST_WIDE_INT size, max_size;
1307 tree expr, parm, obj;
ee45a32d 1308 bool reverse;
40591473
MJ
1309
1310 if (!gimple_assign_single_p (assign))
1311 return NULL_TREE;
1312 expr = gimple_assign_rhs1 (assign);
1313
1314 if (TREE_CODE (expr) != ADDR_EXPR)
1315 return NULL_TREE;
1316 expr = TREE_OPERAND (expr, 0);
1317 obj = expr;
ee45a32d 1318 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
40591473
MJ
1319
1320 if (TREE_CODE (expr) != MEM_REF
1321 /* If this is a varying address, punt. */
1322 || max_size == -1
1323 || max_size != size
1324 || *offset < 0)
1325 return NULL_TREE;
1326 parm = TREE_OPERAND (expr, 0);
1327 if (TREE_CODE (parm) != SSA_NAME
1328 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1329 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1330 return NULL_TREE;
1331
807e902e 1332 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1333 *obj_p = obj;
1334 return expr;
1335}
1336
685b0d13 1337
b258210c
MJ
1338/* Given that an actual argument is an SSA_NAME that is a result of a phi
1339 statement PHI, try to find out whether NAME is in fact a
1340 multiple-inheritance typecast from a descendant into an ancestor of a formal
1341 parameter and thus can be described by an ancestor jump function and if so,
1342 write the appropriate function into JFUNC.
1343
1344 Essentially we want to match the following pattern:
1345
1346 if (obj_2(D) != 0B)
1347 goto <bb 3>;
1348 else
1349 goto <bb 4>;
1350
1351 <bb 3>:
1352 iftmp.1_3 = &obj_2(D)->D.1762;
1353
1354 <bb 4>:
1355 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1356 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1357 return D.1879_6; */
1358
1359static void
56b40062 1360compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1361 struct ipa_node_params *info,
b258210c 1362 struct ipa_jump_func *jfunc,
538dd0b7 1363 gcall *call, gphi *phi)
b258210c 1364{
40591473 1365 HOST_WIDE_INT offset;
355fe088 1366 gimple *assign, *cond;
b258210c 1367 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1368 tree tmp, parm, expr, obj;
b258210c
MJ
1369 int index, i;
1370
54e348cb 1371 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1372 return;
1373
54e348cb
MJ
1374 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1375 tmp = PHI_ARG_DEF (phi, 0);
1376 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1377 tmp = PHI_ARG_DEF (phi, 1);
1378 else
1379 return;
b258210c
MJ
1380 if (TREE_CODE (tmp) != SSA_NAME
1381 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1382 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1383 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1384 return;
1385
1386 assign = SSA_NAME_DEF_STMT (tmp);
1387 assign_bb = gimple_bb (assign);
40591473 1388 if (!single_pred_p (assign_bb))
b258210c 1389 return;
40591473
MJ
1390 expr = get_ancestor_addr_info (assign, &obj, &offset);
1391 if (!expr)
b258210c
MJ
1392 return;
1393 parm = TREE_OPERAND (expr, 0);
b258210c 1394 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1395 if (index < 0)
1396 return;
b258210c
MJ
1397
1398 cond_bb = single_pred (assign_bb);
1399 cond = last_stmt (cond_bb);
69610617
SB
1400 if (!cond
1401 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1402 || gimple_cond_code (cond) != NE_EXPR
1403 || gimple_cond_lhs (cond) != parm
1404 || !integer_zerop (gimple_cond_rhs (cond)))
1405 return;
1406
b258210c
MJ
1407 phi_bb = gimple_bb (phi);
1408 for (i = 0; i < 2; i++)
1409 {
1410 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1411 if (pred != assign_bb && pred != cond_bb)
1412 return;
1413 }
1414
3b97a5c7
MJ
1415 ipa_set_ancestor_jf (jfunc, offset, index,
1416 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1417}
1418
be95e2b9
MJ
1419/* Inspect the given TYPE and return true iff it has the same structure (the
1420 same number of fields of the same types) as a C++ member pointer. If
1421 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1422 corresponding fields there. */
1423
3e293154
MJ
1424static bool
1425type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1426{
1427 tree fld;
1428
1429 if (TREE_CODE (type) != RECORD_TYPE)
1430 return false;
1431
1432 fld = TYPE_FIELDS (type);
1433 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1434 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1435 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1436 return false;
1437
1438 if (method_ptr)
1439 *method_ptr = fld;
1440
910ad8de 1441 fld = DECL_CHAIN (fld);
8b7773a4 1442 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1443 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1444 return false;
1445 if (delta)
1446 *delta = fld;
1447
910ad8de 1448 if (DECL_CHAIN (fld))
3e293154
MJ
1449 return false;
1450
1451 return true;
1452}
1453
61502ca8 1454/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1455 return the rhs of its defining statement. Otherwise return RHS as it
1456 is. */
7ec49257
MJ
1457
1458static inline tree
1459get_ssa_def_if_simple_copy (tree rhs)
1460{
1461 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1462 {
355fe088 1463 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1464
1465 if (gimple_assign_single_p (def_stmt))
1466 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1467 else
1468 break;
7ec49257
MJ
1469 }
1470 return rhs;
1471}
1472
8b7773a4
MJ
1473/* Simple linked list, describing known contents of an aggregate beforere
1474 call. */
1475
1476struct ipa_known_agg_contents_list
1477{
1478 /* Offset and size of the described part of the aggregate. */
1479 HOST_WIDE_INT offset, size;
1480 /* Known constant value or NULL if the contents is known to be unknown. */
1481 tree constant;
1482 /* Pointer to the next structure in the list. */
1483 struct ipa_known_agg_contents_list *next;
1484};
3e293154 1485
0d48ee34
MJ
1486/* Find the proper place in linked list of ipa_known_agg_contents_list
1487 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1488 unless there is a partial overlap, in which case return NULL, or such
1489 element is already there, in which case set *ALREADY_THERE to true. */
1490
1491static struct ipa_known_agg_contents_list **
1492get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1493 HOST_WIDE_INT lhs_offset,
1494 HOST_WIDE_INT lhs_size,
1495 bool *already_there)
1496{
1497 struct ipa_known_agg_contents_list **p = list;
1498 while (*p && (*p)->offset < lhs_offset)
1499 {
1500 if ((*p)->offset + (*p)->size > lhs_offset)
1501 return NULL;
1502 p = &(*p)->next;
1503 }
1504
1505 if (*p && (*p)->offset < lhs_offset + lhs_size)
1506 {
1507 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1508 /* We already know this value is subsequently overwritten with
1509 something else. */
1510 *already_there = true;
1511 else
1512 /* Otherwise this is a partial overlap which we cannot
1513 represent. */
1514 return NULL;
1515 }
1516 return p;
1517}
1518
1519/* Build aggregate jump function from LIST, assuming there are exactly
1520 CONST_COUNT constant entries there and that th offset of the passed argument
1521 is ARG_OFFSET and store it into JFUNC. */
1522
1523static void
1524build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1525 int const_count, HOST_WIDE_INT arg_offset,
1526 struct ipa_jump_func *jfunc)
1527{
1528 vec_alloc (jfunc->agg.items, const_count);
1529 while (list)
1530 {
1531 if (list->constant)
1532 {
1533 struct ipa_agg_jf_item item;
1534 item.offset = list->offset - arg_offset;
1535 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1536 item.value = unshare_expr_without_location (list->constant);
1537 jfunc->agg.items->quick_push (item);
1538 }
1539 list = list->next;
1540 }
1541}
1542
8b7773a4
MJ
1543/* Traverse statements from CALL backwards, scanning whether an aggregate given
1544 in ARG is filled in with constant values. ARG can either be an aggregate
0d48ee34
MJ
1545 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1546 aggregate. JFUNC is the jump function into which the constants are
1547 subsequently stored. */
be95e2b9 1548
3e293154 1549static void
538dd0b7
DM
1550determine_locally_known_aggregate_parts (gcall *call, tree arg,
1551 tree arg_type,
0d48ee34 1552 struct ipa_jump_func *jfunc)
3e293154 1553{
8b7773a4
MJ
1554 struct ipa_known_agg_contents_list *list = NULL;
1555 int item_count = 0, const_count = 0;
1556 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1557 gimple_stmt_iterator gsi;
8b7773a4
MJ
1558 tree arg_base;
1559 bool check_ref, by_ref;
1560 ao_ref r;
3e293154 1561
29799e9d
MJ
1562 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1563 return;
1564
8b7773a4
MJ
1565 /* The function operates in three stages. First, we prepare check_ref, r,
1566 arg_base and arg_offset based on what is actually passed as an actual
1567 argument. */
3e293154 1568
85942f45 1569 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1570 {
1571 by_ref = true;
1572 if (TREE_CODE (arg) == SSA_NAME)
1573 {
1574 tree type_size;
85942f45 1575 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
8b7773a4
MJ
1576 return;
1577 check_ref = true;
1578 arg_base = arg;
1579 arg_offset = 0;
85942f45 1580 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1581 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1582 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1583 }
1584 else if (TREE_CODE (arg) == ADDR_EXPR)
1585 {
1586 HOST_WIDE_INT arg_max_size;
ee45a32d 1587 bool reverse;
8b7773a4
MJ
1588
1589 arg = TREE_OPERAND (arg, 0);
1590 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
ee45a32d 1591 &arg_max_size, &reverse);
8b7773a4
MJ
1592 if (arg_max_size == -1
1593 || arg_max_size != arg_size
1594 || arg_offset < 0)
1595 return;
1596 if (DECL_P (arg_base))
1597 {
8b7773a4 1598 check_ref = false;
0d48ee34 1599 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1600 }
1601 else
1602 return;
1603 }
1604 else
1605 return;
1606 }
1607 else
1608 {
1609 HOST_WIDE_INT arg_max_size;
ee45a32d 1610 bool reverse;
8b7773a4
MJ
1611
1612 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1613
1614 by_ref = false;
1615 check_ref = false;
1616 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
ee45a32d 1617 &arg_max_size, &reverse);
8b7773a4
MJ
1618 if (arg_max_size == -1
1619 || arg_max_size != arg_size
1620 || arg_offset < 0)
1621 return;
1622
1623 ao_ref_init (&r, arg);
1624 }
1625
1626 /* Second stage walks back the BB, looks at individual statements and as long
1627 as it is confident of how the statements affect contents of the
1628 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1629 describing it. */
1630 gsi = gsi_for_stmt (call);
726a989a
RB
1631 gsi_prev (&gsi);
1632 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1633 {
8b7773a4 1634 struct ipa_known_agg_contents_list *n, **p;
355fe088 1635 gimple *stmt = gsi_stmt (gsi);
8b7773a4
MJ
1636 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1637 tree lhs, rhs, lhs_base;
ee45a32d 1638 bool reverse;
3e293154 1639
8b7773a4 1640 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1641 continue;
8b75fc9b 1642 if (!gimple_assign_single_p (stmt))
8b7773a4 1643 break;
3e293154 1644
726a989a
RB
1645 lhs = gimple_assign_lhs (stmt);
1646 rhs = gimple_assign_rhs1 (stmt);
0c6b087c 1647 if (!is_gimple_reg_type (TREE_TYPE (rhs))
7d2fb524
MJ
1648 || TREE_CODE (lhs) == BIT_FIELD_REF
1649 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1650 break;
3e293154 1651
8b7773a4 1652 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
ee45a32d 1653 &lhs_max_size, &reverse);
8b7773a4 1654 if (lhs_max_size == -1
0d48ee34 1655 || lhs_max_size != lhs_size)
8b7773a4 1656 break;
3e293154 1657
8b7773a4 1658 if (check_ref)
518dc859 1659 {
8b7773a4
MJ
1660 if (TREE_CODE (lhs_base) != MEM_REF
1661 || TREE_OPERAND (lhs_base, 0) != arg_base
1662 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1663 break;
3e293154 1664 }
8b7773a4 1665 else if (lhs_base != arg_base)
774b8a55
MJ
1666 {
1667 if (DECL_P (lhs_base))
1668 continue;
1669 else
1670 break;
1671 }
3e293154 1672
0d48ee34
MJ
1673 bool already_there = false;
1674 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1675 &already_there);
1676 if (!p)
8b7773a4 1677 break;
0d48ee34
MJ
1678 if (already_there)
1679 continue;
3e293154 1680
8b7773a4
MJ
1681 rhs = get_ssa_def_if_simple_copy (rhs);
1682 n = XALLOCA (struct ipa_known_agg_contents_list);
1683 n->size = lhs_size;
1684 n->offset = lhs_offset;
1685 if (is_gimple_ip_invariant (rhs))
1686 {
1687 n->constant = rhs;
1688 const_count++;
1689 }
1690 else
1691 n->constant = NULL_TREE;
1692 n->next = *p;
1693 *p = n;
3e293154 1694
8b7773a4 1695 item_count++;
dfea20f1
MJ
1696 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1697 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1698 break;
1699 }
be95e2b9 1700
8b7773a4
MJ
1701 /* Third stage just goes over the list and creates an appropriate vector of
1702 ipa_agg_jf_item structures out of it, of sourse only if there are
1703 any known constants to begin with. */
3e293154 1704
8b7773a4 1705 if (const_count)
3e293154 1706 {
8b7773a4 1707 jfunc->agg.by_ref = by_ref;
0d48ee34 1708 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
3e293154
MJ
1709 }
1710}
1711
5d5f1e95
KV
1712/* Return the Ith param type of callee associated with call graph
1713 edge E. */
1714
1715tree
06d65050
JH
1716ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1717{
1718 int n;
1719 tree type = (e->callee
67348ccc 1720 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1721 : gimple_call_fntype (e->call_stmt));
1722 tree t = TYPE_ARG_TYPES (type);
1723
1724 for (n = 0; n < i; n++)
1725 {
1726 if (!t)
1727 break;
1728 t = TREE_CHAIN (t);
1729 }
1730 if (t)
1731 return TREE_VALUE (t);
1732 if (!e->callee)
1733 return NULL;
67348ccc 1734 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1735 for (n = 0; n < i; n++)
1736 {
1737 if (!t)
1738 return NULL;
1739 t = TREE_CHAIN (t);
1740 }
1741 if (t)
1742 return TREE_TYPE (t);
1743 return NULL;
1744}
1745
86cd0334
MJ
1746/* Return ipa_bits with VALUE and MASK values, which can be either a newly
1747 allocated structure or a previously existing one shared with other jump
1748 functions and/or transformation summaries. */
1749
1750ipa_bits *
1751ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1752{
1753 ipa_bits tmp;
1754 tmp.value = value;
1755 tmp.mask = mask;
1756
1757 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1758 if (*slot)
1759 return *slot;
1760
1761 ipa_bits *res = ggc_alloc<ipa_bits> ();
1762 res->value = value;
1763 res->mask = mask;
1764 *slot = res;
1765
1766 return res;
1767}
1768
1769/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1770 table in order to avoid creating multiple same ipa_bits structures. */
1771
1772static void
1773ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1774 const widest_int &mask)
1775{
1776 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1777}
1778
1779/* Return a pointer to a value_range just like *TMP, but either find it in
1780 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1781
1782static value_range *
1783ipa_get_value_range (value_range *tmp)
1784{
1785 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1786 if (*slot)
1787 return *slot;
1788
1789 value_range *vr = ggc_alloc<value_range> ();
1790 *vr = *tmp;
1791 *slot = vr;
1792
1793 return vr;
1794}
1795
1796/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1797 equiv set. Use hash table in order to avoid creating multiple same copies of
1798 value_ranges. */
1799
1800static value_range *
1801ipa_get_value_range (enum value_range_type type, tree min, tree max)
1802{
1803 value_range tmp;
1804 tmp.type = type;
1805 tmp.min = min;
1806 tmp.max = max;
1807 tmp.equiv = NULL;
1808 return ipa_get_value_range (&tmp);
1809}
1810
1811/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1812 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1813 same value_range structures. */
1814
1815static void
1816ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1817 tree min, tree max)
1818{
1819 jf->m_vr = ipa_get_value_range (type, min, max);
1820}
1821
1822/* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1823 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1824
1825static void
1826ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1827{
1828 jf->m_vr = ipa_get_value_range (tmp);
1829}
1830
3e293154
MJ
1831/* Compute jump function for all arguments of callsite CS and insert the
1832 information in the jump_functions array in the ipa_edge_args corresponding
1833 to this callsite. */
be95e2b9 1834
749aa96d 1835static void
56b40062 1836ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 1837 struct cgraph_edge *cs)
3e293154
MJ
1838{
1839 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09 1840 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
538dd0b7 1841 gcall *call = cs->call_stmt;
8b7773a4 1842 int n, arg_num = gimple_call_num_args (call);
5ce97055 1843 bool useful_context = false;
3e293154 1844
606d9a09 1845 if (arg_num == 0 || args->jump_functions)
3e293154 1846 return;
9771b263 1847 vec_safe_grow_cleared (args->jump_functions, arg_num);
5ce97055
JH
1848 if (flag_devirtualize)
1849 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
3e293154 1850
96e24d49
JJ
1851 if (gimple_call_internal_p (call))
1852 return;
5fe8e757
MJ
1853 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1854 return;
1855
8b7773a4
MJ
1856 for (n = 0; n < arg_num; n++)
1857 {
1858 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1859 tree arg = gimple_call_arg (call, n);
06d65050 1860 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
1861 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1862 {
049e6d36 1863 tree instance;
5ce97055
JH
1864 struct ipa_polymorphic_call_context context (cs->caller->decl,
1865 arg, cs->call_stmt,
049e6d36
JH
1866 &instance);
1867 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
5ce97055
JH
1868 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1869 if (!context.useless_p ())
1870 useful_context = true;
1871 }
3e293154 1872
718625ad
KV
1873 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1874 {
f7503699
KV
1875 bool addr_nonzero = false;
1876 bool strict_overflow = false;
1877
718625ad
KV
1878 if (TREE_CODE (arg) == SSA_NAME
1879 && param_type
1880 && get_ptr_nonnull (arg))
f7503699
KV
1881 addr_nonzero = true;
1882 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1883 addr_nonzero = true;
1884
1885 if (addr_nonzero)
718625ad 1886 {
86cd0334
MJ
1887 tree z = build_int_cst (TREE_TYPE (arg), 0);
1888 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
718625ad
KV
1889 }
1890 else
86cd0334 1891 gcc_assert (!jfunc->m_vr);
718625ad
KV
1892 }
1893 else
8bc5448f
KV
1894 {
1895 wide_int min, max;
1896 value_range_type type;
1897 if (TREE_CODE (arg) == SSA_NAME
1898 && param_type
1899 && (type = get_range_info (arg, &min, &max))
3a4228ba 1900 && (type == VR_RANGE || type == VR_ANTI_RANGE))
8bc5448f 1901 {
86cd0334
MJ
1902 value_range tmpvr,resvr;
1903
1904 tmpvr.type = type;
1905 tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1906 tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1907 tmpvr.equiv = NULL;
1908 memset (&resvr, 0, sizeof (resvr));
1909 extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1910 &tmpvr, TREE_TYPE (arg));
1911 if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1912 ipa_set_jfunc_vr (jfunc, &resvr);
3a4228ba 1913 else
86cd0334 1914 gcc_assert (!jfunc->m_vr);
8bc5448f
KV
1915 }
1916 else
86cd0334 1917 gcc_assert (!jfunc->m_vr);
8bc5448f 1918 }
04be694e 1919
209ca542
PK
1920 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1921 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1922 {
209ca542 1923 if (TREE_CODE (arg) == SSA_NAME)
86cd0334
MJ
1924 ipa_set_jfunc_bits (jfunc, 0,
1925 widest_int::from (get_nonzero_bits (arg),
1926 TYPE_SIGN (TREE_TYPE (arg))));
209ca542 1927 else
86cd0334 1928 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
209ca542 1929 }
67b97478
PK
1930 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1931 {
1932 unsigned HOST_WIDE_INT bitpos;
1933 unsigned align;
1934
67b97478 1935 get_pointer_alignment_1 (arg, &align, &bitpos);
86cd0334
MJ
1936 widest_int mask
1937 = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1938 .and_not (align / BITS_PER_UNIT - 1);
1939 widest_int value = bitpos / BITS_PER_UNIT;
1940 ipa_set_jfunc_bits (jfunc, value, mask);
67b97478 1941 }
209ca542 1942 else
86cd0334 1943 gcc_assert (!jfunc->bits);
209ca542 1944
04643334 1945 if (is_gimple_ip_invariant (arg)
8813a647 1946 || (VAR_P (arg)
04643334
MJ
1947 && is_global_var (arg)
1948 && TREE_READONLY (arg)))
4502fe8d 1949 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1950 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1951 && TREE_CODE (arg) == PARM_DECL)
1952 {
1953 int index = ipa_get_param_decl_index (info, arg);
1954
1955 gcc_assert (index >=0);
1956 /* Aggregate passed by value, check for pass-through, otherwise we
1957 will attempt to fill in aggregate contents later in this
1958 for cycle. */
8aab5218 1959 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 1960 {
3b97a5c7 1961 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
1962 continue;
1963 }
1964 }
1965 else if (TREE_CODE (arg) == SSA_NAME)
1966 {
1967 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1968 {
1969 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1970 if (index >= 0)
8b7773a4 1971 {
3b97a5c7 1972 bool agg_p;
8aab5218 1973 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 1974 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
1975 }
1976 }
1977 else
1978 {
355fe088 1979 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 1980 if (is_gimple_assign (stmt))
8aab5218 1981 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 1982 call, stmt, arg, param_type);
8b7773a4 1983 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 1984 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
1985 call,
1986 as_a <gphi *> (stmt));
8b7773a4
MJ
1987 }
1988 }
3e293154 1989
85942f45
JH
1990 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1991 passed (because type conversions are ignored in gimple). Usually we can
1992 safely get type from function declaration, but in case of K&R prototypes or
1993 variadic functions we can try our luck with type of the pointer passed.
1994 TODO: Since we look for actual initialization of the memory object, we may better
1995 work out the type based on the memory stores we find. */
1996 if (!param_type)
1997 param_type = TREE_TYPE (arg);
1998
8b7773a4
MJ
1999 if ((jfunc->type != IPA_JF_PASS_THROUGH
2000 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2001 && (jfunc->type != IPA_JF_ANCESTOR
2002 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2003 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 2004 || POINTER_TYPE_P (param_type)))
0d48ee34 2005 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
8b7773a4 2006 }
5ce97055
JH
2007 if (!useful_context)
2008 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
2009}
2010
749aa96d 2011/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 2012 from BB. */
749aa96d 2013
062c604f 2014static void
56b40062 2015ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 2016{
8aab5218
MJ
2017 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2018 int i;
749aa96d
MJ
2019 struct cgraph_edge *cs;
2020
8aab5218 2021 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 2022 {
8aab5218 2023 struct cgraph_node *callee = cs->callee;
749aa96d 2024
8aab5218
MJ
2025 if (callee)
2026 {
d52f5295 2027 callee->ultimate_alias_target ();
8aab5218
MJ
2028 /* We do not need to bother analyzing calls to unknown functions
2029 unless they may become known during lto/whopr. */
2030 if (!callee->definition && !flag_lto)
2031 continue;
2032 }
2033 ipa_compute_jump_functions_for_edge (fbi, cs);
2034 }
749aa96d
MJ
2035}
2036
8b7773a4
MJ
2037/* If STMT looks like a statement loading a value from a member pointer formal
2038 parameter, return that parameter and store the offset of the field to
2039 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2040 might be clobbered). If USE_DELTA, then we look for a use of the delta
2041 field rather than the pfn. */
be95e2b9 2042
3e293154 2043static tree
355fe088 2044ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 2045 HOST_WIDE_INT *offset_p)
3e293154 2046{
8b7773a4
MJ
2047 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2048
2049 if (!gimple_assign_single_p (stmt))
2050 return NULL_TREE;
3e293154 2051
8b7773a4 2052 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2053 if (TREE_CODE (rhs) == COMPONENT_REF)
2054 {
2055 ref_field = TREE_OPERAND (rhs, 1);
2056 rhs = TREE_OPERAND (rhs, 0);
2057 }
2058 else
2059 ref_field = NULL_TREE;
d242d063 2060 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2061 return NULL_TREE;
3e293154 2062 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2063 if (TREE_CODE (rec) != ADDR_EXPR)
2064 return NULL_TREE;
2065 rec = TREE_OPERAND (rec, 0);
3e293154 2066 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2067 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2068 return NULL_TREE;
d242d063 2069 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2070
8b7773a4
MJ
2071 if (use_delta)
2072 fld = delta_field;
2073 else
2074 fld = ptr_field;
2075 if (offset_p)
2076 *offset_p = int_bit_position (fld);
2077
ae788515
EB
2078 if (ref_field)
2079 {
2080 if (integer_nonzerop (ref_offset))
2081 return NULL_TREE;
ae788515
EB
2082 return ref_field == fld ? rec : NULL_TREE;
2083 }
3e293154 2084 else
8b7773a4
MJ
2085 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2086 : NULL_TREE;
3e293154
MJ
2087}
2088
2089/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2090
3e293154
MJ
2091static bool
2092ipa_is_ssa_with_stmt_def (tree t)
2093{
2094 if (TREE_CODE (t) == SSA_NAME
2095 && !SSA_NAME_IS_DEFAULT_DEF (t))
2096 return true;
2097 else
2098 return false;
2099}
2100
40591473
MJ
2101/* Find the indirect call graph edge corresponding to STMT and mark it as a
2102 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2103 indirect call graph edge. */
be95e2b9 2104
40591473 2105static struct cgraph_edge *
538dd0b7
DM
2106ipa_note_param_call (struct cgraph_node *node, int param_index,
2107 gcall *stmt)
3e293154 2108{
e33c6cd6 2109 struct cgraph_edge *cs;
3e293154 2110
d52f5295 2111 cs = node->get_edge (stmt);
b258210c 2112 cs->indirect_info->param_index = param_index;
8b7773a4 2113 cs->indirect_info->agg_contents = 0;
c13bc3d9 2114 cs->indirect_info->member_ptr = 0;
91bb9f80 2115 cs->indirect_info->guaranteed_unmodified = 0;
40591473 2116 return cs;
3e293154
MJ
2117}
2118
e33c6cd6 2119/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2120 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2121 intermediate information about each formal parameter. Currently it checks
2122 whether the call calls a pointer that is a formal parameter and if so, the
2123 parameter is marked with the called flag and an indirect call graph edge
2124 describing the call is created. This is very simple for ordinary pointers
2125 represented in SSA but not-so-nice when it comes to member pointers. The
2126 ugly part of this function does nothing more than trying to match the
2127 pattern of such a call. An example of such a pattern is the gimple dump
2128 below, the call is on the last line:
3e293154 2129
ae788515
EB
2130 <bb 2>:
2131 f$__delta_5 = f.__delta;
2132 f$__pfn_24 = f.__pfn;
2133
2134 or
3e293154 2135 <bb 2>:
d242d063
MJ
2136 f$__delta_5 = MEM[(struct *)&f];
2137 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2138
ae788515 2139 and a few lines below:
8aa29647
MJ
2140
2141 <bb 5>
3e293154
MJ
2142 D.2496_3 = (int) f$__pfn_24;
2143 D.2497_4 = D.2496_3 & 1;
2144 if (D.2497_4 != 0)
2145 goto <bb 3>;
2146 else
2147 goto <bb 4>;
2148
8aa29647 2149 <bb 6>:
3e293154
MJ
2150 D.2500_7 = (unsigned int) f$__delta_5;
2151 D.2501_8 = &S + D.2500_7;
2152 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2153 D.2503_10 = *D.2502_9;
2154 D.2504_12 = f$__pfn_24 + -1;
2155 D.2505_13 = (unsigned int) D.2504_12;
2156 D.2506_14 = D.2503_10 + D.2505_13;
2157 D.2507_15 = *D.2506_14;
2158 iftmp.11_16 = (String:: *) D.2507_15;
2159
8aa29647 2160 <bb 7>:
3e293154
MJ
2161 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2162 D.2500_19 = (unsigned int) f$__delta_5;
2163 D.2508_20 = &S + D.2500_19;
2164 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2165
2166 Such patterns are results of simple calls to a member pointer:
2167
2168 int doprinting (int (MyString::* f)(int) const)
2169 {
2170 MyString S ("somestring");
2171
2172 return (S.*f)(4);
2173 }
8b7773a4
MJ
2174
2175 Moreover, the function also looks for called pointers loaded from aggregates
2176 passed by value or reference. */
3e293154
MJ
2177
2178static void
56b40062 2179ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 2180 tree target)
3e293154 2181{
8aab5218 2182 struct ipa_node_params *info = fbi->info;
8b7773a4
MJ
2183 HOST_WIDE_INT offset;
2184 bool by_ref;
3e293154 2185
3e293154
MJ
2186 if (SSA_NAME_IS_DEFAULT_DEF (target))
2187 {
b258210c 2188 tree var = SSA_NAME_VAR (target);
8aab5218 2189 int index = ipa_get_param_decl_index (info, var);
3e293154 2190 if (index >= 0)
8aab5218 2191 ipa_note_param_call (fbi->node, index, call);
3e293154
MJ
2192 return;
2193 }
2194
8aab5218 2195 int index;
355fe088 2196 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 2197 bool guaranteed_unmodified;
8b7773a4 2198 if (gimple_assign_single_p (def)
ff302741
PB
2199 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2200 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2201 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2202 {
8aab5218 2203 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2204 cs->indirect_info->offset = offset;
2205 cs->indirect_info->agg_contents = 1;
2206 cs->indirect_info->by_ref = by_ref;
91bb9f80 2207 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2208 return;
2209 }
2210
3e293154
MJ
2211 /* Now we need to try to match the complex pattern of calling a member
2212 pointer. */
8b7773a4
MJ
2213 if (gimple_code (def) != GIMPLE_PHI
2214 || gimple_phi_num_args (def) != 2
2215 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2216 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2217 return;
2218
3e293154
MJ
2219 /* First, we need to check whether one of these is a load from a member
2220 pointer that is a parameter to this function. */
8aab5218
MJ
2221 tree n1 = PHI_ARG_DEF (def, 0);
2222 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2223 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2224 return;
355fe088
TS
2225 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2226 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2227
8aab5218
MJ
2228 tree rec;
2229 basic_block bb, virt_bb;
2230 basic_block join = gimple_bb (def);
8b7773a4 2231 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2232 {
8b7773a4 2233 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2234 return;
2235
8aa29647 2236 bb = EDGE_PRED (join, 0)->src;
726a989a 2237 virt_bb = gimple_bb (d2);
3e293154 2238 }
8b7773a4 2239 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2240 {
8aa29647 2241 bb = EDGE_PRED (join, 1)->src;
726a989a 2242 virt_bb = gimple_bb (d1);
3e293154
MJ
2243 }
2244 else
2245 return;
2246
2247 /* Second, we need to check that the basic blocks are laid out in the way
2248 corresponding to the pattern. */
2249
3e293154
MJ
2250 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2251 || single_pred (virt_bb) != bb
2252 || single_succ (virt_bb) != join)
2253 return;
2254
2255 /* Third, let's see that the branching is done depending on the least
2256 significant bit of the pfn. */
2257
355fe088 2258 gimple *branch = last_stmt (bb);
8aa29647 2259 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2260 return;
2261
12430896
RG
2262 if ((gimple_cond_code (branch) != NE_EXPR
2263 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2264 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2265 return;
3e293154 2266
8aab5218 2267 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2268 if (!ipa_is_ssa_with_stmt_def (cond))
2269 return;
2270
726a989a 2271 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2272 if (!is_gimple_assign (def)
726a989a
RB
2273 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2274 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2275 return;
726a989a
RB
2276
2277 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2278 if (!ipa_is_ssa_with_stmt_def (cond))
2279 return;
2280
726a989a 2281 def = SSA_NAME_DEF_STMT (cond);
3e293154 2282
8b75fc9b
MJ
2283 if (is_gimple_assign (def)
2284 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2285 {
726a989a 2286 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2287 if (!ipa_is_ssa_with_stmt_def (cond))
2288 return;
726a989a 2289 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2290 }
2291
8aab5218 2292 tree rec2;
6f7b8b70
RE
2293 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2294 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2295 == ptrmemfunc_vbit_in_delta),
2296 NULL);
3e293154
MJ
2297 if (rec != rec2)
2298 return;
2299
2300 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2301 if (index >= 0
8aab5218 2302 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2303 {
8aab5218 2304 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2305 cs->indirect_info->offset = offset;
2306 cs->indirect_info->agg_contents = 1;
c13bc3d9 2307 cs->indirect_info->member_ptr = 1;
91bb9f80 2308 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2309 }
3e293154
MJ
2310
2311 return;
2312}
2313
b258210c
MJ
2314/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2315 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2316 FBI->node (described by FBI->info), create a call note for the
2317 statement. */
b258210c
MJ
2318
2319static void
56b40062 2320ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2321 gcall *call, tree target)
b258210c
MJ
2322{
2323 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2324 int index;
40591473 2325 HOST_WIDE_INT anc_offset;
b258210c 2326
05842ff5
MJ
2327 if (!flag_devirtualize)
2328 return;
2329
40591473 2330 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2331 return;
2332
8aab5218 2333 struct ipa_node_params *info = fbi->info;
40591473
MJ
2334 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2335 {
8aab5218 2336 struct ipa_jump_func jfunc;
40591473
MJ
2337 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2338 return;
b258210c 2339
40591473
MJ
2340 anc_offset = 0;
2341 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2342 gcc_assert (index >= 0);
06d65050
JH
2343 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2344 call, &jfunc))
40591473
MJ
2345 return;
2346 }
2347 else
2348 {
8aab5218 2349 struct ipa_jump_func jfunc;
355fe088 2350 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2351 tree expr;
2352
2353 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2354 if (!expr)
2355 return;
2356 index = ipa_get_param_decl_index (info,
2357 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2358 gcc_assert (index >= 0);
06d65050
JH
2359 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2360 call, &jfunc, anc_offset))
40591473
MJ
2361 return;
2362 }
2363
8aab5218
MJ
2364 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2365 struct cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2366 ii->offset = anc_offset;
ae7e9ddd 2367 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2368 ii->otr_type = obj_type_ref_class (target);
40591473 2369 ii->polymorphic = 1;
b258210c
MJ
2370}
2371
2372/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2373 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2374 containing intermediate information about each formal parameter. */
b258210c
MJ
2375
2376static void
56b40062 2377ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2378{
2379 tree target = gimple_call_fn (call);
b786d31f
JH
2380
2381 if (!target
2382 || (TREE_CODE (target) != SSA_NAME
2383 && !virtual_method_call_p (target)))
2384 return;
b258210c 2385
7d0aa05b 2386 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2387 /* If we previously turned the call into a direct call, there is
2388 no need to analyze. */
b786d31f 2389 if (cs && !cs->indirect_unknown_callee)
25583c4f 2390 return;
7d0aa05b 2391
a5b58b28 2392 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2393 {
7d0aa05b
JH
2394 tree instance;
2395 tree target = gimple_call_fn (call);
6f8091fc
JH
2396 ipa_polymorphic_call_context context (current_function_decl,
2397 target, call, &instance);
7d0aa05b 2398
ba392339
JH
2399 gcc_checking_assert (cs->indirect_info->otr_type
2400 == obj_type_ref_class (target));
2401 gcc_checking_assert (cs->indirect_info->otr_token
2402 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2403
29c43c83
JH
2404 cs->indirect_info->vptr_changed
2405 = !context.get_dynamic_type (instance,
2406 OBJ_TYPE_REF_OBJECT (target),
2407 obj_type_ref_class (target), call);
0127c169 2408 cs->indirect_info->context = context;
7d0aa05b
JH
2409 }
2410
b258210c 2411 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2412 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2413 else if (virtual_method_call_p (target))
8aab5218 2414 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2415}
2416
2417
e33c6cd6 2418/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2419 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2420 formal parameters are called. */
be95e2b9 2421
3e293154 2422static void
355fe088 2423ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2424{
726a989a 2425 if (is_gimple_call (stmt))
538dd0b7 2426 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2427}
2428
2429/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2430 If OP is a parameter declaration, mark it as used in the info structure
2431 passed in DATA. */
2432
2433static bool
355fe088 2434visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f
MJ
2435{
2436 struct ipa_node_params *info = (struct ipa_node_params *) data;
2437
2438 op = get_base_address (op);
2439 if (op
2440 && TREE_CODE (op) == PARM_DECL)
2441 {
2442 int index = ipa_get_param_decl_index (info, op);
2443 gcc_assert (index >= 0);
310bc633 2444 ipa_set_param_used (info, index, true);
062c604f
MJ
2445 }
2446
2447 return false;
3e293154
MJ
2448}
2449
8aab5218
MJ
2450/* Scan the statements in BB and inspect the uses of formal parameters. Store
2451 the findings in various structures of the associated ipa_node_params
2452 structure, such as parameter flags, notes etc. FBI holds various data about
2453 the function being analyzed. */
be95e2b9 2454
062c604f 2455static void
56b40062 2456ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2457{
726a989a 2458 gimple_stmt_iterator gsi;
8aab5218
MJ
2459 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2460 {
355fe088 2461 gimple *stmt = gsi_stmt (gsi);
3e293154 2462
8aab5218
MJ
2463 if (is_gimple_debug (stmt))
2464 continue;
3e293154 2465
8aab5218
MJ
2466 ipa_analyze_stmt_uses (fbi, stmt);
2467 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2468 visit_ref_for_mod_analysis,
2469 visit_ref_for_mod_analysis,
2470 visit_ref_for_mod_analysis);
5fe8e757 2471 }
8aab5218
MJ
2472 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2473 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2474 visit_ref_for_mod_analysis,
2475 visit_ref_for_mod_analysis,
2476 visit_ref_for_mod_analysis);
2477}
2478
2479/* Calculate controlled uses of parameters of NODE. */
2480
2481static void
2482ipa_analyze_controlled_uses (struct cgraph_node *node)
2483{
2484 struct ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2485
8aab5218 2486 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2487 {
2488 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2489 int controlled_uses = 0;
2490
062c604f
MJ
2491 /* For SSA regs see if parameter is used. For non-SSA we compute
2492 the flag during modification analysis. */
4502fe8d
MJ
2493 if (is_gimple_reg (parm))
2494 {
67348ccc 2495 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2496 parm);
2497 if (ddef && !has_zero_uses (ddef))
2498 {
2499 imm_use_iterator imm_iter;
2500 use_operand_p use_p;
2501
2502 ipa_set_param_used (info, i, true);
2503 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2504 if (!is_gimple_call (USE_STMT (use_p)))
2505 {
c6de6665
JJ
2506 if (!is_gimple_debug (USE_STMT (use_p)))
2507 {
2508 controlled_uses = IPA_UNDESCRIBED_USE;
2509 break;
2510 }
4502fe8d
MJ
2511 }
2512 else
2513 controlled_uses++;
2514 }
2515 else
2516 controlled_uses = 0;
2517 }
2518 else
2519 controlled_uses = IPA_UNDESCRIBED_USE;
2520 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2521 }
8aab5218 2522}
062c604f 2523
8aab5218 2524/* Free stuff in BI. */
062c604f 2525
8aab5218
MJ
2526static void
2527free_ipa_bb_info (struct ipa_bb_info *bi)
2528{
2529 bi->cg_edges.release ();
2530 bi->param_aa_statuses.release ();
3e293154
MJ
2531}
2532
8aab5218 2533/* Dominator walker driving the analysis. */
2c9561b5 2534
8aab5218 2535class analysis_dom_walker : public dom_walker
2c9561b5 2536{
8aab5218 2537public:
56b40062 2538 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 2539 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2540
3daacdcd 2541 virtual edge before_dom_children (basic_block);
8aab5218
MJ
2542
2543private:
56b40062 2544 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
2545};
2546
3daacdcd 2547edge
8aab5218
MJ
2548analysis_dom_walker::before_dom_children (basic_block bb)
2549{
2550 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2551 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 2552 return NULL;
2c9561b5
MJ
2553}
2554
c3431191
ML
2555/* Release body info FBI. */
2556
2557void
2558ipa_release_body_info (struct ipa_func_body_info *fbi)
2559{
2560 int i;
2561 struct ipa_bb_info *bi;
2562
2563 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2564 free_ipa_bb_info (bi);
2565 fbi->bb_infos.release ();
2566}
2567
026c3cfd 2568/* Initialize the array describing properties of formal parameters
dd5a833e
MS
2569 of NODE, analyze their uses and compute jump functions associated
2570 with actual arguments of calls from within NODE. */
062c604f
MJ
2571
2572void
2573ipa_analyze_node (struct cgraph_node *node)
2574{
56b40062 2575 struct ipa_func_body_info fbi;
57dbdc5a 2576 struct ipa_node_params *info;
062c604f 2577
57dbdc5a
MJ
2578 ipa_check_create_node_params ();
2579 ipa_check_create_edge_args ();
2580 info = IPA_NODE_REF (node);
8aab5218
MJ
2581
2582 if (info->analysis_done)
2583 return;
2584 info->analysis_done = 1;
2585
2586 if (ipa_func_spec_opts_forbid_analysis_p (node))
2587 {
2588 for (int i = 0; i < ipa_get_param_count (info); i++)
2589 {
2590 ipa_set_param_used (info, i, true);
2591 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2592 }
2593 return;
2594 }
2595
2596 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2597 push_cfun (func);
2598 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2599 ipa_initialize_node_params (node);
8aab5218 2600 ipa_analyze_controlled_uses (node);
062c604f 2601
8aab5218
MJ
2602 fbi.node = node;
2603 fbi.info = IPA_NODE_REF (node);
2604 fbi.bb_infos = vNULL;
2605 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2606 fbi.param_count = ipa_get_param_count (info);
2607 fbi.aa_walked = 0;
062c604f 2608
8aab5218
MJ
2609 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2610 {
2611 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2612 bi->cg_edges.safe_push (cs);
2613 }
062c604f 2614
8aab5218
MJ
2615 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2616 {
2617 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2618 bi->cg_edges.safe_push (cs);
2619 }
2620
2621 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2622
c3431191 2623 ipa_release_body_info (&fbi);
8aab5218 2624 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2625 pop_cfun ();
062c604f 2626}
062c604f 2627
be95e2b9 2628/* Update the jump functions associated with call graph edge E when the call
3e293154 2629 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2630 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2631
3e293154
MJ
2632static void
2633update_jump_functions_after_inlining (struct cgraph_edge *cs,
2634 struct cgraph_edge *e)
2635{
2636 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2637 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2638 int count = ipa_get_cs_argument_count (args);
2639 int i;
2640
2641 for (i = 0; i < count; i++)
2642 {
b258210c 2643 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
5ce97055
JH
2644 struct ipa_polymorphic_call_context *dst_ctx
2645 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2646
685b0d13
MJ
2647 if (dst->type == IPA_JF_ANCESTOR)
2648 {
b258210c 2649 struct ipa_jump_func *src;
8b7773a4 2650 int dst_fid = dst->value.ancestor.formal_id;
5ce97055
JH
2651 struct ipa_polymorphic_call_context *src_ctx
2652 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 2653
b258210c
MJ
2654 /* Variable number of arguments can cause havoc if we try to access
2655 one that does not exist in the inlined edge. So make sure we
2656 don't. */
8b7773a4 2657 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 2658 {
04be694e 2659 ipa_set_jf_unknown (dst);
b258210c
MJ
2660 continue;
2661 }
2662
8b7773a4
MJ
2663 src = ipa_get_ith_jump_func (top, dst_fid);
2664
5ce97055
JH
2665 if (src_ctx && !src_ctx->useless_p ())
2666 {
2667 struct ipa_polymorphic_call_context ctx = *src_ctx;
2668
2669 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2670 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 2671 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2672 ctx.offset_by (dst->value.ancestor.offset);
2673 if (!ctx.useless_p ())
2674 {
a7d1f3fe
ML
2675 if (!dst_ctx)
2676 {
2677 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2678 count);
2679 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2680 }
2681
2682 dst_ctx->combine_with (ctx);
5ce97055
JH
2683 }
2684 }
2685
8b7773a4
MJ
2686 if (src->agg.items
2687 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2688 {
2689 struct ipa_agg_jf_item *item;
2690 int j;
2691
2692 /* Currently we do not produce clobber aggregate jump functions,
2693 replace with merging when we do. */
2694 gcc_assert (!dst->agg.items);
2695
9771b263 2696 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2697 dst->agg.by_ref = src->agg.by_ref;
9771b263 2698 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2699 item->offset -= dst->value.ancestor.offset;
2700 }
2701
3b97a5c7
MJ
2702 if (src->type == IPA_JF_PASS_THROUGH
2703 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2704 {
2705 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2706 dst->value.ancestor.agg_preserved &=
2707 src->value.pass_through.agg_preserved;
2708 }
a2b4c188
KV
2709 else if (src->type == IPA_JF_PASS_THROUGH
2710 && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2711 {
2712 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2713 dst->value.ancestor.agg_preserved = false;
2714 }
b258210c
MJ
2715 else if (src->type == IPA_JF_ANCESTOR)
2716 {
2717 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2718 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2719 dst->value.ancestor.agg_preserved &=
2720 src->value.ancestor.agg_preserved;
b258210c
MJ
2721 }
2722 else
04be694e 2723 ipa_set_jf_unknown (dst);
b258210c
MJ
2724 }
2725 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2726 {
b258210c
MJ
2727 struct ipa_jump_func *src;
2728 /* We must check range due to calls with variable number of arguments
2729 and we cannot combine jump functions with operations. */
2730 if (dst->value.pass_through.operation == NOP_EXPR
2731 && (dst->value.pass_through.formal_id
2732 < ipa_get_cs_argument_count (top)))
2733 {
8b7773a4
MJ
2734 int dst_fid = dst->value.pass_through.formal_id;
2735 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2736 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
5ce97055
JH
2737 struct ipa_polymorphic_call_context *src_ctx
2738 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 2739
5ce97055
JH
2740 if (src_ctx && !src_ctx->useless_p ())
2741 {
2742 struct ipa_polymorphic_call_context ctx = *src_ctx;
2743
2744 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2745 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 2746 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2747 if (!ctx.useless_p ())
2748 {
2749 if (!dst_ctx)
2750 {
2751 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2752 count);
2753 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2754 }
2755 dst_ctx->combine_with (ctx);
2756 }
2757 }
b8f6e610
MJ
2758 switch (src->type)
2759 {
2760 case IPA_JF_UNKNOWN:
04be694e 2761 ipa_set_jf_unknown (dst);
b8f6e610 2762 break;
b8f6e610
MJ
2763 case IPA_JF_CONST:
2764 ipa_set_jf_cst_copy (dst, src);
2765 break;
2766
2767 case IPA_JF_PASS_THROUGH:
2768 {
2769 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2770 enum tree_code operation;
2771 operation = ipa_get_jf_pass_through_operation (src);
2772
2773 if (operation == NOP_EXPR)
2774 {
3b97a5c7 2775 bool agg_p;
b8f6e610
MJ
2776 agg_p = dst_agg_p
2777 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 2778 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610 2779 }
a2b4c188
KV
2780 else if (TREE_CODE_CLASS (operation) == tcc_unary)
2781 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
b8f6e610
MJ
2782 else
2783 {
2784 tree operand = ipa_get_jf_pass_through_operand (src);
2785 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2786 operation);
2787 }
2788 break;
2789 }
2790 case IPA_JF_ANCESTOR:
2791 {
3b97a5c7 2792 bool agg_p;
b8f6e610
MJ
2793 agg_p = dst_agg_p
2794 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
2795 ipa_set_ancestor_jf (dst,
2796 ipa_get_jf_ancestor_offset (src),
b8f6e610 2797 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 2798 agg_p);
b8f6e610
MJ
2799 break;
2800 }
2801 default:
2802 gcc_unreachable ();
2803 }
8b7773a4
MJ
2804
2805 if (src->agg.items
b8f6e610 2806 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2807 {
2808 /* Currently we do not produce clobber aggregate jump
2809 functions, replace with merging when we do. */
2810 gcc_assert (!dst->agg.items);
2811
2812 dst->agg.by_ref = src->agg.by_ref;
9771b263 2813 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2814 }
b258210c
MJ
2815 }
2816 else
04be694e 2817 ipa_set_jf_unknown (dst);
3e293154 2818 }
b258210c
MJ
2819 }
2820}
2821
5ce97055
JH
2822/* If TARGET is an addr_expr of a function declaration, make it the
2823 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2824 Otherwise, return NULL. */
b258210c 2825
3949c4a7 2826struct cgraph_edge *
5ce97055
JH
2827ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2828 bool speculative)
b258210c
MJ
2829{
2830 struct cgraph_node *callee;
263e19c7 2831 struct ipa_call_summary *es = ipa_call_summaries->get (ie);
48b1474e 2832 bool unreachable = false;
b258210c 2833
ceeffab0
MJ
2834 if (TREE_CODE (target) == ADDR_EXPR)
2835 target = TREE_OPERAND (target, 0);
b258210c 2836 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2837 {
2838 target = canonicalize_constructor_val (target, NULL);
2839 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2840 {
db66bf68
JH
2841 /* Member pointer call that goes through a VMT lookup. */
2842 if (ie->indirect_info->member_ptr
2843 /* Or if target is not an invariant expression and we do not
2844 know if it will evaulate to function at runtime.
2845 This can happen when folding through &VAR, where &VAR
2846 is IP invariant, but VAR itself is not.
2847
2848 TODO: Revisit this when GCC 5 is branched. It seems that
2849 member_ptr check is not needed and that we may try to fold
2850 the expression and see if VAR is readonly. */
2851 || !is_gimple_ip_invariant (target))
2852 {
2853 if (dump_enabled_p ())
2854 {
2855 location_t loc = gimple_location_safe (ie->call_stmt);
2856 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2857 "discovered direct call non-invariant "
2858 "%s/%i\n",
2859 ie->caller->name (), ie->caller->order);
2860 }
2861 return NULL;
2862 }
2863
c13bc3d9 2864
2b5f0895
XDL
2865 if (dump_enabled_p ())
2866 {
807b7d62
ML
2867 location_t loc = gimple_location_safe (ie->call_stmt);
2868 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2869 "discovered direct call to non-function in %s/%i, "
2870 "making it __builtin_unreachable\n",
2871 ie->caller->name (), ie->caller->order);
2b5f0895 2872 }
3c9e6fca 2873
48b1474e 2874 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 2875 callee = cgraph_node::get_create (target);
48b1474e 2876 unreachable = true;
a0a7b611 2877 }
48b1474e 2878 else
d52f5295 2879 callee = cgraph_node::get (target);
a0a7b611 2880 }
48b1474e 2881 else
d52f5295 2882 callee = cgraph_node::get (target);
a0a7b611
JH
2883
2884 /* Because may-edges are not explicitely represented and vtable may be external,
2885 we may create the first reference to the object in the unit. */
2886 if (!callee || callee->global.inlined_to)
2887 {
a0a7b611
JH
2888
2889 /* We are better to ensure we can refer to it.
2890 In the case of static functions we are out of luck, since we already
2891 removed its body. In the case of public functions we may or may
2892 not introduce the reference. */
2893 if (!canonicalize_constructor_val (target, NULL)
2894 || !TREE_PUBLIC (target))
2895 {
2896 if (dump_file)
2897 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2898 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2a72a953 2899 xstrdup_for_dump (ie->caller->name ()),
67348ccc 2900 ie->caller->order,
2a72a953 2901 xstrdup_for_dump (ie->callee->name ()),
67348ccc 2902 ie->callee->order);
a0a7b611
JH
2903 return NULL;
2904 }
d52f5295 2905 callee = cgraph_node::get_create (target);
a0a7b611 2906 }
2b5f0895 2907
0127c169
JH
2908 /* If the edge is already speculated. */
2909 if (speculative && ie->speculative)
2910 {
2911 struct cgraph_edge *e2;
2912 struct ipa_ref *ref;
2913 ie->speculative_call_info (e2, ie, ref);
2914 if (e2->callee->ultimate_alias_target ()
2915 != callee->ultimate_alias_target ())
2916 {
2917 if (dump_file)
2918 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2919 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2a72a953 2920 xstrdup_for_dump (ie->caller->name ()),
0127c169 2921 ie->caller->order,
2a72a953 2922 xstrdup_for_dump (callee->name ()),
0127c169 2923 callee->order,
2a72a953 2924 xstrdup_for_dump (e2->callee->name ()),
0127c169
JH
2925 e2->callee->order);
2926 }
2927 else
2928 {
2929 if (dump_file)
2930 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2931 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2a72a953 2932 xstrdup_for_dump (ie->caller->name ()),
0127c169 2933 ie->caller->order,
2a72a953 2934 xstrdup_for_dump (callee->name ()),
0127c169
JH
2935 callee->order);
2936 }
2937 return NULL;
2938 }
2939
2b5f0895
XDL
2940 if (!dbg_cnt (devirt))
2941 return NULL;
2942
1dbee8c9 2943 ipa_check_create_node_params ();
ceeffab0 2944
81fa35bd
MJ
2945 /* We can not make edges to inline clones. It is bug that someone removed
2946 the cgraph node too early. */
17afc0fe
JH
2947 gcc_assert (!callee->global.inlined_to);
2948
48b1474e 2949 if (dump_file && !unreachable)
b258210c 2950 {
5ce97055 2951 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
ceeffab0 2952 "(%s/%i -> %s/%i), for stmt ",
b258210c 2953 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 2954 speculative ? "speculative" : "known",
2a72a953 2955 xstrdup_for_dump (ie->caller->name ()),
67348ccc 2956 ie->caller->order,
2a72a953 2957 xstrdup_for_dump (callee->name ()),
67348ccc 2958 callee->order);
b258210c
MJ
2959 if (ie->call_stmt)
2960 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2961 else
2962 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 2963 }
2b5f0895
XDL
2964 if (dump_enabled_p ())
2965 {
807b7d62 2966 location_t loc = gimple_location_safe (ie->call_stmt);
3c9e6fca 2967
807b7d62
ML
2968 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2969 "converting indirect call in %s to direct call to %s\n",
2970 ie->caller->name (), callee->name ());
2b5f0895 2971 }
5ce97055 2972 if (!speculative)
d8d5aef1
JH
2973 {
2974 struct cgraph_edge *orig = ie;
2975 ie = ie->make_direct (callee);
2976 /* If we resolved speculative edge the cost is already up to date
2977 for direct call (adjusted by inline_edge_duplication_hook). */
2978 if (ie == orig)
2979 {
263e19c7 2980 es = ipa_call_summaries->get (ie);
d8d5aef1
JH
2981 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2982 - eni_size_weights.call_cost);
2983 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2984 - eni_time_weights.call_cost);
2985 }
2986 }
5ce97055
JH
2987 else
2988 {
2989 if (!callee->can_be_discarded_p ())
2990 {
2991 cgraph_node *alias;
2992 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2993 if (alias)
2994 callee = alias;
2995 }
d8d5aef1 2996 /* make_speculative will update ie's cost to direct call cost. */
5ce97055
JH
2997 ie = ie->make_speculative
2998 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2999 }
749aa96d 3000
b258210c 3001 return ie;
3e293154
MJ
3002}
3003
91bb9f80
MJ
3004/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3005 CONSTRUCTOR and return it. Return NULL if the search fails for some
3006 reason. */
3007
3008static tree
3009find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3010{
3011 tree type = TREE_TYPE (constructor);
3012 if (TREE_CODE (type) != ARRAY_TYPE
3013 && TREE_CODE (type) != RECORD_TYPE)
3014 return NULL;
3015
3016 unsigned ix;
3017 tree index, val;
3018 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3019 {
3020 HOST_WIDE_INT elt_offset;
3021 if (TREE_CODE (type) == ARRAY_TYPE)
3022 {
3023 offset_int off;
3024 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3025 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3026
3027 if (index)
3028 {
3029 off = wi::to_offset (index);
3030 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3031 {
3032 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3033 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3034 off = wi::sext (off - wi::to_offset (low_bound),
3035 TYPE_PRECISION (TREE_TYPE (index)));
3036 }
3037 off *= wi::to_offset (unit_size);
3038 }
3039 else
3040 off = wi::to_offset (unit_size) * ix;
3041
3042 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3043 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3044 continue;
3045 elt_offset = off.to_shwi ();
3046 }
3047 else if (TREE_CODE (type) == RECORD_TYPE)
3048 {
3049 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3050 if (DECL_BIT_FIELD (index))
3051 continue;
3052 elt_offset = int_bit_position (index);
3053 }
3054 else
3055 gcc_unreachable ();
3056
3057 if (elt_offset > req_offset)
3058 return NULL;
3059
3060 if (TREE_CODE (val) == CONSTRUCTOR)
3061 return find_constructor_constant_at_offset (val,
3062 req_offset - elt_offset);
3063
3064 if (elt_offset == req_offset
3065 && is_gimple_reg_type (TREE_TYPE (val))
3066 && is_gimple_ip_invariant (val))
3067 return val;
3068 }
3069 return NULL;
3070}
3071
3072/* Check whether SCALAR could be used to look up an aggregate interprocedural
3073 invariant from a static constructor and if so, return it. Otherwise return
3074 NULL. */
3075
3076static tree
3077ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3078{
3079 if (by_ref)
3080 {
3081 if (TREE_CODE (scalar) != ADDR_EXPR)
3082 return NULL;
3083 scalar = TREE_OPERAND (scalar, 0);
3084 }
3085
8813a647 3086 if (!VAR_P (scalar)
91bb9f80
MJ
3087 || !is_global_var (scalar)
3088 || !TREE_READONLY (scalar)
3089 || !DECL_INITIAL (scalar)
3090 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3091 return NULL;
3092
3093 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3094}
3095
3096/* Retrieve value from aggregate jump function AGG or static initializer of
3097 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3098 none. BY_REF specifies whether the value has to be passed by reference or
3099 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3100 to is set to true if the value comes from an initializer of a constant. */
8b7773a4
MJ
3101
3102tree
91bb9f80
MJ
3103ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3104 HOST_WIDE_INT offset, bool by_ref,
3105 bool *from_global_constant)
8b7773a4
MJ
3106{
3107 struct ipa_agg_jf_item *item;
3108 int i;
3109
91bb9f80
MJ
3110 if (scalar)
3111 {
3112 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3113 if (res)
3114 {
3115 if (from_global_constant)
3116 *from_global_constant = true;
3117 return res;
3118 }
3119 }
3120
3121 if (!agg
3122 || by_ref != agg->by_ref)
8b7773a4
MJ
3123 return NULL;
3124
9771b263 3125 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
3126 if (item->offset == offset)
3127 {
3128 /* Currently we do not have clobber values, return NULL for them once
3129 we do. */
3130 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
3131 if (from_global_constant)
3132 *from_global_constant = false;
2c9561b5
MJ
3133 return item->value;
3134 }
8b7773a4
MJ
3135 return NULL;
3136}
3137
4502fe8d 3138/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
3139 reference description RDESC. Return true if the reference has been
3140 successfully found and removed. */
4502fe8d 3141
568cda29 3142static bool
5e20cdc9 3143remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3144{
3145 struct ipa_ref *to_del;
3146 struct cgraph_edge *origin;
3147
3148 origin = rdesc->cs;
a854f856
MJ
3149 if (!origin)
3150 return false;
d122681a
ML
3151 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3152 origin->lto_stmt_uid);
568cda29
MJ
3153 if (!to_del)
3154 return false;
3155
d122681a 3156 to_del->remove_reference ();
4502fe8d
MJ
3157 if (dump_file)
3158 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2a72a953
DM
3159 xstrdup_for_dump (origin->caller->name ()),
3160 origin->caller->order, xstrdup_for_dump (symbol->name ()));
568cda29 3161 return true;
4502fe8d
MJ
3162}
3163
3164/* If JFUNC has a reference description with refcount different from
3165 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3166 NULL. JFUNC must be a constant jump function. */
3167
3168static struct ipa_cst_ref_desc *
3169jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3170{
3171 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3172 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3173 return rdesc;
3174 else
3175 return NULL;
3176}
3177
568cda29
MJ
3178/* If the value of constant jump function JFUNC is an address of a function
3179 declaration, return the associated call graph node. Otherwise return
3180 NULL. */
3181
3182static cgraph_node *
3183cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3184{
3185 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3186 tree cst = ipa_get_jf_constant (jfunc);
3187 if (TREE_CODE (cst) != ADDR_EXPR
3188 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3189 return NULL;
3190
d52f5295 3191 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3192}
3193
3194
3195/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3196 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3197 the edge specified in the rdesc. Return false if either the symbol or the
3198 reference could not be found, otherwise return true. */
3199
3200static bool
3201try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3202{
3203 struct ipa_cst_ref_desc *rdesc;
3204 if (jfunc->type == IPA_JF_CONST
3205 && (rdesc = jfunc_rdesc_usable (jfunc))
3206 && --rdesc->refcount == 0)
3207 {
5e20cdc9 3208 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
3209 if (!symbol)
3210 return false;
3211
3212 return remove_described_reference (symbol, rdesc);
3213 }
3214 return true;
3215}
3216
b258210c
MJ
3217/* Try to find a destination for indirect edge IE that corresponds to a simple
3218 call or a call of a member function pointer and where the destination is a
3219 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
3220 determined, return the newly direct edge, otherwise return NULL.
3221 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 3222
b258210c
MJ
3223static struct cgraph_edge *
3224try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
3225 struct ipa_jump_func *jfunc,
3226 struct ipa_node_params *new_root_info)
b258210c 3227{
4502fe8d 3228 struct cgraph_edge *cs;
b258210c 3229 tree target;
042ae7d2 3230 bool agg_contents = ie->indirect_info->agg_contents;
91bb9f80
MJ
3231 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3232 if (agg_contents)
3233 {
3234 bool from_global_constant;
3235 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3236 ie->indirect_info->offset,
3237 ie->indirect_info->by_ref,
3238 &from_global_constant);
3239 if (target
3240 && !from_global_constant
3241 && !ie->indirect_info->guaranteed_unmodified)
3242 return NULL;
3243 }
b258210c 3244 else
91bb9f80 3245 target = scalar;
d250540a
MJ
3246 if (!target)
3247 return NULL;
4502fe8d
MJ
3248 cs = ipa_make_edge_direct_to_target (ie, target);
3249
a12cd2db 3250 if (cs && !agg_contents)
568cda29
MJ
3251 {
3252 bool ok;
3253 gcc_checking_assert (cs->callee
ae6d0907
MJ
3254 && (cs != ie
3255 || jfunc->type != IPA_JF_CONST
568cda29
MJ
3256 || !cgraph_node_for_jfunc (jfunc)
3257 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3258 ok = try_decrement_rdesc_refcount (jfunc);
3259 gcc_checking_assert (ok);
3260 }
4502fe8d
MJ
3261
3262 return cs;
b258210c
MJ
3263}
3264
bec81025
MJ
3265/* Return the target to be used in cases of impossible devirtualization. IE
3266 and target (the latter can be NULL) are dumped when dumping is enabled. */
3267
72972c22
MJ
3268tree
3269ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3270{
3271 if (dump_file)
3272 {
3273 if (target)
3274 fprintf (dump_file,
72972c22 3275 "Type inconsistent devirtualization: %s/%i->%s\n",
bec81025
MJ
3276 ie->caller->name (), ie->caller->order,
3277 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3278 else
3279 fprintf (dump_file,
3280 "No devirtualization target in %s/%i\n",
3281 ie->caller->name (), ie->caller->order);
3282 }
3283 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3284 cgraph_node::get_create (new_target);
bec81025
MJ
3285 return new_target;
3286}
3287
d250540a
MJ
3288/* Try to find a destination for indirect edge IE that corresponds to a virtual
3289 call based on a formal parameter which is described by jump function JFUNC
3290 and if it can be determined, make it direct and return the direct edge.
44210a96
MJ
3291 Otherwise, return NULL. CTX describes the polymorphic context that the
3292 parameter the call is based on brings along with it. */
b258210c
MJ
3293
3294static struct cgraph_edge *
3295try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3296 struct ipa_jump_func *jfunc,
44210a96 3297 struct ipa_polymorphic_call_context ctx)
3e293154 3298{
44210a96 3299 tree target = NULL;
5ce97055 3300 bool speculative = false;
85942f45 3301
2bf86c84 3302 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3303 return NULL;
b258210c 3304
44210a96 3305 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3306
3307 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3308 if (!ie->indirect_info->vptr_changed
3309 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3310 {
9de2f554
JH
3311 tree vtable;
3312 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3313 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3314 : NULL;
3315 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
85942f45
JH
3316 ie->indirect_info->offset,
3317 true);
9de2f554
JH
3318 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3319 {
2994ab20 3320 bool can_refer;
0127c169 3321 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3322 vtable, offset, &can_refer);
3323 if (can_refer)
9de2f554 3324 {
2994ab20
JH
3325 if (!t
3326 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3327 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
9de2f554 3328 || !possible_polymorphic_call_target_p
0127c169
JH
3329 (ie, cgraph_node::get (t)))
3330 {
33c3b6be 3331 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3332 if (!ie->indirect_info->vptr_changed)
3333 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3334 else
3335 target = NULL;
0127c169
JH
3336 }
3337 else
3338 {
3339 target = t;
3340 speculative = ie->indirect_info->vptr_changed;
3341 }
9de2f554
JH
3342 }
3343 }
85942f45
JH
3344 }
3345
44210a96
MJ
3346 ipa_polymorphic_call_context ie_context (ie);
3347 vec <cgraph_node *>targets;
3348 bool final;
d250540a 3349
44210a96
MJ
3350 ctx.offset_by (ie->indirect_info->offset);
3351 if (ie->indirect_info->vptr_changed)
3352 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3353 ie->indirect_info->otr_type);
3354 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3355 targets = possible_polymorphic_call_targets
3356 (ie->indirect_info->otr_type,
3357 ie->indirect_info->otr_token,
3358 ctx, &final);
3359 if (final && targets.length () <= 1)
5ce97055 3360 {
33c3b6be 3361 speculative = false;
44210a96
MJ
3362 if (targets.length () == 1)
3363 target = targets[0]->decl;
3364 else
3365 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3366 }
2bf86c84 3367 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3368 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3369 {
44210a96
MJ
3370 cgraph_node *n;
3371 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3372 ie->indirect_info->otr_token,
3373 ie->indirect_info->context);
3374 if (n)
5ce97055 3375 {
44210a96
MJ
3376 target = n->decl;
3377 speculative = true;
5ce97055 3378 }
5bccb77a 3379 }
b258210c
MJ
3380
3381 if (target)
450ad0cd 3382 {
44210a96
MJ
3383 if (!possible_polymorphic_call_target_p
3384 (ie, cgraph_node::get_create (target)))
0127c169 3385 {
29c43c83 3386 if (speculative)
0127c169
JH
3387 return NULL;
3388 target = ipa_impossible_devirt_target (ie, target);
3389 }
5ce97055 3390 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3391 }
b258210c
MJ
3392 else
3393 return NULL;
3e293154
MJ
3394}
3395
3396/* Update the param called notes associated with NODE when CS is being inlined,
3397 assuming NODE is (potentially indirectly) inlined into CS->callee.
3398 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3399 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3400 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3401
f8e2a1ed 3402static bool
e33c6cd6
MJ
3403update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3404 struct cgraph_node *node,
d52f5295 3405 vec<cgraph_edge *> *new_edges)
3e293154 3406{
9e97ff61 3407 struct ipa_edge_args *top;
b258210c 3408 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 3409 struct ipa_node_params *new_root_info;
f8e2a1ed 3410 bool res = false;
3e293154 3411
e33c6cd6 3412 ipa_check_create_edge_args ();
9e97ff61 3413 top = IPA_EDGE_REF (cs);
d250540a
MJ
3414 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3415 ? cs->caller->global.inlined_to
3416 : cs->caller);
e33c6cd6
MJ
3417
3418 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3419 {
e33c6cd6 3420 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3421 struct ipa_jump_func *jfunc;
8b7773a4 3422 int param_index;
3ff29913 3423 cgraph_node *spec_target = NULL;
3e293154 3424
e33c6cd6 3425 next_ie = ie->next_callee;
3e293154 3426
5f902d76
JH
3427 if (ici->param_index == -1)
3428 continue;
e33c6cd6 3429
3e293154 3430 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 3431 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3432 {
5ee53a06 3433 ici->param_index = -1;
3e293154
MJ
3434 continue;
3435 }
3436
8b7773a4
MJ
3437 param_index = ici->param_index;
3438 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3439
3ff29913
JH
3440 if (ie->speculative)
3441 {
3442 struct cgraph_edge *de;
3443 struct ipa_ref *ref;
3444 ie->speculative_call_info (de, ie, ref);
3445 spec_target = de->callee;
3446 }
3447
2bf86c84 3448 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3449 new_direct_edge = NULL;
3450 else if (ici->polymorphic)
5ce97055 3451 {
44210a96
MJ
3452 ipa_polymorphic_call_context ctx;
3453 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3454 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
5ce97055 3455 }
b258210c 3456 else
d250540a
MJ
3457 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3458 new_root_info);
042ae7d2 3459 /* If speculation was removed, then we need to do nothing. */
3ff29913
JH
3460 if (new_direct_edge && new_direct_edge != ie
3461 && new_direct_edge->callee == spec_target)
042ae7d2
JH
3462 {
3463 new_direct_edge->indirect_inlining_edge = 1;
3464 top = IPA_EDGE_REF (cs);
3465 res = true;
73d098df
JH
3466 if (!new_direct_edge->speculative)
3467 continue;
042ae7d2
JH
3468 }
3469 else if (new_direct_edge)
685b0d13 3470 {
b258210c 3471 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
3472 if (new_direct_edge->call_stmt)
3473 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
3474 = !gimple_check_call_matching_types (
3475 new_direct_edge->call_stmt,
67348ccc 3476 new_direct_edge->callee->decl, false);
b258210c
MJ
3477 if (new_edges)
3478 {
9771b263 3479 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3480 res = true;
3481 }
042ae7d2 3482 top = IPA_EDGE_REF (cs);
3ff29913
JH
3483 /* If speculative edge was introduced we still need to update
3484 call info of the indirect edge. */
3485 if (!new_direct_edge->speculative)
3486 continue;
685b0d13 3487 }
3ff29913
JH
3488 if (jfunc->type == IPA_JF_PASS_THROUGH
3489 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 3490 {
d0502276
JH
3491 if (ici->agg_contents
3492 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3493 && !ici->polymorphic)
36b72910
JH
3494 ici->param_index = -1;
3495 else
d0502276
JH
3496 {
3497 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3498 if (ici->polymorphic
3499 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3500 ici->vptr_changed = true;
3501 }
36b72910
JH
3502 }
3503 else if (jfunc->type == IPA_JF_ANCESTOR)
3504 {
d0502276
JH
3505 if (ici->agg_contents
3506 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3507 && !ici->polymorphic)
36b72910
JH
3508 ici->param_index = -1;
3509 else
3510 {
3511 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3512 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
3513 if (ici->polymorphic
3514 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3515 ici->vptr_changed = true;
36b72910
JH
3516 }
3517 }
3518 else
3519 /* Either we can find a destination for this edge now or never. */
3520 ici->param_index = -1;
3e293154 3521 }
e33c6cd6 3522
f8e2a1ed 3523 return res;
3e293154
MJ
3524}
3525
3526/* Recursively traverse subtree of NODE (including node) made of inlined
3527 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3528 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3529 update_jump_functions_after_inlining on all non-inlined edges that lead out
3530 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3531 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3532 created. */
be95e2b9 3533
f8e2a1ed 3534static bool
3e293154
MJ
3535propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3536 struct cgraph_node *node,
d52f5295 3537 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3538{
3539 struct cgraph_edge *e;
f8e2a1ed 3540 bool res;
3e293154 3541
e33c6cd6 3542 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3543
3544 for (e = node->callees; e; e = e->next_callee)
3545 if (!e->inline_failed)
f8e2a1ed 3546 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3547 else
3548 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3549 for (e = node->indirect_calls; e; e = e->next_callee)
3550 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3551
3552 return res;
3e293154
MJ
3553}
3554
4502fe8d
MJ
3555/* Combine two controlled uses counts as done during inlining. */
3556
3557static int
3558combine_controlled_uses_counters (int c, int d)
3559{
3560 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3561 return IPA_UNDESCRIBED_USE;
3562 else
3563 return c + d - 1;
3564}
3565
3566/* Propagate number of controlled users from CS->caleee to the new root of the
3567 tree of inlined nodes. */
3568
3569static void
3570propagate_controlled_uses (struct cgraph_edge *cs)
3571{
3572 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3573 struct cgraph_node *new_root = cs->caller->global.inlined_to
3574 ? cs->caller->global.inlined_to : cs->caller;
3575 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3576 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3577 int count, i;
3578
3579 count = MIN (ipa_get_cs_argument_count (args),
3580 ipa_get_param_count (old_root_info));
3581 for (i = 0; i < count; i++)
3582 {
3583 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3584 struct ipa_cst_ref_desc *rdesc;
3585
3586 if (jf->type == IPA_JF_PASS_THROUGH)
3587 {
3588 int src_idx, c, d;
3589 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3590 c = ipa_get_controlled_uses (new_root_info, src_idx);
3591 d = ipa_get_controlled_uses (old_root_info, i);
3592
3593 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3594 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3595 c = combine_controlled_uses_counters (c, d);
3596 ipa_set_controlled_uses (new_root_info, src_idx, c);
3597 if (c == 0 && new_root_info->ipcp_orig_node)
3598 {
3599 struct cgraph_node *n;
3600 struct ipa_ref *ref;
44210a96 3601 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
3602
3603 if (t && TREE_CODE (t) == ADDR_EXPR
3604 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 3605 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 3606 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
3607 {
3608 if (dump_file)
3609 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3610 "reference from %s/%i to %s/%i.\n",
2a72a953 3611 xstrdup_for_dump (new_root->name ()),
67348ccc 3612 new_root->order,
2a72a953 3613 xstrdup_for_dump (n->name ()), n->order);
d122681a 3614 ref->remove_reference ();
4502fe8d
MJ
3615 }
3616 }
3617 }
3618 else if (jf->type == IPA_JF_CONST
3619 && (rdesc = jfunc_rdesc_usable (jf)))
3620 {
3621 int d = ipa_get_controlled_uses (old_root_info, i);
3622 int c = rdesc->refcount;
3623 rdesc->refcount = combine_controlled_uses_counters (c, d);
3624 if (rdesc->refcount == 0)
3625 {
3626 tree cst = ipa_get_jf_constant (jf);
3627 struct cgraph_node *n;
3628 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3629 && TREE_CODE (TREE_OPERAND (cst, 0))
3630 == FUNCTION_DECL);
d52f5295 3631 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
3632 if (n)
3633 {
3634 struct cgraph_node *clone;
568cda29 3635 bool ok;
67348ccc 3636 ok = remove_described_reference (n, rdesc);
568cda29 3637 gcc_checking_assert (ok);
4502fe8d
MJ
3638
3639 clone = cs->caller;
3640 while (clone->global.inlined_to
3641 && clone != rdesc->cs->caller
3642 && IPA_NODE_REF (clone)->ipcp_orig_node)
3643 {
3644 struct ipa_ref *ref;
d122681a 3645 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
3646 if (ref)
3647 {
3648 if (dump_file)
3649 fprintf (dump_file, "ipa-prop: Removing "
3650 "cloning-created reference "
3651 "from %s/%i to %s/%i.\n",
2a72a953 3652 xstrdup_for_dump (clone->name ()),
67348ccc 3653 clone->order,
2a72a953 3654 xstrdup_for_dump (n->name ()),
67348ccc 3655 n->order);
d122681a 3656 ref->remove_reference ();
4502fe8d
MJ
3657 }
3658 clone = clone->callers->caller;
3659 }
3660 }
3661 }
3662 }
3663 }
3664
3665 for (i = ipa_get_param_count (old_root_info);
3666 i < ipa_get_cs_argument_count (args);
3667 i++)
3668 {
3669 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3670
3671 if (jf->type == IPA_JF_CONST)
3672 {
3673 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3674 if (rdesc)
3675 rdesc->refcount = IPA_UNDESCRIBED_USE;
3676 }
3677 else if (jf->type == IPA_JF_PASS_THROUGH)
3678 ipa_set_controlled_uses (new_root_info,
3679 jf->value.pass_through.formal_id,
3680 IPA_UNDESCRIBED_USE);
3681 }
3682}
3683
3e293154
MJ
3684/* Update jump functions and call note functions on inlining the call site CS.
3685 CS is expected to lead to a node already cloned by
3686 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3687 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3688 created. */
be95e2b9 3689
f8e2a1ed 3690bool
3e293154 3691ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 3692 vec<cgraph_edge *> *new_edges)
3e293154 3693{
5ee53a06 3694 bool changed;
f8e2a1ed
MJ
3695 /* Do nothing if the preparation phase has not been carried out yet
3696 (i.e. during early inlining). */
dd912cb8 3697 if (!ipa_node_params_sum)
f8e2a1ed 3698 return false;
6fe906a3 3699 gcc_assert (ipa_edge_args_sum);
f8e2a1ed 3700
4502fe8d 3701 propagate_controlled_uses (cs);
5ee53a06
JH
3702 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3703
5ee53a06 3704 return changed;
518dc859
RL
3705}
3706
86cd0334
MJ
3707/* Ensure that array of edge arguments infos is big enough to accommodate a
3708 structure for all edges and reallocates it if not. Also, allocate
3709 associated hash tables is they do not already exist. */
3710
3711void
3712ipa_check_create_edge_args (void)
3713{
6fe906a3
MJ
3714 if (!ipa_edge_args_sum)
3715 ipa_edge_args_sum
3716 = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3717 ipa_edge_args_sum_t (symtab, true));
86cd0334
MJ
3718 if (!ipa_bits_hash_table)
3719 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3720 if (!ipa_vr_hash_table)
3721 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3722}
3723
771578a0
MJ
3724/* Frees all dynamically allocated structures that the argument info points
3725 to. */
be95e2b9 3726
518dc859 3727void
771578a0 3728ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 3729{
9771b263 3730 vec_free (args->jump_functions);
771578a0 3731 memset (args, 0, sizeof (*args));
518dc859
RL
3732}
3733
771578a0 3734/* Free all ipa_edge structures. */
be95e2b9 3735
518dc859 3736void
771578a0 3737ipa_free_all_edge_args (void)
518dc859 3738{
6fe906a3 3739 if (!ipa_edge_args_sum)
9771b263
DN
3740 return;
3741
6fe906a3
MJ
3742 ipa_edge_args_sum->release ();
3743 ipa_edge_args_sum = NULL;
518dc859
RL
3744}
3745
771578a0 3746/* Free all ipa_node_params structures. */
be95e2b9 3747
518dc859 3748void
771578a0 3749ipa_free_all_node_params (void)
518dc859 3750{
a0a348b1 3751 ipa_node_params_sum->release ();
dd912cb8 3752 ipa_node_params_sum = NULL;
771578a0
MJ
3753}
3754
86cd0334
MJ
3755/* Grow ipcp_transformations if necessary. Also allocate any necessary hash
3756 tables if they do not already exist. */
04be694e
MJ
3757
3758void
3759ipcp_grow_transformations_if_necessary (void)
3760{
3761 if (vec_safe_length (ipcp_transformations)
3762 <= (unsigned) symtab->cgraph_max_uid)
3763 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
86cd0334
MJ
3764 if (!ipa_bits_hash_table)
3765 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3766 if (!ipa_vr_hash_table)
3767 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
04be694e
MJ
3768}
3769
2c9561b5
MJ
3770/* Set the aggregate replacements of NODE to be AGGVALS. */
3771
3772void
3773ipa_set_node_agg_value_chain (struct cgraph_node *node,
3774 struct ipa_agg_replacement_value *aggvals)
3775{
04be694e
MJ
3776 ipcp_grow_transformations_if_necessary ();
3777 (*ipcp_transformations)[node->uid].agg_values = aggvals;
2c9561b5
MJ
3778}
3779
6fe906a3
MJ
3780/* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3781 count data structures accordingly. */
be95e2b9 3782
6fe906a3
MJ
3783void
3784ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
771578a0 3785{
568cda29
MJ
3786 if (args->jump_functions)
3787 {
3788 struct ipa_jump_func *jf;
3789 int i;
3790 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3791 {
3792 struct ipa_cst_ref_desc *rdesc;
3793 try_decrement_rdesc_refcount (jf);
3794 if (jf->type == IPA_JF_CONST
3795 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3796 && rdesc->cs == cs)
3797 rdesc->cs = NULL;
3798 }
568cda29 3799 }
518dc859
RL
3800}
3801
6fe906a3
MJ
3802/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3803 reference count data strucutres accordingly. */
be95e2b9 3804
6fe906a3
MJ
3805void
3806ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3807 ipa_edge_args *old_args, ipa_edge_args *new_args)
771578a0 3808{
8b7773a4 3809 unsigned int i;
771578a0 3810
9771b263 3811 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
3812 if (old_args->polymorphic_call_contexts)
3813 new_args->polymorphic_call_contexts
3814 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 3815
9771b263 3816 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3817 {
3818 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3819 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3820
3821 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3822
3823 if (src_jf->type == IPA_JF_CONST)
3824 {
3825 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3826
3827 if (!src_rdesc)
3828 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3829 else if (src->caller == dst->caller)
3830 {
3831 struct ipa_ref *ref;
5e20cdc9 3832 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3833 gcc_checking_assert (n);
d122681a
ML
3834 ref = src->caller->find_reference (n, src->call_stmt,
3835 src->lto_stmt_uid);
568cda29 3836 gcc_checking_assert (ref);
d122681a 3837 dst->caller->clone_reference (ref, ref->stmt);
568cda29 3838
601f3293 3839 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
568cda29
MJ
3840 dst_rdesc->cs = dst;
3841 dst_rdesc->refcount = src_rdesc->refcount;
3842 dst_rdesc->next_duplicate = NULL;
3843 dst_jf->value.constant.rdesc = dst_rdesc;
3844 }
4502fe8d
MJ
3845 else if (src_rdesc->cs == src)
3846 {
601f3293 3847 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 3848 dst_rdesc->cs = dst;
4502fe8d 3849 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3850 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3851 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3852 dst_jf->value.constant.rdesc = dst_rdesc;
3853 }
3854 else
3855 {
3856 struct ipa_cst_ref_desc *dst_rdesc;
3857 /* This can happen during inlining, when a JFUNC can refer to a
3858 reference taken in a function up in the tree of inline clones.
3859 We need to find the duplicate that refers to our tree of
3860 inline clones. */
3861
3862 gcc_assert (dst->caller->global.inlined_to);
3863 for (dst_rdesc = src_rdesc->next_duplicate;
3864 dst_rdesc;
3865 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3866 {
3867 struct cgraph_node *top;
3868 top = dst_rdesc->cs->caller->global.inlined_to
3869 ? dst_rdesc->cs->caller->global.inlined_to
3870 : dst_rdesc->cs->caller;
3871 if (dst->caller->global.inlined_to == top)
3872 break;
3873 }
44a60244 3874 gcc_assert (dst_rdesc);
4502fe8d
MJ
3875 dst_jf->value.constant.rdesc = dst_rdesc;
3876 }
3877 }
6fe45955
MJ
3878 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3879 && src->caller == dst->caller)
3880 {
3881 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3882 ? dst->caller->global.inlined_to : dst->caller;
3883 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3884 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3885
3886 int c = ipa_get_controlled_uses (root_info, idx);
3887 if (c != IPA_UNDESCRIBED_USE)
3888 {
3889 c++;
3890 ipa_set_controlled_uses (root_info, idx, c);
3891 }
3892 }
4502fe8d 3893 }
771578a0
MJ
3894}
3895
dd912cb8 3896/* Analyze newly added function into callgraph. */
be95e2b9 3897
771578a0 3898static void
dd912cb8 3899ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3900{
dd912cb8
ML
3901 if (node->has_gimple_body_p ())
3902 ipa_analyze_node (node);
3903}
771578a0 3904
dd912cb8
ML
3905/* Hook that is called by summary when a node is duplicated. */
3906
3907void
3908ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3909 ipa_node_params *old_info,
3910 ipa_node_params *new_info)
3911{
3912 ipa_agg_replacement_value *old_av, *new_av;
771578a0 3913
f65f1ae3 3914 new_info->descriptors = vec_safe_copy (old_info->descriptors);
310bc633 3915 new_info->lattices = NULL;
771578a0 3916 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
f65f1ae3
MJ
3917 new_info->known_csts = old_info->known_csts.copy ();
3918 new_info->known_contexts = old_info->known_contexts.copy ();
3949c4a7 3919
8aab5218 3920 new_info->analysis_done = old_info->analysis_done;
3949c4a7 3921 new_info->node_enqueued = old_info->node_enqueued;
7e729474 3922 new_info->versionable = old_info->versionable;
2c9561b5
MJ
3923
3924 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 3925 if (old_av)
2c9561b5 3926 {
04be694e
MJ
3927 new_av = NULL;
3928 while (old_av)
3929 {
3930 struct ipa_agg_replacement_value *v;
2c9561b5 3931
04be694e
MJ
3932 v = ggc_alloc<ipa_agg_replacement_value> ();
3933 memcpy (v, old_av, sizeof (*v));
3934 v->next = new_av;
3935 new_av = v;
3936 old_av = old_av->next;
3937 }
3938 ipa_set_node_agg_value_chain (dst, new_av);
3939 }
3940
86cd0334
MJ
3941 ipcp_transformation_summary *src_trans
3942 = ipcp_get_transformation_summary (src);
04be694e 3943
8bc5448f 3944 if (src_trans)
04be694e
MJ
3945 {
3946 ipcp_grow_transformations_if_necessary ();
3947 src_trans = ipcp_get_transformation_summary (src);
86cd0334
MJ
3948 ipcp_transformation_summary *dst_trans
3949 = ipcp_get_transformation_summary (dst);
3950
3951 dst_trans->bits = vec_safe_copy (src_trans->bits);
3952
8bc5448f 3953 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
8bc5448f
KV
3954 vec<ipa_vr, va_gc> *&dst_vr
3955 = ipcp_get_transformation_summary (dst)->m_vr;
8bc5448f
KV
3956 if (vec_safe_length (src_trans->m_vr) > 0)
3957 {
3958 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3959 for (unsigned i = 0; i < src_vr->length (); ++i)
3960 dst_vr->quick_push ((*src_vr)[i]);
3961 }
2c9561b5 3962 }
771578a0
MJ
3963}
3964
3965/* Register our cgraph hooks if they are not already there. */
be95e2b9 3966
518dc859 3967void
771578a0 3968ipa_register_cgraph_hooks (void)
518dc859 3969{
dd912cb8 3970 ipa_check_create_node_params ();
6fe906a3 3971 ipa_check_create_edge_args ();
dd912cb8 3972
dd912cb8 3973 function_insertion_hook_holder =
3dafb85c 3974 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3975}
518dc859 3976
771578a0 3977/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3978
771578a0
MJ
3979static void
3980ipa_unregister_cgraph_hooks (void)
3981{
3dafb85c 3982 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 3983 function_insertion_hook_holder = NULL;
771578a0
MJ
3984}
3985
3986/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3987 longer needed after ipa-cp. */
be95e2b9 3988
771578a0 3989void
e33c6cd6 3990ipa_free_all_structures_after_ipa_cp (void)
3e293154 3991{
2bf86c84 3992 if (!optimize && !in_lto_p)
3e293154
MJ
3993 {
3994 ipa_free_all_edge_args ();
3995 ipa_free_all_node_params ();
2651e637
ML
3996 ipcp_sources_pool.release ();
3997 ipcp_cst_values_pool.release ();
3998 ipcp_poly_ctx_values_pool.release ();
3999 ipcp_agg_lattice_pool.release ();
3e293154 4000 ipa_unregister_cgraph_hooks ();
601f3293 4001 ipa_refdesc_pool.release ();
3e293154
MJ
4002 }
4003}
4004
4005/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4006 longer needed after indirect inlining. */
be95e2b9 4007
3e293154 4008void
e33c6cd6 4009ipa_free_all_structures_after_iinln (void)
771578a0
MJ
4010{
4011 ipa_free_all_edge_args ();
4012 ipa_free_all_node_params ();
4013 ipa_unregister_cgraph_hooks ();
2651e637
ML
4014 ipcp_sources_pool.release ();
4015 ipcp_cst_values_pool.release ();
4016 ipcp_poly_ctx_values_pool.release ();
4017 ipcp_agg_lattice_pool.release ();
601f3293 4018 ipa_refdesc_pool.release ();
518dc859
RL
4019}
4020
dcd416e3 4021/* Print ipa_tree_map data structures of all functions in the
518dc859 4022 callgraph to F. */
be95e2b9 4023
518dc859 4024void
2c9561b5 4025ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
4026{
4027 int i, count;
3e293154 4028 struct ipa_node_params *info;
518dc859 4029
67348ccc 4030 if (!node->definition)
3e293154
MJ
4031 return;
4032 info = IPA_NODE_REF (node);
9de04252 4033 fprintf (f, " function %s/%i parameter descriptors:\n",
fec39fa6 4034 node->name (), node->order);
3e293154
MJ
4035 count = ipa_get_param_count (info);
4036 for (i = 0; i < count; i++)
518dc859 4037 {
4502fe8d
MJ
4038 int c;
4039
a4e33812 4040 fprintf (f, " ");
e067bd43 4041 ipa_dump_param (f, info, i);
339f49ec
JH
4042 if (ipa_is_param_used (info, i))
4043 fprintf (f, " used");
4502fe8d
MJ
4044 c = ipa_get_controlled_uses (info, i);
4045 if (c == IPA_UNDESCRIBED_USE)
4046 fprintf (f, " undescribed_use");
4047 else
4048 fprintf (f, " controlled_uses=%i", c);
3e293154 4049 fprintf (f, "\n");
518dc859
RL
4050 }
4051}
dcd416e3 4052
ca30a539 4053/* Print ipa_tree_map data structures of all functions in the
3e293154 4054 callgraph to F. */
be95e2b9 4055
3e293154 4056void
ca30a539 4057ipa_print_all_params (FILE * f)
3e293154
MJ
4058{
4059 struct cgraph_node *node;
4060
ca30a539 4061 fprintf (f, "\nFunction parameters:\n");
65c70e6b 4062 FOR_EACH_FUNCTION (node)
ca30a539 4063 ipa_print_node_params (f, node);
3e293154 4064}
3f84bf08
MJ
4065
4066/* Return a heap allocated vector containing formal parameters of FNDECL. */
4067
9771b263 4068vec<tree>
3f84bf08
MJ
4069ipa_get_vector_of_formal_parms (tree fndecl)
4070{
9771b263 4071 vec<tree> args;
3f84bf08
MJ
4072 int count;
4073 tree parm;
4074
0e8853ee 4075 gcc_assert (!flag_wpa);
310bc633 4076 count = count_formal_params (fndecl);
9771b263 4077 args.create (count);
910ad8de 4078 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 4079 args.quick_push (parm);
3f84bf08
MJ
4080
4081 return args;
4082}
4083
4084/* Return a heap allocated vector containing types of formal parameters of
4085 function type FNTYPE. */
4086
31519c38
AH
4087vec<tree>
4088ipa_get_vector_of_formal_parm_types (tree fntype)
3f84bf08 4089{
9771b263 4090 vec<tree> types;
3f84bf08
MJ
4091 int count = 0;
4092 tree t;
4093
4094 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4095 count++;
4096
9771b263 4097 types.create (count);
3f84bf08 4098 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 4099 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
4100
4101 return types;
4102}
4103
4104/* Modify the function declaration FNDECL and its type according to the plan in
4105 ADJUSTMENTS. It also sets base fields of individual adjustments structures
4106 to reflect the actual parameters being modified which are determined by the
4107 base_index field. */
4108
4109void
31519c38 4110ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3f84bf08 4111{
31519c38
AH
4112 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4113 tree orig_type = TREE_TYPE (fndecl);
4114 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3f84bf08
MJ
4115
4116 /* The following test is an ugly hack, some functions simply don't have any
4117 arguments in their type. This is probably a bug but well... */
31519c38
AH
4118 bool care_for_types = (old_arg_types != NULL_TREE);
4119 bool last_parm_void;
4120 vec<tree> otypes;
3f84bf08
MJ
4121 if (care_for_types)
4122 {
4123 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4124 == void_type_node);
31519c38 4125 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3f84bf08 4126 if (last_parm_void)
9771b263 4127 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 4128 else
9771b263 4129 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
4130 }
4131 else
4132 {
4133 last_parm_void = false;
9771b263 4134 otypes.create (0);
3f84bf08
MJ
4135 }
4136
31519c38
AH
4137 int len = adjustments.length ();
4138 tree *link = &DECL_ARGUMENTS (fndecl);
4139 tree new_arg_types = NULL;
4140 for (int i = 0; i < len; i++)
3f84bf08
MJ
4141 {
4142 struct ipa_parm_adjustment *adj;
4143 gcc_assert (link);
4144
9771b263 4145 adj = &adjustments[i];
31519c38
AH
4146 tree parm;
4147 if (adj->op == IPA_PARM_OP_NEW)
4148 parm = NULL;
4149 else
4150 parm = oparms[adj->base_index];
3f84bf08
MJ
4151 adj->base = parm;
4152
31519c38 4153 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
4154 {
4155 if (care_for_types)
9771b263 4156 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
4157 new_arg_types);
4158 *link = parm;
910ad8de 4159 link = &DECL_CHAIN (parm);
3f84bf08 4160 }
31519c38 4161 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4162 {
4163 tree new_parm;
4164 tree ptype;
4165
4166 if (adj->by_ref)
4167 ptype = build_pointer_type (adj->type);
4168 else
e69dbe37
MJ
4169 {
4170 ptype = adj->type;
26d7dc48
RB
4171 if (is_gimple_reg_type (ptype)
4172 && TYPE_MODE (ptype) != BLKmode)
e69dbe37
MJ
4173 {
4174 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
fe7afdf5 4175 if (TYPE_ALIGN (ptype) != malign)
e69dbe37
MJ
4176 ptype = build_aligned_type (ptype, malign);
4177 }
4178 }
3f84bf08
MJ
4179
4180 if (care_for_types)
4181 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4182
4183 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4184 ptype);
31519c38
AH
4185 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4186 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3f84bf08
MJ
4187 DECL_ARTIFICIAL (new_parm) = 1;
4188 DECL_ARG_TYPE (new_parm) = ptype;
4189 DECL_CONTEXT (new_parm) = fndecl;
4190 TREE_USED (new_parm) = 1;
4191 DECL_IGNORED_P (new_parm) = 1;
4192 layout_decl (new_parm, 0);
4193
31519c38
AH
4194 if (adj->op == IPA_PARM_OP_NEW)
4195 adj->base = NULL;
4196 else
4197 adj->base = parm;
4198 adj->new_decl = new_parm;
3f84bf08
MJ
4199
4200 *link = new_parm;
910ad8de 4201 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
4202 }
4203 }
4204
4205 *link = NULL_TREE;
4206
31519c38 4207 tree new_reversed = NULL;
3f84bf08
MJ
4208 if (care_for_types)
4209 {
4210 new_reversed = nreverse (new_arg_types);
4211 if (last_parm_void)
4212 {
4213 if (new_reversed)
4214 TREE_CHAIN (new_arg_types) = void_list_node;
4215 else
4216 new_reversed = void_list_node;
4217 }
4218 }
4219
4220 /* Use copy_node to preserve as much as possible from original type
4221 (debug info, attribute lists etc.)
4222 Exception is METHOD_TYPEs must have THIS argument.
4223 When we are asked to remove it, we need to build new FUNCTION_TYPE
4224 instead. */
31519c38 4225 tree new_type = NULL;
3f84bf08 4226 if (TREE_CODE (orig_type) != METHOD_TYPE
31519c38 4227 || (adjustments[0].op == IPA_PARM_OP_COPY
9771b263 4228 && adjustments[0].base_index == 0))
3f84bf08 4229 {
4eb3f32c 4230 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
4231 TYPE_ARG_TYPES (new_type) = new_reversed;
4232 }
4233 else
4234 {
4235 new_type
4236 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4237 new_reversed));
4238 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4239 DECL_VINDEX (fndecl) = NULL_TREE;
4240 }
4241
d402c33d
JH
4242 /* When signature changes, we need to clear builtin info. */
4243 if (DECL_BUILT_IN (fndecl))
4244 {
4245 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4246 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4247 }
4248
3f84bf08 4249 TREE_TYPE (fndecl) = new_type;
9b389a5e 4250 DECL_VIRTUAL_P (fndecl) = 0;
70d6d5c1 4251 DECL_LANG_SPECIFIC (fndecl) = NULL;
9771b263
DN
4252 otypes.release ();
4253 oparms.release ();
3f84bf08
MJ
4254}
4255
4256/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4257 If this is a directly recursive call, CS must be NULL. Otherwise it must
4258 contain the corresponding call graph edge. */
4259
4260void
538dd0b7 4261ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3f84bf08
MJ
4262 ipa_parm_adjustment_vec adjustments)
4263{
d52f5295 4264 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
9771b263
DN
4265 vec<tree> vargs;
4266 vec<tree, va_gc> **debug_args = NULL;
538dd0b7 4267 gcall *new_stmt;
82338059 4268 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
4269 tree callee_decl;
4270 int i, len;
4271
9771b263
DN
4272 len = adjustments.length ();
4273 vargs.create (len);
67348ccc 4274 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
d122681a 4275 current_node->remove_stmt_references (stmt);
3f84bf08
MJ
4276
4277 gsi = gsi_for_stmt (stmt);
82338059
MJ
4278 prev_gsi = gsi;
4279 gsi_prev (&prev_gsi);
3f84bf08
MJ
4280 for (i = 0; i < len; i++)
4281 {
4282 struct ipa_parm_adjustment *adj;
4283
9771b263 4284 adj = &adjustments[i];
3f84bf08 4285
31519c38 4286 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
4287 {
4288 tree arg = gimple_call_arg (stmt, adj->base_index);
4289
9771b263 4290 vargs.quick_push (arg);
3f84bf08 4291 }
31519c38 4292 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08 4293 {
fffe1e40
MJ
4294 tree expr, base, off;
4295 location_t loc;
f43245d1 4296 unsigned int deref_align = 0;
c1ed6a01 4297 bool deref_base = false;
fffe1e40
MJ
4298
4299 /* We create a new parameter out of the value of the old one, we can
4300 do the following kind of transformations:
4301
4302 - A scalar passed by reference is converted to a scalar passed by
4303 value. (adj->by_ref is false and the type of the original
4304 actual argument is a pointer to a scalar).
4305
4306 - A part of an aggregate is passed instead of the whole aggregate.
4307 The part can be passed either by value or by reference, this is
4308 determined by value of adj->by_ref. Moreover, the code below
4309 handles both situations when the original aggregate is passed by
4310 value (its type is not a pointer) and when it is passed by
4311 reference (it is a pointer to an aggregate).
4312
4313 When the new argument is passed by reference (adj->by_ref is true)
4314 it must be a part of an aggregate and therefore we form it by
4315 simply taking the address of a reference inside the original
4316 aggregate. */
4317
4318 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4319 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
4320 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4321 : EXPR_LOCATION (base);
fffe1e40 4322
82d49829
MJ
4323 if (TREE_CODE (base) != ADDR_EXPR
4324 && POINTER_TYPE_P (TREE_TYPE (base)))
4325 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 4326 adj->offset / BITS_PER_UNIT);
3f84bf08 4327 else
3f84bf08 4328 {
fffe1e40
MJ
4329 HOST_WIDE_INT base_offset;
4330 tree prev_base;
c1ed6a01 4331 bool addrof;
fffe1e40
MJ
4332
4333 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
4334 {
4335 base = TREE_OPERAND (base, 0);
4336 addrof = true;
4337 }
4338 else
4339 addrof = false;
fffe1e40
MJ
4340 prev_base = base;
4341 base = get_addr_base_and_unit_offset (base, &base_offset);
4342 /* Aggregate arguments can have non-invariant addresses. */
4343 if (!base)
4344 {
4345 base = build_fold_addr_expr (prev_base);
82d49829 4346 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4347 adj->offset / BITS_PER_UNIT);
4348 }
4349 else if (TREE_CODE (base) == MEM_REF)
4350 {
c1ed6a01
MJ
4351 if (!addrof)
4352 {
4353 deref_base = true;
4354 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4355 }
82d49829 4356 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4357 base_offset
4358 + adj->offset / BITS_PER_UNIT);
4359 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 4360 off);
fffe1e40
MJ
4361 base = TREE_OPERAND (base, 0);
4362 }
4363 else
4364 {
82d49829 4365 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4366 base_offset
4367 + adj->offset / BITS_PER_UNIT);
4368 base = build_fold_addr_expr (base);
4369 }
3f84bf08 4370 }
fffe1e40 4371
3a5a825a
RG
4372 if (!adj->by_ref)
4373 {
4374 tree type = adj->type;
4375 unsigned int align;
4376 unsigned HOST_WIDE_INT misalign;
644ffefd 4377
c1ed6a01
MJ
4378 if (deref_base)
4379 {
4380 align = deref_align;
4381 misalign = 0;
4382 }
4383 else
4384 {
4385 get_pointer_alignment_1 (base, &align, &misalign);
4386 if (TYPE_ALIGN (type) > align)
4387 align = TYPE_ALIGN (type);
4388 }
807e902e 4389 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3a5a825a
RG
4390 * BITS_PER_UNIT);
4391 misalign = misalign & (align - 1);
4392 if (misalign != 0)
146ec50f 4393 align = least_bit_hwi (misalign);
3a5a825a
RG
4394 if (align < TYPE_ALIGN (type))
4395 type = build_aligned_type (type, align);
4df65a85
RB
4396 base = force_gimple_operand_gsi (&gsi, base,
4397 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4398 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
ee45a32d 4399 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4df65a85
RB
4400 /* If expr is not a valid gimple call argument emit
4401 a load into a temporary. */
4402 if (is_gimple_reg_type (TREE_TYPE (expr)))
4403 {
355fe088 4404 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4df65a85
RB
4405 if (gimple_in_ssa_p (cfun))
4406 {
4407 gimple_set_vuse (tem, gimple_vuse (stmt));
4408 expr = make_ssa_name (TREE_TYPE (expr), tem);
4409 }
4410 else
b731b390 4411 expr = create_tmp_reg (TREE_TYPE (expr));
4df65a85
RB
4412 gimple_assign_set_lhs (tem, expr);
4413 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4414 }
3a5a825a
RG
4415 }
4416 else
4417 {
4418 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
ee45a32d 4419 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
3a5a825a 4420 expr = build_fold_addr_expr (expr);
4df65a85
RB
4421 expr = force_gimple_operand_gsi (&gsi, expr,
4422 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4423 }
9771b263 4424 vargs.quick_push (expr);
3f84bf08 4425 }
31519c38 4426 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
ddb555ed
JJ
4427 {
4428 unsigned int ix;
4429 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
355fe088 4430 gimple *def_temp;
ddb555ed
JJ
4431
4432 arg = gimple_call_arg (stmt, adj->base_index);
4433 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4434 {
4435 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4436 continue;
4437 arg = fold_convert_loc (gimple_location (stmt),
4438 TREE_TYPE (origin), arg);
4439 }
4440 if (debug_args == NULL)
4441 debug_args = decl_debug_args_insert (callee_decl);
9771b263 4442 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
4443 if (ddecl == origin)
4444 {
9771b263 4445 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
4446 break;
4447 }
4448 if (ddecl == NULL)
4449 {
4450 ddecl = make_node (DEBUG_EXPR_DECL);
4451 DECL_ARTIFICIAL (ddecl) = 1;
4452 TREE_TYPE (ddecl) = TREE_TYPE (origin);
899ca90e 4453 SET_DECL_MODE (ddecl, DECL_MODE (origin));
ddb555ed 4454
9771b263
DN
4455 vec_safe_push (*debug_args, origin);
4456 vec_safe_push (*debug_args, ddecl);
ddb555ed 4457 }
9771b263 4458 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
4459 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4460 }
3f84bf08
MJ
4461 }
4462
4463 if (dump_file && (dump_flags & TDF_DETAILS))
4464 {
4465 fprintf (dump_file, "replacing stmt:");
ef6cb4c7 4466 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
3f84bf08
MJ
4467 }
4468
3f84bf08 4469 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 4470 vargs.release ();
3f84bf08
MJ
4471 if (gimple_call_lhs (stmt))
4472 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4473
4474 gimple_set_block (new_stmt, gimple_block (stmt));
4475 if (gimple_has_location (stmt))
4476 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 4477 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 4478 gimple_call_copy_flags (new_stmt, stmt);
4df65a85
RB
4479 if (gimple_in_ssa_p (cfun))
4480 {
4481 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4482 if (gimple_vdef (stmt))
4483 {
4484 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4485 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4486 }
4487 }
3f84bf08
MJ
4488
4489 if (dump_file && (dump_flags & TDF_DETAILS))
4490 {
4491 fprintf (dump_file, "with stmt:");
ef6cb4c7 4492 print_gimple_stmt (dump_file, new_stmt, 0);
3f84bf08
MJ
4493 fprintf (dump_file, "\n");
4494 }
4495 gsi_replace (&gsi, new_stmt, true);
4496 if (cs)
3dafb85c 4497 cs->set_call_stmt (new_stmt);
82338059
MJ
4498 do
4499 {
d52f5295 4500 current_node->record_stmt_references (gsi_stmt (gsi));
82338059
MJ
4501 gsi_prev (&gsi);
4502 }
3d354792 4503 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
3f84bf08
MJ
4504}
4505
31519c38
AH
4506/* If the expression *EXPR should be replaced by a reduction of a parameter, do
4507 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4508 specifies whether the function should care about type incompatibility the
4509 current and new expressions. If it is false, the function will leave
4510 incompatibility issues to the caller. Return true iff the expression
4511 was modified. */
4512
4513bool
4514ipa_modify_expr (tree *expr, bool convert,
4515 ipa_parm_adjustment_vec adjustments)
4516{
4517 struct ipa_parm_adjustment *cand
4518 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4519 if (!cand)
4520 return false;
4521
4522 tree src;
4523 if (cand->by_ref)
ee45a32d
EB
4524 {
4525 src = build_simple_mem_ref (cand->new_decl);
4526 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4527 }
31519c38
AH
4528 else
4529 src = cand->new_decl;
4530
4531 if (dump_file && (dump_flags & TDF_DETAILS))
4532 {
4533 fprintf (dump_file, "About to replace expr ");
ef6cb4c7 4534 print_generic_expr (dump_file, *expr);
31519c38 4535 fprintf (dump_file, " with ");
ef6cb4c7 4536 print_generic_expr (dump_file, src);
31519c38
AH
4537 fprintf (dump_file, "\n");
4538 }
4539
4540 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4541 {
4542 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4543 *expr = vce;
4544 }
4545 else
4546 *expr = src;
4547 return true;
4548}
4549
4550/* If T is an SSA_NAME, return NULL if it is not a default def or
4551 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4552 the base variable is always returned, regardless if it is a default
4553 def. Return T if it is not an SSA_NAME. */
4554
4555static tree
4556get_ssa_base_param (tree t, bool ignore_default_def)
4557{
4558 if (TREE_CODE (t) == SSA_NAME)
4559 {
4560 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4561 return SSA_NAME_VAR (t);
4562 else
4563 return NULL_TREE;
4564 }
4565 return t;
4566}
4567
4568/* Given an expression, return an adjustment entry specifying the
4569 transformation to be done on EXPR. If no suitable adjustment entry
4570 was found, returns NULL.
4571
4572 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4573 default def, otherwise bail on them.
4574
4575 If CONVERT is non-NULL, this function will set *CONVERT if the
4576 expression provided is a component reference. ADJUSTMENTS is the
4577 adjustments vector. */
4578
4579ipa_parm_adjustment *
4580ipa_get_adjustment_candidate (tree **expr, bool *convert,
4581 ipa_parm_adjustment_vec adjustments,
4582 bool ignore_default_def)
4583{
4584 if (TREE_CODE (**expr) == BIT_FIELD_REF
4585 || TREE_CODE (**expr) == IMAGPART_EXPR
4586 || TREE_CODE (**expr) == REALPART_EXPR)
4587 {
4588 *expr = &TREE_OPERAND (**expr, 0);
4589 if (convert)
4590 *convert = true;
4591 }
4592
4593 HOST_WIDE_INT offset, size, max_size;
ee45a32d
EB
4594 bool reverse;
4595 tree base
4596 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
31519c38
AH
4597 if (!base || size == -1 || max_size == -1)
4598 return NULL;
4599
4600 if (TREE_CODE (base) == MEM_REF)
4601 {
807e902e 4602 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
31519c38
AH
4603 base = TREE_OPERAND (base, 0);
4604 }
4605
4606 base = get_ssa_base_param (base, ignore_default_def);
4607 if (!base || TREE_CODE (base) != PARM_DECL)
4608 return NULL;
4609
4610 struct ipa_parm_adjustment *cand = NULL;
4611 unsigned int len = adjustments.length ();
4612 for (unsigned i = 0; i < len; i++)
4613 {
4614 struct ipa_parm_adjustment *adj = &adjustments[i];
4615
4616 if (adj->base == base
4617 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4618 {
4619 cand = adj;
4620 break;
4621 }
4622 }
4623
4624 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4625 return NULL;
4626 return cand;
4627}
4628
3f84bf08
MJ
4629/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4630
4631static bool
4632index_in_adjustments_multiple_times_p (int base_index,
4633 ipa_parm_adjustment_vec adjustments)
4634{
9771b263 4635 int i, len = adjustments.length ();
3f84bf08
MJ
4636 bool one = false;
4637
4638 for (i = 0; i < len; i++)
4639 {
4640 struct ipa_parm_adjustment *adj;
9771b263 4641 adj = &adjustments[i];
3f84bf08
MJ
4642
4643 if (adj->base_index == base_index)
4644 {
4645 if (one)
4646 return true;
4647 else
4648 one = true;
4649 }
4650 }
4651 return false;
4652}
4653
4654
4655/* Return adjustments that should have the same effect on function parameters
4656 and call arguments as if they were first changed according to adjustments in
4657 INNER and then by adjustments in OUTER. */
4658
4659ipa_parm_adjustment_vec
4660ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4661 ipa_parm_adjustment_vec outer)
4662{
9771b263
DN
4663 int i, outlen = outer.length ();
4664 int inlen = inner.length ();
3f84bf08
MJ
4665 int removals = 0;
4666 ipa_parm_adjustment_vec adjustments, tmp;
4667
9771b263 4668 tmp.create (inlen);
3f84bf08
MJ
4669 for (i = 0; i < inlen; i++)
4670 {
4671 struct ipa_parm_adjustment *n;
9771b263 4672 n = &inner[i];
3f84bf08 4673
31519c38 4674 if (n->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4675 removals++;
4676 else
31519c38
AH
4677 {
4678 /* FIXME: Handling of new arguments are not implemented yet. */
4679 gcc_assert (n->op != IPA_PARM_OP_NEW);
4680 tmp.quick_push (*n);
4681 }
3f84bf08
MJ
4682 }
4683
9771b263 4684 adjustments.create (outlen + removals);
3f84bf08
MJ
4685 for (i = 0; i < outlen; i++)
4686 {
f32682ca 4687 struct ipa_parm_adjustment r;
9771b263
DN
4688 struct ipa_parm_adjustment *out = &outer[i];
4689 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 4690
f32682ca 4691 memset (&r, 0, sizeof (r));
31519c38
AH
4692 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4693 if (out->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4694 {
4695 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4696 {
31519c38 4697 r.op = IPA_PARM_OP_REMOVE;
9771b263 4698 adjustments.quick_push (r);
3f84bf08
MJ
4699 }
4700 continue;
4701 }
31519c38
AH
4702 else
4703 {
4704 /* FIXME: Handling of new arguments are not implemented yet. */
4705 gcc_assert (out->op != IPA_PARM_OP_NEW);
4706 }
3f84bf08 4707
f32682ca
DN
4708 r.base_index = in->base_index;
4709 r.type = out->type;
3f84bf08
MJ
4710
4711 /* FIXME: Create nonlocal value too. */
4712
31519c38
AH
4713 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4714 r.op = IPA_PARM_OP_COPY;
4715 else if (in->op == IPA_PARM_OP_COPY)
f32682ca 4716 r.offset = out->offset;
31519c38 4717 else if (out->op == IPA_PARM_OP_COPY)
f32682ca 4718 r.offset = in->offset;
3f84bf08 4719 else
f32682ca 4720 r.offset = in->offset + out->offset;
9771b263 4721 adjustments.quick_push (r);
3f84bf08
MJ
4722 }
4723
4724 for (i = 0; i < inlen; i++)
4725 {
9771b263 4726 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08 4727
31519c38 4728 if (n->op == IPA_PARM_OP_REMOVE)
9771b263 4729 adjustments.quick_push (*n);
3f84bf08
MJ
4730 }
4731
9771b263 4732 tmp.release ();
3f84bf08
MJ
4733 return adjustments;
4734}
4735
4736/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4737 friendly way, assuming they are meant to be applied to FNDECL. */
4738
4739void
4740ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4741 tree fndecl)
4742{
9771b263 4743 int i, len = adjustments.length ();
3f84bf08 4744 bool first = true;
9771b263 4745 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
4746
4747 fprintf (file, "IPA param adjustments: ");
4748 for (i = 0; i < len; i++)
4749 {
4750 struct ipa_parm_adjustment *adj;
9771b263 4751 adj = &adjustments[i];
3f84bf08
MJ
4752
4753 if (!first)
4754 fprintf (file, " ");
4755 else
4756 first = false;
4757
4758 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
ef6cb4c7 4759 print_generic_expr (file, parms[adj->base_index]);
3f84bf08
MJ
4760 if (adj->base)
4761 {
4762 fprintf (file, ", base: ");
ef6cb4c7 4763 print_generic_expr (file, adj->base);
3f84bf08 4764 }
31519c38 4765 if (adj->new_decl)
3f84bf08 4766 {
31519c38 4767 fprintf (file, ", new_decl: ");
ef6cb4c7 4768 print_generic_expr (file, adj->new_decl);
3f84bf08
MJ
4769 }
4770 if (adj->new_ssa_base)
4771 {
4772 fprintf (file, ", new_ssa_base: ");
ef6cb4c7 4773 print_generic_expr (file, adj->new_ssa_base);
3f84bf08
MJ
4774 }
4775
31519c38 4776 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08 4777 fprintf (file, ", copy_param");
31519c38 4778 else if (adj->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4779 fprintf (file, ", remove_param");
4780 else
4781 fprintf (file, ", offset %li", (long) adj->offset);
4782 if (adj->by_ref)
4783 fprintf (file, ", by_ref");
4784 print_node_brief (file, ", type: ", adj->type, 0);
4785 fprintf (file, "\n");
4786 }
9771b263 4787 parms.release ();
3f84bf08
MJ
4788}
4789
2c9561b5
MJ
4790/* Dump the AV linked list. */
4791
4792void
4793ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4794{
4795 bool comma = false;
4796 fprintf (f, " Aggregate replacements:");
4797 for (; av; av = av->next)
4798 {
4799 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4800 av->index, av->offset);
ef6cb4c7 4801 print_generic_expr (f, av->value);
2c9561b5
MJ
4802 comma = true;
4803 }
4804 fprintf (f, "\n");
4805}
4806
fb3f88cc
JH
4807/* Stream out jump function JUMP_FUNC to OB. */
4808
4809static void
4810ipa_write_jump_function (struct output_block *ob,
4811 struct ipa_jump_func *jump_func)
4812{
8b7773a4
MJ
4813 struct ipa_agg_jf_item *item;
4814 struct bitpack_d bp;
4815 int i, count;
fb3f88cc 4816
8b7773a4 4817 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
4818 switch (jump_func->type)
4819 {
4820 case IPA_JF_UNKNOWN:
4821 break;
4822 case IPA_JF_CONST:
5368224f 4823 gcc_assert (
4502fe8d
MJ
4824 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4825 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
4826 break;
4827 case IPA_JF_PASS_THROUGH:
412288f1 4828 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4829 if (jump_func->value.pass_through.operation == NOP_EXPR)
4830 {
4831 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4832 bp = bitpack_create (ob->main_stream);
4833 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4834 streamer_write_bitpack (&bp);
4835 }
a2b4c188
KV
4836 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4837 == tcc_unary)
4838 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4a53743e
MJ
4839 else
4840 {
4841 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4842 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4843 }
fb3f88cc
JH
4844 break;
4845 case IPA_JF_ANCESTOR:
412288f1 4846 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4847 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4848 bp = bitpack_create (ob->main_stream);
4849 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4850 streamer_write_bitpack (&bp);
fb3f88cc 4851 break;
8b7773a4
MJ
4852 }
4853
9771b263 4854 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4855 streamer_write_uhwi (ob, count);
4856 if (count)
4857 {
4858 bp = bitpack_create (ob->main_stream);
4859 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4860 streamer_write_bitpack (&bp);
4861 }
4862
9771b263 4863 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4864 {
4865 streamer_write_uhwi (ob, item->offset);
4866 stream_write_tree (ob, item->value, true);
fb3f88cc 4867 }
04be694e 4868
209ca542 4869 bp = bitpack_create (ob->main_stream);
86cd0334 4870 bp_pack_value (&bp, !!jump_func->bits, 1);
209ca542 4871 streamer_write_bitpack (&bp);
86cd0334 4872 if (jump_func->bits)
209ca542 4873 {
86cd0334
MJ
4874 streamer_write_widest_int (ob, jump_func->bits->value);
4875 streamer_write_widest_int (ob, jump_func->bits->mask);
a5e14a42 4876 }
86cd0334 4877 bp_pack_value (&bp, !!jump_func->m_vr, 1);
8bc5448f 4878 streamer_write_bitpack (&bp);
86cd0334 4879 if (jump_func->m_vr)
8bc5448f
KV
4880 {
4881 streamer_write_enum (ob->main_stream, value_rang_type,
86cd0334
MJ
4882 VR_LAST, jump_func->m_vr->type);
4883 stream_write_tree (ob, jump_func->m_vr->min, true);
4884 stream_write_tree (ob, jump_func->m_vr->max, true);
8bc5448f 4885 }
fb3f88cc
JH
4886}
4887
4888/* Read in jump function JUMP_FUNC from IB. */
4889
4890static void
4891ipa_read_jump_function (struct lto_input_block *ib,
4892 struct ipa_jump_func *jump_func,
4502fe8d 4893 struct cgraph_edge *cs,
fb3f88cc
JH
4894 struct data_in *data_in)
4895{
4a53743e
MJ
4896 enum jump_func_type jftype;
4897 enum tree_code operation;
8b7773a4 4898 int i, count;
fb3f88cc 4899
4a53743e
MJ
4900 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4901 switch (jftype)
fb3f88cc
JH
4902 {
4903 case IPA_JF_UNKNOWN:
04be694e 4904 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4905 break;
4906 case IPA_JF_CONST:
4502fe8d 4907 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4908 break;
4909 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4910 operation = (enum tree_code) streamer_read_uhwi (ib);
4911 if (operation == NOP_EXPR)
4912 {
4913 int formal_id = streamer_read_uhwi (ib);
4914 struct bitpack_d bp = streamer_read_bitpack (ib);
4915 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4916 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e 4917 }
a2b4c188
KV
4918 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4919 {
4920 int formal_id = streamer_read_uhwi (ib);
4921 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4922 }
4a53743e
MJ
4923 else
4924 {
4925 tree operand = stream_read_tree (ib, data_in);
4926 int formal_id = streamer_read_uhwi (ib);
4927 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4928 operation);
4929 }
fb3f88cc
JH
4930 break;
4931 case IPA_JF_ANCESTOR:
4a53743e
MJ
4932 {
4933 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4934 int formal_id = streamer_read_uhwi (ib);
4935 struct bitpack_d bp = streamer_read_bitpack (ib);
4936 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4937 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4938 break;
4939 }
8b7773a4
MJ
4940 }
4941
4942 count = streamer_read_uhwi (ib);
9771b263 4943 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4944 if (count)
4945 {
4a53743e 4946 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4947 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4948 }
4949 for (i = 0; i < count; i++)
4950 {
f32682ca
DN
4951 struct ipa_agg_jf_item item;
4952 item.offset = streamer_read_uhwi (ib);
4953 item.value = stream_read_tree (ib, data_in);
9771b263 4954 jump_func->agg.items->quick_push (item);
fb3f88cc 4955 }
04be694e
MJ
4956
4957 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4958 bool bits_known = bp_unpack_value (&bp, 1);
4959 if (bits_known)
4960 {
86cd0334
MJ
4961 widest_int value = streamer_read_widest_int (ib);
4962 widest_int mask = streamer_read_widest_int (ib);
4963 ipa_set_jfunc_bits (jump_func, value, mask);
209ca542
PK
4964 }
4965 else
86cd0334 4966 jump_func->bits = NULL;
8bc5448f
KV
4967
4968 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4969 bool vr_known = bp_unpack_value (&vr_bp, 1);
4970 if (vr_known)
4971 {
86cd0334
MJ
4972 enum value_range_type type = streamer_read_enum (ib, value_range_type,
4973 VR_LAST);
4974 tree min = stream_read_tree (ib, data_in);
4975 tree max = stream_read_tree (ib, data_in);
4976 ipa_set_jfunc_vr (jump_func, type, min, max);
8bc5448f
KV
4977 }
4978 else
86cd0334 4979 jump_func->m_vr = NULL;
fb3f88cc
JH
4980}
4981
e33c6cd6
MJ
4982/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4983 relevant to indirect inlining to OB. */
661e7330
MJ
4984
4985static void
e33c6cd6
MJ
4986ipa_write_indirect_edge_info (struct output_block *ob,
4987 struct cgraph_edge *cs)
661e7330 4988{
e33c6cd6 4989 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4990 struct bitpack_d bp;
e33c6cd6 4991
412288f1 4992 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4993 bp = bitpack_create (ob->main_stream);
4994 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4995 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4996 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4997 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4998 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4999 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 5000 streamer_write_bitpack (&bp);
ba392339
JH
5001 if (ii->agg_contents || ii->polymorphic)
5002 streamer_write_hwi (ob, ii->offset);
5003 else
5004 gcc_assert (ii->offset == 0);
b258210c
MJ
5005
5006 if (ii->polymorphic)
5007 {
412288f1 5008 streamer_write_hwi (ob, ii->otr_token);
b9393656 5009 stream_write_tree (ob, ii->otr_type, true);
ba392339 5010 ii->context.stream_out (ob);
b258210c 5011 }
661e7330
MJ
5012}
5013
e33c6cd6
MJ
5014/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5015 relevant to indirect inlining from IB. */
661e7330
MJ
5016
5017static void
e33c6cd6 5018ipa_read_indirect_edge_info (struct lto_input_block *ib,
ba392339 5019 struct data_in *data_in,
e33c6cd6 5020 struct cgraph_edge *cs)
661e7330 5021{
e33c6cd6 5022 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 5023 struct bitpack_d bp;
661e7330 5024
412288f1 5025 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 5026 bp = streamer_read_bitpack (ib);
2465dcc2 5027 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 5028 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 5029 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 5030 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 5031 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 5032 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
5033 if (ii->agg_contents || ii->polymorphic)
5034 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5035 else
5036 ii->offset = 0;
b258210c
MJ
5037 if (ii->polymorphic)
5038 {
412288f1 5039 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 5040 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 5041 ii->context.stream_in (ib, data_in);
b258210c 5042 }
661e7330
MJ
5043}
5044
fb3f88cc
JH
5045/* Stream out NODE info to OB. */
5046
5047static void
5048ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5049{
5050 int node_ref;
7380e6ef 5051 lto_symtab_encoder_t encoder;
fb3f88cc
JH
5052 struct ipa_node_params *info = IPA_NODE_REF (node);
5053 int j;
5054 struct cgraph_edge *e;
2465dcc2 5055 struct bitpack_d bp;
fb3f88cc 5056
7380e6ef 5057 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5058 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 5059 streamer_write_uhwi (ob, node_ref);
fb3f88cc 5060
0e8853ee
JH
5061 streamer_write_uhwi (ob, ipa_get_param_count (info));
5062 for (j = 0; j < ipa_get_param_count (info); j++)
5063 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 5064 bp = bitpack_create (ob->main_stream);
8aab5218 5065 gcc_assert (info->analysis_done
661e7330 5066 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
5067 gcc_assert (!info->node_enqueued);
5068 gcc_assert (!info->ipcp_orig_node);
5069 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 5070 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 5071 streamer_write_bitpack (&bp);
4502fe8d 5072 for (j = 0; j < ipa_get_param_count (info); j++)
a5e14a42
MJ
5073 {
5074 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5075 stream_write_tree (ob, ipa_get_type (info, j), true);
5076 }
fb3f88cc
JH
5077 for (e = node->callees; e; e = e->next_callee)
5078 {
5079 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5080
5ce97055
JH
5081 streamer_write_uhwi (ob,
5082 ipa_get_cs_argument_count (args) * 2
5083 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 5084 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
5085 {
5086 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5087 if (args->polymorphic_call_contexts != NULL)
5088 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5089 }
fb3f88cc 5090 }
e33c6cd6 5091 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
5092 {
5093 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5094
5ce97055
JH
5095 streamer_write_uhwi (ob,
5096 ipa_get_cs_argument_count (args) * 2
5097 + (args->polymorphic_call_contexts != NULL));
c8246dbe 5098 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
5099 {
5100 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5101 if (args->polymorphic_call_contexts != NULL)
5102 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5103 }
c8246dbe
JH
5104 ipa_write_indirect_edge_info (ob, e);
5105 }
fb3f88cc
JH
5106}
5107
61502ca8 5108/* Stream in NODE info from IB. */
fb3f88cc
JH
5109
5110static void
5111ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
5112 struct data_in *data_in)
5113{
5114 struct ipa_node_params *info = IPA_NODE_REF (node);
5115 int k;
5116 struct cgraph_edge *e;
2465dcc2 5117 struct bitpack_d bp;
fb3f88cc 5118
0e8853ee 5119 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 5120
0e8853ee 5121 for (k = 0; k < ipa_get_param_count (info); k++)
f65f1ae3 5122 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
a5e14a42 5123
412288f1 5124 bp = streamer_read_bitpack (ib);
fb3f88cc 5125 if (ipa_get_param_count (info) != 0)
8aab5218 5126 info->analysis_done = true;
fb3f88cc
JH
5127 info->node_enqueued = false;
5128 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 5129 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a 5130 for (k = 0; k < ipa_get_param_count (info); k++)
a5e14a42
MJ
5131 {
5132 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
f65f1ae3 5133 (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
a5e14a42 5134 }
fb3f88cc
JH
5135 for (e = node->callees; e; e = e->next_callee)
5136 {
5137 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 5138 int count = streamer_read_uhwi (ib);
5ce97055
JH
5139 bool contexts_computed = count & 1;
5140 count /= 2;
fb3f88cc 5141
fb3f88cc
JH
5142 if (!count)
5143 continue;
9771b263 5144 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
5145 if (contexts_computed)
5146 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
fb3f88cc 5147
fb3f88cc 5148 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
5149 {
5150 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5151 data_in);
5152 if (contexts_computed)
5153 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5154 }
fb3f88cc 5155 }
e33c6cd6 5156 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
5157 {
5158 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 5159 int count = streamer_read_uhwi (ib);
5ce97055
JH
5160 bool contexts_computed = count & 1;
5161 count /= 2;
c8246dbe 5162
c8246dbe
JH
5163 if (count)
5164 {
9771b263 5165 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
5166 if (contexts_computed)
5167 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
c8246dbe 5168 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
5169 {
5170 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5171 data_in);
5172 if (contexts_computed)
5173 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5174 }
c8246dbe
JH
5175 }
5176 ipa_read_indirect_edge_info (ib, data_in, e);
5177 }
fb3f88cc
JH
5178}
5179
5180/* Write jump functions for nodes in SET. */
5181
5182void
f27c1867 5183ipa_prop_write_jump_functions (void)
fb3f88cc
JH
5184{
5185 struct cgraph_node *node;
93536c97 5186 struct output_block *ob;
fb3f88cc 5187 unsigned int count = 0;
f27c1867
JH
5188 lto_symtab_encoder_iterator lsei;
5189 lto_symtab_encoder_t encoder;
5190
6fe906a3 5191 if (!ipa_node_params_sum || !ipa_edge_args_sum)
93536c97 5192 return;
fb3f88cc 5193
93536c97 5194 ob = create_output_block (LTO_section_jump_functions);
f27c1867 5195 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5196 ob->symbol = NULL;
f27c1867
JH
5197 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5198 lsei_next_function_in_partition (&lsei))
fb3f88cc 5199 {
f27c1867 5200 node = lsei_cgraph_node (lsei);
d52f5295 5201 if (node->has_gimple_body_p ()
c47d0034 5202 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5203 count++;
5204 }
5205
412288f1 5206 streamer_write_uhwi (ob, count);
fb3f88cc
JH
5207
5208 /* Process all of the functions. */
f27c1867
JH
5209 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5210 lsei_next_function_in_partition (&lsei))
fb3f88cc 5211 {
f27c1867 5212 node = lsei_cgraph_node (lsei);
d52f5295 5213 if (node->has_gimple_body_p ()
c47d0034 5214 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5215 ipa_write_node_info (ob, node);
5216 }
412288f1 5217 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
5218 produce_asm (ob, NULL);
5219 destroy_output_block (ob);
5220}
5221
5222/* Read section in file FILE_DATA of length LEN with data DATA. */
5223
5224static void
5225ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5226 size_t len)
5227{
5228 const struct lto_function_header *header =
5229 (const struct lto_function_header *) data;
4ad9a9de
EB
5230 const int cfg_offset = sizeof (struct lto_function_header);
5231 const int main_offset = cfg_offset + header->cfg_size;
5232 const int string_offset = main_offset + header->main_size;
fb3f88cc 5233 struct data_in *data_in;
fb3f88cc
JH
5234 unsigned int i;
5235 unsigned int count;
5236
207c68cd 5237 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5238 header->main_size, file_data->mode_table);
fb3f88cc
JH
5239
5240 data_in =
5241 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5242 header->string_size, vNULL);
412288f1 5243 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
5244
5245 for (i = 0; i < count; i++)
5246 {
5247 unsigned int index;
5248 struct cgraph_node *node;
7380e6ef 5249 lto_symtab_encoder_t encoder;
fb3f88cc 5250
412288f1 5251 index = streamer_read_uhwi (&ib_main);
7380e6ef 5252 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5253 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5254 index));
67348ccc 5255 gcc_assert (node->definition);
fb3f88cc
JH
5256 ipa_read_node_info (&ib_main, node, data_in);
5257 }
5258 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5259 len);
5260 lto_data_in_delete (data_in);
5261}
5262
5263/* Read ipcp jump functions. */
5264
5265void
5266ipa_prop_read_jump_functions (void)
5267{
5268 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5269 struct lto_file_decl_data *file_data;
5270 unsigned int j = 0;
5271
5272 ipa_check_create_node_params ();
5273 ipa_check_create_edge_args ();
5274 ipa_register_cgraph_hooks ();
5275
5276 while ((file_data = file_data_vec[j++]))
5277 {
5278 size_t len;
5279 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5280
5281 if (data)
5282 ipa_prop_read_section (file_data, data, len);
5283 }
5284}
5285
2c9561b5 5286void
04be694e 5287write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
5288{
5289 int node_ref;
5290 unsigned int count = 0;
5291 lto_symtab_encoder_t encoder;
5292 struct ipa_agg_replacement_value *aggvals, *av;
5293
5294 aggvals = ipa_get_agg_replacements_for_node (node);
5295 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5296 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
5297 streamer_write_uhwi (ob, node_ref);
5298
5299 for (av = aggvals; av; av = av->next)
5300 count++;
5301 streamer_write_uhwi (ob, count);
5302
5303 for (av = aggvals; av; av = av->next)
5304 {
7b920a9a
MJ
5305 struct bitpack_d bp;
5306
2c9561b5
MJ
5307 streamer_write_uhwi (ob, av->offset);
5308 streamer_write_uhwi (ob, av->index);
5309 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
5310
5311 bp = bitpack_create (ob->main_stream);
5312 bp_pack_value (&bp, av->by_ref, 1);
5313 streamer_write_bitpack (&bp);
2c9561b5 5314 }
04be694e
MJ
5315
5316 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5317 if (ts && vec_safe_length (ts->m_vr) > 0)
5318 {
5319 count = ts->m_vr->length ();
5320 streamer_write_uhwi (ob, count);
5321 for (unsigned i = 0; i < count; ++i)
5322 {
5323 struct bitpack_d bp;
5324 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5325 bp = bitpack_create (ob->main_stream);
5326 bp_pack_value (&bp, parm_vr->known, 1);
5327 streamer_write_bitpack (&bp);
5328 if (parm_vr->known)
5329 {
5330 streamer_write_enum (ob->main_stream, value_rang_type,
5331 VR_LAST, parm_vr->type);
5332 streamer_write_wide_int (ob, parm_vr->min);
5333 streamer_write_wide_int (ob, parm_vr->max);
5334 }
5335 }
5336 }
5337 else
5338 streamer_write_uhwi (ob, 0);
5339
209ca542
PK
5340 if (ts && vec_safe_length (ts->bits) > 0)
5341 {
5342 count = ts->bits->length ();
5343 streamer_write_uhwi (ob, count);
5344
5345 for (unsigned i = 0; i < count; ++i)
5346 {
86cd0334 5347 const ipa_bits *bits_jfunc = (*ts->bits)[i];
209ca542 5348 struct bitpack_d bp = bitpack_create (ob->main_stream);
86cd0334 5349 bp_pack_value (&bp, !!bits_jfunc, 1);
209ca542 5350 streamer_write_bitpack (&bp);
86cd0334 5351 if (bits_jfunc)
209ca542 5352 {
86cd0334
MJ
5353 streamer_write_widest_int (ob, bits_jfunc->value);
5354 streamer_write_widest_int (ob, bits_jfunc->mask);
209ca542
PK
5355 }
5356 }
5357 }
5358 else
5359 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
5360}
5361
5362/* Stream in the aggregate value replacement chain for NODE from IB. */
5363
5364static void
04be694e
MJ
5365read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5366 data_in *data_in)
2c9561b5
MJ
5367{
5368 struct ipa_agg_replacement_value *aggvals = NULL;
5369 unsigned int count, i;
5370
5371 count = streamer_read_uhwi (ib);
5372 for (i = 0; i <count; i++)
5373 {
5374 struct ipa_agg_replacement_value *av;
7b920a9a 5375 struct bitpack_d bp;
2c9561b5 5376
766090c2 5377 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
5378 av->offset = streamer_read_uhwi (ib);
5379 av->index = streamer_read_uhwi (ib);
5380 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
5381 bp = streamer_read_bitpack (ib);
5382 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
5383 av->next = aggvals;
5384 aggvals = av;
5385 }
5386 ipa_set_node_agg_value_chain (node, aggvals);
67b97478 5387
209ca542
PK
5388 count = streamer_read_uhwi (ib);
5389 if (count > 0)
5390 {
5391 ipcp_grow_transformations_if_necessary ();
8bc5448f
KV
5392
5393 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5394 vec_safe_grow_cleared (ts->m_vr, count);
5395 for (i = 0; i < count; i++)
5396 {
5397 ipa_vr *parm_vr;
5398 parm_vr = &(*ts->m_vr)[i];
5399 struct bitpack_d bp;
5400 bp = streamer_read_bitpack (ib);
5401 parm_vr->known = bp_unpack_value (&bp, 1);
5402 if (parm_vr->known)
5403 {
5404 parm_vr->type = streamer_read_enum (ib, value_range_type,
5405 VR_LAST);
5406 parm_vr->min = streamer_read_wide_int (ib);
5407 parm_vr->max = streamer_read_wide_int (ib);
5408 }
5409 }
5410 }
5411 count = streamer_read_uhwi (ib);
5412 if (count > 0)
5413 {
5414 ipcp_grow_transformations_if_necessary ();
5415
209ca542
PK
5416 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5417 vec_safe_grow_cleared (ts->bits, count);
5418
5419 for (i = 0; i < count; i++)
5420 {
209ca542 5421 struct bitpack_d bp = streamer_read_bitpack (ib);
86cd0334
MJ
5422 bool known = bp_unpack_value (&bp, 1);
5423 if (known)
209ca542 5424 {
86cd0334
MJ
5425 ipa_bits *bits
5426 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
5427 streamer_read_widest_int (ib));
5428 (*ts->bits)[i] = bits;
209ca542
PK
5429 }
5430 }
5431 }
2c9561b5
MJ
5432}
5433
5434/* Write all aggregate replacement for nodes in set. */
5435
5436void
04be694e 5437ipcp_write_transformation_summaries (void)
2c9561b5
MJ
5438{
5439 struct cgraph_node *node;
5440 struct output_block *ob;
5441 unsigned int count = 0;
5442 lto_symtab_encoder_iterator lsei;
5443 lto_symtab_encoder_t encoder;
5444
2c9561b5
MJ
5445 ob = create_output_block (LTO_section_ipcp_transform);
5446 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5447 ob->symbol = NULL;
2c9561b5
MJ
5448 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5449 lsei_next_function_in_partition (&lsei))
5450 {
5451 node = lsei_cgraph_node (lsei);
04be694e 5452 if (node->has_gimple_body_p ())
2c9561b5
MJ
5453 count++;
5454 }
5455
5456 streamer_write_uhwi (ob, count);
5457
5458 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5459 lsei_next_function_in_partition (&lsei))
5460 {
5461 node = lsei_cgraph_node (lsei);
04be694e
MJ
5462 if (node->has_gimple_body_p ())
5463 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
5464 }
5465 streamer_write_char_stream (ob->main_stream, 0);
5466 produce_asm (ob, NULL);
5467 destroy_output_block (ob);
5468}
5469
5470/* Read replacements section in file FILE_DATA of length LEN with data
5471 DATA. */
5472
5473static void
5474read_replacements_section (struct lto_file_decl_data *file_data,
5475 const char *data,
5476 size_t len)
5477{
5478 const struct lto_function_header *header =
5479 (const struct lto_function_header *) data;
5480 const int cfg_offset = sizeof (struct lto_function_header);
5481 const int main_offset = cfg_offset + header->cfg_size;
5482 const int string_offset = main_offset + header->main_size;
5483 struct data_in *data_in;
2c9561b5
MJ
5484 unsigned int i;
5485 unsigned int count;
5486
207c68cd 5487 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5488 header->main_size, file_data->mode_table);
2c9561b5
MJ
5489
5490 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5491 header->string_size, vNULL);
2c9561b5
MJ
5492 count = streamer_read_uhwi (&ib_main);
5493
5494 for (i = 0; i < count; i++)
5495 {
5496 unsigned int index;
5497 struct cgraph_node *node;
5498 lto_symtab_encoder_t encoder;
5499
5500 index = streamer_read_uhwi (&ib_main);
5501 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5502 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5503 index));
67348ccc 5504 gcc_assert (node->definition);
04be694e 5505 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
5506 }
5507 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5508 len);
5509 lto_data_in_delete (data_in);
5510}
5511
5512/* Read IPA-CP aggregate replacements. */
5513
5514void
04be694e 5515ipcp_read_transformation_summaries (void)
2c9561b5
MJ
5516{
5517 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5518 struct lto_file_decl_data *file_data;
5519 unsigned int j = 0;
5520
5521 while ((file_data = file_data_vec[j++]))
5522 {
5523 size_t len;
5524 const char *data = lto_get_section_data (file_data,
5525 LTO_section_ipcp_transform,
5526 NULL, &len);
5527 if (data)
5528 read_replacements_section (file_data, data, len);
5529 }
5530}
5531
5532/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5533 NODE. */
5534
5535static void
5536adjust_agg_replacement_values (struct cgraph_node *node,
5537 struct ipa_agg_replacement_value *aggval)
5538{
5539 struct ipa_agg_replacement_value *v;
5540 int i, c = 0, d = 0, *adj;
5541
5542 if (!node->clone.combined_args_to_skip)
5543 return;
5544
5545 for (v = aggval; v; v = v->next)
5546 {
5547 gcc_assert (v->index >= 0);
5548 if (c < v->index)
5549 c = v->index;
5550 }
5551 c++;
5552
5553 adj = XALLOCAVEC (int, c);
5554 for (i = 0; i < c; i++)
5555 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5556 {
5557 adj[i] = -1;
5558 d++;
5559 }
5560 else
5561 adj[i] = i - d;
5562
5563 for (v = aggval; v; v = v->next)
5564 v->index = adj[v->index];
5565}
5566
8aab5218
MJ
5567/* Dominator walker driving the ipcp modification phase. */
5568
5569class ipcp_modif_dom_walker : public dom_walker
5570{
5571public:
56b40062 5572 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
f65f1ae3 5573 vec<ipa_param_descriptor, va_gc> *descs,
8aab5218
MJ
5574 struct ipa_agg_replacement_value *av,
5575 bool *sc, bool *cc)
5576 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5577 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5578
3daacdcd 5579 virtual edge before_dom_children (basic_block);
8aab5218
MJ
5580
5581private:
56b40062 5582 struct ipa_func_body_info *m_fbi;
f65f1ae3 5583 vec<ipa_param_descriptor, va_gc> *m_descriptors;
8aab5218
MJ
5584 struct ipa_agg_replacement_value *m_aggval;
5585 bool *m_something_changed, *m_cfg_changed;
5586};
5587
3daacdcd 5588edge
8aab5218
MJ
5589ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5590{
5591 gimple_stmt_iterator gsi;
5592 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5593 {
5594 struct ipa_agg_replacement_value *v;
355fe088 5595 gimple *stmt = gsi_stmt (gsi);
8aab5218
MJ
5596 tree rhs, val, t;
5597 HOST_WIDE_INT offset, size;
5598 int index;
5599 bool by_ref, vce;
5600
5601 if (!gimple_assign_load_p (stmt))
5602 continue;
5603 rhs = gimple_assign_rhs1 (stmt);
5604 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5605 continue;
2c9561b5 5606
8aab5218
MJ
5607 vce = false;
5608 t = rhs;
5609 while (handled_component_p (t))
5610 {
5611 /* V_C_E can do things like convert an array of integers to one
5612 bigger integer and similar things we do not handle below. */
5613 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5614 {
5615 vce = true;
5616 break;
5617 }
5618 t = TREE_OPERAND (t, 0);
5619 }
5620 if (vce)
5621 continue;
5622
ff302741
PB
5623 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5624 &offset, &size, &by_ref))
8aab5218
MJ
5625 continue;
5626 for (v = m_aggval; v; v = v->next)
5627 if (v->index == index
5628 && v->offset == offset)
5629 break;
5630 if (!v
5631 || v->by_ref != by_ref
5632 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5633 continue;
5634
5635 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5636 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5637 {
5638 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5639 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5640 else if (TYPE_SIZE (TREE_TYPE (rhs))
5641 == TYPE_SIZE (TREE_TYPE (v->value)))
5642 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5643 else
5644 {
5645 if (dump_file)
5646 {
5647 fprintf (dump_file, " const ");
ef6cb4c7 5648 print_generic_expr (dump_file, v->value);
8aab5218 5649 fprintf (dump_file, " can't be converted to type of ");
ef6cb4c7 5650 print_generic_expr (dump_file, rhs);
8aab5218
MJ
5651 fprintf (dump_file, "\n");
5652 }
5653 continue;
5654 }
5655 }
5656 else
5657 val = v->value;
5658
5659 if (dump_file && (dump_flags & TDF_DETAILS))
5660 {
5661 fprintf (dump_file, "Modifying stmt:\n ");
ef6cb4c7 5662 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5663 }
5664 gimple_assign_set_rhs_from_tree (&gsi, val);
5665 update_stmt (stmt);
5666
5667 if (dump_file && (dump_flags & TDF_DETAILS))
5668 {
5669 fprintf (dump_file, "into:\n ");
ef6cb4c7 5670 print_gimple_stmt (dump_file, stmt, 0);
8aab5218
MJ
5671 fprintf (dump_file, "\n");
5672 }
5673
5674 *m_something_changed = true;
5675 if (maybe_clean_eh_stmt (stmt)
5676 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5677 *m_cfg_changed = true;
5678 }
3daacdcd 5679 return NULL;
8aab5218
MJ
5680}
5681
209ca542
PK
5682/* Update bits info of formal parameters as described in
5683 ipcp_transformation_summary. */
5684
5685static void
5686ipcp_update_bits (struct cgraph_node *node)
5687{
5688 tree parm = DECL_ARGUMENTS (node->decl);
5689 tree next_parm = parm;
5690 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5691
5692 if (!ts || vec_safe_length (ts->bits) == 0)
5693 return;
5694
86cd0334 5695 vec<ipa_bits *, va_gc> &bits = *ts->bits;
209ca542
PK
5696 unsigned count = bits.length ();
5697
5698 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5699 {
5700 if (node->clone.combined_args_to_skip
5701 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5702 continue;
5703
5704 gcc_checking_assert (parm);
5705 next_parm = DECL_CHAIN (parm);
5706
86cd0334
MJ
5707 if (!bits[i]
5708 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5709 || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542 5710 || !is_gimple_reg (parm))
86cd0334 5711 continue;
209ca542
PK
5712
5713 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5714 if (!ddef)
5715 continue;
5716
5717 if (dump_file)
5718 {
86cd0334
MJ
5719 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5720 print_hex (bits[i]->mask, dump_file);
209ca542
PK
5721 fprintf (dump_file, "\n");
5722 }
5723
67b97478
PK
5724 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5725 {
5726 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5727 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5728
86cd0334
MJ
5729 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5730 | wide_int::from (bits[i]->value, prec, sgn);
67b97478
PK
5731 set_nonzero_bits (ddef, nonzero_bits);
5732 }
5733 else
5734 {
86cd0334
MJ
5735 unsigned tem = bits[i]->mask.to_uhwi ();
5736 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
67b97478
PK
5737 unsigned align = tem & -tem;
5738 unsigned misalign = bitpos & (align - 1);
209ca542 5739
67b97478
PK
5740 if (align > 1)
5741 {
5742 if (dump_file)
5743 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5744
5745 unsigned old_align, old_misalign;
5746 struct ptr_info_def *pi = get_ptr_info (ddef);
5747 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5748
5749 if (old_known
5750 && old_align > align)
5751 {
5752 if (dump_file)
5753 {
5754 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5755 if ((old_misalign & (align - 1)) != misalign)
5756 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5757 old_misalign, misalign);
5758 }
5759 continue;
5760 }
5761
5762 if (old_known
5763 && ((misalign & (old_align - 1)) != old_misalign)
5764 && dump_file)
5765 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5766 old_misalign, misalign);
5767
5768 set_ptr_info_alignment (pi, align, misalign);
5769 }
5770 }
209ca542
PK
5771 }
5772}
5773
8bc5448f
KV
5774/* Update value range of formal parameters as described in
5775 ipcp_transformation_summary. */
5776
5777static void
5778ipcp_update_vr (struct cgraph_node *node)
5779{
5780 tree fndecl = node->decl;
5781 tree parm = DECL_ARGUMENTS (fndecl);
5782 tree next_parm = parm;
5783 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5784 if (!ts || vec_safe_length (ts->m_vr) == 0)
5785 return;
5786 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5787 unsigned count = vr.length ();
5788
5789 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5790 {
5791 if (node->clone.combined_args_to_skip
5792 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5793 continue;
5794 gcc_checking_assert (parm);
5795 next_parm = DECL_CHAIN (parm);
5796 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5797
5798 if (!ddef || !is_gimple_reg (parm))
5799 continue;
5800
5801 if (vr[i].known
8bc5448f
KV
5802 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5803 {
5804 tree type = TREE_TYPE (ddef);
5805 unsigned prec = TYPE_PRECISION (type);
718625ad
KV
5806 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5807 {
5808 if (dump_file)
5809 {
5810 fprintf (dump_file, "Setting value range of param %u ", i);
5811 fprintf (dump_file, "%s[",
5812 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5813 print_decs (vr[i].min, dump_file);
5814 fprintf (dump_file, ", ");
5815 print_decs (vr[i].max, dump_file);
5816 fprintf (dump_file, "]\n");
5817 }
5818 set_range_info (ddef, vr[i].type,
5819 wide_int_storage::from (vr[i].min, prec,
5820 TYPE_SIGN (type)),
5821 wide_int_storage::from (vr[i].max, prec,
5822 TYPE_SIGN (type)));
5823 }
5824 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5825 && vr[i].type == VR_ANTI_RANGE
5826 && wi::eq_p (vr[i].min, 0)
5827 && wi::eq_p (vr[i].max, 0))
8bc5448f 5828 {
718625ad
KV
5829 if (dump_file)
5830 fprintf (dump_file, "Setting nonnull for %u\n", i);
5831 set_ptr_nonnull (ddef);
8bc5448f 5832 }
8bc5448f
KV
5833 }
5834 }
5835}
5836
8aab5218 5837/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5838
5839unsigned int
5840ipcp_transform_function (struct cgraph_node *node)
5841{
f65f1ae3 5842 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
56b40062 5843 struct ipa_func_body_info fbi;
2c9561b5 5844 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5845 int param_count;
5846 bool cfg_changed = false, something_changed = false;
5847
5848 gcc_checking_assert (cfun);
5849 gcc_checking_assert (current_function_decl);
5850
5851 if (dump_file)
5852 fprintf (dump_file, "Modification phase of node %s/%i\n",
fec39fa6 5853 node->name (), node->order);
2c9561b5 5854
209ca542 5855 ipcp_update_bits (node);
8bc5448f 5856 ipcp_update_vr (node);
2c9561b5
MJ
5857 aggval = ipa_get_agg_replacements_for_node (node);
5858 if (!aggval)
5859 return 0;
67348ccc 5860 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5861 if (param_count == 0)
5862 return 0;
5863 adjust_agg_replacement_values (node, aggval);
5864 if (dump_file)
5865 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5866
8aab5218
MJ
5867 fbi.node = node;
5868 fbi.info = NULL;
5869 fbi.bb_infos = vNULL;
5870 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5871 fbi.param_count = param_count;
5872 fbi.aa_walked = 0;
2c9561b5 5873
f65f1ae3
MJ
5874 vec_safe_grow_cleared (descriptors, param_count);
5875 ipa_populate_param_decls (node, *descriptors);
8aab5218
MJ
5876 calculate_dominance_info (CDI_DOMINATORS);
5877 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5878 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5879
8aab5218
MJ
5880 int i;
5881 struct ipa_bb_info *bi;
5882 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5883 free_ipa_bb_info (bi);
5884 fbi.bb_infos.release ();
5885 free_dominance_info (CDI_DOMINATORS);
04be694e 5886 (*ipcp_transformations)[node->uid].agg_values = NULL;
676b4899
PK
5887 (*ipcp_transformations)[node->uid].bits = NULL;
5888 (*ipcp_transformations)[node->uid].m_vr = NULL;
5889
f65f1ae3 5890 vec_free (descriptors);
2c9561b5
MJ
5891
5892 if (!something_changed)
5893 return 0;
5894 else if (cfg_changed)
5895 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5896 else
5897 return TODO_update_ssa_only_virtuals;
5898}
86cd0334
MJ
5899
5900#include "gt-ipa-prop.h"