]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
regex_compiler.h (__detail::_BracketMatcher): Reorder members to avoid wasted space...
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
23a5b65a 2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tree.h"
2fb9a547
AM
24#include "basic-block.h"
25#include "tree-ssa-alias.h"
26#include "internal-fn.h"
27#include "gimple-fold.h"
28#include "tree-eh.h"
29#include "gimple-expr.h"
30#include "is-a.h"
18f429e2 31#include "gimple.h"
d8a2d370
DN
32#include "expr.h"
33#include "stor-layout.h"
34#include "print-tree.h"
45b0be94 35#include "gimplify.h"
5be5c238 36#include "gimple-iterator.h"
18f429e2 37#include "gimplify-me.h"
5be5c238 38#include "gimple-walk.h"
518dc859 39#include "langhooks.h"
518dc859 40#include "target.h"
518dc859 41#include "ipa-prop.h"
442b4905
AM
42#include "bitmap.h"
43#include "gimple-ssa.h"
44#include "tree-cfg.h"
45#include "tree-phinodes.h"
46#include "ssa-iterators.h"
47#include "tree-into-ssa.h"
48#include "tree-dfa.h"
518dc859 49#include "tree-pass.h"
771578a0 50#include "tree-inline.h"
0f378cb5 51#include "ipa-inline.h"
518dc859 52#include "flags.h"
3e293154 53#include "diagnostic.h"
cf835838 54#include "gimple-pretty-print.h"
fb3f88cc 55#include "lto-streamer.h"
f0efc7aa
DN
56#include "data-streamer.h"
57#include "tree-streamer.h"
dfea20f1 58#include "params.h"
450ad0cd 59#include "ipa-utils.h"
4df65a85
RB
60#include "stringpool.h"
61#include "tree-ssanames.h"
2b5f0895 62#include "dbgcnt.h"
8aab5218 63#include "domwalk.h"
771578a0 64
8aab5218
MJ
65/* Intermediate information that we get from alias analysis about a particular
66 parameter in a particular basic_block. When a parameter or the memory it
67 references is marked modified, we use that information in all dominatd
68 blocks without cosulting alias analysis oracle. */
062c604f 69
8aab5218 70struct param_aa_status
062c604f 71{
8aab5218
MJ
72 /* Set when this structure contains meaningful information. If not, the
73 structure describing a dominating BB should be used instead. */
74 bool valid;
75
76 /* Whether we have seen something which might have modified the data in
77 question. PARM is for the parameter itself, REF is for data it points to
78 but using the alias type of individual accesses and PT is the same thing
79 but for computing aggregate pass-through functions using a very inclusive
80 ao_ref. */
8b7773a4 81 bool parm_modified, ref_modified, pt_modified;
8aab5218
MJ
82};
83
84/* Information related to a given BB that used only when looking at function
85 body. */
86
87struct ipa_bb_info
88{
89 /* Call graph edges going out of this BB. */
90 vec<cgraph_edge_p> cg_edges;
91 /* Alias analysis statuses of each formal parameter at this bb. */
92 vec<param_aa_status> param_aa_statuses;
93};
94
95/* Structure with global information that is only used when looking at function
96 body. */
97
98struct func_body_info
99{
100 /* The node that is being analyzed. */
101 cgraph_node *node;
102
103 /* Its info. */
104 struct ipa_node_params *info;
105
106 /* Information about individual BBs. */
107 vec<ipa_bb_info> bb_infos;
108
109 /* Number of parameters. */
110 int param_count;
111
112 /* Number of statements already walked by when analyzing this function. */
113 unsigned int aa_walked;
062c604f
MJ
114};
115
771578a0 116/* Vector where the parameter infos are actually stored. */
84562394 117vec<ipa_node_params> ipa_node_params_vector;
2c9561b5 118/* Vector of known aggregate values in cloned nodes. */
9771b263 119vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
771578a0 120/* Vector where the parameter infos are actually stored. */
84562394 121vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
771578a0
MJ
122
123/* Holders of ipa cgraph hooks: */
e2c9111c
JH
124static struct cgraph_edge_hook_list *edge_removal_hook_holder;
125static struct cgraph_node_hook_list *node_removal_hook_holder;
126static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
127static struct cgraph_2node_hook_list *node_duplication_hook_holder;
40982661 128static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 129
4502fe8d
MJ
130/* Description of a reference to an IPA constant. */
131struct ipa_cst_ref_desc
132{
133 /* Edge that corresponds to the statement which took the reference. */
134 struct cgraph_edge *cs;
135 /* Linked list of duplicates created when call graph edges are cloned. */
136 struct ipa_cst_ref_desc *next_duplicate;
137 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
138 if out of control. */
139 int refcount;
140};
141
142/* Allocation pool for reference descriptions. */
143
144static alloc_pool ipa_refdesc_pool;
145
5fe8e757
MJ
146/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
147 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
148
149static bool
150ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
151{
67348ccc 152 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
153 struct cl_optimization *os;
154
155 if (!fs_opts)
156 return false;
157 os = TREE_OPTIMIZATION (fs_opts);
158 return !os->x_optimize || !os->x_flag_ipa_cp;
159}
160
be95e2b9
MJ
161/* Return index of the formal whose tree is PTREE in function which corresponds
162 to INFO. */
163
d044dd17 164static int
84562394 165ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
518dc859
RL
166{
167 int i, count;
168
9771b263 169 count = descriptors.length ();
518dc859 170 for (i = 0; i < count; i++)
9771b263 171 if (descriptors[i].decl == ptree)
518dc859
RL
172 return i;
173
174 return -1;
175}
176
d044dd17
MJ
177/* Return index of the formal whose tree is PTREE in function which corresponds
178 to INFO. */
179
180int
181ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
182{
183 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
184}
185
186/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
187 NODE. */
be95e2b9 188
f8e2a1ed
MJ
189static void
190ipa_populate_param_decls (struct cgraph_node *node,
84562394 191 vec<ipa_param_descriptor> &descriptors)
518dc859
RL
192{
193 tree fndecl;
194 tree fnargs;
195 tree parm;
196 int param_num;
3e293154 197
67348ccc 198 fndecl = node->decl;
0e8853ee 199 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
200 fnargs = DECL_ARGUMENTS (fndecl);
201 param_num = 0;
910ad8de 202 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 203 {
9771b263 204 descriptors[param_num].decl = parm;
0e8853ee 205 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
518dc859
RL
206 param_num++;
207 }
208}
209
3f84bf08
MJ
210/* Return how many formal parameters FNDECL has. */
211
212static inline int
310bc633 213count_formal_params (tree fndecl)
3f84bf08
MJ
214{
215 tree parm;
216 int count = 0;
0e8853ee 217 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 218
910ad8de 219 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
220 count++;
221
222 return count;
223}
224
0e8853ee
JH
225/* Return the declaration of Ith formal parameter of the function corresponding
226 to INFO. Note there is no setter function as this array is built just once
227 using ipa_initialize_node_params. */
228
229void
230ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
231{
232 fprintf (file, "param #%i", i);
233 if (info->descriptors[i].decl)
234 {
235 fprintf (file, " ");
236 print_generic_expr (file, info->descriptors[i].decl, 0);
237 }
238}
239
240/* Initialize the ipa_node_params structure associated with NODE
241 to hold PARAM_COUNT parameters. */
242
243void
244ipa_alloc_node_params (struct cgraph_node *node, int param_count)
245{
246 struct ipa_node_params *info = IPA_NODE_REF (node);
247
248 if (!info->descriptors.exists () && param_count)
249 info->descriptors.safe_grow_cleared (param_count);
250}
251
f8e2a1ed
MJ
252/* Initialize the ipa_node_params structure associated with NODE by counting
253 the function parameters, creating the descriptors and populating their
254 param_decls. */
be95e2b9 255
f8e2a1ed
MJ
256void
257ipa_initialize_node_params (struct cgraph_node *node)
258{
259 struct ipa_node_params *info = IPA_NODE_REF (node);
260
9771b263 261 if (!info->descriptors.exists ())
f8e2a1ed 262 {
67348ccc 263 ipa_alloc_node_params (node, count_formal_params (node->decl));
0e8853ee 264 ipa_populate_param_decls (node, info->descriptors);
f8e2a1ed 265 }
518dc859
RL
266}
267
749aa96d
MJ
268/* Print the jump functions associated with call graph edge CS to file F. */
269
270static void
271ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
272{
273 int i, count;
274
275 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
276 for (i = 0; i < count; i++)
277 {
278 struct ipa_jump_func *jump_func;
279 enum jump_func_type type;
280
281 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
282 type = jump_func->type;
283
284 fprintf (f, " param %d: ", i);
285 if (type == IPA_JF_UNKNOWN)
286 fprintf (f, "UNKNOWN\n");
287 else if (type == IPA_JF_KNOWN_TYPE)
288 {
c7573249
MJ
289 fprintf (f, "KNOWN TYPE: base ");
290 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
291 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
292 jump_func->value.known_type.offset);
293 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
294 fprintf (f, "\n");
749aa96d
MJ
295 }
296 else if (type == IPA_JF_CONST)
297 {
4502fe8d 298 tree val = jump_func->value.constant.value;
749aa96d
MJ
299 fprintf (f, "CONST: ");
300 print_generic_expr (f, val, 0);
301 if (TREE_CODE (val) == ADDR_EXPR
302 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
303 {
304 fprintf (f, " -> ");
305 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
306 0);
307 }
308 fprintf (f, "\n");
309 }
749aa96d
MJ
310 else if (type == IPA_JF_PASS_THROUGH)
311 {
312 fprintf (f, "PASS THROUGH: ");
8b7773a4 313 fprintf (f, "%d, op %s",
749aa96d 314 jump_func->value.pass_through.formal_id,
5806f481 315 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 316 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
317 {
318 fprintf (f, " ");
319 print_generic_expr (f,
320 jump_func->value.pass_through.operand, 0);
321 }
322 if (jump_func->value.pass_through.agg_preserved)
323 fprintf (f, ", agg_preserved");
b8f6e610
MJ
324 if (jump_func->value.pass_through.type_preserved)
325 fprintf (f, ", type_preserved");
3ea6239f 326 fprintf (f, "\n");
749aa96d
MJ
327 }
328 else if (type == IPA_JF_ANCESTOR)
329 {
330 fprintf (f, "ANCESTOR: ");
331 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
332 jump_func->value.ancestor.formal_id,
333 jump_func->value.ancestor.offset);
334 print_generic_expr (f, jump_func->value.ancestor.type, 0);
8b7773a4
MJ
335 if (jump_func->value.ancestor.agg_preserved)
336 fprintf (f, ", agg_preserved");
b8f6e610
MJ
337 if (jump_func->value.ancestor.type_preserved)
338 fprintf (f, ", type_preserved");
3ea6239f 339 fprintf (f, "\n");
749aa96d 340 }
8b7773a4
MJ
341
342 if (jump_func->agg.items)
343 {
344 struct ipa_agg_jf_item *item;
345 int j;
346
347 fprintf (f, " Aggregate passed by %s:\n",
348 jump_func->agg.by_ref ? "reference" : "value");
9771b263 349 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
350 {
351 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
352 item->offset);
353 if (TYPE_P (item->value))
354 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 355 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
356 else
357 {
358 fprintf (f, "cst: ");
359 print_generic_expr (f, item->value, 0);
360 }
361 fprintf (f, "\n");
362 }
363 }
749aa96d
MJ
364 }
365}
366
367
be95e2b9
MJ
368/* Print the jump functions of all arguments on all call graph edges going from
369 NODE to file F. */
370
518dc859 371void
3e293154 372ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 373{
3e293154 374 struct cgraph_edge *cs;
518dc859 375
fec39fa6 376 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
67348ccc 377 node->order);
3e293154
MJ
378 for (cs = node->callees; cs; cs = cs->next_callee)
379 {
380 if (!ipa_edge_args_info_available_for_edge_p (cs))
381 continue;
382
749aa96d 383 fprintf (f, " callsite %s/%i -> %s/%i : \n",
fec39fa6
TS
384 xstrdup (node->name ()), node->order,
385 xstrdup (cs->callee->name ()),
67348ccc 386 cs->callee->order);
749aa96d
MJ
387 ipa_print_node_jump_functions_for_edge (f, cs);
388 }
518dc859 389
9de04252 390 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 391 {
9de04252 392 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
393 if (!ipa_edge_args_info_available_for_edge_p (cs))
394 continue;
3e293154 395
9de04252
MJ
396 ii = cs->indirect_info;
397 if (ii->agg_contents)
c13bc3d9 398 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 399 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 400 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
401 ii->param_index, ii->offset,
402 ii->by_ref ? "by reference" : "by_value");
403 else
85942f45
JH
404 fprintf (f, " indirect %s callsite, calling param %i, "
405 "offset " HOST_WIDE_INT_PRINT_DEC,
406 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
407 ii->offset);
9de04252 408
749aa96d
MJ
409 if (cs->call_stmt)
410 {
9de04252 411 fprintf (f, ", for stmt ");
749aa96d 412 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 413 }
749aa96d 414 else
9de04252 415 fprintf (f, "\n");
749aa96d 416 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
417 }
418}
419
420/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 421
3e293154
MJ
422void
423ipa_print_all_jump_functions (FILE *f)
424{
425 struct cgraph_node *node;
426
ca30a539 427 fprintf (f, "\nJump functions:\n");
65c70e6b 428 FOR_EACH_FUNCTION (node)
3e293154
MJ
429 {
430 ipa_print_node_jump_functions (f, node);
431 }
432}
433
7b872d9e
MJ
434/* Set JFUNC to be a known type jump function. */
435
436static void
437ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
438 tree base_type, tree component_type)
439{
06d65050
JH
440 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
441 && TYPE_BINFO (component_type));
0a2550e7
JH
442 if (!flag_devirtualize)
443 return;
444 gcc_assert (BINFO_VTABLE (TYPE_BINFO (component_type)));
7b872d9e
MJ
445 jfunc->type = IPA_JF_KNOWN_TYPE;
446 jfunc->value.known_type.offset = offset,
447 jfunc->value.known_type.base_type = base_type;
448 jfunc->value.known_type.component_type = component_type;
68377e53 449 gcc_assert (component_type);
7b872d9e
MJ
450}
451
b8f6e610
MJ
452/* Set JFUNC to be a copy of another jmp (to be used by jump function
453 combination code). The two functions will share their rdesc. */
454
455static void
456ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
457 struct ipa_jump_func *src)
458
459{
460 gcc_checking_assert (src->type == IPA_JF_CONST);
461 dst->type = IPA_JF_CONST;
462 dst->value.constant = src->value.constant;
463}
464
7b872d9e
MJ
465/* Set JFUNC to be a constant jmp function. */
466
467static void
4502fe8d
MJ
468ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
469 struct cgraph_edge *cs)
7b872d9e 470{
5368224f
DC
471 constant = unshare_expr (constant);
472 if (constant && EXPR_P (constant))
473 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
7b872d9e 474 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
475 jfunc->value.constant.value = unshare_expr_without_location (constant);
476
477 if (TREE_CODE (constant) == ADDR_EXPR
478 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
479 {
480 struct ipa_cst_ref_desc *rdesc;
481 if (!ipa_refdesc_pool)
482 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
483 sizeof (struct ipa_cst_ref_desc), 32);
484
485 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
486 rdesc->cs = cs;
487 rdesc->next_duplicate = NULL;
488 rdesc->refcount = 1;
489 jfunc->value.constant.rdesc = rdesc;
490 }
491 else
492 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
493}
494
495/* Set JFUNC to be a simple pass-through jump function. */
496static void
8b7773a4 497ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
b8f6e610 498 bool agg_preserved, bool type_preserved)
7b872d9e
MJ
499{
500 jfunc->type = IPA_JF_PASS_THROUGH;
501 jfunc->value.pass_through.operand = NULL_TREE;
502 jfunc->value.pass_through.formal_id = formal_id;
503 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 504 jfunc->value.pass_through.agg_preserved = agg_preserved;
b8f6e610 505 jfunc->value.pass_through.type_preserved = type_preserved;
7b872d9e
MJ
506}
507
508/* Set JFUNC to be an arithmetic pass through jump function. */
509
510static void
511ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
512 tree operand, enum tree_code operation)
513{
514 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 515 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
516 jfunc->value.pass_through.formal_id = formal_id;
517 jfunc->value.pass_through.operation = operation;
8b7773a4 518 jfunc->value.pass_through.agg_preserved = false;
b8f6e610 519 jfunc->value.pass_through.type_preserved = false;
7b872d9e
MJ
520}
521
522/* Set JFUNC to be an ancestor jump function. */
523
524static void
525ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
b8f6e610
MJ
526 tree type, int formal_id, bool agg_preserved,
527 bool type_preserved)
7b872d9e 528{
0a2550e7
JH
529 if (!flag_devirtualize)
530 type_preserved = false;
531 gcc_assert (!type_preserved
532 || (TREE_CODE (type) == RECORD_TYPE
533 && TYPE_BINFO (type)
534 && BINFO_VTABLE (TYPE_BINFO (type))));
7b872d9e
MJ
535 jfunc->type = IPA_JF_ANCESTOR;
536 jfunc->value.ancestor.formal_id = formal_id;
537 jfunc->value.ancestor.offset = offset;
0a2550e7 538 jfunc->value.ancestor.type = type_preserved ? type : NULL;
8b7773a4 539 jfunc->value.ancestor.agg_preserved = agg_preserved;
b8f6e610 540 jfunc->value.ancestor.type_preserved = type_preserved;
7b872d9e
MJ
541}
542
e248d83f
MJ
543/* Extract the acual BINFO being described by JFUNC which must be a known type
544 jump function. */
545
546tree
547ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
548{
549 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
550 if (!base_binfo)
551 return NULL_TREE;
552 return get_binfo_at_offset (base_binfo,
553 jfunc->value.known_type.offset,
554 jfunc->value.known_type.component_type);
555}
556
8aab5218
MJ
557/* Get IPA BB information about the given BB. FBI is the context of analyzis
558 of this function body. */
559
560static struct ipa_bb_info *
561ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
562{
563 gcc_checking_assert (fbi);
564 return &fbi->bb_infos[bb->index];
565}
566
f65cf2b7
MJ
567/* Structure to be passed in between detect_type_change and
568 check_stmt_for_type_change. */
569
570struct type_change_info
571{
290ebcb7
MJ
572 /* Offset into the object where there is the virtual method pointer we are
573 looking for. */
574 HOST_WIDE_INT offset;
575 /* The declaration or SSA_NAME pointer of the base that we are checking for
576 type change. */
577 tree object;
578 /* If we actually can tell the type that the object has changed to, it is
579 stored in this field. Otherwise it remains NULL_TREE. */
580 tree known_current_type;
f65cf2b7
MJ
581 /* Set to true if dynamic type change has been detected. */
582 bool type_maybe_changed;
290ebcb7
MJ
583 /* Set to true if multiple types have been encountered. known_current_type
584 must be disregarded in that case. */
585 bool multiple_types_encountered;
f65cf2b7
MJ
586};
587
588/* Return true if STMT can modify a virtual method table pointer.
589
590 This function makes special assumptions about both constructors and
591 destructors which are all the functions that are allowed to alter the VMT
592 pointers. It assumes that destructors begin with assignment into all VMT
593 pointers and that constructors essentially look in the following way:
594
595 1) The very first thing they do is that they call constructors of ancestor
596 sub-objects that have them.
597
598 2) Then VMT pointers of this and all its ancestors is set to new values
599 corresponding to the type corresponding to the constructor.
600
601 3) Only afterwards, other stuff such as constructor of member sub-objects
602 and the code written by the user is run. Only this may include calling
603 virtual functions, directly or indirectly.
604
605 There is no way to call a constructor of an ancestor sub-object in any
606 other way.
607
608 This means that we do not have to care whether constructors get the correct
609 type information because they will always change it (in fact, if we define
610 the type to be given by the VMT pointer, it is undefined).
611
612 The most important fact to derive from the above is that if, for some
613 statement in the section 3, we try to detect whether the dynamic type has
614 changed, we can safely ignore all calls as we examine the function body
615 backwards until we reach statements in section 2 because these calls cannot
616 be ancestor constructors or destructors (if the input is not bogus) and so
617 do not change the dynamic type (this holds true only for automatically
618 allocated objects but at the moment we devirtualize only these). We then
619 must detect that statements in section 2 change the dynamic type and can try
620 to derive the new type. That is enough and we can stop, we will never see
621 the calls into constructors of sub-objects in this code. Therefore we can
622 safely ignore all call statements that we traverse.
623 */
624
625static bool
626stmt_may_be_vtbl_ptr_store (gimple stmt)
627{
628 if (is_gimple_call (stmt))
629 return false;
993df21e 630 /* TODO: Skip clobbers, doing so triggers problem in PR60306. */
f65cf2b7
MJ
631 else if (is_gimple_assign (stmt))
632 {
633 tree lhs = gimple_assign_lhs (stmt);
634
0004f992
MJ
635 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
636 {
637 if (flag_strict_aliasing
638 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
639 return false;
640
641 if (TREE_CODE (lhs) == COMPONENT_REF
642 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 643 return false;
0004f992
MJ
644 /* In the future we might want to use get_base_ref_and_offset to find
645 if there is a field corresponding to the offset and if so, proceed
646 almost like if it was a component ref. */
647 }
f65cf2b7
MJ
648 }
649 return true;
650}
651
290ebcb7
MJ
652/* If STMT can be proved to be an assignment to the virtual method table
653 pointer of ANALYZED_OBJ and the type associated with the new table
654 identified, return the type. Otherwise return NULL_TREE. */
655
656static tree
657extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
658{
659 HOST_WIDE_INT offset, size, max_size;
390675c8 660 tree lhs, rhs, base, binfo;
290ebcb7
MJ
661
662 if (!gimple_assign_single_p (stmt))
663 return NULL_TREE;
664
665 lhs = gimple_assign_lhs (stmt);
666 rhs = gimple_assign_rhs1 (stmt);
667 if (TREE_CODE (lhs) != COMPONENT_REF
390675c8 668 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
290ebcb7
MJ
669 return NULL_TREE;
670
671 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
672 if (offset != tci->offset
673 || size != POINTER_SIZE
674 || max_size != POINTER_SIZE)
675 return NULL_TREE;
676 if (TREE_CODE (base) == MEM_REF)
677 {
678 if (TREE_CODE (tci->object) != MEM_REF
679 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
680 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
681 TREE_OPERAND (base, 1)))
682 return NULL_TREE;
683 }
684 else if (tci->object != base)
685 return NULL_TREE;
686
390675c8
JH
687 binfo = vtable_pointer_value_to_binfo (rhs);
688
689 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
690 base of outer type. In this case we would need to either
691 work on binfos or translate it back to outer type and offset.
692 KNOWN_TYPE jump functions are not ready for that, yet. */
693 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
694 return NULL;
695
696 return BINFO_TYPE (binfo);
290ebcb7
MJ
697}
698
61502ca8 699/* Callback of walk_aliased_vdefs and a helper function for
f65cf2b7
MJ
700 detect_type_change to check whether a particular statement may modify
701 the virtual table pointer, and if possible also determine the new type of
702 the (sub-)object. It stores its result into DATA, which points to a
703 type_change_info structure. */
704
705static bool
706check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
707{
708 gimple stmt = SSA_NAME_DEF_STMT (vdef);
709 struct type_change_info *tci = (struct type_change_info *) data;
710
711 if (stmt_may_be_vtbl_ptr_store (stmt))
712 {
290ebcb7
MJ
713 tree type;
714 type = extr_type_from_vtbl_ptr_store (stmt, tci);
715 if (tci->type_maybe_changed
716 && type != tci->known_current_type)
717 tci->multiple_types_encountered = true;
718 tci->known_current_type = type;
f65cf2b7
MJ
719 tci->type_maybe_changed = true;
720 return true;
721 }
722 else
723 return false;
724}
725
290ebcb7
MJ
726
727
06d65050
JH
728/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
729 callsite CALL) by looking for assignments to its virtual table pointer. If
730 it is, return true and fill in the jump function JFUNC with relevant type
731 information or set it to unknown. ARG is the object itself (not a pointer
732 to it, unless dereferenced). BASE is the base of the memory access as
733 returned by get_ref_base_and_extent, as is the offset. */
f65cf2b7
MJ
734
735static bool
06d65050
JH
736detect_type_change (tree arg, tree base, tree comp_type, gimple call,
737 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
f65cf2b7
MJ
738{
739 struct type_change_info tci;
740 ao_ref ao;
741
742 gcc_checking_assert (DECL_P (arg)
743 || TREE_CODE (arg) == MEM_REF
744 || handled_component_p (arg));
745 /* Const calls cannot call virtual methods through VMT and so type changes do
746 not matter. */
06d65050
JH
747 if (!flag_devirtualize || !gimple_vuse (call)
748 /* Be sure expected_type is polymorphic. */
749 || !comp_type
750 || TREE_CODE (comp_type) != RECORD_TYPE
751 || !TYPE_BINFO (comp_type)
752 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
0a2550e7 753 return true;
f65cf2b7 754
4bf2a588
JH
755 /* C++ methods are not allowed to change THIS pointer unless they
756 are constructors or destructors. */
757 if (TREE_CODE (base) == MEM_REF
758 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
759 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
760 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0))) == PARM_DECL
761 && TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
762 && !DECL_CXX_CONSTRUCTOR_P (current_function_decl)
763 && !DECL_CXX_DESTRUCTOR_P (current_function_decl)
764 && (SSA_NAME_VAR (TREE_OPERAND (base, 0))
765 == DECL_ARGUMENTS (current_function_decl)))
766 return false;
767
dd887943 768 ao_ref_init (&ao, arg);
f65cf2b7
MJ
769 ao.base = base;
770 ao.offset = offset;
771 ao.size = POINTER_SIZE;
772 ao.max_size = ao.size;
f65cf2b7 773
290ebcb7
MJ
774 tci.offset = offset;
775 tci.object = get_base_address (arg);
776 tci.known_current_type = NULL_TREE;
777 tci.type_maybe_changed = false;
778 tci.multiple_types_encountered = false;
779
f65cf2b7
MJ
780 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
781 &tci, NULL);
782 if (!tci.type_maybe_changed)
783 return false;
784
290ebcb7
MJ
785 if (!tci.known_current_type
786 || tci.multiple_types_encountered
787 || offset != 0)
788 jfunc->type = IPA_JF_UNKNOWN;
789 else
7b872d9e 790 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
290ebcb7 791
f65cf2b7
MJ
792 return true;
793}
794
795/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
796 SSA name (its dereference will become the base and the offset is assumed to
797 be zero). */
798
799static bool
06d65050
JH
800detect_type_change_ssa (tree arg, tree comp_type,
801 gimple call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
802{
803 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 804 if (!flag_devirtualize
06d65050 805 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
806 return false;
807
808 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 809 build_int_cst (ptr_type_node, 0));
f65cf2b7 810
06d65050 811 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
f65cf2b7
MJ
812}
813
fdb0e1b4
MJ
814/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
815 boolean variable pointed to by DATA. */
816
817static bool
818mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
819 void *data)
820{
821 bool *b = (bool *) data;
822 *b = true;
823 return true;
824}
825
8aab5218
MJ
826/* Return true if we have already walked so many statements in AA that we
827 should really just start giving up. */
828
829static bool
830aa_overwalked (struct func_body_info *fbi)
831{
832 gcc_checking_assert (fbi);
833 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
834}
835
836/* Find the nearest valid aa status for parameter specified by INDEX that
837 dominates BB. */
838
839static struct param_aa_status *
840find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
841 int index)
842{
843 while (true)
844 {
845 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
846 if (!bb)
847 return NULL;
848 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
849 if (!bi->param_aa_statuses.is_empty ()
850 && bi->param_aa_statuses[index].valid)
851 return &bi->param_aa_statuses[index];
852 }
853}
854
855/* Get AA status structure for the given BB and parameter with INDEX. Allocate
856 structures and/or intialize the result with a dominating description as
857 necessary. */
858
859static struct param_aa_status *
860parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
861 int index)
862{
863 gcc_checking_assert (fbi);
864 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
865 if (bi->param_aa_statuses.is_empty ())
866 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
867 struct param_aa_status *paa = &bi->param_aa_statuses[index];
868 if (!paa->valid)
869 {
870 gcc_checking_assert (!paa->parm_modified
871 && !paa->ref_modified
872 && !paa->pt_modified);
873 struct param_aa_status *dom_paa;
874 dom_paa = find_dominating_aa_status (fbi, bb, index);
875 if (dom_paa)
876 *paa = *dom_paa;
877 else
878 paa->valid = true;
879 }
880
881 return paa;
882}
883
688010ba 884/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 885 a value known not to be modified in this function before reaching the
8aab5218
MJ
886 statement STMT. FBI holds information about the function we have so far
887 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
888
889static bool
8aab5218
MJ
890parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
891 gimple stmt, tree parm_load)
fdb0e1b4 892{
8aab5218 893 struct param_aa_status *paa;
fdb0e1b4
MJ
894 bool modified = false;
895 ao_ref refd;
896
8aab5218
MJ
897 /* FIXME: FBI can be NULL if we are being called from outside
898 ipa_node_analysis or ipcp_transform_function, which currently happens
899 during inlining analysis. It would be great to extend fbi's lifetime and
900 always have it. Currently, we are just not afraid of too much walking in
901 that case. */
902 if (fbi)
903 {
904 if (aa_overwalked (fbi))
905 return false;
906 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
907 if (paa->parm_modified)
908 return false;
909 }
910 else
911 paa = NULL;
fdb0e1b4
MJ
912
913 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 914 ao_ref_init (&refd, parm_load);
8aab5218
MJ
915 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
916 &modified, NULL);
917 if (fbi)
918 fbi->aa_walked += walked;
919 if (paa && modified)
920 paa->parm_modified = true;
8b7773a4 921 return !modified;
fdb0e1b4
MJ
922}
923
924/* If STMT is an assignment that loads a value from an parameter declaration,
925 return the index of the parameter in ipa_node_params which has not been
926 modified. Otherwise return -1. */
927
928static int
8aab5218
MJ
929load_from_unmodified_param (struct func_body_info *fbi,
930 vec<ipa_param_descriptor> descriptors,
fdb0e1b4
MJ
931 gimple stmt)
932{
933 int index;
934 tree op1;
935
936 if (!gimple_assign_single_p (stmt))
937 return -1;
938
939 op1 = gimple_assign_rhs1 (stmt);
940 if (TREE_CODE (op1) != PARM_DECL)
941 return -1;
942
d044dd17 943 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 944 if (index < 0
8aab5218 945 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
fdb0e1b4
MJ
946 return -1;
947
948 return index;
949}
f65cf2b7 950
8aab5218
MJ
951/* Return true if memory reference REF (which must be a load through parameter
952 with INDEX) loads data that are known to be unmodified in this function
953 before reaching statement STMT. */
8b7773a4
MJ
954
955static bool
8aab5218
MJ
956parm_ref_data_preserved_p (struct func_body_info *fbi,
957 int index, gimple stmt, tree ref)
8b7773a4 958{
8aab5218 959 struct param_aa_status *paa;
8b7773a4
MJ
960 bool modified = false;
961 ao_ref refd;
962
8aab5218
MJ
963 /* FIXME: FBI can be NULL if we are being called from outside
964 ipa_node_analysis or ipcp_transform_function, which currently happens
965 during inlining analysis. It would be great to extend fbi's lifetime and
966 always have it. Currently, we are just not afraid of too much walking in
967 that case. */
968 if (fbi)
969 {
970 if (aa_overwalked (fbi))
971 return false;
972 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
973 if (paa->ref_modified)
974 return false;
975 }
976 else
977 paa = NULL;
8b7773a4 978
8aab5218 979 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 980 ao_ref_init (&refd, ref);
8aab5218
MJ
981 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
982 &modified, NULL);
983 if (fbi)
984 fbi->aa_walked += walked;
985 if (paa && modified)
986 paa->ref_modified = true;
8b7773a4
MJ
987 return !modified;
988}
989
8aab5218
MJ
990/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
991 is known to be unmodified in this function before reaching call statement
992 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
993
994static bool
8aab5218
MJ
995parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
996 gimple call, tree parm)
8b7773a4
MJ
997{
998 bool modified = false;
999 ao_ref refd;
1000
1001 /* It's unnecessary to calculate anything about memory contnets for a const
1002 function because it is not goin to use it. But do not cache the result
1003 either. Also, no such calculations for non-pointers. */
1004 if (!gimple_vuse (call)
8aab5218
MJ
1005 || !POINTER_TYPE_P (TREE_TYPE (parm))
1006 || aa_overwalked (fbi))
8b7773a4
MJ
1007 return false;
1008
8aab5218
MJ
1009 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1010 index);
1011 if (paa->pt_modified)
8b7773a4
MJ
1012 return false;
1013
1014 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218
MJ
1015 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1016 &modified, NULL);
1017 fbi->aa_walked += walked;
8b7773a4 1018 if (modified)
8aab5218 1019 paa->pt_modified = true;
8b7773a4
MJ
1020 return !modified;
1021}
1022
1023/* Return true if we can prove that OP is a memory reference loading unmodified
1024 data from an aggregate passed as a parameter and if the aggregate is passed
1025 by reference, that the alias type of the load corresponds to the type of the
1026 formal parameter (so that we can rely on this type for TBAA in callers).
1027 INFO and PARMS_AINFO describe parameters of the current function (but the
1028 latter can be NULL), STMT is the load statement. If function returns true,
1029 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1030 within the aggregate and whether it is a load from a value passed by
1031 reference respectively. */
1032
1033static bool
8aab5218
MJ
1034ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1035 vec<ipa_param_descriptor> descriptors,
1036 gimple stmt, tree op, int *index_p,
1037 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1038 bool *by_ref_p)
8b7773a4
MJ
1039{
1040 int index;
1041 HOST_WIDE_INT size, max_size;
1042 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1043
1044 if (max_size == -1 || max_size != size || *offset_p < 0)
1045 return false;
1046
1047 if (DECL_P (base))
1048 {
d044dd17 1049 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1050 if (index >= 0
8aab5218 1051 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1052 {
1053 *index_p = index;
1054 *by_ref_p = false;
3ff2ca23
JJ
1055 if (size_p)
1056 *size_p = size;
8b7773a4
MJ
1057 return true;
1058 }
1059 return false;
1060 }
1061
1062 if (TREE_CODE (base) != MEM_REF
1063 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1064 || !integer_zerop (TREE_OPERAND (base, 1)))
1065 return false;
1066
1067 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1068 {
1069 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1070 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1071 }
1072 else
1073 {
1074 /* This branch catches situations where a pointer parameter is not a
1075 gimple register, for example:
1076
1077 void hip7(S*) (struct S * p)
1078 {
1079 void (*<T2e4>) (struct S *) D.1867;
1080 struct S * p.1;
1081
1082 <bb 2>:
1083 p.1_1 = p;
1084 D.1867_2 = p.1_1->f;
1085 D.1867_2 ();
1086 gdp = &p;
1087 */
1088
1089 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1090 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1091 }
1092
1093 if (index >= 0
8aab5218 1094 && parm_ref_data_preserved_p (fbi, index, stmt, op))
8b7773a4
MJ
1095 {
1096 *index_p = index;
1097 *by_ref_p = true;
3ff2ca23
JJ
1098 if (size_p)
1099 *size_p = size;
8b7773a4
MJ
1100 return true;
1101 }
1102 return false;
1103}
1104
1105/* Just like the previous function, just without the param_analysis_info
1106 pointer, for users outside of this file. */
1107
1108bool
1109ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1110 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1111 bool *by_ref_p)
1112{
8aab5218 1113 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
3ff2ca23 1114 offset_p, NULL, by_ref_p);
8b7773a4
MJ
1115}
1116
b258210c 1117/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1118 of an assignment statement STMT, try to determine whether we are actually
1119 handling any of the following cases and construct an appropriate jump
1120 function into JFUNC if so:
1121
1122 1) The passed value is loaded from a formal parameter which is not a gimple
1123 register (most probably because it is addressable, the value has to be
1124 scalar) and we can guarantee the value has not changed. This case can
1125 therefore be described by a simple pass-through jump function. For example:
1126
1127 foo (int a)
1128 {
1129 int a.0;
1130
1131 a.0_2 = a;
1132 bar (a.0_2);
1133
1134 2) The passed value can be described by a simple arithmetic pass-through
1135 jump function. E.g.
1136
1137 foo (int a)
1138 {
1139 int D.2064;
1140
1141 D.2064_4 = a.1(D) + 4;
1142 bar (D.2064_4);
1143
1144 This case can also occur in combination of the previous one, e.g.:
1145
1146 foo (int a, int z)
1147 {
1148 int a.0;
1149 int D.2064;
1150
1151 a.0_3 = a;
1152 D.2064_4 = a.0_3 + 4;
1153 foo (D.2064_4);
1154
1155 3) The passed value is an address of an object within another one (which
1156 also passed by reference). Such situations are described by an ancestor
1157 jump function and describe situations such as:
1158
1159 B::foo() (struct B * const this)
1160 {
1161 struct A * D.1845;
1162
1163 D.1845_2 = &this_1(D)->D.1748;
1164 A::bar (D.1845_2);
1165
1166 INFO is the structure describing individual parameters access different
1167 stages of IPA optimizations. PARMS_AINFO contains the information that is
1168 only needed for intraprocedural analysis. */
685b0d13
MJ
1169
1170static void
8aab5218
MJ
1171compute_complex_assign_jump_func (struct func_body_info *fbi,
1172 struct ipa_node_params *info,
b258210c 1173 struct ipa_jump_func *jfunc,
06d65050
JH
1174 gimple call, gimple stmt, tree name,
1175 tree param_type)
685b0d13
MJ
1176{
1177 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 1178 tree op1, tc_ssa, base, ssa;
685b0d13 1179 int index;
685b0d13 1180
685b0d13 1181 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1182
fdb0e1b4 1183 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1184 {
fdb0e1b4
MJ
1185 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1186 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1187 else
8aab5218 1188 index = load_from_unmodified_param (fbi, info->descriptors,
fdb0e1b4
MJ
1189 SSA_NAME_DEF_STMT (op1));
1190 tc_ssa = op1;
1191 }
1192 else
1193 {
8aab5218 1194 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1195 tc_ssa = gimple_assign_lhs (stmt);
1196 }
1197
1198 if (index >= 0)
1199 {
1200 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 1201
b258210c 1202 if (op2)
685b0d13 1203 {
b258210c
MJ
1204 if (!is_gimple_ip_invariant (op2)
1205 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1206 && !useless_type_conversion_p (TREE_TYPE (name),
1207 TREE_TYPE (op1))))
1208 return;
1209
7b872d9e
MJ
1210 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1211 gimple_assign_rhs_code (stmt));
685b0d13 1212 }
b8f6e610 1213 else if (gimple_assign_single_p (stmt))
8b7773a4 1214 {
8aab5218 1215 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
06d65050
JH
1216 bool type_p = false;
1217
1218 if (param_type && POINTER_TYPE_P (param_type))
1219 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1220 call, jfunc);
b8f6e610
MJ
1221 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1222 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
8b7773a4 1223 }
685b0d13
MJ
1224 return;
1225 }
1226
1227 if (TREE_CODE (op1) != ADDR_EXPR)
1228 return;
1229 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1230 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1231 return;
32aa622c
MJ
1232 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1233 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1234 /* If this is a varying address, punt. */
1235 || max_size == -1
1236 || max_size != size)
685b0d13 1237 return;
807e902e 1238 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1239 ssa = TREE_OPERAND (base, 0);
1240 if (TREE_CODE (ssa) != SSA_NAME
1241 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1242 || offset < 0)
685b0d13
MJ
1243 return;
1244
b8f6e610 1245 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1246 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1247 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
b8f6e610 1248 {
06d65050
JH
1249 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1250 call, jfunc, offset);
b8f6e610 1251 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
0a2550e7
JH
1252 ipa_set_ancestor_jf (jfunc, offset,
1253 type_p ? TREE_TYPE (param_type) : NULL, index,
8aab5218 1254 parm_ref_data_pass_through_p (fbi, index,
b8f6e610
MJ
1255 call, ssa), type_p);
1256 }
685b0d13
MJ
1257}
1258
40591473
MJ
1259/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1260 it looks like:
1261
1262 iftmp.1_3 = &obj_2(D)->D.1762;
1263
1264 The base of the MEM_REF must be a default definition SSA NAME of a
1265 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1266 whole MEM_REF expression is returned and the offset calculated from any
1267 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1268 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1269
1270static tree
1271get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1272{
1273 HOST_WIDE_INT size, max_size;
1274 tree expr, parm, obj;
1275
1276 if (!gimple_assign_single_p (assign))
1277 return NULL_TREE;
1278 expr = gimple_assign_rhs1 (assign);
1279
1280 if (TREE_CODE (expr) != ADDR_EXPR)
1281 return NULL_TREE;
1282 expr = TREE_OPERAND (expr, 0);
1283 obj = expr;
1284 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1285
1286 if (TREE_CODE (expr) != MEM_REF
1287 /* If this is a varying address, punt. */
1288 || max_size == -1
1289 || max_size != size
1290 || *offset < 0)
1291 return NULL_TREE;
1292 parm = TREE_OPERAND (expr, 0);
1293 if (TREE_CODE (parm) != SSA_NAME
1294 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1295 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1296 return NULL_TREE;
1297
807e902e 1298 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1299 *obj_p = obj;
1300 return expr;
1301}
1302
685b0d13 1303
b258210c
MJ
1304/* Given that an actual argument is an SSA_NAME that is a result of a phi
1305 statement PHI, try to find out whether NAME is in fact a
1306 multiple-inheritance typecast from a descendant into an ancestor of a formal
1307 parameter and thus can be described by an ancestor jump function and if so,
1308 write the appropriate function into JFUNC.
1309
1310 Essentially we want to match the following pattern:
1311
1312 if (obj_2(D) != 0B)
1313 goto <bb 3>;
1314 else
1315 goto <bb 4>;
1316
1317 <bb 3>:
1318 iftmp.1_3 = &obj_2(D)->D.1762;
1319
1320 <bb 4>:
1321 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1322 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1323 return D.1879_6; */
1324
1325static void
8aab5218
MJ
1326compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1327 struct ipa_node_params *info,
b258210c 1328 struct ipa_jump_func *jfunc,
06d65050 1329 gimple call, gimple phi, tree param_type)
b258210c 1330{
40591473 1331 HOST_WIDE_INT offset;
b258210c
MJ
1332 gimple assign, cond;
1333 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1334 tree tmp, parm, expr, obj;
b258210c
MJ
1335 int index, i;
1336
54e348cb 1337 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1338 return;
1339
54e348cb
MJ
1340 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1341 tmp = PHI_ARG_DEF (phi, 0);
1342 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1343 tmp = PHI_ARG_DEF (phi, 1);
1344 else
1345 return;
b258210c
MJ
1346 if (TREE_CODE (tmp) != SSA_NAME
1347 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1348 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1349 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1350 return;
1351
1352 assign = SSA_NAME_DEF_STMT (tmp);
1353 assign_bb = gimple_bb (assign);
40591473 1354 if (!single_pred_p (assign_bb))
b258210c 1355 return;
40591473
MJ
1356 expr = get_ancestor_addr_info (assign, &obj, &offset);
1357 if (!expr)
b258210c
MJ
1358 return;
1359 parm = TREE_OPERAND (expr, 0);
b258210c 1360 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1361 if (index < 0)
1362 return;
b258210c
MJ
1363
1364 cond_bb = single_pred (assign_bb);
1365 cond = last_stmt (cond_bb);
69610617
SB
1366 if (!cond
1367 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1368 || gimple_cond_code (cond) != NE_EXPR
1369 || gimple_cond_lhs (cond) != parm
1370 || !integer_zerop (gimple_cond_rhs (cond)))
1371 return;
1372
b258210c
MJ
1373 phi_bb = gimple_bb (phi);
1374 for (i = 0; i < 2; i++)
1375 {
1376 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1377 if (pred != assign_bb && pred != cond_bb)
1378 return;
1379 }
1380
06d65050
JH
1381 bool type_p = false;
1382 if (param_type && POINTER_TYPE_P (param_type))
1383 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1384 call, jfunc, offset);
b8f6e610 1385 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
8aab5218
MJ
1386 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1387 index,
1388 parm_ref_data_pass_through_p (fbi, index, call, parm),
1389 type_p);
b258210c
MJ
1390}
1391
61502ca8 1392/* Given OP which is passed as an actual argument to a called function,
b258210c 1393 determine if it is possible to construct a KNOWN_TYPE jump function for it
06d65050
JH
1394 and if so, create one and store it to JFUNC.
1395 EXPECTED_TYPE represents a type the argument should be in */
b258210c
MJ
1396
1397static void
f65cf2b7 1398compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
06d65050 1399 gimple call, tree expected_type)
b258210c 1400{
32aa622c 1401 HOST_WIDE_INT offset, size, max_size;
c7573249 1402 tree base;
b258210c 1403
05842ff5
MJ
1404 if (!flag_devirtualize
1405 || TREE_CODE (op) != ADDR_EXPR
06d65050
JH
1406 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1407 /* Be sure expected_type is polymorphic. */
1408 || !expected_type
1409 || TREE_CODE (expected_type) != RECORD_TYPE
1410 || !TYPE_BINFO (expected_type)
1411 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
b258210c
MJ
1412 return;
1413
1414 op = TREE_OPERAND (op, 0);
32aa622c
MJ
1415 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1416 if (!DECL_P (base)
1417 || max_size == -1
1418 || max_size != size
1419 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1420 || is_global_var (base))
1421 return;
1422
06d65050 1423 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
f65cf2b7
MJ
1424 return;
1425
06d65050
JH
1426 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1427 expected_type);
b258210c
MJ
1428}
1429
be95e2b9
MJ
1430/* Inspect the given TYPE and return true iff it has the same structure (the
1431 same number of fields of the same types) as a C++ member pointer. If
1432 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1433 corresponding fields there. */
1434
3e293154
MJ
1435static bool
1436type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1437{
1438 tree fld;
1439
1440 if (TREE_CODE (type) != RECORD_TYPE)
1441 return false;
1442
1443 fld = TYPE_FIELDS (type);
1444 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1445 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1446 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1447 return false;
1448
1449 if (method_ptr)
1450 *method_ptr = fld;
1451
910ad8de 1452 fld = DECL_CHAIN (fld);
8b7773a4 1453 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1454 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1455 return false;
1456 if (delta)
1457 *delta = fld;
1458
910ad8de 1459 if (DECL_CHAIN (fld))
3e293154
MJ
1460 return false;
1461
1462 return true;
1463}
1464
61502ca8 1465/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1466 return the rhs of its defining statement. Otherwise return RHS as it
1467 is. */
7ec49257
MJ
1468
1469static inline tree
1470get_ssa_def_if_simple_copy (tree rhs)
1471{
1472 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1473 {
1474 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1475
1476 if (gimple_assign_single_p (def_stmt))
1477 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1478 else
1479 break;
7ec49257
MJ
1480 }
1481 return rhs;
1482}
1483
8b7773a4
MJ
1484/* Simple linked list, describing known contents of an aggregate beforere
1485 call. */
1486
1487struct ipa_known_agg_contents_list
1488{
1489 /* Offset and size of the described part of the aggregate. */
1490 HOST_WIDE_INT offset, size;
1491 /* Known constant value or NULL if the contents is known to be unknown. */
1492 tree constant;
1493 /* Pointer to the next structure in the list. */
1494 struct ipa_known_agg_contents_list *next;
1495};
3e293154 1496
8b7773a4
MJ
1497/* Traverse statements from CALL backwards, scanning whether an aggregate given
1498 in ARG is filled in with constant values. ARG can either be an aggregate
85942f45
JH
1499 expression or a pointer to an aggregate. ARG_TYPE is the type of the aggregate.
1500 JFUNC is the jump function into which the constants are subsequently stored. */
be95e2b9 1501
3e293154 1502static void
85942f45 1503determine_known_aggregate_parts (gimple call, tree arg, tree arg_type,
8b7773a4 1504 struct ipa_jump_func *jfunc)
3e293154 1505{
8b7773a4
MJ
1506 struct ipa_known_agg_contents_list *list = NULL;
1507 int item_count = 0, const_count = 0;
1508 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1509 gimple_stmt_iterator gsi;
8b7773a4
MJ
1510 tree arg_base;
1511 bool check_ref, by_ref;
1512 ao_ref r;
3e293154 1513
8b7773a4
MJ
1514 /* The function operates in three stages. First, we prepare check_ref, r,
1515 arg_base and arg_offset based on what is actually passed as an actual
1516 argument. */
3e293154 1517
85942f45 1518 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1519 {
1520 by_ref = true;
1521 if (TREE_CODE (arg) == SSA_NAME)
1522 {
1523 tree type_size;
85942f45 1524 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
8b7773a4
MJ
1525 return;
1526 check_ref = true;
1527 arg_base = arg;
1528 arg_offset = 0;
85942f45 1529 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1530 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1531 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1532 }
1533 else if (TREE_CODE (arg) == ADDR_EXPR)
1534 {
1535 HOST_WIDE_INT arg_max_size;
1536
1537 arg = TREE_OPERAND (arg, 0);
1538 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1539 &arg_max_size);
1540 if (arg_max_size == -1
1541 || arg_max_size != arg_size
1542 || arg_offset < 0)
1543 return;
1544 if (DECL_P (arg_base))
1545 {
1546 tree size;
1547 check_ref = false;
1548 size = build_int_cst (integer_type_node, arg_size);
1549 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1550 }
1551 else
1552 return;
1553 }
1554 else
1555 return;
1556 }
1557 else
1558 {
1559 HOST_WIDE_INT arg_max_size;
1560
1561 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1562
1563 by_ref = false;
1564 check_ref = false;
1565 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1566 &arg_max_size);
1567 if (arg_max_size == -1
1568 || arg_max_size != arg_size
1569 || arg_offset < 0)
1570 return;
1571
1572 ao_ref_init (&r, arg);
1573 }
1574
1575 /* Second stage walks back the BB, looks at individual statements and as long
1576 as it is confident of how the statements affect contents of the
1577 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1578 describing it. */
1579 gsi = gsi_for_stmt (call);
726a989a
RB
1580 gsi_prev (&gsi);
1581 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1582 {
8b7773a4 1583 struct ipa_known_agg_contents_list *n, **p;
726a989a 1584 gimple stmt = gsi_stmt (gsi);
8b7773a4
MJ
1585 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1586 tree lhs, rhs, lhs_base;
1587 bool partial_overlap;
3e293154 1588
8b7773a4 1589 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1590 continue;
8b75fc9b 1591 if (!gimple_assign_single_p (stmt))
8b7773a4 1592 break;
3e293154 1593
726a989a
RB
1594 lhs = gimple_assign_lhs (stmt);
1595 rhs = gimple_assign_rhs1 (stmt);
0c6b087c 1596 if (!is_gimple_reg_type (TREE_TYPE (rhs))
7d2fb524
MJ
1597 || TREE_CODE (lhs) == BIT_FIELD_REF
1598 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1599 break;
3e293154 1600
8b7773a4
MJ
1601 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1602 &lhs_max_size);
1603 if (lhs_max_size == -1
1604 || lhs_max_size != lhs_size
1605 || (lhs_offset < arg_offset
1606 && lhs_offset + lhs_size > arg_offset)
1607 || (lhs_offset < arg_offset + arg_size
1608 && lhs_offset + lhs_size > arg_offset + arg_size))
1609 break;
3e293154 1610
8b7773a4 1611 if (check_ref)
518dc859 1612 {
8b7773a4
MJ
1613 if (TREE_CODE (lhs_base) != MEM_REF
1614 || TREE_OPERAND (lhs_base, 0) != arg_base
1615 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1616 break;
3e293154 1617 }
8b7773a4 1618 else if (lhs_base != arg_base)
774b8a55
MJ
1619 {
1620 if (DECL_P (lhs_base))
1621 continue;
1622 else
1623 break;
1624 }
3e293154 1625
8b7773a4
MJ
1626 if (lhs_offset + lhs_size < arg_offset
1627 || lhs_offset >= (arg_offset + arg_size))
1628 continue;
1629
1630 partial_overlap = false;
1631 p = &list;
1632 while (*p && (*p)->offset < lhs_offset)
3e293154 1633 {
8b7773a4 1634 if ((*p)->offset + (*p)->size > lhs_offset)
3e293154 1635 {
8b7773a4
MJ
1636 partial_overlap = true;
1637 break;
3e293154 1638 }
8b7773a4
MJ
1639 p = &(*p)->next;
1640 }
1641 if (partial_overlap)
1642 break;
1643 if (*p && (*p)->offset < lhs_offset + lhs_size)
1644 {
1645 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1646 /* We already know this value is subsequently overwritten with
1647 something else. */
1648 continue;
3e293154 1649 else
8b7773a4
MJ
1650 /* Otherwise this is a partial overlap which we cannot
1651 represent. */
1652 break;
3e293154 1653 }
3e293154 1654
8b7773a4
MJ
1655 rhs = get_ssa_def_if_simple_copy (rhs);
1656 n = XALLOCA (struct ipa_known_agg_contents_list);
1657 n->size = lhs_size;
1658 n->offset = lhs_offset;
1659 if (is_gimple_ip_invariant (rhs))
1660 {
1661 n->constant = rhs;
1662 const_count++;
1663 }
1664 else
1665 n->constant = NULL_TREE;
1666 n->next = *p;
1667 *p = n;
3e293154 1668
8b7773a4 1669 item_count++;
dfea20f1
MJ
1670 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1671 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1672 break;
1673 }
be95e2b9 1674
8b7773a4
MJ
1675 /* Third stage just goes over the list and creates an appropriate vector of
1676 ipa_agg_jf_item structures out of it, of sourse only if there are
1677 any known constants to begin with. */
3e293154 1678
8b7773a4 1679 if (const_count)
3e293154 1680 {
8b7773a4 1681 jfunc->agg.by_ref = by_ref;
9771b263 1682 vec_alloc (jfunc->agg.items, const_count);
8b7773a4
MJ
1683 while (list)
1684 {
1685 if (list->constant)
1686 {
f32682ca
DN
1687 struct ipa_agg_jf_item item;
1688 item.offset = list->offset - arg_offset;
7d2fb524 1689 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
d1f98542 1690 item.value = unshare_expr_without_location (list->constant);
9771b263 1691 jfunc->agg.items->quick_push (item);
8b7773a4
MJ
1692 }
1693 list = list->next;
1694 }
3e293154
MJ
1695 }
1696}
1697
06d65050
JH
1698static tree
1699ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1700{
1701 int n;
1702 tree type = (e->callee
67348ccc 1703 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1704 : gimple_call_fntype (e->call_stmt));
1705 tree t = TYPE_ARG_TYPES (type);
1706
1707 for (n = 0; n < i; n++)
1708 {
1709 if (!t)
1710 break;
1711 t = TREE_CHAIN (t);
1712 }
1713 if (t)
1714 return TREE_VALUE (t);
1715 if (!e->callee)
1716 return NULL;
67348ccc 1717 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1718 for (n = 0; n < i; n++)
1719 {
1720 if (!t)
1721 return NULL;
1722 t = TREE_CHAIN (t);
1723 }
1724 if (t)
1725 return TREE_TYPE (t);
1726 return NULL;
1727}
1728
3e293154
MJ
1729/* Compute jump function for all arguments of callsite CS and insert the
1730 information in the jump_functions array in the ipa_edge_args corresponding
1731 to this callsite. */
be95e2b9 1732
749aa96d 1733static void
8aab5218 1734ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
062c604f 1735 struct cgraph_edge *cs)
3e293154
MJ
1736{
1737 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09
MJ
1738 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1739 gimple call = cs->call_stmt;
8b7773a4 1740 int n, arg_num = gimple_call_num_args (call);
3e293154 1741
606d9a09 1742 if (arg_num == 0 || args->jump_functions)
3e293154 1743 return;
9771b263 1744 vec_safe_grow_cleared (args->jump_functions, arg_num);
3e293154 1745
96e24d49
JJ
1746 if (gimple_call_internal_p (call))
1747 return;
5fe8e757
MJ
1748 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1749 return;
1750
8b7773a4
MJ
1751 for (n = 0; n < arg_num; n++)
1752 {
1753 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1754 tree arg = gimple_call_arg (call, n);
06d65050 1755 tree param_type = ipa_get_callee_param_type (cs, n);
3e293154 1756
8b7773a4 1757 if (is_gimple_ip_invariant (arg))
4502fe8d 1758 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1759 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1760 && TREE_CODE (arg) == PARM_DECL)
1761 {
1762 int index = ipa_get_param_decl_index (info, arg);
1763
1764 gcc_assert (index >=0);
1765 /* Aggregate passed by value, check for pass-through, otherwise we
1766 will attempt to fill in aggregate contents later in this
1767 for cycle. */
8aab5218 1768 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 1769 {
b8f6e610 1770 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
8b7773a4
MJ
1771 continue;
1772 }
1773 }
1774 else if (TREE_CODE (arg) == SSA_NAME)
1775 {
1776 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1777 {
1778 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1779 if (index >= 0)
8b7773a4 1780 {
b8f6e610 1781 bool agg_p, type_p;
8aab5218 1782 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
06d65050
JH
1783 if (param_type && POINTER_TYPE_P (param_type))
1784 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1785 call, jfunc);
1786 else
1787 type_p = false;
b8f6e610 1788 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
06d65050
JH
1789 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1790 type_p);
8b7773a4
MJ
1791 }
1792 }
1793 else
1794 {
1795 gimple stmt = SSA_NAME_DEF_STMT (arg);
1796 if (is_gimple_assign (stmt))
8aab5218 1797 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 1798 call, stmt, arg, param_type);
8b7773a4 1799 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 1800 compute_complex_ancestor_jump_func (fbi, info, jfunc,
06d65050 1801 call, stmt, param_type);
8b7773a4
MJ
1802 }
1803 }
1804 else
06d65050
JH
1805 compute_known_type_jump_func (arg, jfunc, call,
1806 param_type
1807 && POINTER_TYPE_P (param_type)
1808 ? TREE_TYPE (param_type)
1809 : NULL);
3e293154 1810
85942f45
JH
1811 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1812 passed (because type conversions are ignored in gimple). Usually we can
1813 safely get type from function declaration, but in case of K&R prototypes or
1814 variadic functions we can try our luck with type of the pointer passed.
1815 TODO: Since we look for actual initialization of the memory object, we may better
1816 work out the type based on the memory stores we find. */
1817 if (!param_type)
1818 param_type = TREE_TYPE (arg);
1819
8b7773a4
MJ
1820 if ((jfunc->type != IPA_JF_PASS_THROUGH
1821 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1822 && (jfunc->type != IPA_JF_ANCESTOR
1823 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1824 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45
JH
1825 || POINTER_TYPE_P (param_type)))
1826 determine_known_aggregate_parts (call, arg, param_type, jfunc);
8b7773a4 1827 }
3e293154
MJ
1828}
1829
749aa96d 1830/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 1831 from BB. */
749aa96d 1832
062c604f 1833static void
8aab5218 1834ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
749aa96d 1835{
8aab5218
MJ
1836 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1837 int i;
749aa96d
MJ
1838 struct cgraph_edge *cs;
1839
8aab5218 1840 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 1841 {
8aab5218 1842 struct cgraph_node *callee = cs->callee;
749aa96d 1843
8aab5218
MJ
1844 if (callee)
1845 {
1846 cgraph_function_or_thunk_node (callee, NULL);
1847 /* We do not need to bother analyzing calls to unknown functions
1848 unless they may become known during lto/whopr. */
1849 if (!callee->definition && !flag_lto)
1850 continue;
1851 }
1852 ipa_compute_jump_functions_for_edge (fbi, cs);
1853 }
749aa96d
MJ
1854}
1855
8b7773a4
MJ
1856/* If STMT looks like a statement loading a value from a member pointer formal
1857 parameter, return that parameter and store the offset of the field to
1858 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1859 might be clobbered). If USE_DELTA, then we look for a use of the delta
1860 field rather than the pfn. */
be95e2b9 1861
3e293154 1862static tree
8b7773a4
MJ
1863ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1864 HOST_WIDE_INT *offset_p)
3e293154 1865{
8b7773a4
MJ
1866 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1867
1868 if (!gimple_assign_single_p (stmt))
1869 return NULL_TREE;
3e293154 1870
8b7773a4 1871 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
1872 if (TREE_CODE (rhs) == COMPONENT_REF)
1873 {
1874 ref_field = TREE_OPERAND (rhs, 1);
1875 rhs = TREE_OPERAND (rhs, 0);
1876 }
1877 else
1878 ref_field = NULL_TREE;
d242d063 1879 if (TREE_CODE (rhs) != MEM_REF)
3e293154 1880 return NULL_TREE;
3e293154 1881 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
1882 if (TREE_CODE (rec) != ADDR_EXPR)
1883 return NULL_TREE;
1884 rec = TREE_OPERAND (rec, 0);
3e293154 1885 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 1886 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 1887 return NULL_TREE;
d242d063 1888 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 1889
8b7773a4
MJ
1890 if (use_delta)
1891 fld = delta_field;
1892 else
1893 fld = ptr_field;
1894 if (offset_p)
1895 *offset_p = int_bit_position (fld);
1896
ae788515
EB
1897 if (ref_field)
1898 {
1899 if (integer_nonzerop (ref_offset))
1900 return NULL_TREE;
ae788515
EB
1901 return ref_field == fld ? rec : NULL_TREE;
1902 }
3e293154 1903 else
8b7773a4
MJ
1904 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1905 : NULL_TREE;
3e293154
MJ
1906}
1907
1908/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 1909
3e293154
MJ
1910static bool
1911ipa_is_ssa_with_stmt_def (tree t)
1912{
1913 if (TREE_CODE (t) == SSA_NAME
1914 && !SSA_NAME_IS_DEFAULT_DEF (t))
1915 return true;
1916 else
1917 return false;
1918}
1919
40591473
MJ
1920/* Find the indirect call graph edge corresponding to STMT and mark it as a
1921 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1922 indirect call graph edge. */
be95e2b9 1923
40591473
MJ
1924static struct cgraph_edge *
1925ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
3e293154 1926{
e33c6cd6 1927 struct cgraph_edge *cs;
3e293154 1928
5f902d76 1929 cs = cgraph_edge (node, stmt);
b258210c 1930 cs->indirect_info->param_index = param_index;
8b7773a4 1931 cs->indirect_info->agg_contents = 0;
c13bc3d9 1932 cs->indirect_info->member_ptr = 0;
40591473 1933 return cs;
3e293154
MJ
1934}
1935
e33c6cd6 1936/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 1937 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
1938 intermediate information about each formal parameter. Currently it checks
1939 whether the call calls a pointer that is a formal parameter and if so, the
1940 parameter is marked with the called flag and an indirect call graph edge
1941 describing the call is created. This is very simple for ordinary pointers
1942 represented in SSA but not-so-nice when it comes to member pointers. The
1943 ugly part of this function does nothing more than trying to match the
1944 pattern of such a call. An example of such a pattern is the gimple dump
1945 below, the call is on the last line:
3e293154 1946
ae788515
EB
1947 <bb 2>:
1948 f$__delta_5 = f.__delta;
1949 f$__pfn_24 = f.__pfn;
1950
1951 or
3e293154 1952 <bb 2>:
d242d063
MJ
1953 f$__delta_5 = MEM[(struct *)&f];
1954 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 1955
ae788515 1956 and a few lines below:
8aa29647
MJ
1957
1958 <bb 5>
3e293154
MJ
1959 D.2496_3 = (int) f$__pfn_24;
1960 D.2497_4 = D.2496_3 & 1;
1961 if (D.2497_4 != 0)
1962 goto <bb 3>;
1963 else
1964 goto <bb 4>;
1965
8aa29647 1966 <bb 6>:
3e293154
MJ
1967 D.2500_7 = (unsigned int) f$__delta_5;
1968 D.2501_8 = &S + D.2500_7;
1969 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1970 D.2503_10 = *D.2502_9;
1971 D.2504_12 = f$__pfn_24 + -1;
1972 D.2505_13 = (unsigned int) D.2504_12;
1973 D.2506_14 = D.2503_10 + D.2505_13;
1974 D.2507_15 = *D.2506_14;
1975 iftmp.11_16 = (String:: *) D.2507_15;
1976
8aa29647 1977 <bb 7>:
3e293154
MJ
1978 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1979 D.2500_19 = (unsigned int) f$__delta_5;
1980 D.2508_20 = &S + D.2500_19;
1981 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1982
1983 Such patterns are results of simple calls to a member pointer:
1984
1985 int doprinting (int (MyString::* f)(int) const)
1986 {
1987 MyString S ("somestring");
1988
1989 return (S.*f)(4);
1990 }
8b7773a4
MJ
1991
1992 Moreover, the function also looks for called pointers loaded from aggregates
1993 passed by value or reference. */
3e293154
MJ
1994
1995static void
8aab5218
MJ
1996ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
1997 tree target)
3e293154 1998{
8aab5218 1999 struct ipa_node_params *info = fbi->info;
8b7773a4
MJ
2000 HOST_WIDE_INT offset;
2001 bool by_ref;
3e293154 2002
3e293154
MJ
2003 if (SSA_NAME_IS_DEFAULT_DEF (target))
2004 {
b258210c 2005 tree var = SSA_NAME_VAR (target);
8aab5218 2006 int index = ipa_get_param_decl_index (info, var);
3e293154 2007 if (index >= 0)
8aab5218 2008 ipa_note_param_call (fbi->node, index, call);
3e293154
MJ
2009 return;
2010 }
2011
8aab5218
MJ
2012 int index;
2013 gimple def = SSA_NAME_DEF_STMT (target);
8b7773a4 2014 if (gimple_assign_single_p (def)
8aab5218 2015 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
8b7773a4 2016 gimple_assign_rhs1 (def), &index, &offset,
3ff2ca23 2017 NULL, &by_ref))
8b7773a4 2018 {
8aab5218 2019 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
68377e53
JH
2020 if (cs->indirect_info->offset != offset)
2021 cs->indirect_info->outer_type = NULL;
8b7773a4
MJ
2022 cs->indirect_info->offset = offset;
2023 cs->indirect_info->agg_contents = 1;
2024 cs->indirect_info->by_ref = by_ref;
2025 return;
2026 }
2027
3e293154
MJ
2028 /* Now we need to try to match the complex pattern of calling a member
2029 pointer. */
8b7773a4
MJ
2030 if (gimple_code (def) != GIMPLE_PHI
2031 || gimple_phi_num_args (def) != 2
2032 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2033 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2034 return;
2035
3e293154
MJ
2036 /* First, we need to check whether one of these is a load from a member
2037 pointer that is a parameter to this function. */
8aab5218
MJ
2038 tree n1 = PHI_ARG_DEF (def, 0);
2039 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2040 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2041 return;
8aab5218
MJ
2042 gimple d1 = SSA_NAME_DEF_STMT (n1);
2043 gimple d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2044
8aab5218
MJ
2045 tree rec;
2046 basic_block bb, virt_bb;
2047 basic_block join = gimple_bb (def);
8b7773a4 2048 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2049 {
8b7773a4 2050 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2051 return;
2052
8aa29647 2053 bb = EDGE_PRED (join, 0)->src;
726a989a 2054 virt_bb = gimple_bb (d2);
3e293154 2055 }
8b7773a4 2056 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2057 {
8aa29647 2058 bb = EDGE_PRED (join, 1)->src;
726a989a 2059 virt_bb = gimple_bb (d1);
3e293154
MJ
2060 }
2061 else
2062 return;
2063
2064 /* Second, we need to check that the basic blocks are laid out in the way
2065 corresponding to the pattern. */
2066
3e293154
MJ
2067 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2068 || single_pred (virt_bb) != bb
2069 || single_succ (virt_bb) != join)
2070 return;
2071
2072 /* Third, let's see that the branching is done depending on the least
2073 significant bit of the pfn. */
2074
8aab5218 2075 gimple branch = last_stmt (bb);
8aa29647 2076 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2077 return;
2078
12430896
RG
2079 if ((gimple_cond_code (branch) != NE_EXPR
2080 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2081 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2082 return;
3e293154 2083
8aab5218 2084 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2085 if (!ipa_is_ssa_with_stmt_def (cond))
2086 return;
2087
726a989a 2088 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2089 if (!is_gimple_assign (def)
726a989a
RB
2090 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2091 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2092 return;
726a989a
RB
2093
2094 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2095 if (!ipa_is_ssa_with_stmt_def (cond))
2096 return;
2097
726a989a 2098 def = SSA_NAME_DEF_STMT (cond);
3e293154 2099
8b75fc9b
MJ
2100 if (is_gimple_assign (def)
2101 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2102 {
726a989a 2103 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2104 if (!ipa_is_ssa_with_stmt_def (cond))
2105 return;
726a989a 2106 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2107 }
2108
8aab5218 2109 tree rec2;
6f7b8b70
RE
2110 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2111 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2112 == ptrmemfunc_vbit_in_delta),
2113 NULL);
3e293154
MJ
2114 if (rec != rec2)
2115 return;
2116
2117 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2118 if (index >= 0
8aab5218 2119 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2120 {
8aab5218 2121 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
68377e53
JH
2122 if (cs->indirect_info->offset != offset)
2123 cs->indirect_info->outer_type = NULL;
8b7773a4
MJ
2124 cs->indirect_info->offset = offset;
2125 cs->indirect_info->agg_contents = 1;
c13bc3d9 2126 cs->indirect_info->member_ptr = 1;
8b7773a4 2127 }
3e293154
MJ
2128
2129 return;
2130}
2131
b258210c
MJ
2132/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2133 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2134 FBI->node (described by FBI->info), create a call note for the
2135 statement. */
b258210c
MJ
2136
2137static void
8aab5218
MJ
2138ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2139 gimple call, tree target)
b258210c
MJ
2140{
2141 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2142 int index;
40591473 2143 HOST_WIDE_INT anc_offset;
b258210c 2144
05842ff5
MJ
2145 if (!flag_devirtualize)
2146 return;
2147
40591473 2148 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2149 return;
2150
8aab5218 2151 struct ipa_node_params *info = fbi->info;
40591473
MJ
2152 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2153 {
8aab5218 2154 struct ipa_jump_func jfunc;
40591473
MJ
2155 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2156 return;
b258210c 2157
40591473
MJ
2158 anc_offset = 0;
2159 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2160 gcc_assert (index >= 0);
06d65050
JH
2161 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2162 call, &jfunc))
40591473
MJ
2163 return;
2164 }
2165 else
2166 {
8aab5218 2167 struct ipa_jump_func jfunc;
40591473
MJ
2168 gimple stmt = SSA_NAME_DEF_STMT (obj);
2169 tree expr;
2170
2171 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2172 if (!expr)
2173 return;
2174 index = ipa_get_param_decl_index (info,
2175 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2176 gcc_assert (index >= 0);
06d65050
JH
2177 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2178 call, &jfunc, anc_offset))
40591473
MJ
2179 return;
2180 }
2181
8aab5218
MJ
2182 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2183 struct cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2184 ii->offset = anc_offset;
ae7e9ddd 2185 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2186 ii->otr_type = obj_type_ref_class (target);
40591473 2187 ii->polymorphic = 1;
b258210c
MJ
2188}
2189
2190/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2191 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2192 containing intermediate information about each formal parameter. */
b258210c
MJ
2193
2194static void
8aab5218 2195ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
b258210c
MJ
2196{
2197 tree target = gimple_call_fn (call);
b786d31f
JH
2198
2199 if (!target
2200 || (TREE_CODE (target) != SSA_NAME
2201 && !virtual_method_call_p (target)))
2202 return;
b258210c 2203
b786d31f
JH
2204 /* If we previously turned the call into a direct call, there is
2205 no need to analyze. */
8aab5218 2206 struct cgraph_edge *cs = cgraph_edge (fbi->node, call);
b786d31f 2207 if (cs && !cs->indirect_unknown_callee)
25583c4f 2208 return;
b258210c 2209 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2210 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2211 else if (virtual_method_call_p (target))
8aab5218 2212 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2213}
2214
2215
e33c6cd6 2216/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2217 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2218 formal parameters are called. */
be95e2b9 2219
3e293154 2220static void
8aab5218 2221ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
3e293154 2222{
726a989a 2223 if (is_gimple_call (stmt))
8aab5218 2224 ipa_analyze_call_uses (fbi, stmt);
062c604f
MJ
2225}
2226
2227/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2228 If OP is a parameter declaration, mark it as used in the info structure
2229 passed in DATA. */
2230
2231static bool
9f1363cd 2232visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
062c604f
MJ
2233{
2234 struct ipa_node_params *info = (struct ipa_node_params *) data;
2235
2236 op = get_base_address (op);
2237 if (op
2238 && TREE_CODE (op) == PARM_DECL)
2239 {
2240 int index = ipa_get_param_decl_index (info, op);
2241 gcc_assert (index >= 0);
310bc633 2242 ipa_set_param_used (info, index, true);
062c604f
MJ
2243 }
2244
2245 return false;
3e293154
MJ
2246}
2247
8aab5218
MJ
2248/* Scan the statements in BB and inspect the uses of formal parameters. Store
2249 the findings in various structures of the associated ipa_node_params
2250 structure, such as parameter flags, notes etc. FBI holds various data about
2251 the function being analyzed. */
be95e2b9 2252
062c604f 2253static void
8aab5218 2254ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
3e293154 2255{
726a989a 2256 gimple_stmt_iterator gsi;
8aab5218
MJ
2257 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2258 {
2259 gimple stmt = gsi_stmt (gsi);
3e293154 2260
8aab5218
MJ
2261 if (is_gimple_debug (stmt))
2262 continue;
3e293154 2263
8aab5218
MJ
2264 ipa_analyze_stmt_uses (fbi, stmt);
2265 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2266 visit_ref_for_mod_analysis,
2267 visit_ref_for_mod_analysis,
2268 visit_ref_for_mod_analysis);
5fe8e757 2269 }
8aab5218
MJ
2270 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2271 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2272 visit_ref_for_mod_analysis,
2273 visit_ref_for_mod_analysis,
2274 visit_ref_for_mod_analysis);
2275}
2276
2277/* Calculate controlled uses of parameters of NODE. */
2278
2279static void
2280ipa_analyze_controlled_uses (struct cgraph_node *node)
2281{
2282 struct ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2283
8aab5218 2284 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2285 {
2286 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2287 int controlled_uses = 0;
2288
062c604f
MJ
2289 /* For SSA regs see if parameter is used. For non-SSA we compute
2290 the flag during modification analysis. */
4502fe8d
MJ
2291 if (is_gimple_reg (parm))
2292 {
67348ccc 2293 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2294 parm);
2295 if (ddef && !has_zero_uses (ddef))
2296 {
2297 imm_use_iterator imm_iter;
2298 use_operand_p use_p;
2299
2300 ipa_set_param_used (info, i, true);
2301 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2302 if (!is_gimple_call (USE_STMT (use_p)))
2303 {
c6de6665
JJ
2304 if (!is_gimple_debug (USE_STMT (use_p)))
2305 {
2306 controlled_uses = IPA_UNDESCRIBED_USE;
2307 break;
2308 }
4502fe8d
MJ
2309 }
2310 else
2311 controlled_uses++;
2312 }
2313 else
2314 controlled_uses = 0;
2315 }
2316 else
2317 controlled_uses = IPA_UNDESCRIBED_USE;
2318 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2319 }
8aab5218 2320}
062c604f 2321
8aab5218 2322/* Free stuff in BI. */
062c604f 2323
8aab5218
MJ
2324static void
2325free_ipa_bb_info (struct ipa_bb_info *bi)
2326{
2327 bi->cg_edges.release ();
2328 bi->param_aa_statuses.release ();
3e293154
MJ
2329}
2330
8aab5218 2331/* Dominator walker driving the analysis. */
2c9561b5 2332
8aab5218 2333class analysis_dom_walker : public dom_walker
2c9561b5 2334{
8aab5218
MJ
2335public:
2336 analysis_dom_walker (struct func_body_info *fbi)
2337 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2338
8aab5218
MJ
2339 virtual void before_dom_children (basic_block);
2340
2341private:
2342 struct func_body_info *m_fbi;
2343};
2344
2345void
2346analysis_dom_walker::before_dom_children (basic_block bb)
2347{
2348 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2349 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2c9561b5
MJ
2350}
2351
dd5a833e
MS
2352/* Initialize the array describing properties of of formal parameters
2353 of NODE, analyze their uses and compute jump functions associated
2354 with actual arguments of calls from within NODE. */
062c604f
MJ
2355
2356void
2357ipa_analyze_node (struct cgraph_node *node)
2358{
8aab5218 2359 struct func_body_info fbi;
57dbdc5a 2360 struct ipa_node_params *info;
062c604f 2361
57dbdc5a
MJ
2362 ipa_check_create_node_params ();
2363 ipa_check_create_edge_args ();
2364 info = IPA_NODE_REF (node);
8aab5218
MJ
2365
2366 if (info->analysis_done)
2367 return;
2368 info->analysis_done = 1;
2369
2370 if (ipa_func_spec_opts_forbid_analysis_p (node))
2371 {
2372 for (int i = 0; i < ipa_get_param_count (info); i++)
2373 {
2374 ipa_set_param_used (info, i, true);
2375 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2376 }
2377 return;
2378 }
2379
2380 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2381 push_cfun (func);
2382 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2383 ipa_initialize_node_params (node);
8aab5218 2384 ipa_analyze_controlled_uses (node);
062c604f 2385
8aab5218
MJ
2386 fbi.node = node;
2387 fbi.info = IPA_NODE_REF (node);
2388 fbi.bb_infos = vNULL;
2389 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2390 fbi.param_count = ipa_get_param_count (info);
2391 fbi.aa_walked = 0;
062c604f 2392
8aab5218
MJ
2393 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2394 {
2395 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2396 bi->cg_edges.safe_push (cs);
2397 }
062c604f 2398
8aab5218
MJ
2399 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2400 {
2401 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2402 bi->cg_edges.safe_push (cs);
2403 }
2404
2405 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2406
2407 int i;
2408 struct ipa_bb_info *bi;
2409 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2410 free_ipa_bb_info (bi);
2411 fbi.bb_infos.release ();
2412 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2413 pop_cfun ();
062c604f
MJ
2414}
2415
e248d83f
MJ
2416/* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2417 attempt a type-based devirtualization. If successful, return the
2418 target function declaration, otherwise return NULL. */
2419
2420tree
2421ipa_intraprocedural_devirtualization (gimple call)
2422{
2423 tree binfo, token, fndecl;
2424 struct ipa_jump_func jfunc;
2425 tree otr = gimple_call_fn (call);
2426
2427 jfunc.type = IPA_JF_UNKNOWN;
2428 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
06d65050 2429 call, obj_type_ref_class (otr));
e248d83f
MJ
2430 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2431 return NULL_TREE;
2432 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2433 if (!binfo)
2434 return NULL_TREE;
2435 token = OBJ_TYPE_REF_TOKEN (otr);
ae7e9ddd 2436 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
e248d83f 2437 binfo);
450ad0cd
JH
2438#ifdef ENABLE_CHECKING
2439 if (fndecl)
2440 gcc_assert (possible_polymorphic_call_target_p
2441 (otr, cgraph_get_node (fndecl)));
2442#endif
e248d83f
MJ
2443 return fndecl;
2444}
062c604f 2445
61502ca8 2446/* Update the jump function DST when the call graph edge corresponding to SRC is
b258210c
MJ
2447 is being inlined, knowing that DST is of type ancestor and src of known
2448 type. */
2449
2450static void
2451combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2452 struct ipa_jump_func *dst)
2453{
c7573249
MJ
2454 HOST_WIDE_INT combined_offset;
2455 tree combined_type;
b258210c 2456
b8f6e610
MJ
2457 if (!ipa_get_jf_ancestor_type_preserved (dst))
2458 {
2459 dst->type = IPA_JF_UNKNOWN;
2460 return;
2461 }
2462
7b872d9e
MJ
2463 combined_offset = ipa_get_jf_known_type_offset (src)
2464 + ipa_get_jf_ancestor_offset (dst);
2465 combined_type = ipa_get_jf_ancestor_type (dst);
c7573249 2466
7b872d9e
MJ
2467 ipa_set_jf_known_type (dst, combined_offset,
2468 ipa_get_jf_known_type_base_type (src),
2469 combined_type);
b258210c
MJ
2470}
2471
be95e2b9 2472/* Update the jump functions associated with call graph edge E when the call
3e293154 2473 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2474 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2475
3e293154
MJ
2476static void
2477update_jump_functions_after_inlining (struct cgraph_edge *cs,
2478 struct cgraph_edge *e)
2479{
2480 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2481 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2482 int count = ipa_get_cs_argument_count (args);
2483 int i;
2484
2485 for (i = 0; i < count; i++)
2486 {
b258210c 2487 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3e293154 2488
685b0d13
MJ
2489 if (dst->type == IPA_JF_ANCESTOR)
2490 {
b258210c 2491 struct ipa_jump_func *src;
8b7773a4 2492 int dst_fid = dst->value.ancestor.formal_id;
685b0d13 2493
b258210c
MJ
2494 /* Variable number of arguments can cause havoc if we try to access
2495 one that does not exist in the inlined edge. So make sure we
2496 don't. */
8b7773a4 2497 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c
MJ
2498 {
2499 dst->type = IPA_JF_UNKNOWN;
2500 continue;
2501 }
2502
8b7773a4
MJ
2503 src = ipa_get_ith_jump_func (top, dst_fid);
2504
2505 if (src->agg.items
2506 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2507 {
2508 struct ipa_agg_jf_item *item;
2509 int j;
2510
2511 /* Currently we do not produce clobber aggregate jump functions,
2512 replace with merging when we do. */
2513 gcc_assert (!dst->agg.items);
2514
9771b263 2515 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2516 dst->agg.by_ref = src->agg.by_ref;
9771b263 2517 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2518 item->offset -= dst->value.ancestor.offset;
2519 }
2520
b258210c
MJ
2521 if (src->type == IPA_JF_KNOWN_TYPE)
2522 combine_known_type_and_ancestor_jfs (src, dst);
b258210c
MJ
2523 else if (src->type == IPA_JF_PASS_THROUGH
2524 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2525 {
2526 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2527 dst->value.ancestor.agg_preserved &=
2528 src->value.pass_through.agg_preserved;
b8f6e610
MJ
2529 dst->value.ancestor.type_preserved &=
2530 src->value.pass_through.type_preserved;
8b7773a4 2531 }
b258210c
MJ
2532 else if (src->type == IPA_JF_ANCESTOR)
2533 {
2534 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2535 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2536 dst->value.ancestor.agg_preserved &=
2537 src->value.ancestor.agg_preserved;
b8f6e610
MJ
2538 dst->value.ancestor.type_preserved &=
2539 src->value.ancestor.type_preserved;
b258210c
MJ
2540 }
2541 else
2542 dst->type = IPA_JF_UNKNOWN;
2543 }
2544 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2545 {
b258210c
MJ
2546 struct ipa_jump_func *src;
2547 /* We must check range due to calls with variable number of arguments
2548 and we cannot combine jump functions with operations. */
2549 if (dst->value.pass_through.operation == NOP_EXPR
2550 && (dst->value.pass_through.formal_id
2551 < ipa_get_cs_argument_count (top)))
2552 {
8b7773a4
MJ
2553 int dst_fid = dst->value.pass_through.formal_id;
2554 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2555 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
8b7773a4 2556
b8f6e610
MJ
2557 switch (src->type)
2558 {
2559 case IPA_JF_UNKNOWN:
2560 dst->type = IPA_JF_UNKNOWN;
2561 break;
2562 case IPA_JF_KNOWN_TYPE:
2ace77c2
JH
2563 if (ipa_get_jf_pass_through_type_preserved (dst))
2564 ipa_set_jf_known_type (dst,
2565 ipa_get_jf_known_type_offset (src),
2566 ipa_get_jf_known_type_base_type (src),
0a2550e7 2567 ipa_get_jf_known_type_component_type (src));
2ace77c2
JH
2568 else
2569 dst->type = IPA_JF_UNKNOWN;
b8f6e610
MJ
2570 break;
2571 case IPA_JF_CONST:
2572 ipa_set_jf_cst_copy (dst, src);
2573 break;
2574
2575 case IPA_JF_PASS_THROUGH:
2576 {
2577 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2578 enum tree_code operation;
2579 operation = ipa_get_jf_pass_through_operation (src);
2580
2581 if (operation == NOP_EXPR)
2582 {
2583 bool agg_p, type_p;
2584 agg_p = dst_agg_p
2585 && ipa_get_jf_pass_through_agg_preserved (src);
2586 type_p = ipa_get_jf_pass_through_type_preserved (src)
2587 && ipa_get_jf_pass_through_type_preserved (dst);
2588 ipa_set_jf_simple_pass_through (dst, formal_id,
2589 agg_p, type_p);
2590 }
2591 else
2592 {
2593 tree operand = ipa_get_jf_pass_through_operand (src);
2594 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2595 operation);
2596 }
2597 break;
2598 }
2599 case IPA_JF_ANCESTOR:
2600 {
2601 bool agg_p, type_p;
2602 agg_p = dst_agg_p
2603 && ipa_get_jf_ancestor_agg_preserved (src);
2604 type_p = ipa_get_jf_ancestor_type_preserved (src)
2605 && ipa_get_jf_pass_through_type_preserved (dst);
2606 ipa_set_ancestor_jf (dst,
2607 ipa_get_jf_ancestor_offset (src),
2608 ipa_get_jf_ancestor_type (src),
2609 ipa_get_jf_ancestor_formal_id (src),
2610 agg_p, type_p);
2611 break;
2612 }
2613 default:
2614 gcc_unreachable ();
2615 }
8b7773a4
MJ
2616
2617 if (src->agg.items
b8f6e610 2618 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2619 {
2620 /* Currently we do not produce clobber aggregate jump
2621 functions, replace with merging when we do. */
2622 gcc_assert (!dst->agg.items);
2623
2624 dst->agg.by_ref = src->agg.by_ref;
9771b263 2625 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2626 }
b258210c
MJ
2627 }
2628 else
2629 dst->type = IPA_JF_UNKNOWN;
3e293154 2630 }
b258210c
MJ
2631 }
2632}
2633
2634/* If TARGET is an addr_expr of a function declaration, make it the destination
81fa35bd 2635 of an indirect edge IE and return the edge. Otherwise, return NULL. */
b258210c 2636
3949c4a7 2637struct cgraph_edge *
81fa35bd 2638ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
b258210c
MJ
2639{
2640 struct cgraph_node *callee;
0f378cb5 2641 struct inline_edge_summary *es = inline_edge_summary (ie);
48b1474e 2642 bool unreachable = false;
b258210c 2643
ceeffab0
MJ
2644 if (TREE_CODE (target) == ADDR_EXPR)
2645 target = TREE_OPERAND (target, 0);
b258210c 2646 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2647 {
2648 target = canonicalize_constructor_val (target, NULL);
2649 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2650 {
c13bc3d9
MJ
2651 if (ie->indirect_info->member_ptr)
2652 /* Member pointer call that goes through a VMT lookup. */
2653 return NULL;
2654
2b5f0895
XDL
2655 if (dump_enabled_p ())
2656 {
2657 location_t loc = gimple_location (ie->call_stmt);
2658 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2659 "discovered direct call to non-function in %s/%i, "
2660 "making it __builtin_unreachable\n",
2661 ie->caller->name (),
2662 ie->caller->order);
2663 }
48b1474e
MJ
2664 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2665 callee = cgraph_get_create_node (target);
2666 unreachable = true;
a0a7b611 2667 }
48b1474e
MJ
2668 else
2669 callee = cgraph_get_node (target);
a0a7b611 2670 }
48b1474e
MJ
2671 else
2672 callee = cgraph_get_node (target);
a0a7b611
JH
2673
2674 /* Because may-edges are not explicitely represented and vtable may be external,
2675 we may create the first reference to the object in the unit. */
2676 if (!callee || callee->global.inlined_to)
2677 {
a0a7b611
JH
2678
2679 /* We are better to ensure we can refer to it.
2680 In the case of static functions we are out of luck, since we already
2681 removed its body. In the case of public functions we may or may
2682 not introduce the reference. */
2683 if (!canonicalize_constructor_val (target, NULL)
2684 || !TREE_PUBLIC (target))
2685 {
2686 if (dump_file)
2687 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2688 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
fec39fa6 2689 xstrdup (ie->caller->name ()),
67348ccc 2690 ie->caller->order,
fec39fa6 2691 xstrdup (ie->callee->name ()),
67348ccc 2692 ie->callee->order);
a0a7b611
JH
2693 return NULL;
2694 }
6f99e449 2695 callee = cgraph_get_create_node (target);
a0a7b611 2696 }
2b5f0895
XDL
2697
2698 if (!dbg_cnt (devirt))
2699 return NULL;
2700
1dbee8c9 2701 ipa_check_create_node_params ();
ceeffab0 2702
81fa35bd
MJ
2703 /* We can not make edges to inline clones. It is bug that someone removed
2704 the cgraph node too early. */
17afc0fe
JH
2705 gcc_assert (!callee->global.inlined_to);
2706
48b1474e 2707 if (dump_file && !unreachable)
b258210c
MJ
2708 {
2709 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
ceeffab0 2710 "(%s/%i -> %s/%i), for stmt ",
b258210c 2711 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
fec39fa6 2712 xstrdup (ie->caller->name ()),
67348ccc 2713 ie->caller->order,
fec39fa6 2714 xstrdup (callee->name ()),
67348ccc 2715 callee->order);
b258210c
MJ
2716 if (ie->call_stmt)
2717 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2718 else
2719 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 2720 }
2b5f0895
XDL
2721 if (dump_enabled_p ())
2722 {
2723 location_t loc = gimple_location (ie->call_stmt);
2724 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2725 "converting indirect call in %s to direct call to %s\n",
2726 ie->caller->name (), callee->name ());
2727 }
042ae7d2
JH
2728 ie = cgraph_make_edge_direct (ie, callee);
2729 es = inline_edge_summary (ie);
2730 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2731 - eni_size_weights.call_cost);
2732 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2733 - eni_time_weights.call_cost);
749aa96d 2734
b258210c 2735 return ie;
3e293154
MJ
2736}
2737
8b7773a4
MJ
2738/* Retrieve value from aggregate jump function AGG for the given OFFSET or
2739 return NULL if there is not any. BY_REF specifies whether the value has to
2740 be passed by reference or by value. */
2741
2742tree
2743ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2744 HOST_WIDE_INT offset, bool by_ref)
2745{
2746 struct ipa_agg_jf_item *item;
2747 int i;
2748
2749 if (by_ref != agg->by_ref)
2750 return NULL;
2751
9771b263 2752 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2753 if (item->offset == offset)
2754 {
2755 /* Currently we do not have clobber values, return NULL for them once
2756 we do. */
2757 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2758 return item->value;
2759 }
8b7773a4
MJ
2760 return NULL;
2761}
2762
4502fe8d 2763/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
2764 reference description RDESC. Return true if the reference has been
2765 successfully found and removed. */
4502fe8d 2766
568cda29 2767static bool
5e20cdc9 2768remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
2769{
2770 struct ipa_ref *to_del;
2771 struct cgraph_edge *origin;
2772
2773 origin = rdesc->cs;
a854f856
MJ
2774 if (!origin)
2775 return false;
67348ccc 2776 to_del = ipa_find_reference (origin->caller, symbol,
042ae7d2 2777 origin->call_stmt, origin->lto_stmt_uid);
568cda29
MJ
2778 if (!to_del)
2779 return false;
2780
4502fe8d
MJ
2781 ipa_remove_reference (to_del);
2782 if (dump_file)
2783 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
fec39fa6
TS
2784 xstrdup (origin->caller->name ()),
2785 origin->caller->order, xstrdup (symbol->name ()));
568cda29 2786 return true;
4502fe8d
MJ
2787}
2788
2789/* If JFUNC has a reference description with refcount different from
2790 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2791 NULL. JFUNC must be a constant jump function. */
2792
2793static struct ipa_cst_ref_desc *
2794jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2795{
2796 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2797 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2798 return rdesc;
2799 else
2800 return NULL;
2801}
2802
568cda29
MJ
2803/* If the value of constant jump function JFUNC is an address of a function
2804 declaration, return the associated call graph node. Otherwise return
2805 NULL. */
2806
2807static cgraph_node *
2808cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2809{
2810 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2811 tree cst = ipa_get_jf_constant (jfunc);
2812 if (TREE_CODE (cst) != ADDR_EXPR
2813 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2814 return NULL;
2815
2816 return cgraph_get_node (TREE_OPERAND (cst, 0));
2817}
2818
2819
2820/* If JFUNC is a constant jump function with a usable rdesc, decrement its
2821 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2822 the edge specified in the rdesc. Return false if either the symbol or the
2823 reference could not be found, otherwise return true. */
2824
2825static bool
2826try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2827{
2828 struct ipa_cst_ref_desc *rdesc;
2829 if (jfunc->type == IPA_JF_CONST
2830 && (rdesc = jfunc_rdesc_usable (jfunc))
2831 && --rdesc->refcount == 0)
2832 {
5e20cdc9 2833 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
2834 if (!symbol)
2835 return false;
2836
2837 return remove_described_reference (symbol, rdesc);
2838 }
2839 return true;
2840}
2841
b258210c
MJ
2842/* Try to find a destination for indirect edge IE that corresponds to a simple
2843 call or a call of a member function pointer and where the destination is a
2844 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
2845 determined, return the newly direct edge, otherwise return NULL.
2846 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 2847
b258210c
MJ
2848static struct cgraph_edge *
2849try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
2850 struct ipa_jump_func *jfunc,
2851 struct ipa_node_params *new_root_info)
b258210c 2852{
4502fe8d 2853 struct cgraph_edge *cs;
b258210c 2854 tree target;
042ae7d2 2855 bool agg_contents = ie->indirect_info->agg_contents;
b258210c 2856
8b7773a4 2857 if (ie->indirect_info->agg_contents)
d250540a
MJ
2858 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2859 ie->indirect_info->offset,
2860 ie->indirect_info->by_ref);
b258210c 2861 else
d250540a
MJ
2862 target = ipa_value_from_jfunc (new_root_info, jfunc);
2863 if (!target)
2864 return NULL;
4502fe8d
MJ
2865 cs = ipa_make_edge_direct_to_target (ie, target);
2866
a12cd2db 2867 if (cs && !agg_contents)
568cda29
MJ
2868 {
2869 bool ok;
2870 gcc_checking_assert (cs->callee
ae6d0907
MJ
2871 && (cs != ie
2872 || jfunc->type != IPA_JF_CONST
568cda29
MJ
2873 || !cgraph_node_for_jfunc (jfunc)
2874 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2875 ok = try_decrement_rdesc_refcount (jfunc);
2876 gcc_checking_assert (ok);
2877 }
4502fe8d
MJ
2878
2879 return cs;
b258210c
MJ
2880}
2881
d250540a
MJ
2882/* Try to find a destination for indirect edge IE that corresponds to a virtual
2883 call based on a formal parameter which is described by jump function JFUNC
2884 and if it can be determined, make it direct and return the direct edge.
2885 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2886 are relative to. */
b258210c
MJ
2887
2888static struct cgraph_edge *
2889try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a
MJ
2890 struct ipa_jump_func *jfunc,
2891 struct ipa_node_params *new_root_info)
3e293154 2892{
9de2f554 2893 tree binfo, target;
85942f45
JH
2894
2895 if (!flag_devirtualize)
2896 return NULL;
b258210c 2897
9de2f554 2898 /* First try to do lookup via known virtual table pointer value. */
85942f45
JH
2899 if (!ie->indirect_info->by_ref)
2900 {
9de2f554
JH
2901 tree vtable;
2902 unsigned HOST_WIDE_INT offset;
85942f45
JH
2903 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2904 ie->indirect_info->offset,
2905 true);
9de2f554
JH
2906 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2907 {
2908 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2909 vtable, offset);
2910 if (target)
2911 {
2912 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2913 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2914 || !possible_polymorphic_call_target_p
2915 (ie, cgraph_get_node (target)))
2916 {
2917 if (dump_file)
2918 fprintf (dump_file,
2919 "Type inconsident devirtualization: %s/%i->%s\n",
2920 ie->caller->name (), ie->caller->order,
2921 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2922 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2923 cgraph_get_create_node (target);
2924 }
2925 return ipa_make_edge_direct_to_target (ie, target);
2926 }
2927 }
85942f45
JH
2928 }
2929
9de2f554 2930 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
d250540a 2931
da942ca0 2932 if (!binfo)
b258210c 2933 return NULL;
3e293154 2934
da942ca0
JH
2935 if (TREE_CODE (binfo) != TREE_BINFO)
2936 {
5bccb77a
JH
2937 ipa_polymorphic_call_context context;
2938 vec <cgraph_node *>targets;
2939 bool final;
2940
2941 if (!get_polymorphic_call_info_from_invariant
2942 (&context, binfo, ie->indirect_info->otr_type,
2943 ie->indirect_info->offset))
2944 return NULL;
2945 targets = possible_polymorphic_call_targets
2946 (ie->indirect_info->otr_type,
2947 ie->indirect_info->otr_token,
2948 context, &final);
2949 if (!final || targets.length () > 1)
85942f45 2950 return NULL;
5bccb77a
JH
2951 if (targets.length () == 1)
2952 target = targets[0]->decl;
2953 else
2954 {
2955 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2956 cgraph_get_create_node (target);
2957 }
da942ca0 2958 }
b258210c 2959 else
5bccb77a
JH
2960 {
2961 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2962 ie->indirect_info->otr_type);
2963 if (binfo)
2964 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2965 binfo);
2966 else
2967 return NULL;
2968 }
b258210c
MJ
2969
2970 if (target)
450ad0cd
JH
2971 {
2972#ifdef ENABLE_CHECKING
2973 gcc_assert (possible_polymorphic_call_target_p
2974 (ie, cgraph_get_node (target)));
2975#endif
2976 return ipa_make_edge_direct_to_target (ie, target);
2977 }
b258210c
MJ
2978 else
2979 return NULL;
3e293154
MJ
2980}
2981
2982/* Update the param called notes associated with NODE when CS is being inlined,
2983 assuming NODE is (potentially indirectly) inlined into CS->callee.
2984 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 2985 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 2986 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 2987
f8e2a1ed 2988static bool
e33c6cd6
MJ
2989update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2990 struct cgraph_node *node,
9771b263 2991 vec<cgraph_edge_p> *new_edges)
3e293154 2992{
9e97ff61 2993 struct ipa_edge_args *top;
b258210c 2994 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 2995 struct ipa_node_params *new_root_info;
f8e2a1ed 2996 bool res = false;
3e293154 2997
e33c6cd6 2998 ipa_check_create_edge_args ();
9e97ff61 2999 top = IPA_EDGE_REF (cs);
d250540a
MJ
3000 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3001 ? cs->caller->global.inlined_to
3002 : cs->caller);
e33c6cd6
MJ
3003
3004 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3005 {
e33c6cd6 3006 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3007 struct ipa_jump_func *jfunc;
8b7773a4 3008 int param_index;
3e293154 3009
e33c6cd6 3010 next_ie = ie->next_callee;
3e293154 3011
5f902d76
JH
3012 if (ici->param_index == -1)
3013 continue;
e33c6cd6 3014
3e293154 3015 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 3016 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3017 {
5ee53a06 3018 ici->param_index = -1;
3e293154
MJ
3019 continue;
3020 }
3021
8b7773a4
MJ
3022 param_index = ici->param_index;
3023 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06
JH
3024
3025 if (!flag_indirect_inlining)
36b72910
JH
3026 new_direct_edge = NULL;
3027 else if (ici->polymorphic)
d250540a
MJ
3028 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3029 new_root_info);
b258210c 3030 else
d250540a
MJ
3031 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3032 new_root_info);
042ae7d2
JH
3033 /* If speculation was removed, then we need to do nothing. */
3034 if (new_direct_edge && new_direct_edge != ie)
3035 {
3036 new_direct_edge->indirect_inlining_edge = 1;
3037 top = IPA_EDGE_REF (cs);
3038 res = true;
3039 }
3040 else if (new_direct_edge)
685b0d13 3041 {
b258210c 3042 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
3043 if (new_direct_edge->call_stmt)
3044 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
3045 = !gimple_check_call_matching_types (
3046 new_direct_edge->call_stmt,
67348ccc 3047 new_direct_edge->callee->decl, false);
b258210c
MJ
3048 if (new_edges)
3049 {
9771b263 3050 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3051 res = true;
3052 }
042ae7d2 3053 top = IPA_EDGE_REF (cs);
685b0d13 3054 }
36b72910
JH
3055 else if (jfunc->type == IPA_JF_PASS_THROUGH
3056 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3057 {
8a2256dd
MJ
3058 if ((ici->agg_contents
3059 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3060 || (ici->polymorphic
3061 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
36b72910
JH
3062 ici->param_index = -1;
3063 else
3064 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3065 }
3066 else if (jfunc->type == IPA_JF_ANCESTOR)
3067 {
8a2256dd
MJ
3068 if ((ici->agg_contents
3069 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3070 || (ici->polymorphic
3071 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
36b72910
JH
3072 ici->param_index = -1;
3073 else
3074 {
3075 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
68377e53
JH
3076 if (ipa_get_jf_ancestor_offset (jfunc))
3077 ici->outer_type = NULL;
36b72910
JH
3078 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3079 }
3080 }
3081 else
3082 /* Either we can find a destination for this edge now or never. */
3083 ici->param_index = -1;
3e293154 3084 }
e33c6cd6 3085
f8e2a1ed 3086 return res;
3e293154
MJ
3087}
3088
3089/* Recursively traverse subtree of NODE (including node) made of inlined
3090 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3091 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3092 update_jump_functions_after_inlining on all non-inlined edges that lead out
3093 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3094 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3095 created. */
be95e2b9 3096
f8e2a1ed 3097static bool
3e293154
MJ
3098propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3099 struct cgraph_node *node,
9771b263 3100 vec<cgraph_edge_p> *new_edges)
3e293154
MJ
3101{
3102 struct cgraph_edge *e;
f8e2a1ed 3103 bool res;
3e293154 3104
e33c6cd6 3105 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3106
3107 for (e = node->callees; e; e = e->next_callee)
3108 if (!e->inline_failed)
f8e2a1ed 3109 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3110 else
3111 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3112 for (e = node->indirect_calls; e; e = e->next_callee)
3113 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3114
3115 return res;
3e293154
MJ
3116}
3117
4502fe8d
MJ
3118/* Combine two controlled uses counts as done during inlining. */
3119
3120static int
3121combine_controlled_uses_counters (int c, int d)
3122{
3123 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3124 return IPA_UNDESCRIBED_USE;
3125 else
3126 return c + d - 1;
3127}
3128
3129/* Propagate number of controlled users from CS->caleee to the new root of the
3130 tree of inlined nodes. */
3131
3132static void
3133propagate_controlled_uses (struct cgraph_edge *cs)
3134{
3135 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3136 struct cgraph_node *new_root = cs->caller->global.inlined_to
3137 ? cs->caller->global.inlined_to : cs->caller;
3138 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3139 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3140 int count, i;
3141
3142 count = MIN (ipa_get_cs_argument_count (args),
3143 ipa_get_param_count (old_root_info));
3144 for (i = 0; i < count; i++)
3145 {
3146 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3147 struct ipa_cst_ref_desc *rdesc;
3148
3149 if (jf->type == IPA_JF_PASS_THROUGH)
3150 {
3151 int src_idx, c, d;
3152 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3153 c = ipa_get_controlled_uses (new_root_info, src_idx);
3154 d = ipa_get_controlled_uses (old_root_info, i);
3155
3156 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3157 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3158 c = combine_controlled_uses_counters (c, d);
3159 ipa_set_controlled_uses (new_root_info, src_idx, c);
3160 if (c == 0 && new_root_info->ipcp_orig_node)
3161 {
3162 struct cgraph_node *n;
3163 struct ipa_ref *ref;
3164 tree t = new_root_info->known_vals[src_idx];
3165
3166 if (t && TREE_CODE (t) == ADDR_EXPR
3167 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3168 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
67348ccc
DM
3169 && (ref = ipa_find_reference (new_root,
3170 n, NULL, 0)))
4502fe8d
MJ
3171 {
3172 if (dump_file)
3173 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3174 "reference from %s/%i to %s/%i.\n",
fec39fa6 3175 xstrdup (new_root->name ()),
67348ccc 3176 new_root->order,
fec39fa6 3177 xstrdup (n->name ()), n->order);
4502fe8d
MJ
3178 ipa_remove_reference (ref);
3179 }
3180 }
3181 }
3182 else if (jf->type == IPA_JF_CONST
3183 && (rdesc = jfunc_rdesc_usable (jf)))
3184 {
3185 int d = ipa_get_controlled_uses (old_root_info, i);
3186 int c = rdesc->refcount;
3187 rdesc->refcount = combine_controlled_uses_counters (c, d);
3188 if (rdesc->refcount == 0)
3189 {
3190 tree cst = ipa_get_jf_constant (jf);
3191 struct cgraph_node *n;
3192 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3193 && TREE_CODE (TREE_OPERAND (cst, 0))
3194 == FUNCTION_DECL);
3195 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3196 if (n)
3197 {
3198 struct cgraph_node *clone;
568cda29 3199 bool ok;
67348ccc 3200 ok = remove_described_reference (n, rdesc);
568cda29 3201 gcc_checking_assert (ok);
4502fe8d
MJ
3202
3203 clone = cs->caller;
3204 while (clone->global.inlined_to
3205 && clone != rdesc->cs->caller
3206 && IPA_NODE_REF (clone)->ipcp_orig_node)
3207 {
3208 struct ipa_ref *ref;
67348ccc
DM
3209 ref = ipa_find_reference (clone,
3210 n, NULL, 0);
4502fe8d
MJ
3211 if (ref)
3212 {
3213 if (dump_file)
3214 fprintf (dump_file, "ipa-prop: Removing "
3215 "cloning-created reference "
3216 "from %s/%i to %s/%i.\n",
fec39fa6 3217 xstrdup (clone->name ()),
67348ccc 3218 clone->order,
fec39fa6 3219 xstrdup (n->name ()),
67348ccc 3220 n->order);
4502fe8d
MJ
3221 ipa_remove_reference (ref);
3222 }
3223 clone = clone->callers->caller;
3224 }
3225 }
3226 }
3227 }
3228 }
3229
3230 for (i = ipa_get_param_count (old_root_info);
3231 i < ipa_get_cs_argument_count (args);
3232 i++)
3233 {
3234 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3235
3236 if (jf->type == IPA_JF_CONST)
3237 {
3238 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3239 if (rdesc)
3240 rdesc->refcount = IPA_UNDESCRIBED_USE;
3241 }
3242 else if (jf->type == IPA_JF_PASS_THROUGH)
3243 ipa_set_controlled_uses (new_root_info,
3244 jf->value.pass_through.formal_id,
3245 IPA_UNDESCRIBED_USE);
3246 }
3247}
3248
3e293154
MJ
3249/* Update jump functions and call note functions on inlining the call site CS.
3250 CS is expected to lead to a node already cloned by
3251 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3252 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3253 created. */
be95e2b9 3254
f8e2a1ed 3255bool
3e293154 3256ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
9771b263 3257 vec<cgraph_edge_p> *new_edges)
3e293154 3258{
5ee53a06 3259 bool changed;
f8e2a1ed
MJ
3260 /* Do nothing if the preparation phase has not been carried out yet
3261 (i.e. during early inlining). */
9771b263 3262 if (!ipa_node_params_vector.exists ())
f8e2a1ed
MJ
3263 return false;
3264 gcc_assert (ipa_edge_args_vector);
3265
4502fe8d 3266 propagate_controlled_uses (cs);
5ee53a06
JH
3267 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3268
5ee53a06 3269 return changed;
518dc859
RL
3270}
3271
771578a0
MJ
3272/* Frees all dynamically allocated structures that the argument info points
3273 to. */
be95e2b9 3274
518dc859 3275void
771578a0 3276ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 3277{
9771b263 3278 vec_free (args->jump_functions);
771578a0 3279 memset (args, 0, sizeof (*args));
518dc859
RL
3280}
3281
771578a0 3282/* Free all ipa_edge structures. */
be95e2b9 3283
518dc859 3284void
771578a0 3285ipa_free_all_edge_args (void)
518dc859 3286{
771578a0
MJ
3287 int i;
3288 struct ipa_edge_args *args;
518dc859 3289
9771b263
DN
3290 if (!ipa_edge_args_vector)
3291 return;
3292
3293 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
3294 ipa_free_edge_args_substructures (args);
3295
9771b263 3296 vec_free (ipa_edge_args_vector);
518dc859
RL
3297}
3298
771578a0
MJ
3299/* Frees all dynamically allocated structures that the param info points
3300 to. */
be95e2b9 3301
518dc859 3302void
771578a0 3303ipa_free_node_params_substructures (struct ipa_node_params *info)
518dc859 3304{
9771b263 3305 info->descriptors.release ();
310bc633
MJ
3306 free (info->lattices);
3307 /* Lattice values and their sources are deallocated with their alocation
3308 pool. */
9771b263 3309 info->known_vals.release ();
771578a0 3310 memset (info, 0, sizeof (*info));
518dc859
RL
3311}
3312
771578a0 3313/* Free all ipa_node_params structures. */
be95e2b9 3314
518dc859 3315void
771578a0 3316ipa_free_all_node_params (void)
518dc859 3317{
771578a0
MJ
3318 int i;
3319 struct ipa_node_params *info;
518dc859 3320
9771b263 3321 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
771578a0
MJ
3322 ipa_free_node_params_substructures (info);
3323
9771b263 3324 ipa_node_params_vector.release ();
771578a0
MJ
3325}
3326
2c9561b5
MJ
3327/* Set the aggregate replacements of NODE to be AGGVALS. */
3328
3329void
3330ipa_set_node_agg_value_chain (struct cgraph_node *node,
3331 struct ipa_agg_replacement_value *aggvals)
3332{
9771b263
DN
3333 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3334 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2c9561b5 3335
9771b263 3336 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2c9561b5
MJ
3337}
3338
771578a0 3339/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 3340
771578a0 3341static void
5c0466b5 3342ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 3343{
568cda29
MJ
3344 struct ipa_edge_args *args;
3345
3346 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
9771b263 3347 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 3348 return;
568cda29
MJ
3349
3350 args = IPA_EDGE_REF (cs);
3351 if (args->jump_functions)
3352 {
3353 struct ipa_jump_func *jf;
3354 int i;
3355 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3356 {
3357 struct ipa_cst_ref_desc *rdesc;
3358 try_decrement_rdesc_refcount (jf);
3359 if (jf->type == IPA_JF_CONST
3360 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3361 && rdesc->cs == cs)
3362 rdesc->cs = NULL;
3363 }
568cda29
MJ
3364 }
3365
771578a0 3366 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
3367}
3368
771578a0 3369/* Hook that is called by cgraph.c when a node is removed. */
be95e2b9 3370
771578a0 3371static void
5c0466b5 3372ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3373{
dd6d1ad7 3374 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 3375 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2c9561b5 3376 ipa_free_node_params_substructures (IPA_NODE_REF (node));
9771b263
DN
3377 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3378 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
771578a0
MJ
3379}
3380
8b7773a4 3381/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 3382
771578a0
MJ
3383static void
3384ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
f8e2a1ed 3385 __attribute__((unused)) void *data)
771578a0
MJ
3386{
3387 struct ipa_edge_args *old_args, *new_args;
8b7773a4 3388 unsigned int i;
771578a0
MJ
3389
3390 ipa_check_create_edge_args ();
3391
3392 old_args = IPA_EDGE_REF (src);
3393 new_args = IPA_EDGE_REF (dst);
3394
9771b263 3395 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
8b7773a4 3396
9771b263 3397 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3398 {
3399 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3400 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3401
3402 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3403
3404 if (src_jf->type == IPA_JF_CONST)
3405 {
3406 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3407
3408 if (!src_rdesc)
3409 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3410 else if (src->caller == dst->caller)
3411 {
3412 struct ipa_ref *ref;
5e20cdc9 3413 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3414 gcc_checking_assert (n);
67348ccc 3415 ref = ipa_find_reference (src->caller, n,
568cda29
MJ
3416 src->call_stmt, src->lto_stmt_uid);
3417 gcc_checking_assert (ref);
67348ccc 3418 ipa_clone_ref (ref, dst->caller, ref->stmt);
568cda29
MJ
3419
3420 gcc_checking_assert (ipa_refdesc_pool);
3421 struct ipa_cst_ref_desc *dst_rdesc
3422 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3423 dst_rdesc->cs = dst;
3424 dst_rdesc->refcount = src_rdesc->refcount;
3425 dst_rdesc->next_duplicate = NULL;
3426 dst_jf->value.constant.rdesc = dst_rdesc;
3427 }
4502fe8d
MJ
3428 else if (src_rdesc->cs == src)
3429 {
3430 struct ipa_cst_ref_desc *dst_rdesc;
3431 gcc_checking_assert (ipa_refdesc_pool);
3432 dst_rdesc
3433 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3434 dst_rdesc->cs = dst;
4502fe8d 3435 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3436 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3437 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3438 dst_jf->value.constant.rdesc = dst_rdesc;
3439 }
3440 else
3441 {
3442 struct ipa_cst_ref_desc *dst_rdesc;
3443 /* This can happen during inlining, when a JFUNC can refer to a
3444 reference taken in a function up in the tree of inline clones.
3445 We need to find the duplicate that refers to our tree of
3446 inline clones. */
3447
3448 gcc_assert (dst->caller->global.inlined_to);
3449 for (dst_rdesc = src_rdesc->next_duplicate;
3450 dst_rdesc;
3451 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3452 {
3453 struct cgraph_node *top;
3454 top = dst_rdesc->cs->caller->global.inlined_to
3455 ? dst_rdesc->cs->caller->global.inlined_to
3456 : dst_rdesc->cs->caller;
3457 if (dst->caller->global.inlined_to == top)
3458 break;
3459 }
44a60244 3460 gcc_assert (dst_rdesc);
4502fe8d
MJ
3461 dst_jf->value.constant.rdesc = dst_rdesc;
3462 }
3463 }
3464 }
771578a0
MJ
3465}
3466
3467/* Hook that is called by cgraph.c when a node is duplicated. */
be95e2b9 3468
771578a0
MJ
3469static void
3470ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
10a5dd5d 3471 ATTRIBUTE_UNUSED void *data)
771578a0
MJ
3472{
3473 struct ipa_node_params *old_info, *new_info;
2c9561b5 3474 struct ipa_agg_replacement_value *old_av, *new_av;
771578a0
MJ
3475
3476 ipa_check_create_node_params ();
3477 old_info = IPA_NODE_REF (src);
3478 new_info = IPA_NODE_REF (dst);
771578a0 3479
9771b263 3480 new_info->descriptors = old_info->descriptors.copy ();
310bc633 3481 new_info->lattices = NULL;
771578a0 3482 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 3483
8aab5218 3484 new_info->analysis_done = old_info->analysis_done;
3949c4a7 3485 new_info->node_enqueued = old_info->node_enqueued;
2c9561b5
MJ
3486
3487 old_av = ipa_get_agg_replacements_for_node (src);
3488 if (!old_av)
3489 return;
3490
3491 new_av = NULL;
3492 while (old_av)
3493 {
3494 struct ipa_agg_replacement_value *v;
3495
766090c2 3496 v = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
3497 memcpy (v, old_av, sizeof (*v));
3498 v->next = new_av;
3499 new_av = v;
3500 old_av = old_av->next;
3501 }
3502 ipa_set_node_agg_value_chain (dst, new_av);
771578a0
MJ
3503}
3504
40982661
JH
3505
3506/* Analyze newly added function into callgraph. */
3507
3508static void
3509ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3510{
0136f8f0
AH
3511 if (cgraph_function_with_gimple_body_p (node))
3512 ipa_analyze_node (node);
40982661
JH
3513}
3514
771578a0 3515/* Register our cgraph hooks if they are not already there. */
be95e2b9 3516
518dc859 3517void
771578a0 3518ipa_register_cgraph_hooks (void)
518dc859 3519{
771578a0
MJ
3520 if (!edge_removal_hook_holder)
3521 edge_removal_hook_holder =
3522 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3523 if (!node_removal_hook_holder)
3524 node_removal_hook_holder =
3525 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3526 if (!edge_duplication_hook_holder)
3527 edge_duplication_hook_holder =
3528 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3529 if (!node_duplication_hook_holder)
3530 node_duplication_hook_holder =
3531 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
40982661
JH
3532 function_insertion_hook_holder =
3533 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3534}
518dc859 3535
771578a0 3536/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3537
771578a0
MJ
3538static void
3539ipa_unregister_cgraph_hooks (void)
3540{
3541 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3542 edge_removal_hook_holder = NULL;
3543 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3544 node_removal_hook_holder = NULL;
3545 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3546 edge_duplication_hook_holder = NULL;
3547 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3548 node_duplication_hook_holder = NULL;
40982661
JH
3549 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3550 function_insertion_hook_holder = NULL;
771578a0
MJ
3551}
3552
3553/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3554 longer needed after ipa-cp. */
be95e2b9 3555
771578a0 3556void
e33c6cd6 3557ipa_free_all_structures_after_ipa_cp (void)
3e293154 3558{
5ee53a06 3559 if (!optimize)
3e293154
MJ
3560 {
3561 ipa_free_all_edge_args ();
3562 ipa_free_all_node_params ();
310bc633
MJ
3563 free_alloc_pool (ipcp_sources_pool);
3564 free_alloc_pool (ipcp_values_pool);
2c9561b5 3565 free_alloc_pool (ipcp_agg_lattice_pool);
3e293154 3566 ipa_unregister_cgraph_hooks ();
4502fe8d
MJ
3567 if (ipa_refdesc_pool)
3568 free_alloc_pool (ipa_refdesc_pool);
3e293154
MJ
3569 }
3570}
3571
3572/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3573 longer needed after indirect inlining. */
be95e2b9 3574
3e293154 3575void
e33c6cd6 3576ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3577{
3578 ipa_free_all_edge_args ();
3579 ipa_free_all_node_params ();
3580 ipa_unregister_cgraph_hooks ();
310bc633
MJ
3581 if (ipcp_sources_pool)
3582 free_alloc_pool (ipcp_sources_pool);
3583 if (ipcp_values_pool)
3584 free_alloc_pool (ipcp_values_pool);
2c9561b5
MJ
3585 if (ipcp_agg_lattice_pool)
3586 free_alloc_pool (ipcp_agg_lattice_pool);
4502fe8d
MJ
3587 if (ipa_refdesc_pool)
3588 free_alloc_pool (ipa_refdesc_pool);
518dc859
RL
3589}
3590
dcd416e3 3591/* Print ipa_tree_map data structures of all functions in the
518dc859 3592 callgraph to F. */
be95e2b9 3593
518dc859 3594void
2c9561b5 3595ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3596{
3597 int i, count;
3e293154 3598 struct ipa_node_params *info;
518dc859 3599
67348ccc 3600 if (!node->definition)
3e293154
MJ
3601 return;
3602 info = IPA_NODE_REF (node);
9de04252 3603 fprintf (f, " function %s/%i parameter descriptors:\n",
fec39fa6 3604 node->name (), node->order);
3e293154
MJ
3605 count = ipa_get_param_count (info);
3606 for (i = 0; i < count; i++)
518dc859 3607 {
4502fe8d
MJ
3608 int c;
3609
a4e33812 3610 fprintf (f, " ");
e067bd43 3611 ipa_dump_param (f, info, i);
339f49ec
JH
3612 if (ipa_is_param_used (info, i))
3613 fprintf (f, " used");
4502fe8d
MJ
3614 c = ipa_get_controlled_uses (info, i);
3615 if (c == IPA_UNDESCRIBED_USE)
3616 fprintf (f, " undescribed_use");
3617 else
3618 fprintf (f, " controlled_uses=%i", c);
3e293154 3619 fprintf (f, "\n");
518dc859
RL
3620 }
3621}
dcd416e3 3622
ca30a539 3623/* Print ipa_tree_map data structures of all functions in the
3e293154 3624 callgraph to F. */
be95e2b9 3625
3e293154 3626void
ca30a539 3627ipa_print_all_params (FILE * f)
3e293154
MJ
3628{
3629 struct cgraph_node *node;
3630
ca30a539 3631 fprintf (f, "\nFunction parameters:\n");
65c70e6b 3632 FOR_EACH_FUNCTION (node)
ca30a539 3633 ipa_print_node_params (f, node);
3e293154 3634}
3f84bf08
MJ
3635
3636/* Return a heap allocated vector containing formal parameters of FNDECL. */
3637
9771b263 3638vec<tree>
3f84bf08
MJ
3639ipa_get_vector_of_formal_parms (tree fndecl)
3640{
9771b263 3641 vec<tree> args;
3f84bf08
MJ
3642 int count;
3643 tree parm;
3644
0e8853ee 3645 gcc_assert (!flag_wpa);
310bc633 3646 count = count_formal_params (fndecl);
9771b263 3647 args.create (count);
910ad8de 3648 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 3649 args.quick_push (parm);
3f84bf08
MJ
3650
3651 return args;
3652}
3653
3654/* Return a heap allocated vector containing types of formal parameters of
3655 function type FNTYPE. */
3656
31519c38
AH
3657vec<tree>
3658ipa_get_vector_of_formal_parm_types (tree fntype)
3f84bf08 3659{
9771b263 3660 vec<tree> types;
3f84bf08
MJ
3661 int count = 0;
3662 tree t;
3663
3664 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3665 count++;
3666
9771b263 3667 types.create (count);
3f84bf08 3668 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 3669 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
3670
3671 return types;
3672}
3673
3674/* Modify the function declaration FNDECL and its type according to the plan in
3675 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3676 to reflect the actual parameters being modified which are determined by the
3677 base_index field. */
3678
3679void
31519c38 3680ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3f84bf08 3681{
31519c38
AH
3682 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3683 tree orig_type = TREE_TYPE (fndecl);
3684 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3f84bf08
MJ
3685
3686 /* The following test is an ugly hack, some functions simply don't have any
3687 arguments in their type. This is probably a bug but well... */
31519c38
AH
3688 bool care_for_types = (old_arg_types != NULL_TREE);
3689 bool last_parm_void;
3690 vec<tree> otypes;
3f84bf08
MJ
3691 if (care_for_types)
3692 {
3693 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3694 == void_type_node);
31519c38 3695 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3f84bf08 3696 if (last_parm_void)
9771b263 3697 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 3698 else
9771b263 3699 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
3700 }
3701 else
3702 {
3703 last_parm_void = false;
9771b263 3704 otypes.create (0);
3f84bf08
MJ
3705 }
3706
31519c38
AH
3707 int len = adjustments.length ();
3708 tree *link = &DECL_ARGUMENTS (fndecl);
3709 tree new_arg_types = NULL;
3710 for (int i = 0; i < len; i++)
3f84bf08
MJ
3711 {
3712 struct ipa_parm_adjustment *adj;
3713 gcc_assert (link);
3714
9771b263 3715 adj = &adjustments[i];
31519c38
AH
3716 tree parm;
3717 if (adj->op == IPA_PARM_OP_NEW)
3718 parm = NULL;
3719 else
3720 parm = oparms[adj->base_index];
3f84bf08
MJ
3721 adj->base = parm;
3722
31519c38 3723 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
3724 {
3725 if (care_for_types)
9771b263 3726 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
3727 new_arg_types);
3728 *link = parm;
910ad8de 3729 link = &DECL_CHAIN (parm);
3f84bf08 3730 }
31519c38 3731 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08
MJ
3732 {
3733 tree new_parm;
3734 tree ptype;
3735
3736 if (adj->by_ref)
3737 ptype = build_pointer_type (adj->type);
3738 else
e69dbe37
MJ
3739 {
3740 ptype = adj->type;
3741 if (is_gimple_reg_type (ptype))
3742 {
3743 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3744 if (TYPE_ALIGN (ptype) < malign)
3745 ptype = build_aligned_type (ptype, malign);
3746 }
3747 }
3f84bf08
MJ
3748
3749 if (care_for_types)
3750 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3751
3752 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3753 ptype);
31519c38
AH
3754 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3755 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3f84bf08
MJ
3756 DECL_ARTIFICIAL (new_parm) = 1;
3757 DECL_ARG_TYPE (new_parm) = ptype;
3758 DECL_CONTEXT (new_parm) = fndecl;
3759 TREE_USED (new_parm) = 1;
3760 DECL_IGNORED_P (new_parm) = 1;
3761 layout_decl (new_parm, 0);
3762
31519c38
AH
3763 if (adj->op == IPA_PARM_OP_NEW)
3764 adj->base = NULL;
3765 else
3766 adj->base = parm;
3767 adj->new_decl = new_parm;
3f84bf08
MJ
3768
3769 *link = new_parm;
910ad8de 3770 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
3771 }
3772 }
3773
3774 *link = NULL_TREE;
3775
31519c38 3776 tree new_reversed = NULL;
3f84bf08
MJ
3777 if (care_for_types)
3778 {
3779 new_reversed = nreverse (new_arg_types);
3780 if (last_parm_void)
3781 {
3782 if (new_reversed)
3783 TREE_CHAIN (new_arg_types) = void_list_node;
3784 else
3785 new_reversed = void_list_node;
3786 }
3787 }
3788
3789 /* Use copy_node to preserve as much as possible from original type
3790 (debug info, attribute lists etc.)
3791 Exception is METHOD_TYPEs must have THIS argument.
3792 When we are asked to remove it, we need to build new FUNCTION_TYPE
3793 instead. */
31519c38 3794 tree new_type = NULL;
3f84bf08 3795 if (TREE_CODE (orig_type) != METHOD_TYPE
31519c38 3796 || (adjustments[0].op == IPA_PARM_OP_COPY
9771b263 3797 && adjustments[0].base_index == 0))
3f84bf08 3798 {
4eb3f32c 3799 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
3800 TYPE_ARG_TYPES (new_type) = new_reversed;
3801 }
3802 else
3803 {
3804 new_type
3805 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3806 new_reversed));
3807 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3808 DECL_VINDEX (fndecl) = NULL_TREE;
3809 }
3810
d402c33d
JH
3811 /* When signature changes, we need to clear builtin info. */
3812 if (DECL_BUILT_IN (fndecl))
3813 {
3814 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3815 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3816 }
3817
3f84bf08
MJ
3818 /* This is a new type, not a copy of an old type. Need to reassociate
3819 variants. We can handle everything except the main variant lazily. */
31519c38 3820 tree t = TYPE_MAIN_VARIANT (orig_type);
3f84bf08
MJ
3821 if (orig_type != t)
3822 {
3823 TYPE_MAIN_VARIANT (new_type) = t;
3824 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3825 TYPE_NEXT_VARIANT (t) = new_type;
3826 }
3827 else
3828 {
3829 TYPE_MAIN_VARIANT (new_type) = new_type;
3830 TYPE_NEXT_VARIANT (new_type) = NULL;
3831 }
3832
3833 TREE_TYPE (fndecl) = new_type;
9b389a5e 3834 DECL_VIRTUAL_P (fndecl) = 0;
70d6d5c1 3835 DECL_LANG_SPECIFIC (fndecl) = NULL;
9771b263
DN
3836 otypes.release ();
3837 oparms.release ();
3f84bf08
MJ
3838}
3839
3840/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3841 If this is a directly recursive call, CS must be NULL. Otherwise it must
3842 contain the corresponding call graph edge. */
3843
3844void
3845ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3846 ipa_parm_adjustment_vec adjustments)
3847{
82338059 3848 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
9771b263
DN
3849 vec<tree> vargs;
3850 vec<tree, va_gc> **debug_args = NULL;
3f84bf08 3851 gimple new_stmt;
82338059 3852 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
3853 tree callee_decl;
3854 int i, len;
3855
9771b263
DN
3856 len = adjustments.length ();
3857 vargs.create (len);
67348ccc
DM
3858 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3859 ipa_remove_stmt_references (current_node, stmt);
3f84bf08
MJ
3860
3861 gsi = gsi_for_stmt (stmt);
82338059
MJ
3862 prev_gsi = gsi;
3863 gsi_prev (&prev_gsi);
3f84bf08
MJ
3864 for (i = 0; i < len; i++)
3865 {
3866 struct ipa_parm_adjustment *adj;
3867
9771b263 3868 adj = &adjustments[i];
3f84bf08 3869
31519c38 3870 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
3871 {
3872 tree arg = gimple_call_arg (stmt, adj->base_index);
3873
9771b263 3874 vargs.quick_push (arg);
3f84bf08 3875 }
31519c38 3876 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08 3877 {
fffe1e40
MJ
3878 tree expr, base, off;
3879 location_t loc;
f43245d1 3880 unsigned int deref_align = 0;
c1ed6a01 3881 bool deref_base = false;
fffe1e40
MJ
3882
3883 /* We create a new parameter out of the value of the old one, we can
3884 do the following kind of transformations:
3885
3886 - A scalar passed by reference is converted to a scalar passed by
3887 value. (adj->by_ref is false and the type of the original
3888 actual argument is a pointer to a scalar).
3889
3890 - A part of an aggregate is passed instead of the whole aggregate.
3891 The part can be passed either by value or by reference, this is
3892 determined by value of adj->by_ref. Moreover, the code below
3893 handles both situations when the original aggregate is passed by
3894 value (its type is not a pointer) and when it is passed by
3895 reference (it is a pointer to an aggregate).
3896
3897 When the new argument is passed by reference (adj->by_ref is true)
3898 it must be a part of an aggregate and therefore we form it by
3899 simply taking the address of a reference inside the original
3900 aggregate. */
3901
3902 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3903 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
3904 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3905 : EXPR_LOCATION (base);
fffe1e40 3906
82d49829
MJ
3907 if (TREE_CODE (base) != ADDR_EXPR
3908 && POINTER_TYPE_P (TREE_TYPE (base)))
3909 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 3910 adj->offset / BITS_PER_UNIT);
3f84bf08 3911 else
3f84bf08 3912 {
fffe1e40
MJ
3913 HOST_WIDE_INT base_offset;
3914 tree prev_base;
c1ed6a01 3915 bool addrof;
fffe1e40
MJ
3916
3917 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
3918 {
3919 base = TREE_OPERAND (base, 0);
3920 addrof = true;
3921 }
3922 else
3923 addrof = false;
fffe1e40
MJ
3924 prev_base = base;
3925 base = get_addr_base_and_unit_offset (base, &base_offset);
3926 /* Aggregate arguments can have non-invariant addresses. */
3927 if (!base)
3928 {
3929 base = build_fold_addr_expr (prev_base);
82d49829 3930 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3931 adj->offset / BITS_PER_UNIT);
3932 }
3933 else if (TREE_CODE (base) == MEM_REF)
3934 {
c1ed6a01
MJ
3935 if (!addrof)
3936 {
3937 deref_base = true;
3938 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3939 }
82d49829 3940 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3941 base_offset
3942 + adj->offset / BITS_PER_UNIT);
3943 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 3944 off);
fffe1e40
MJ
3945 base = TREE_OPERAND (base, 0);
3946 }
3947 else
3948 {
82d49829 3949 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3950 base_offset
3951 + adj->offset / BITS_PER_UNIT);
3952 base = build_fold_addr_expr (base);
3953 }
3f84bf08 3954 }
fffe1e40 3955
3a5a825a
RG
3956 if (!adj->by_ref)
3957 {
3958 tree type = adj->type;
3959 unsigned int align;
3960 unsigned HOST_WIDE_INT misalign;
644ffefd 3961
c1ed6a01
MJ
3962 if (deref_base)
3963 {
3964 align = deref_align;
3965 misalign = 0;
3966 }
3967 else
3968 {
3969 get_pointer_alignment_1 (base, &align, &misalign);
3970 if (TYPE_ALIGN (type) > align)
3971 align = TYPE_ALIGN (type);
3972 }
807e902e 3973 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3a5a825a
RG
3974 * BITS_PER_UNIT);
3975 misalign = misalign & (align - 1);
3976 if (misalign != 0)
3977 align = (misalign & -misalign);
3978 if (align < TYPE_ALIGN (type))
3979 type = build_aligned_type (type, align);
4df65a85
RB
3980 base = force_gimple_operand_gsi (&gsi, base,
3981 true, NULL, true, GSI_SAME_STMT);
3a5a825a 3982 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4df65a85
RB
3983 /* If expr is not a valid gimple call argument emit
3984 a load into a temporary. */
3985 if (is_gimple_reg_type (TREE_TYPE (expr)))
3986 {
3987 gimple tem = gimple_build_assign (NULL_TREE, expr);
3988 if (gimple_in_ssa_p (cfun))
3989 {
3990 gimple_set_vuse (tem, gimple_vuse (stmt));
3991 expr = make_ssa_name (TREE_TYPE (expr), tem);
3992 }
3993 else
3994 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
3995 gimple_assign_set_lhs (tem, expr);
3996 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
3997 }
3a5a825a
RG
3998 }
3999 else
4000 {
4001 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4002 expr = build_fold_addr_expr (expr);
4df65a85
RB
4003 expr = force_gimple_operand_gsi (&gsi, expr,
4004 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4005 }
9771b263 4006 vargs.quick_push (expr);
3f84bf08 4007 }
31519c38 4008 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
ddb555ed
JJ
4009 {
4010 unsigned int ix;
4011 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4012 gimple def_temp;
4013
4014 arg = gimple_call_arg (stmt, adj->base_index);
4015 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4016 {
4017 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4018 continue;
4019 arg = fold_convert_loc (gimple_location (stmt),
4020 TREE_TYPE (origin), arg);
4021 }
4022 if (debug_args == NULL)
4023 debug_args = decl_debug_args_insert (callee_decl);
9771b263 4024 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
4025 if (ddecl == origin)
4026 {
9771b263 4027 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
4028 break;
4029 }
4030 if (ddecl == NULL)
4031 {
4032 ddecl = make_node (DEBUG_EXPR_DECL);
4033 DECL_ARTIFICIAL (ddecl) = 1;
4034 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4035 DECL_MODE (ddecl) = DECL_MODE (origin);
4036
9771b263
DN
4037 vec_safe_push (*debug_args, origin);
4038 vec_safe_push (*debug_args, ddecl);
ddb555ed 4039 }
9771b263 4040 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
4041 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4042 }
3f84bf08
MJ
4043 }
4044
4045 if (dump_file && (dump_flags & TDF_DETAILS))
4046 {
4047 fprintf (dump_file, "replacing stmt:");
4048 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4049 }
4050
3f84bf08 4051 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 4052 vargs.release ();
3f84bf08
MJ
4053 if (gimple_call_lhs (stmt))
4054 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4055
4056 gimple_set_block (new_stmt, gimple_block (stmt));
4057 if (gimple_has_location (stmt))
4058 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 4059 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 4060 gimple_call_copy_flags (new_stmt, stmt);
4df65a85
RB
4061 if (gimple_in_ssa_p (cfun))
4062 {
4063 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4064 if (gimple_vdef (stmt))
4065 {
4066 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4067 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4068 }
4069 }
3f84bf08
MJ
4070
4071 if (dump_file && (dump_flags & TDF_DETAILS))
4072 {
4073 fprintf (dump_file, "with stmt:");
4074 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4075 fprintf (dump_file, "\n");
4076 }
4077 gsi_replace (&gsi, new_stmt, true);
4078 if (cs)
4079 cgraph_set_call_stmt (cs, new_stmt);
82338059
MJ
4080 do
4081 {
4082 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
4083 gsi_prev (&gsi);
4084 }
3d354792 4085 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
3f84bf08
MJ
4086}
4087
31519c38
AH
4088/* If the expression *EXPR should be replaced by a reduction of a parameter, do
4089 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4090 specifies whether the function should care about type incompatibility the
4091 current and new expressions. If it is false, the function will leave
4092 incompatibility issues to the caller. Return true iff the expression
4093 was modified. */
4094
4095bool
4096ipa_modify_expr (tree *expr, bool convert,
4097 ipa_parm_adjustment_vec adjustments)
4098{
4099 struct ipa_parm_adjustment *cand
4100 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4101 if (!cand)
4102 return false;
4103
4104 tree src;
4105 if (cand->by_ref)
4106 src = build_simple_mem_ref (cand->new_decl);
4107 else
4108 src = cand->new_decl;
4109
4110 if (dump_file && (dump_flags & TDF_DETAILS))
4111 {
4112 fprintf (dump_file, "About to replace expr ");
4113 print_generic_expr (dump_file, *expr, 0);
4114 fprintf (dump_file, " with ");
4115 print_generic_expr (dump_file, src, 0);
4116 fprintf (dump_file, "\n");
4117 }
4118
4119 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4120 {
4121 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4122 *expr = vce;
4123 }
4124 else
4125 *expr = src;
4126 return true;
4127}
4128
4129/* If T is an SSA_NAME, return NULL if it is not a default def or
4130 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4131 the base variable is always returned, regardless if it is a default
4132 def. Return T if it is not an SSA_NAME. */
4133
4134static tree
4135get_ssa_base_param (tree t, bool ignore_default_def)
4136{
4137 if (TREE_CODE (t) == SSA_NAME)
4138 {
4139 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4140 return SSA_NAME_VAR (t);
4141 else
4142 return NULL_TREE;
4143 }
4144 return t;
4145}
4146
4147/* Given an expression, return an adjustment entry specifying the
4148 transformation to be done on EXPR. If no suitable adjustment entry
4149 was found, returns NULL.
4150
4151 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4152 default def, otherwise bail on them.
4153
4154 If CONVERT is non-NULL, this function will set *CONVERT if the
4155 expression provided is a component reference. ADJUSTMENTS is the
4156 adjustments vector. */
4157
4158ipa_parm_adjustment *
4159ipa_get_adjustment_candidate (tree **expr, bool *convert,
4160 ipa_parm_adjustment_vec adjustments,
4161 bool ignore_default_def)
4162{
4163 if (TREE_CODE (**expr) == BIT_FIELD_REF
4164 || TREE_CODE (**expr) == IMAGPART_EXPR
4165 || TREE_CODE (**expr) == REALPART_EXPR)
4166 {
4167 *expr = &TREE_OPERAND (**expr, 0);
4168 if (convert)
4169 *convert = true;
4170 }
4171
4172 HOST_WIDE_INT offset, size, max_size;
4173 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4174 if (!base || size == -1 || max_size == -1)
4175 return NULL;
4176
4177 if (TREE_CODE (base) == MEM_REF)
4178 {
807e902e 4179 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
31519c38
AH
4180 base = TREE_OPERAND (base, 0);
4181 }
4182
4183 base = get_ssa_base_param (base, ignore_default_def);
4184 if (!base || TREE_CODE (base) != PARM_DECL)
4185 return NULL;
4186
4187 struct ipa_parm_adjustment *cand = NULL;
4188 unsigned int len = adjustments.length ();
4189 for (unsigned i = 0; i < len; i++)
4190 {
4191 struct ipa_parm_adjustment *adj = &adjustments[i];
4192
4193 if (adj->base == base
4194 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4195 {
4196 cand = adj;
4197 break;
4198 }
4199 }
4200
4201 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4202 return NULL;
4203 return cand;
4204}
4205
3f84bf08
MJ
4206/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4207
4208static bool
4209index_in_adjustments_multiple_times_p (int base_index,
4210 ipa_parm_adjustment_vec adjustments)
4211{
9771b263 4212 int i, len = adjustments.length ();
3f84bf08
MJ
4213 bool one = false;
4214
4215 for (i = 0; i < len; i++)
4216 {
4217 struct ipa_parm_adjustment *adj;
9771b263 4218 adj = &adjustments[i];
3f84bf08
MJ
4219
4220 if (adj->base_index == base_index)
4221 {
4222 if (one)
4223 return true;
4224 else
4225 one = true;
4226 }
4227 }
4228 return false;
4229}
4230
4231
4232/* Return adjustments that should have the same effect on function parameters
4233 and call arguments as if they were first changed according to adjustments in
4234 INNER and then by adjustments in OUTER. */
4235
4236ipa_parm_adjustment_vec
4237ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4238 ipa_parm_adjustment_vec outer)
4239{
9771b263
DN
4240 int i, outlen = outer.length ();
4241 int inlen = inner.length ();
3f84bf08
MJ
4242 int removals = 0;
4243 ipa_parm_adjustment_vec adjustments, tmp;
4244
9771b263 4245 tmp.create (inlen);
3f84bf08
MJ
4246 for (i = 0; i < inlen; i++)
4247 {
4248 struct ipa_parm_adjustment *n;
9771b263 4249 n = &inner[i];
3f84bf08 4250
31519c38 4251 if (n->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4252 removals++;
4253 else
31519c38
AH
4254 {
4255 /* FIXME: Handling of new arguments are not implemented yet. */
4256 gcc_assert (n->op != IPA_PARM_OP_NEW);
4257 tmp.quick_push (*n);
4258 }
3f84bf08
MJ
4259 }
4260
9771b263 4261 adjustments.create (outlen + removals);
3f84bf08
MJ
4262 for (i = 0; i < outlen; i++)
4263 {
f32682ca 4264 struct ipa_parm_adjustment r;
9771b263
DN
4265 struct ipa_parm_adjustment *out = &outer[i];
4266 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 4267
f32682ca 4268 memset (&r, 0, sizeof (r));
31519c38
AH
4269 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4270 if (out->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4271 {
4272 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4273 {
31519c38 4274 r.op = IPA_PARM_OP_REMOVE;
9771b263 4275 adjustments.quick_push (r);
3f84bf08
MJ
4276 }
4277 continue;
4278 }
31519c38
AH
4279 else
4280 {
4281 /* FIXME: Handling of new arguments are not implemented yet. */
4282 gcc_assert (out->op != IPA_PARM_OP_NEW);
4283 }
3f84bf08 4284
f32682ca
DN
4285 r.base_index = in->base_index;
4286 r.type = out->type;
3f84bf08
MJ
4287
4288 /* FIXME: Create nonlocal value too. */
4289
31519c38
AH
4290 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4291 r.op = IPA_PARM_OP_COPY;
4292 else if (in->op == IPA_PARM_OP_COPY)
f32682ca 4293 r.offset = out->offset;
31519c38 4294 else if (out->op == IPA_PARM_OP_COPY)
f32682ca 4295 r.offset = in->offset;
3f84bf08 4296 else
f32682ca 4297 r.offset = in->offset + out->offset;
9771b263 4298 adjustments.quick_push (r);
3f84bf08
MJ
4299 }
4300
4301 for (i = 0; i < inlen; i++)
4302 {
9771b263 4303 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08 4304
31519c38 4305 if (n->op == IPA_PARM_OP_REMOVE)
9771b263 4306 adjustments.quick_push (*n);
3f84bf08
MJ
4307 }
4308
9771b263 4309 tmp.release ();
3f84bf08
MJ
4310 return adjustments;
4311}
4312
4313/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4314 friendly way, assuming they are meant to be applied to FNDECL. */
4315
4316void
4317ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4318 tree fndecl)
4319{
9771b263 4320 int i, len = adjustments.length ();
3f84bf08 4321 bool first = true;
9771b263 4322 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
4323
4324 fprintf (file, "IPA param adjustments: ");
4325 for (i = 0; i < len; i++)
4326 {
4327 struct ipa_parm_adjustment *adj;
9771b263 4328 adj = &adjustments[i];
3f84bf08
MJ
4329
4330 if (!first)
4331 fprintf (file, " ");
4332 else
4333 first = false;
4334
4335 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 4336 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
4337 if (adj->base)
4338 {
4339 fprintf (file, ", base: ");
4340 print_generic_expr (file, adj->base, 0);
4341 }
31519c38 4342 if (adj->new_decl)
3f84bf08 4343 {
31519c38
AH
4344 fprintf (file, ", new_decl: ");
4345 print_generic_expr (file, adj->new_decl, 0);
3f84bf08
MJ
4346 }
4347 if (adj->new_ssa_base)
4348 {
4349 fprintf (file, ", new_ssa_base: ");
4350 print_generic_expr (file, adj->new_ssa_base, 0);
4351 }
4352
31519c38 4353 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08 4354 fprintf (file, ", copy_param");
31519c38 4355 else if (adj->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4356 fprintf (file, ", remove_param");
4357 else
4358 fprintf (file, ", offset %li", (long) adj->offset);
4359 if (adj->by_ref)
4360 fprintf (file, ", by_ref");
4361 print_node_brief (file, ", type: ", adj->type, 0);
4362 fprintf (file, "\n");
4363 }
9771b263 4364 parms.release ();
3f84bf08
MJ
4365}
4366
2c9561b5
MJ
4367/* Dump the AV linked list. */
4368
4369void
4370ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4371{
4372 bool comma = false;
4373 fprintf (f, " Aggregate replacements:");
4374 for (; av; av = av->next)
4375 {
4376 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4377 av->index, av->offset);
4378 print_generic_expr (f, av->value, 0);
4379 comma = true;
4380 }
4381 fprintf (f, "\n");
4382}
4383
fb3f88cc
JH
4384/* Stream out jump function JUMP_FUNC to OB. */
4385
4386static void
4387ipa_write_jump_function (struct output_block *ob,
4388 struct ipa_jump_func *jump_func)
4389{
8b7773a4
MJ
4390 struct ipa_agg_jf_item *item;
4391 struct bitpack_d bp;
4392 int i, count;
fb3f88cc 4393
8b7773a4 4394 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
4395 switch (jump_func->type)
4396 {
4397 case IPA_JF_UNKNOWN:
4398 break;
b258210c 4399 case IPA_JF_KNOWN_TYPE:
c7573249
MJ
4400 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4401 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4402 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
b258210c 4403 break;
fb3f88cc 4404 case IPA_JF_CONST:
5368224f 4405 gcc_assert (
4502fe8d
MJ
4406 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4407 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
4408 break;
4409 case IPA_JF_PASS_THROUGH:
412288f1 4410 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4411 if (jump_func->value.pass_through.operation == NOP_EXPR)
4412 {
4413 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4414 bp = bitpack_create (ob->main_stream);
4415 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
b8f6e610 4416 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4a53743e
MJ
4417 streamer_write_bitpack (&bp);
4418 }
4419 else
4420 {
4421 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4422 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4423 }
fb3f88cc
JH
4424 break;
4425 case IPA_JF_ANCESTOR:
412288f1 4426 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
b9393656 4427 stream_write_tree (ob, jump_func->value.ancestor.type, true);
412288f1 4428 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4429 bp = bitpack_create (ob->main_stream);
4430 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
b8f6e610 4431 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
8b7773a4 4432 streamer_write_bitpack (&bp);
fb3f88cc 4433 break;
8b7773a4
MJ
4434 }
4435
9771b263 4436 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4437 streamer_write_uhwi (ob, count);
4438 if (count)
4439 {
4440 bp = bitpack_create (ob->main_stream);
4441 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4442 streamer_write_bitpack (&bp);
4443 }
4444
9771b263 4445 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4446 {
4447 streamer_write_uhwi (ob, item->offset);
4448 stream_write_tree (ob, item->value, true);
fb3f88cc
JH
4449 }
4450}
4451
4452/* Read in jump function JUMP_FUNC from IB. */
4453
4454static void
4455ipa_read_jump_function (struct lto_input_block *ib,
4456 struct ipa_jump_func *jump_func,
4502fe8d 4457 struct cgraph_edge *cs,
fb3f88cc
JH
4458 struct data_in *data_in)
4459{
4a53743e
MJ
4460 enum jump_func_type jftype;
4461 enum tree_code operation;
8b7773a4 4462 int i, count;
fb3f88cc 4463
4a53743e
MJ
4464 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4465 switch (jftype)
fb3f88cc
JH
4466 {
4467 case IPA_JF_UNKNOWN:
4a53743e 4468 jump_func->type = IPA_JF_UNKNOWN;
fb3f88cc 4469 break;
b258210c 4470 case IPA_JF_KNOWN_TYPE:
4a53743e
MJ
4471 {
4472 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4473 tree base_type = stream_read_tree (ib, data_in);
4474 tree component_type = stream_read_tree (ib, data_in);
4475
4476 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4477 break;
4478 }
fb3f88cc 4479 case IPA_JF_CONST:
4502fe8d 4480 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4481 break;
4482 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4483 operation = (enum tree_code) streamer_read_uhwi (ib);
4484 if (operation == NOP_EXPR)
4485 {
4486 int formal_id = streamer_read_uhwi (ib);
4487 struct bitpack_d bp = streamer_read_bitpack (ib);
4488 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610
MJ
4489 bool type_preserved = bp_unpack_value (&bp, 1);
4490 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4491 type_preserved);
4a53743e
MJ
4492 }
4493 else
4494 {
4495 tree operand = stream_read_tree (ib, data_in);
4496 int formal_id = streamer_read_uhwi (ib);
4497 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4498 operation);
4499 }
fb3f88cc
JH
4500 break;
4501 case IPA_JF_ANCESTOR:
4a53743e
MJ
4502 {
4503 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4504 tree type = stream_read_tree (ib, data_in);
4505 int formal_id = streamer_read_uhwi (ib);
4506 struct bitpack_d bp = streamer_read_bitpack (ib);
4507 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610 4508 bool type_preserved = bp_unpack_value (&bp, 1);
4a53743e 4509
b8f6e610
MJ
4510 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4511 type_preserved);
4a53743e
MJ
4512 break;
4513 }
8b7773a4
MJ
4514 }
4515
4516 count = streamer_read_uhwi (ib);
9771b263 4517 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4518 if (count)
4519 {
4a53743e 4520 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4521 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4522 }
4523 for (i = 0; i < count; i++)
4524 {
f32682ca
DN
4525 struct ipa_agg_jf_item item;
4526 item.offset = streamer_read_uhwi (ib);
4527 item.value = stream_read_tree (ib, data_in);
9771b263 4528 jump_func->agg.items->quick_push (item);
fb3f88cc
JH
4529 }
4530}
4531
e33c6cd6
MJ
4532/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4533 relevant to indirect inlining to OB. */
661e7330
MJ
4534
4535static void
e33c6cd6
MJ
4536ipa_write_indirect_edge_info (struct output_block *ob,
4537 struct cgraph_edge *cs)
661e7330 4538{
e33c6cd6 4539 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4540 struct bitpack_d bp;
e33c6cd6 4541
412288f1 4542 streamer_write_hwi (ob, ii->param_index);
8b7773a4 4543 streamer_write_hwi (ob, ii->offset);
2465dcc2
RG
4544 bp = bitpack_create (ob->main_stream);
4545 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4546 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4547 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4548 bp_pack_value (&bp, ii->by_ref, 1);
68377e53
JH
4549 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4550 bp_pack_value (&bp, ii->maybe_derived_type, 1);
412288f1 4551 streamer_write_bitpack (&bp);
b258210c
MJ
4552
4553 if (ii->polymorphic)
4554 {
412288f1 4555 streamer_write_hwi (ob, ii->otr_token);
b9393656 4556 stream_write_tree (ob, ii->otr_type, true);
68377e53 4557 stream_write_tree (ob, ii->outer_type, true);
b258210c 4558 }
661e7330
MJ
4559}
4560
e33c6cd6
MJ
4561/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4562 relevant to indirect inlining from IB. */
661e7330
MJ
4563
4564static void
e33c6cd6
MJ
4565ipa_read_indirect_edge_info (struct lto_input_block *ib,
4566 struct data_in *data_in ATTRIBUTE_UNUSED,
4567 struct cgraph_edge *cs)
661e7330 4568{
e33c6cd6 4569 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4570 struct bitpack_d bp;
661e7330 4571
412288f1 4572 ii->param_index = (int) streamer_read_hwi (ib);
8b7773a4 4573 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
412288f1 4574 bp = streamer_read_bitpack (ib);
2465dcc2 4575 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4576 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4577 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4578 ii->by_ref = bp_unpack_value (&bp, 1);
68377e53
JH
4579 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4580 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
b258210c
MJ
4581 if (ii->polymorphic)
4582 {
412288f1 4583 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4584 ii->otr_type = stream_read_tree (ib, data_in);
68377e53 4585 ii->outer_type = stream_read_tree (ib, data_in);
b258210c 4586 }
661e7330
MJ
4587}
4588
fb3f88cc
JH
4589/* Stream out NODE info to OB. */
4590
4591static void
4592ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4593{
4594 int node_ref;
7380e6ef 4595 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4596 struct ipa_node_params *info = IPA_NODE_REF (node);
4597 int j;
4598 struct cgraph_edge *e;
2465dcc2 4599 struct bitpack_d bp;
fb3f88cc 4600
7380e6ef 4601 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4602 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4603 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4604
0e8853ee
JH
4605 streamer_write_uhwi (ob, ipa_get_param_count (info));
4606 for (j = 0; j < ipa_get_param_count (info); j++)
4607 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4608 bp = bitpack_create (ob->main_stream);
8aab5218 4609 gcc_assert (info->analysis_done
661e7330 4610 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4611 gcc_assert (!info->node_enqueued);
4612 gcc_assert (!info->ipcp_orig_node);
4613 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4614 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4615 streamer_write_bitpack (&bp);
4502fe8d
MJ
4616 for (j = 0; j < ipa_get_param_count (info); j++)
4617 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
fb3f88cc
JH
4618 for (e = node->callees; e; e = e->next_callee)
4619 {
4620 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4621
412288f1 4622 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
fb3f88cc
JH
4623 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4624 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4625 }
e33c6cd6 4626 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4627 {
4628 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4629
412288f1 4630 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
c8246dbe
JH
4631 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4632 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4633 ipa_write_indirect_edge_info (ob, e);
4634 }
fb3f88cc
JH
4635}
4636
61502ca8 4637/* Stream in NODE info from IB. */
fb3f88cc
JH
4638
4639static void
4640ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4641 struct data_in *data_in)
4642{
4643 struct ipa_node_params *info = IPA_NODE_REF (node);
4644 int k;
4645 struct cgraph_edge *e;
2465dcc2 4646 struct bitpack_d bp;
fb3f88cc 4647
0e8853ee 4648 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4649
0e8853ee
JH
4650 for (k = 0; k < ipa_get_param_count (info); k++)
4651 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4652
412288f1 4653 bp = streamer_read_bitpack (ib);
fb3f88cc 4654 if (ipa_get_param_count (info) != 0)
8aab5218 4655 info->analysis_done = true;
fb3f88cc
JH
4656 info->node_enqueued = false;
4657 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4658 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a
MJ
4659 for (k = 0; k < ipa_get_param_count (info); k++)
4660 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
fb3f88cc
JH
4661 for (e = node->callees; e; e = e->next_callee)
4662 {
4663 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4664 int count = streamer_read_uhwi (ib);
fb3f88cc 4665
fb3f88cc
JH
4666 if (!count)
4667 continue;
9771b263 4668 vec_safe_grow_cleared (args->jump_functions, count);
fb3f88cc 4669
fb3f88cc 4670 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d
MJ
4671 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4672 data_in);
fb3f88cc 4673 }
e33c6cd6 4674 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4675 {
4676 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4677 int count = streamer_read_uhwi (ib);
c8246dbe 4678
c8246dbe
JH
4679 if (count)
4680 {
9771b263 4681 vec_safe_grow_cleared (args->jump_functions, count);
c8246dbe 4682 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d 4683 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
606d9a09 4684 data_in);
c8246dbe
JH
4685 }
4686 ipa_read_indirect_edge_info (ib, data_in, e);
4687 }
fb3f88cc
JH
4688}
4689
4690/* Write jump functions for nodes in SET. */
4691
4692void
f27c1867 4693ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4694{
4695 struct cgraph_node *node;
93536c97 4696 struct output_block *ob;
fb3f88cc 4697 unsigned int count = 0;
f27c1867
JH
4698 lto_symtab_encoder_iterator lsei;
4699 lto_symtab_encoder_t encoder;
4700
fb3f88cc 4701
9771b263 4702 if (!ipa_node_params_vector.exists ())
93536c97 4703 return;
fb3f88cc 4704
93536c97 4705 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4706 encoder = ob->decl_state->symtab_node_encoder;
93536c97 4707 ob->cgraph_node = NULL;
f27c1867
JH
4708 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4709 lsei_next_function_in_partition (&lsei))
fb3f88cc 4710 {
f27c1867 4711 node = lsei_cgraph_node (lsei);
c47d0034
JH
4712 if (cgraph_function_with_gimple_body_p (node)
4713 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4714 count++;
4715 }
4716
412288f1 4717 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4718
4719 /* Process all of the functions. */
f27c1867
JH
4720 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4721 lsei_next_function_in_partition (&lsei))
fb3f88cc 4722 {
f27c1867 4723 node = lsei_cgraph_node (lsei);
c47d0034
JH
4724 if (cgraph_function_with_gimple_body_p (node)
4725 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4726 ipa_write_node_info (ob, node);
4727 }
412288f1 4728 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4729 produce_asm (ob, NULL);
4730 destroy_output_block (ob);
4731}
4732
4733/* Read section in file FILE_DATA of length LEN with data DATA. */
4734
4735static void
4736ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4737 size_t len)
4738{
4739 const struct lto_function_header *header =
4740 (const struct lto_function_header *) data;
4ad9a9de
EB
4741 const int cfg_offset = sizeof (struct lto_function_header);
4742 const int main_offset = cfg_offset + header->cfg_size;
4743 const int string_offset = main_offset + header->main_size;
fb3f88cc
JH
4744 struct data_in *data_in;
4745 struct lto_input_block ib_main;
4746 unsigned int i;
4747 unsigned int count;
4748
4749 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4750 header->main_size);
4751
4752 data_in =
4753 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4754 header->string_size, vNULL);
412288f1 4755 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4756
4757 for (i = 0; i < count; i++)
4758 {
4759 unsigned int index;
4760 struct cgraph_node *node;
7380e6ef 4761 lto_symtab_encoder_t encoder;
fb3f88cc 4762
412288f1 4763 index = streamer_read_uhwi (&ib_main);
7380e6ef
JH
4764 encoder = file_data->symtab_node_encoder;
4765 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4766 gcc_assert (node->definition);
fb3f88cc
JH
4767 ipa_read_node_info (&ib_main, node, data_in);
4768 }
4769 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4770 len);
4771 lto_data_in_delete (data_in);
4772}
4773
4774/* Read ipcp jump functions. */
4775
4776void
4777ipa_prop_read_jump_functions (void)
4778{
4779 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4780 struct lto_file_decl_data *file_data;
4781 unsigned int j = 0;
4782
4783 ipa_check_create_node_params ();
4784 ipa_check_create_edge_args ();
4785 ipa_register_cgraph_hooks ();
4786
4787 while ((file_data = file_data_vec[j++]))
4788 {
4789 size_t len;
4790 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4791
4792 if (data)
4793 ipa_prop_read_section (file_data, data, len);
4794 }
4795}
4796
b8698a0f 4797/* After merging units, we can get mismatch in argument counts.
61502ca8 4798 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
4799 Also compute called_with_variable_arg info. */
4800
4801void
4802ipa_update_after_lto_read (void)
4803{
05d3aa37
MJ
4804 ipa_check_create_node_params ();
4805 ipa_check_create_edge_args ();
fb3f88cc 4806}
2c9561b5
MJ
4807
4808void
4809write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4810{
4811 int node_ref;
4812 unsigned int count = 0;
4813 lto_symtab_encoder_t encoder;
4814 struct ipa_agg_replacement_value *aggvals, *av;
4815
4816 aggvals = ipa_get_agg_replacements_for_node (node);
4817 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4818 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4819 streamer_write_uhwi (ob, node_ref);
4820
4821 for (av = aggvals; av; av = av->next)
4822 count++;
4823 streamer_write_uhwi (ob, count);
4824
4825 for (av = aggvals; av; av = av->next)
4826 {
7b920a9a
MJ
4827 struct bitpack_d bp;
4828
2c9561b5
MJ
4829 streamer_write_uhwi (ob, av->offset);
4830 streamer_write_uhwi (ob, av->index);
4831 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4832
4833 bp = bitpack_create (ob->main_stream);
4834 bp_pack_value (&bp, av->by_ref, 1);
4835 streamer_write_bitpack (&bp);
2c9561b5
MJ
4836 }
4837}
4838
4839/* Stream in the aggregate value replacement chain for NODE from IB. */
4840
4841static void
4842read_agg_replacement_chain (struct lto_input_block *ib,
4843 struct cgraph_node *node,
4844 struct data_in *data_in)
4845{
4846 struct ipa_agg_replacement_value *aggvals = NULL;
4847 unsigned int count, i;
4848
4849 count = streamer_read_uhwi (ib);
4850 for (i = 0; i <count; i++)
4851 {
4852 struct ipa_agg_replacement_value *av;
7b920a9a 4853 struct bitpack_d bp;
2c9561b5 4854
766090c2 4855 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
4856 av->offset = streamer_read_uhwi (ib);
4857 av->index = streamer_read_uhwi (ib);
4858 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4859 bp = streamer_read_bitpack (ib);
4860 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4861 av->next = aggvals;
4862 aggvals = av;
4863 }
4864 ipa_set_node_agg_value_chain (node, aggvals);
4865}
4866
4867/* Write all aggregate replacement for nodes in set. */
4868
4869void
4870ipa_prop_write_all_agg_replacement (void)
4871{
4872 struct cgraph_node *node;
4873 struct output_block *ob;
4874 unsigned int count = 0;
4875 lto_symtab_encoder_iterator lsei;
4876 lto_symtab_encoder_t encoder;
4877
4878 if (!ipa_node_agg_replacements)
4879 return;
4880
4881 ob = create_output_block (LTO_section_ipcp_transform);
4882 encoder = ob->decl_state->symtab_node_encoder;
4883 ob->cgraph_node = NULL;
4884 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4885 lsei_next_function_in_partition (&lsei))
4886 {
4887 node = lsei_cgraph_node (lsei);
4888 if (cgraph_function_with_gimple_body_p (node)
4889 && ipa_get_agg_replacements_for_node (node) != NULL)
4890 count++;
4891 }
4892
4893 streamer_write_uhwi (ob, count);
4894
4895 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4896 lsei_next_function_in_partition (&lsei))
4897 {
4898 node = lsei_cgraph_node (lsei);
4899 if (cgraph_function_with_gimple_body_p (node)
4900 && ipa_get_agg_replacements_for_node (node) != NULL)
4901 write_agg_replacement_chain (ob, node);
4902 }
4903 streamer_write_char_stream (ob->main_stream, 0);
4904 produce_asm (ob, NULL);
4905 destroy_output_block (ob);
4906}
4907
4908/* Read replacements section in file FILE_DATA of length LEN with data
4909 DATA. */
4910
4911static void
4912read_replacements_section (struct lto_file_decl_data *file_data,
4913 const char *data,
4914 size_t len)
4915{
4916 const struct lto_function_header *header =
4917 (const struct lto_function_header *) data;
4918 const int cfg_offset = sizeof (struct lto_function_header);
4919 const int main_offset = cfg_offset + header->cfg_size;
4920 const int string_offset = main_offset + header->main_size;
4921 struct data_in *data_in;
4922 struct lto_input_block ib_main;
4923 unsigned int i;
4924 unsigned int count;
4925
4926 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4927 header->main_size);
4928
4929 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4930 header->string_size, vNULL);
2c9561b5
MJ
4931 count = streamer_read_uhwi (&ib_main);
4932
4933 for (i = 0; i < count; i++)
4934 {
4935 unsigned int index;
4936 struct cgraph_node *node;
4937 lto_symtab_encoder_t encoder;
4938
4939 index = streamer_read_uhwi (&ib_main);
4940 encoder = file_data->symtab_node_encoder;
4941 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4942 gcc_assert (node->definition);
2c9561b5
MJ
4943 read_agg_replacement_chain (&ib_main, node, data_in);
4944 }
4945 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4946 len);
4947 lto_data_in_delete (data_in);
4948}
4949
4950/* Read IPA-CP aggregate replacements. */
4951
4952void
4953ipa_prop_read_all_agg_replacement (void)
4954{
4955 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4956 struct lto_file_decl_data *file_data;
4957 unsigned int j = 0;
4958
4959 while ((file_data = file_data_vec[j++]))
4960 {
4961 size_t len;
4962 const char *data = lto_get_section_data (file_data,
4963 LTO_section_ipcp_transform,
4964 NULL, &len);
4965 if (data)
4966 read_replacements_section (file_data, data, len);
4967 }
4968}
4969
4970/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4971 NODE. */
4972
4973static void
4974adjust_agg_replacement_values (struct cgraph_node *node,
4975 struct ipa_agg_replacement_value *aggval)
4976{
4977 struct ipa_agg_replacement_value *v;
4978 int i, c = 0, d = 0, *adj;
4979
4980 if (!node->clone.combined_args_to_skip)
4981 return;
4982
4983 for (v = aggval; v; v = v->next)
4984 {
4985 gcc_assert (v->index >= 0);
4986 if (c < v->index)
4987 c = v->index;
4988 }
4989 c++;
4990
4991 adj = XALLOCAVEC (int, c);
4992 for (i = 0; i < c; i++)
4993 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4994 {
4995 adj[i] = -1;
4996 d++;
4997 }
4998 else
4999 adj[i] = i - d;
5000
5001 for (v = aggval; v; v = v->next)
5002 v->index = adj[v->index];
5003}
5004
8aab5218
MJ
5005/* Dominator walker driving the ipcp modification phase. */
5006
5007class ipcp_modif_dom_walker : public dom_walker
5008{
5009public:
5010 ipcp_modif_dom_walker (struct func_body_info *fbi,
5011 vec<ipa_param_descriptor> descs,
5012 struct ipa_agg_replacement_value *av,
5013 bool *sc, bool *cc)
5014 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5015 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5016
5017 virtual void before_dom_children (basic_block);
5018
5019private:
5020 struct func_body_info *m_fbi;
5021 vec<ipa_param_descriptor> m_descriptors;
5022 struct ipa_agg_replacement_value *m_aggval;
5023 bool *m_something_changed, *m_cfg_changed;
5024};
5025
5026void
5027ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5028{
5029 gimple_stmt_iterator gsi;
5030 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5031 {
5032 struct ipa_agg_replacement_value *v;
5033 gimple stmt = gsi_stmt (gsi);
5034 tree rhs, val, t;
5035 HOST_WIDE_INT offset, size;
5036 int index;
5037 bool by_ref, vce;
5038
5039 if (!gimple_assign_load_p (stmt))
5040 continue;
5041 rhs = gimple_assign_rhs1 (stmt);
5042 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5043 continue;
2c9561b5 5044
8aab5218
MJ
5045 vce = false;
5046 t = rhs;
5047 while (handled_component_p (t))
5048 {
5049 /* V_C_E can do things like convert an array of integers to one
5050 bigger integer and similar things we do not handle below. */
5051 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5052 {
5053 vce = true;
5054 break;
5055 }
5056 t = TREE_OPERAND (t, 0);
5057 }
5058 if (vce)
5059 continue;
5060
5061 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5062 &offset, &size, &by_ref))
5063 continue;
5064 for (v = m_aggval; v; v = v->next)
5065 if (v->index == index
5066 && v->offset == offset)
5067 break;
5068 if (!v
5069 || v->by_ref != by_ref
5070 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5071 continue;
5072
5073 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5074 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5075 {
5076 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5077 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5078 else if (TYPE_SIZE (TREE_TYPE (rhs))
5079 == TYPE_SIZE (TREE_TYPE (v->value)))
5080 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5081 else
5082 {
5083 if (dump_file)
5084 {
5085 fprintf (dump_file, " const ");
5086 print_generic_expr (dump_file, v->value, 0);
5087 fprintf (dump_file, " can't be converted to type of ");
5088 print_generic_expr (dump_file, rhs, 0);
5089 fprintf (dump_file, "\n");
5090 }
5091 continue;
5092 }
5093 }
5094 else
5095 val = v->value;
5096
5097 if (dump_file && (dump_flags & TDF_DETAILS))
5098 {
5099 fprintf (dump_file, "Modifying stmt:\n ");
5100 print_gimple_stmt (dump_file, stmt, 0, 0);
5101 }
5102 gimple_assign_set_rhs_from_tree (&gsi, val);
5103 update_stmt (stmt);
5104
5105 if (dump_file && (dump_flags & TDF_DETAILS))
5106 {
5107 fprintf (dump_file, "into:\n ");
5108 print_gimple_stmt (dump_file, stmt, 0, 0);
5109 fprintf (dump_file, "\n");
5110 }
5111
5112 *m_something_changed = true;
5113 if (maybe_clean_eh_stmt (stmt)
5114 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5115 *m_cfg_changed = true;
5116 }
5117
5118}
5119
5120/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5121
5122unsigned int
5123ipcp_transform_function (struct cgraph_node *node)
5124{
84562394 5125 vec<ipa_param_descriptor> descriptors = vNULL;
8aab5218 5126 struct func_body_info fbi;
2c9561b5 5127 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5128 int param_count;
5129 bool cfg_changed = false, something_changed = false;
5130
5131 gcc_checking_assert (cfun);
5132 gcc_checking_assert (current_function_decl);
5133
5134 if (dump_file)
5135 fprintf (dump_file, "Modification phase of node %s/%i\n",
fec39fa6 5136 node->name (), node->order);
2c9561b5
MJ
5137
5138 aggval = ipa_get_agg_replacements_for_node (node);
5139 if (!aggval)
5140 return 0;
67348ccc 5141 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5142 if (param_count == 0)
5143 return 0;
5144 adjust_agg_replacement_values (node, aggval);
5145 if (dump_file)
5146 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5147
8aab5218
MJ
5148 fbi.node = node;
5149 fbi.info = NULL;
5150 fbi.bb_infos = vNULL;
5151 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5152 fbi.param_count = param_count;
5153 fbi.aa_walked = 0;
2c9561b5 5154
8aab5218
MJ
5155 descriptors.safe_grow_cleared (param_count);
5156 ipa_populate_param_decls (node, descriptors);
5157 calculate_dominance_info (CDI_DOMINATORS);
5158 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5159 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5160
8aab5218
MJ
5161 int i;
5162 struct ipa_bb_info *bi;
5163 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5164 free_ipa_bb_info (bi);
5165 fbi.bb_infos.release ();
5166 free_dominance_info (CDI_DOMINATORS);
9771b263 5167 (*ipa_node_agg_replacements)[node->uid] = NULL;
9771b263 5168 descriptors.release ();
2c9561b5
MJ
5169
5170 if (!something_changed)
5171 return 0;
5172 else if (cfg_changed)
5173 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5174 else
5175 return TODO_update_ssa_only_virtuals;
5176}