]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
parser.c (cp_parser_compound_literal_p): New.
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
23a5b65a 2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tree.h"
2fb9a547
AM
24#include "basic-block.h"
25#include "tree-ssa-alias.h"
26#include "internal-fn.h"
27#include "gimple-fold.h"
28#include "tree-eh.h"
29#include "gimple-expr.h"
30#include "is-a.h"
18f429e2 31#include "gimple.h"
d8a2d370
DN
32#include "expr.h"
33#include "stor-layout.h"
34#include "print-tree.h"
45b0be94 35#include "gimplify.h"
5be5c238 36#include "gimple-iterator.h"
18f429e2 37#include "gimplify-me.h"
5be5c238 38#include "gimple-walk.h"
518dc859 39#include "langhooks.h"
518dc859 40#include "target.h"
518dc859 41#include "ipa-prop.h"
442b4905
AM
42#include "bitmap.h"
43#include "gimple-ssa.h"
44#include "tree-cfg.h"
45#include "tree-phinodes.h"
46#include "ssa-iterators.h"
47#include "tree-into-ssa.h"
48#include "tree-dfa.h"
518dc859 49#include "tree-pass.h"
771578a0 50#include "tree-inline.h"
0f378cb5 51#include "ipa-inline.h"
518dc859 52#include "flags.h"
3e293154 53#include "diagnostic.h"
cf835838 54#include "gimple-pretty-print.h"
fb3f88cc 55#include "lto-streamer.h"
f0efc7aa
DN
56#include "data-streamer.h"
57#include "tree-streamer.h"
dfea20f1 58#include "params.h"
450ad0cd 59#include "ipa-utils.h"
4df65a85
RB
60#include "stringpool.h"
61#include "tree-ssanames.h"
2b5f0895 62#include "dbgcnt.h"
8aab5218 63#include "domwalk.h"
9b2b7279 64#include "builtins.h"
771578a0 65
8aab5218
MJ
66/* Intermediate information that we get from alias analysis about a particular
67 parameter in a particular basic_block. When a parameter or the memory it
68 references is marked modified, we use that information in all dominatd
69 blocks without cosulting alias analysis oracle. */
062c604f 70
8aab5218 71struct param_aa_status
062c604f 72{
8aab5218
MJ
73 /* Set when this structure contains meaningful information. If not, the
74 structure describing a dominating BB should be used instead. */
75 bool valid;
76
77 /* Whether we have seen something which might have modified the data in
78 question. PARM is for the parameter itself, REF is for data it points to
79 but using the alias type of individual accesses and PT is the same thing
80 but for computing aggregate pass-through functions using a very inclusive
81 ao_ref. */
8b7773a4 82 bool parm_modified, ref_modified, pt_modified;
8aab5218
MJ
83};
84
85/* Information related to a given BB that used only when looking at function
86 body. */
87
88struct ipa_bb_info
89{
90 /* Call graph edges going out of this BB. */
91 vec<cgraph_edge_p> cg_edges;
92 /* Alias analysis statuses of each formal parameter at this bb. */
93 vec<param_aa_status> param_aa_statuses;
94};
95
96/* Structure with global information that is only used when looking at function
97 body. */
98
99struct func_body_info
100{
101 /* The node that is being analyzed. */
102 cgraph_node *node;
103
104 /* Its info. */
105 struct ipa_node_params *info;
106
107 /* Information about individual BBs. */
108 vec<ipa_bb_info> bb_infos;
109
110 /* Number of parameters. */
111 int param_count;
112
113 /* Number of statements already walked by when analyzing this function. */
114 unsigned int aa_walked;
062c604f
MJ
115};
116
771578a0 117/* Vector where the parameter infos are actually stored. */
84562394 118vec<ipa_node_params> ipa_node_params_vector;
2c9561b5 119/* Vector of known aggregate values in cloned nodes. */
9771b263 120vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
771578a0 121/* Vector where the parameter infos are actually stored. */
84562394 122vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
771578a0
MJ
123
124/* Holders of ipa cgraph hooks: */
e2c9111c
JH
125static struct cgraph_edge_hook_list *edge_removal_hook_holder;
126static struct cgraph_node_hook_list *node_removal_hook_holder;
127static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
128static struct cgraph_2node_hook_list *node_duplication_hook_holder;
40982661 129static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 130
4502fe8d
MJ
131/* Description of a reference to an IPA constant. */
132struct ipa_cst_ref_desc
133{
134 /* Edge that corresponds to the statement which took the reference. */
135 struct cgraph_edge *cs;
136 /* Linked list of duplicates created when call graph edges are cloned. */
137 struct ipa_cst_ref_desc *next_duplicate;
138 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
139 if out of control. */
140 int refcount;
141};
142
143/* Allocation pool for reference descriptions. */
144
145static alloc_pool ipa_refdesc_pool;
146
5fe8e757
MJ
147/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
148 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
149
150static bool
151ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
152{
67348ccc 153 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
154 struct cl_optimization *os;
155
156 if (!fs_opts)
157 return false;
158 os = TREE_OPTIMIZATION (fs_opts);
159 return !os->x_optimize || !os->x_flag_ipa_cp;
160}
161
be95e2b9
MJ
162/* Return index of the formal whose tree is PTREE in function which corresponds
163 to INFO. */
164
d044dd17 165static int
84562394 166ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
518dc859
RL
167{
168 int i, count;
169
9771b263 170 count = descriptors.length ();
518dc859 171 for (i = 0; i < count; i++)
9771b263 172 if (descriptors[i].decl == ptree)
518dc859
RL
173 return i;
174
175 return -1;
176}
177
d044dd17
MJ
178/* Return index of the formal whose tree is PTREE in function which corresponds
179 to INFO. */
180
181int
182ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
183{
184 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
185}
186
187/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
188 NODE. */
be95e2b9 189
f8e2a1ed
MJ
190static void
191ipa_populate_param_decls (struct cgraph_node *node,
84562394 192 vec<ipa_param_descriptor> &descriptors)
518dc859
RL
193{
194 tree fndecl;
195 tree fnargs;
196 tree parm;
197 int param_num;
3e293154 198
67348ccc 199 fndecl = node->decl;
0e8853ee 200 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
201 fnargs = DECL_ARGUMENTS (fndecl);
202 param_num = 0;
910ad8de 203 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 204 {
9771b263 205 descriptors[param_num].decl = parm;
0e8853ee 206 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
518dc859
RL
207 param_num++;
208 }
209}
210
3f84bf08
MJ
211/* Return how many formal parameters FNDECL has. */
212
213static inline int
310bc633 214count_formal_params (tree fndecl)
3f84bf08
MJ
215{
216 tree parm;
217 int count = 0;
0e8853ee 218 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 219
910ad8de 220 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
221 count++;
222
223 return count;
224}
225
0e8853ee
JH
226/* Return the declaration of Ith formal parameter of the function corresponding
227 to INFO. Note there is no setter function as this array is built just once
228 using ipa_initialize_node_params. */
229
230void
231ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
232{
233 fprintf (file, "param #%i", i);
234 if (info->descriptors[i].decl)
235 {
236 fprintf (file, " ");
237 print_generic_expr (file, info->descriptors[i].decl, 0);
238 }
239}
240
241/* Initialize the ipa_node_params structure associated with NODE
242 to hold PARAM_COUNT parameters. */
243
244void
245ipa_alloc_node_params (struct cgraph_node *node, int param_count)
246{
247 struct ipa_node_params *info = IPA_NODE_REF (node);
248
249 if (!info->descriptors.exists () && param_count)
250 info->descriptors.safe_grow_cleared (param_count);
251}
252
f8e2a1ed
MJ
253/* Initialize the ipa_node_params structure associated with NODE by counting
254 the function parameters, creating the descriptors and populating their
255 param_decls. */
be95e2b9 256
f8e2a1ed
MJ
257void
258ipa_initialize_node_params (struct cgraph_node *node)
259{
260 struct ipa_node_params *info = IPA_NODE_REF (node);
261
9771b263 262 if (!info->descriptors.exists ())
f8e2a1ed 263 {
67348ccc 264 ipa_alloc_node_params (node, count_formal_params (node->decl));
0e8853ee 265 ipa_populate_param_decls (node, info->descriptors);
f8e2a1ed 266 }
518dc859
RL
267}
268
749aa96d
MJ
269/* Print the jump functions associated with call graph edge CS to file F. */
270
271static void
272ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
273{
274 int i, count;
275
276 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
277 for (i = 0; i < count; i++)
278 {
279 struct ipa_jump_func *jump_func;
280 enum jump_func_type type;
281
282 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
283 type = jump_func->type;
284
285 fprintf (f, " param %d: ", i);
286 if (type == IPA_JF_UNKNOWN)
287 fprintf (f, "UNKNOWN\n");
288 else if (type == IPA_JF_KNOWN_TYPE)
289 {
c7573249
MJ
290 fprintf (f, "KNOWN TYPE: base ");
291 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
292 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
293 jump_func->value.known_type.offset);
294 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
295 fprintf (f, "\n");
749aa96d
MJ
296 }
297 else if (type == IPA_JF_CONST)
298 {
4502fe8d 299 tree val = jump_func->value.constant.value;
749aa96d
MJ
300 fprintf (f, "CONST: ");
301 print_generic_expr (f, val, 0);
302 if (TREE_CODE (val) == ADDR_EXPR
303 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
304 {
305 fprintf (f, " -> ");
306 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
307 0);
308 }
309 fprintf (f, "\n");
310 }
749aa96d
MJ
311 else if (type == IPA_JF_PASS_THROUGH)
312 {
313 fprintf (f, "PASS THROUGH: ");
8b7773a4 314 fprintf (f, "%d, op %s",
749aa96d 315 jump_func->value.pass_through.formal_id,
5806f481 316 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 317 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
318 {
319 fprintf (f, " ");
320 print_generic_expr (f,
321 jump_func->value.pass_through.operand, 0);
322 }
323 if (jump_func->value.pass_through.agg_preserved)
324 fprintf (f, ", agg_preserved");
b8f6e610
MJ
325 if (jump_func->value.pass_through.type_preserved)
326 fprintf (f, ", type_preserved");
3ea6239f 327 fprintf (f, "\n");
749aa96d
MJ
328 }
329 else if (type == IPA_JF_ANCESTOR)
330 {
331 fprintf (f, "ANCESTOR: ");
332 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
333 jump_func->value.ancestor.formal_id,
334 jump_func->value.ancestor.offset);
335 print_generic_expr (f, jump_func->value.ancestor.type, 0);
8b7773a4
MJ
336 if (jump_func->value.ancestor.agg_preserved)
337 fprintf (f, ", agg_preserved");
b8f6e610
MJ
338 if (jump_func->value.ancestor.type_preserved)
339 fprintf (f, ", type_preserved");
3ea6239f 340 fprintf (f, "\n");
749aa96d 341 }
8b7773a4
MJ
342
343 if (jump_func->agg.items)
344 {
345 struct ipa_agg_jf_item *item;
346 int j;
347
348 fprintf (f, " Aggregate passed by %s:\n",
349 jump_func->agg.by_ref ? "reference" : "value");
9771b263 350 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
351 {
352 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
353 item->offset);
354 if (TYPE_P (item->value))
355 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 356 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
357 else
358 {
359 fprintf (f, "cst: ");
360 print_generic_expr (f, item->value, 0);
361 }
362 fprintf (f, "\n");
363 }
364 }
749aa96d
MJ
365 }
366}
367
368
be95e2b9
MJ
369/* Print the jump functions of all arguments on all call graph edges going from
370 NODE to file F. */
371
518dc859 372void
3e293154 373ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 374{
3e293154 375 struct cgraph_edge *cs;
518dc859 376
fec39fa6 377 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
67348ccc 378 node->order);
3e293154
MJ
379 for (cs = node->callees; cs; cs = cs->next_callee)
380 {
381 if (!ipa_edge_args_info_available_for_edge_p (cs))
382 continue;
383
749aa96d 384 fprintf (f, " callsite %s/%i -> %s/%i : \n",
fec39fa6
TS
385 xstrdup (node->name ()), node->order,
386 xstrdup (cs->callee->name ()),
67348ccc 387 cs->callee->order);
749aa96d
MJ
388 ipa_print_node_jump_functions_for_edge (f, cs);
389 }
518dc859 390
9de04252 391 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 392 {
9de04252 393 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
394 if (!ipa_edge_args_info_available_for_edge_p (cs))
395 continue;
3e293154 396
9de04252
MJ
397 ii = cs->indirect_info;
398 if (ii->agg_contents)
c13bc3d9 399 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 400 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 401 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
402 ii->param_index, ii->offset,
403 ii->by_ref ? "by reference" : "by_value");
404 else
85942f45
JH
405 fprintf (f, " indirect %s callsite, calling param %i, "
406 "offset " HOST_WIDE_INT_PRINT_DEC,
407 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
408 ii->offset);
9de04252 409
749aa96d
MJ
410 if (cs->call_stmt)
411 {
9de04252 412 fprintf (f, ", for stmt ");
749aa96d 413 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 414 }
749aa96d 415 else
9de04252 416 fprintf (f, "\n");
749aa96d 417 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
418 }
419}
420
421/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 422
3e293154
MJ
423void
424ipa_print_all_jump_functions (FILE *f)
425{
426 struct cgraph_node *node;
427
ca30a539 428 fprintf (f, "\nJump functions:\n");
65c70e6b 429 FOR_EACH_FUNCTION (node)
3e293154
MJ
430 {
431 ipa_print_node_jump_functions (f, node);
432 }
433}
434
7b872d9e
MJ
435/* Set JFUNC to be a known type jump function. */
436
437static void
438ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
439 tree base_type, tree component_type)
440{
06d65050
JH
441 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
442 && TYPE_BINFO (component_type));
0a2550e7
JH
443 if (!flag_devirtualize)
444 return;
445 gcc_assert (BINFO_VTABLE (TYPE_BINFO (component_type)));
7b872d9e
MJ
446 jfunc->type = IPA_JF_KNOWN_TYPE;
447 jfunc->value.known_type.offset = offset,
448 jfunc->value.known_type.base_type = base_type;
449 jfunc->value.known_type.component_type = component_type;
68377e53 450 gcc_assert (component_type);
7b872d9e
MJ
451}
452
b8f6e610
MJ
453/* Set JFUNC to be a copy of another jmp (to be used by jump function
454 combination code). The two functions will share their rdesc. */
455
456static void
457ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
458 struct ipa_jump_func *src)
459
460{
461 gcc_checking_assert (src->type == IPA_JF_CONST);
462 dst->type = IPA_JF_CONST;
463 dst->value.constant = src->value.constant;
464}
465
7b872d9e
MJ
466/* Set JFUNC to be a constant jmp function. */
467
468static void
4502fe8d
MJ
469ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
470 struct cgraph_edge *cs)
7b872d9e 471{
5368224f
DC
472 constant = unshare_expr (constant);
473 if (constant && EXPR_P (constant))
474 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
7b872d9e 475 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
476 jfunc->value.constant.value = unshare_expr_without_location (constant);
477
478 if (TREE_CODE (constant) == ADDR_EXPR
479 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
480 {
481 struct ipa_cst_ref_desc *rdesc;
482 if (!ipa_refdesc_pool)
483 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
484 sizeof (struct ipa_cst_ref_desc), 32);
485
486 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
487 rdesc->cs = cs;
488 rdesc->next_duplicate = NULL;
489 rdesc->refcount = 1;
490 jfunc->value.constant.rdesc = rdesc;
491 }
492 else
493 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
494}
495
496/* Set JFUNC to be a simple pass-through jump function. */
497static void
8b7773a4 498ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
b8f6e610 499 bool agg_preserved, bool type_preserved)
7b872d9e
MJ
500{
501 jfunc->type = IPA_JF_PASS_THROUGH;
502 jfunc->value.pass_through.operand = NULL_TREE;
503 jfunc->value.pass_through.formal_id = formal_id;
504 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 505 jfunc->value.pass_through.agg_preserved = agg_preserved;
b8f6e610 506 jfunc->value.pass_through.type_preserved = type_preserved;
7b872d9e
MJ
507}
508
509/* Set JFUNC to be an arithmetic pass through jump function. */
510
511static void
512ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
513 tree operand, enum tree_code operation)
514{
515 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 516 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
517 jfunc->value.pass_through.formal_id = formal_id;
518 jfunc->value.pass_through.operation = operation;
8b7773a4 519 jfunc->value.pass_through.agg_preserved = false;
b8f6e610 520 jfunc->value.pass_through.type_preserved = false;
7b872d9e
MJ
521}
522
523/* Set JFUNC to be an ancestor jump function. */
524
525static void
526ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
b8f6e610
MJ
527 tree type, int formal_id, bool agg_preserved,
528 bool type_preserved)
7b872d9e 529{
0a2550e7
JH
530 if (!flag_devirtualize)
531 type_preserved = false;
532 gcc_assert (!type_preserved
533 || (TREE_CODE (type) == RECORD_TYPE
534 && TYPE_BINFO (type)
535 && BINFO_VTABLE (TYPE_BINFO (type))));
7b872d9e
MJ
536 jfunc->type = IPA_JF_ANCESTOR;
537 jfunc->value.ancestor.formal_id = formal_id;
538 jfunc->value.ancestor.offset = offset;
0a2550e7 539 jfunc->value.ancestor.type = type_preserved ? type : NULL;
8b7773a4 540 jfunc->value.ancestor.agg_preserved = agg_preserved;
b8f6e610 541 jfunc->value.ancestor.type_preserved = type_preserved;
7b872d9e
MJ
542}
543
e248d83f
MJ
544/* Extract the acual BINFO being described by JFUNC which must be a known type
545 jump function. */
546
547tree
548ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
549{
550 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
551 if (!base_binfo)
552 return NULL_TREE;
553 return get_binfo_at_offset (base_binfo,
554 jfunc->value.known_type.offset,
555 jfunc->value.known_type.component_type);
556}
557
8aab5218
MJ
558/* Get IPA BB information about the given BB. FBI is the context of analyzis
559 of this function body. */
560
561static struct ipa_bb_info *
562ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
563{
564 gcc_checking_assert (fbi);
565 return &fbi->bb_infos[bb->index];
566}
567
f65cf2b7
MJ
568/* Structure to be passed in between detect_type_change and
569 check_stmt_for_type_change. */
570
571struct type_change_info
572{
290ebcb7
MJ
573 /* Offset into the object where there is the virtual method pointer we are
574 looking for. */
575 HOST_WIDE_INT offset;
576 /* The declaration or SSA_NAME pointer of the base that we are checking for
577 type change. */
578 tree object;
579 /* If we actually can tell the type that the object has changed to, it is
580 stored in this field. Otherwise it remains NULL_TREE. */
581 tree known_current_type;
f65cf2b7
MJ
582 /* Set to true if dynamic type change has been detected. */
583 bool type_maybe_changed;
290ebcb7
MJ
584 /* Set to true if multiple types have been encountered. known_current_type
585 must be disregarded in that case. */
586 bool multiple_types_encountered;
f65cf2b7
MJ
587};
588
589/* Return true if STMT can modify a virtual method table pointer.
590
591 This function makes special assumptions about both constructors and
592 destructors which are all the functions that are allowed to alter the VMT
593 pointers. It assumes that destructors begin with assignment into all VMT
594 pointers and that constructors essentially look in the following way:
595
596 1) The very first thing they do is that they call constructors of ancestor
597 sub-objects that have them.
598
599 2) Then VMT pointers of this and all its ancestors is set to new values
600 corresponding to the type corresponding to the constructor.
601
602 3) Only afterwards, other stuff such as constructor of member sub-objects
603 and the code written by the user is run. Only this may include calling
604 virtual functions, directly or indirectly.
605
606 There is no way to call a constructor of an ancestor sub-object in any
607 other way.
608
609 This means that we do not have to care whether constructors get the correct
610 type information because they will always change it (in fact, if we define
611 the type to be given by the VMT pointer, it is undefined).
612
613 The most important fact to derive from the above is that if, for some
614 statement in the section 3, we try to detect whether the dynamic type has
615 changed, we can safely ignore all calls as we examine the function body
616 backwards until we reach statements in section 2 because these calls cannot
617 be ancestor constructors or destructors (if the input is not bogus) and so
618 do not change the dynamic type (this holds true only for automatically
619 allocated objects but at the moment we devirtualize only these). We then
620 must detect that statements in section 2 change the dynamic type and can try
621 to derive the new type. That is enough and we can stop, we will never see
622 the calls into constructors of sub-objects in this code. Therefore we can
623 safely ignore all call statements that we traverse.
624 */
625
626static bool
627stmt_may_be_vtbl_ptr_store (gimple stmt)
628{
629 if (is_gimple_call (stmt))
630 return false;
993df21e 631 /* TODO: Skip clobbers, doing so triggers problem in PR60306. */
f65cf2b7
MJ
632 else if (is_gimple_assign (stmt))
633 {
634 tree lhs = gimple_assign_lhs (stmt);
635
0004f992
MJ
636 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
637 {
638 if (flag_strict_aliasing
639 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
640 return false;
641
642 if (TREE_CODE (lhs) == COMPONENT_REF
643 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 644 return false;
0004f992
MJ
645 /* In the future we might want to use get_base_ref_and_offset to find
646 if there is a field corresponding to the offset and if so, proceed
647 almost like if it was a component ref. */
648 }
f65cf2b7
MJ
649 }
650 return true;
651}
652
290ebcb7
MJ
653/* If STMT can be proved to be an assignment to the virtual method table
654 pointer of ANALYZED_OBJ and the type associated with the new table
655 identified, return the type. Otherwise return NULL_TREE. */
656
657static tree
658extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
659{
660 HOST_WIDE_INT offset, size, max_size;
390675c8 661 tree lhs, rhs, base, binfo;
290ebcb7
MJ
662
663 if (!gimple_assign_single_p (stmt))
664 return NULL_TREE;
665
666 lhs = gimple_assign_lhs (stmt);
667 rhs = gimple_assign_rhs1 (stmt);
668 if (TREE_CODE (lhs) != COMPONENT_REF
390675c8 669 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
290ebcb7
MJ
670 return NULL_TREE;
671
672 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
673 if (offset != tci->offset
674 || size != POINTER_SIZE
675 || max_size != POINTER_SIZE)
676 return NULL_TREE;
677 if (TREE_CODE (base) == MEM_REF)
678 {
679 if (TREE_CODE (tci->object) != MEM_REF
680 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
681 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
682 TREE_OPERAND (base, 1)))
683 return NULL_TREE;
684 }
685 else if (tci->object != base)
686 return NULL_TREE;
687
390675c8
JH
688 binfo = vtable_pointer_value_to_binfo (rhs);
689
690 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
691 base of outer type. In this case we would need to either
692 work on binfos or translate it back to outer type and offset.
693 KNOWN_TYPE jump functions are not ready for that, yet. */
694 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
695 return NULL;
696
697 return BINFO_TYPE (binfo);
290ebcb7
MJ
698}
699
61502ca8 700/* Callback of walk_aliased_vdefs and a helper function for
f65cf2b7
MJ
701 detect_type_change to check whether a particular statement may modify
702 the virtual table pointer, and if possible also determine the new type of
703 the (sub-)object. It stores its result into DATA, which points to a
704 type_change_info structure. */
705
706static bool
707check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
708{
709 gimple stmt = SSA_NAME_DEF_STMT (vdef);
710 struct type_change_info *tci = (struct type_change_info *) data;
711
712 if (stmt_may_be_vtbl_ptr_store (stmt))
713 {
290ebcb7
MJ
714 tree type;
715 type = extr_type_from_vtbl_ptr_store (stmt, tci);
716 if (tci->type_maybe_changed
717 && type != tci->known_current_type)
718 tci->multiple_types_encountered = true;
719 tci->known_current_type = type;
f65cf2b7
MJ
720 tci->type_maybe_changed = true;
721 return true;
722 }
723 else
724 return false;
725}
726
290ebcb7
MJ
727
728
06d65050
JH
729/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
730 callsite CALL) by looking for assignments to its virtual table pointer. If
731 it is, return true and fill in the jump function JFUNC with relevant type
732 information or set it to unknown. ARG is the object itself (not a pointer
733 to it, unless dereferenced). BASE is the base of the memory access as
734 returned by get_ref_base_and_extent, as is the offset. */
f65cf2b7
MJ
735
736static bool
06d65050
JH
737detect_type_change (tree arg, tree base, tree comp_type, gimple call,
738 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
f65cf2b7
MJ
739{
740 struct type_change_info tci;
741 ao_ref ao;
742
743 gcc_checking_assert (DECL_P (arg)
744 || TREE_CODE (arg) == MEM_REF
745 || handled_component_p (arg));
746 /* Const calls cannot call virtual methods through VMT and so type changes do
747 not matter. */
06d65050
JH
748 if (!flag_devirtualize || !gimple_vuse (call)
749 /* Be sure expected_type is polymorphic. */
750 || !comp_type
751 || TREE_CODE (comp_type) != RECORD_TYPE
752 || !TYPE_BINFO (comp_type)
753 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
0a2550e7 754 return true;
f65cf2b7 755
4bf2a588
JH
756 /* C++ methods are not allowed to change THIS pointer unless they
757 are constructors or destructors. */
758 if (TREE_CODE (base) == MEM_REF
759 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
760 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
761 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0))) == PARM_DECL
762 && TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
763 && !DECL_CXX_CONSTRUCTOR_P (current_function_decl)
764 && !DECL_CXX_DESTRUCTOR_P (current_function_decl)
765 && (SSA_NAME_VAR (TREE_OPERAND (base, 0))
766 == DECL_ARGUMENTS (current_function_decl)))
767 return false;
768
dd887943 769 ao_ref_init (&ao, arg);
f65cf2b7
MJ
770 ao.base = base;
771 ao.offset = offset;
772 ao.size = POINTER_SIZE;
773 ao.max_size = ao.size;
f65cf2b7 774
290ebcb7
MJ
775 tci.offset = offset;
776 tci.object = get_base_address (arg);
777 tci.known_current_type = NULL_TREE;
778 tci.type_maybe_changed = false;
779 tci.multiple_types_encountered = false;
780
f65cf2b7
MJ
781 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
782 &tci, NULL);
783 if (!tci.type_maybe_changed)
784 return false;
785
290ebcb7
MJ
786 if (!tci.known_current_type
787 || tci.multiple_types_encountered
788 || offset != 0)
789 jfunc->type = IPA_JF_UNKNOWN;
790 else
7b872d9e 791 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
290ebcb7 792
f65cf2b7
MJ
793 return true;
794}
795
796/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
797 SSA name (its dereference will become the base and the offset is assumed to
798 be zero). */
799
800static bool
06d65050
JH
801detect_type_change_ssa (tree arg, tree comp_type,
802 gimple call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
803{
804 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 805 if (!flag_devirtualize
06d65050 806 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
807 return false;
808
809 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 810 build_int_cst (ptr_type_node, 0));
f65cf2b7 811
06d65050 812 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
f65cf2b7
MJ
813}
814
fdb0e1b4
MJ
815/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
816 boolean variable pointed to by DATA. */
817
818static bool
819mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
820 void *data)
821{
822 bool *b = (bool *) data;
823 *b = true;
824 return true;
825}
826
8aab5218
MJ
827/* Return true if we have already walked so many statements in AA that we
828 should really just start giving up. */
829
830static bool
831aa_overwalked (struct func_body_info *fbi)
832{
833 gcc_checking_assert (fbi);
834 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
835}
836
837/* Find the nearest valid aa status for parameter specified by INDEX that
838 dominates BB. */
839
840static struct param_aa_status *
841find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
842 int index)
843{
844 while (true)
845 {
846 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
847 if (!bb)
848 return NULL;
849 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
850 if (!bi->param_aa_statuses.is_empty ()
851 && bi->param_aa_statuses[index].valid)
852 return &bi->param_aa_statuses[index];
853 }
854}
855
856/* Get AA status structure for the given BB and parameter with INDEX. Allocate
857 structures and/or intialize the result with a dominating description as
858 necessary. */
859
860static struct param_aa_status *
861parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
862 int index)
863{
864 gcc_checking_assert (fbi);
865 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
866 if (bi->param_aa_statuses.is_empty ())
867 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
868 struct param_aa_status *paa = &bi->param_aa_statuses[index];
869 if (!paa->valid)
870 {
871 gcc_checking_assert (!paa->parm_modified
872 && !paa->ref_modified
873 && !paa->pt_modified);
874 struct param_aa_status *dom_paa;
875 dom_paa = find_dominating_aa_status (fbi, bb, index);
876 if (dom_paa)
877 *paa = *dom_paa;
878 else
879 paa->valid = true;
880 }
881
882 return paa;
883}
884
688010ba 885/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 886 a value known not to be modified in this function before reaching the
8aab5218
MJ
887 statement STMT. FBI holds information about the function we have so far
888 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
889
890static bool
8aab5218
MJ
891parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
892 gimple stmt, tree parm_load)
fdb0e1b4 893{
8aab5218 894 struct param_aa_status *paa;
fdb0e1b4
MJ
895 bool modified = false;
896 ao_ref refd;
897
8aab5218
MJ
898 /* FIXME: FBI can be NULL if we are being called from outside
899 ipa_node_analysis or ipcp_transform_function, which currently happens
900 during inlining analysis. It would be great to extend fbi's lifetime and
901 always have it. Currently, we are just not afraid of too much walking in
902 that case. */
903 if (fbi)
904 {
905 if (aa_overwalked (fbi))
906 return false;
907 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
908 if (paa->parm_modified)
909 return false;
910 }
911 else
912 paa = NULL;
fdb0e1b4
MJ
913
914 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 915 ao_ref_init (&refd, parm_load);
8aab5218
MJ
916 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
917 &modified, NULL);
918 if (fbi)
919 fbi->aa_walked += walked;
920 if (paa && modified)
921 paa->parm_modified = true;
8b7773a4 922 return !modified;
fdb0e1b4
MJ
923}
924
925/* If STMT is an assignment that loads a value from an parameter declaration,
926 return the index of the parameter in ipa_node_params which has not been
927 modified. Otherwise return -1. */
928
929static int
8aab5218
MJ
930load_from_unmodified_param (struct func_body_info *fbi,
931 vec<ipa_param_descriptor> descriptors,
fdb0e1b4
MJ
932 gimple stmt)
933{
934 int index;
935 tree op1;
936
937 if (!gimple_assign_single_p (stmt))
938 return -1;
939
940 op1 = gimple_assign_rhs1 (stmt);
941 if (TREE_CODE (op1) != PARM_DECL)
942 return -1;
943
d044dd17 944 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 945 if (index < 0
8aab5218 946 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
fdb0e1b4
MJ
947 return -1;
948
949 return index;
950}
f65cf2b7 951
8aab5218
MJ
952/* Return true if memory reference REF (which must be a load through parameter
953 with INDEX) loads data that are known to be unmodified in this function
954 before reaching statement STMT. */
8b7773a4
MJ
955
956static bool
8aab5218
MJ
957parm_ref_data_preserved_p (struct func_body_info *fbi,
958 int index, gimple stmt, tree ref)
8b7773a4 959{
8aab5218 960 struct param_aa_status *paa;
8b7773a4
MJ
961 bool modified = false;
962 ao_ref refd;
963
8aab5218
MJ
964 /* FIXME: FBI can be NULL if we are being called from outside
965 ipa_node_analysis or ipcp_transform_function, which currently happens
966 during inlining analysis. It would be great to extend fbi's lifetime and
967 always have it. Currently, we are just not afraid of too much walking in
968 that case. */
969 if (fbi)
970 {
971 if (aa_overwalked (fbi))
972 return false;
973 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
974 if (paa->ref_modified)
975 return false;
976 }
977 else
978 paa = NULL;
8b7773a4 979
8aab5218 980 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 981 ao_ref_init (&refd, ref);
8aab5218
MJ
982 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
983 &modified, NULL);
984 if (fbi)
985 fbi->aa_walked += walked;
986 if (paa && modified)
987 paa->ref_modified = true;
8b7773a4
MJ
988 return !modified;
989}
990
8aab5218
MJ
991/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
992 is known to be unmodified in this function before reaching call statement
993 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
994
995static bool
8aab5218
MJ
996parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
997 gimple call, tree parm)
8b7773a4
MJ
998{
999 bool modified = false;
1000 ao_ref refd;
1001
1002 /* It's unnecessary to calculate anything about memory contnets for a const
1003 function because it is not goin to use it. But do not cache the result
1004 either. Also, no such calculations for non-pointers. */
1005 if (!gimple_vuse (call)
8aab5218
MJ
1006 || !POINTER_TYPE_P (TREE_TYPE (parm))
1007 || aa_overwalked (fbi))
8b7773a4
MJ
1008 return false;
1009
8aab5218
MJ
1010 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1011 index);
1012 if (paa->pt_modified)
8b7773a4
MJ
1013 return false;
1014
1015 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218
MJ
1016 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1017 &modified, NULL);
1018 fbi->aa_walked += walked;
8b7773a4 1019 if (modified)
8aab5218 1020 paa->pt_modified = true;
8b7773a4
MJ
1021 return !modified;
1022}
1023
1024/* Return true if we can prove that OP is a memory reference loading unmodified
1025 data from an aggregate passed as a parameter and if the aggregate is passed
1026 by reference, that the alias type of the load corresponds to the type of the
1027 formal parameter (so that we can rely on this type for TBAA in callers).
1028 INFO and PARMS_AINFO describe parameters of the current function (but the
1029 latter can be NULL), STMT is the load statement. If function returns true,
1030 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1031 within the aggregate and whether it is a load from a value passed by
1032 reference respectively. */
1033
1034static bool
8aab5218
MJ
1035ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1036 vec<ipa_param_descriptor> descriptors,
1037 gimple stmt, tree op, int *index_p,
1038 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1039 bool *by_ref_p)
8b7773a4
MJ
1040{
1041 int index;
1042 HOST_WIDE_INT size, max_size;
1043 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1044
1045 if (max_size == -1 || max_size != size || *offset_p < 0)
1046 return false;
1047
1048 if (DECL_P (base))
1049 {
d044dd17 1050 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1051 if (index >= 0
8aab5218 1052 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1053 {
1054 *index_p = index;
1055 *by_ref_p = false;
3ff2ca23
JJ
1056 if (size_p)
1057 *size_p = size;
8b7773a4
MJ
1058 return true;
1059 }
1060 return false;
1061 }
1062
1063 if (TREE_CODE (base) != MEM_REF
1064 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1065 || !integer_zerop (TREE_OPERAND (base, 1)))
1066 return false;
1067
1068 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1069 {
1070 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1071 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1072 }
1073 else
1074 {
1075 /* This branch catches situations where a pointer parameter is not a
1076 gimple register, for example:
1077
1078 void hip7(S*) (struct S * p)
1079 {
1080 void (*<T2e4>) (struct S *) D.1867;
1081 struct S * p.1;
1082
1083 <bb 2>:
1084 p.1_1 = p;
1085 D.1867_2 = p.1_1->f;
1086 D.1867_2 ();
1087 gdp = &p;
1088 */
1089
1090 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1091 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1092 }
1093
1094 if (index >= 0
8aab5218 1095 && parm_ref_data_preserved_p (fbi, index, stmt, op))
8b7773a4
MJ
1096 {
1097 *index_p = index;
1098 *by_ref_p = true;
3ff2ca23
JJ
1099 if (size_p)
1100 *size_p = size;
8b7773a4
MJ
1101 return true;
1102 }
1103 return false;
1104}
1105
1106/* Just like the previous function, just without the param_analysis_info
1107 pointer, for users outside of this file. */
1108
1109bool
1110ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1111 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1112 bool *by_ref_p)
1113{
8aab5218 1114 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
3ff2ca23 1115 offset_p, NULL, by_ref_p);
8b7773a4
MJ
1116}
1117
b258210c 1118/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1119 of an assignment statement STMT, try to determine whether we are actually
1120 handling any of the following cases and construct an appropriate jump
1121 function into JFUNC if so:
1122
1123 1) The passed value is loaded from a formal parameter which is not a gimple
1124 register (most probably because it is addressable, the value has to be
1125 scalar) and we can guarantee the value has not changed. This case can
1126 therefore be described by a simple pass-through jump function. For example:
1127
1128 foo (int a)
1129 {
1130 int a.0;
1131
1132 a.0_2 = a;
1133 bar (a.0_2);
1134
1135 2) The passed value can be described by a simple arithmetic pass-through
1136 jump function. E.g.
1137
1138 foo (int a)
1139 {
1140 int D.2064;
1141
1142 D.2064_4 = a.1(D) + 4;
1143 bar (D.2064_4);
1144
1145 This case can also occur in combination of the previous one, e.g.:
1146
1147 foo (int a, int z)
1148 {
1149 int a.0;
1150 int D.2064;
1151
1152 a.0_3 = a;
1153 D.2064_4 = a.0_3 + 4;
1154 foo (D.2064_4);
1155
1156 3) The passed value is an address of an object within another one (which
1157 also passed by reference). Such situations are described by an ancestor
1158 jump function and describe situations such as:
1159
1160 B::foo() (struct B * const this)
1161 {
1162 struct A * D.1845;
1163
1164 D.1845_2 = &this_1(D)->D.1748;
1165 A::bar (D.1845_2);
1166
1167 INFO is the structure describing individual parameters access different
1168 stages of IPA optimizations. PARMS_AINFO contains the information that is
1169 only needed for intraprocedural analysis. */
685b0d13
MJ
1170
1171static void
8aab5218
MJ
1172compute_complex_assign_jump_func (struct func_body_info *fbi,
1173 struct ipa_node_params *info,
b258210c 1174 struct ipa_jump_func *jfunc,
06d65050
JH
1175 gimple call, gimple stmt, tree name,
1176 tree param_type)
685b0d13
MJ
1177{
1178 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 1179 tree op1, tc_ssa, base, ssa;
685b0d13 1180 int index;
685b0d13 1181
685b0d13 1182 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1183
fdb0e1b4 1184 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1185 {
fdb0e1b4
MJ
1186 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1187 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1188 else
8aab5218 1189 index = load_from_unmodified_param (fbi, info->descriptors,
fdb0e1b4
MJ
1190 SSA_NAME_DEF_STMT (op1));
1191 tc_ssa = op1;
1192 }
1193 else
1194 {
8aab5218 1195 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1196 tc_ssa = gimple_assign_lhs (stmt);
1197 }
1198
1199 if (index >= 0)
1200 {
1201 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 1202
b258210c 1203 if (op2)
685b0d13 1204 {
b258210c
MJ
1205 if (!is_gimple_ip_invariant (op2)
1206 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1207 && !useless_type_conversion_p (TREE_TYPE (name),
1208 TREE_TYPE (op1))))
1209 return;
1210
7b872d9e
MJ
1211 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1212 gimple_assign_rhs_code (stmt));
685b0d13 1213 }
b8f6e610 1214 else if (gimple_assign_single_p (stmt))
8b7773a4 1215 {
8aab5218 1216 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
06d65050
JH
1217 bool type_p = false;
1218
1219 if (param_type && POINTER_TYPE_P (param_type))
1220 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1221 call, jfunc);
b8f6e610
MJ
1222 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1223 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
8b7773a4 1224 }
685b0d13
MJ
1225 return;
1226 }
1227
1228 if (TREE_CODE (op1) != ADDR_EXPR)
1229 return;
1230 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1231 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1232 return;
32aa622c
MJ
1233 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1234 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1235 /* If this is a varying address, punt. */
1236 || max_size == -1
1237 || max_size != size)
685b0d13 1238 return;
807e902e 1239 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1240 ssa = TREE_OPERAND (base, 0);
1241 if (TREE_CODE (ssa) != SSA_NAME
1242 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1243 || offset < 0)
685b0d13
MJ
1244 return;
1245
b8f6e610 1246 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1247 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1248 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
b8f6e610 1249 {
06d65050
JH
1250 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1251 call, jfunc, offset);
b8f6e610 1252 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
0a2550e7
JH
1253 ipa_set_ancestor_jf (jfunc, offset,
1254 type_p ? TREE_TYPE (param_type) : NULL, index,
8aab5218 1255 parm_ref_data_pass_through_p (fbi, index,
b8f6e610
MJ
1256 call, ssa), type_p);
1257 }
685b0d13
MJ
1258}
1259
40591473
MJ
1260/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1261 it looks like:
1262
1263 iftmp.1_3 = &obj_2(D)->D.1762;
1264
1265 The base of the MEM_REF must be a default definition SSA NAME of a
1266 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1267 whole MEM_REF expression is returned and the offset calculated from any
1268 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1269 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1270
1271static tree
1272get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1273{
1274 HOST_WIDE_INT size, max_size;
1275 tree expr, parm, obj;
1276
1277 if (!gimple_assign_single_p (assign))
1278 return NULL_TREE;
1279 expr = gimple_assign_rhs1 (assign);
1280
1281 if (TREE_CODE (expr) != ADDR_EXPR)
1282 return NULL_TREE;
1283 expr = TREE_OPERAND (expr, 0);
1284 obj = expr;
1285 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1286
1287 if (TREE_CODE (expr) != MEM_REF
1288 /* If this is a varying address, punt. */
1289 || max_size == -1
1290 || max_size != size
1291 || *offset < 0)
1292 return NULL_TREE;
1293 parm = TREE_OPERAND (expr, 0);
1294 if (TREE_CODE (parm) != SSA_NAME
1295 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1296 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1297 return NULL_TREE;
1298
807e902e 1299 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1300 *obj_p = obj;
1301 return expr;
1302}
1303
685b0d13 1304
b258210c
MJ
1305/* Given that an actual argument is an SSA_NAME that is a result of a phi
1306 statement PHI, try to find out whether NAME is in fact a
1307 multiple-inheritance typecast from a descendant into an ancestor of a formal
1308 parameter and thus can be described by an ancestor jump function and if so,
1309 write the appropriate function into JFUNC.
1310
1311 Essentially we want to match the following pattern:
1312
1313 if (obj_2(D) != 0B)
1314 goto <bb 3>;
1315 else
1316 goto <bb 4>;
1317
1318 <bb 3>:
1319 iftmp.1_3 = &obj_2(D)->D.1762;
1320
1321 <bb 4>:
1322 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1323 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1324 return D.1879_6; */
1325
1326static void
8aab5218
MJ
1327compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1328 struct ipa_node_params *info,
b258210c 1329 struct ipa_jump_func *jfunc,
06d65050 1330 gimple call, gimple phi, tree param_type)
b258210c 1331{
40591473 1332 HOST_WIDE_INT offset;
b258210c
MJ
1333 gimple assign, cond;
1334 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1335 tree tmp, parm, expr, obj;
b258210c
MJ
1336 int index, i;
1337
54e348cb 1338 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1339 return;
1340
54e348cb
MJ
1341 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1342 tmp = PHI_ARG_DEF (phi, 0);
1343 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1344 tmp = PHI_ARG_DEF (phi, 1);
1345 else
1346 return;
b258210c
MJ
1347 if (TREE_CODE (tmp) != SSA_NAME
1348 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1349 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1350 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1351 return;
1352
1353 assign = SSA_NAME_DEF_STMT (tmp);
1354 assign_bb = gimple_bb (assign);
40591473 1355 if (!single_pred_p (assign_bb))
b258210c 1356 return;
40591473
MJ
1357 expr = get_ancestor_addr_info (assign, &obj, &offset);
1358 if (!expr)
b258210c
MJ
1359 return;
1360 parm = TREE_OPERAND (expr, 0);
b258210c 1361 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1362 if (index < 0)
1363 return;
b258210c
MJ
1364
1365 cond_bb = single_pred (assign_bb);
1366 cond = last_stmt (cond_bb);
69610617
SB
1367 if (!cond
1368 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1369 || gimple_cond_code (cond) != NE_EXPR
1370 || gimple_cond_lhs (cond) != parm
1371 || !integer_zerop (gimple_cond_rhs (cond)))
1372 return;
1373
b258210c
MJ
1374 phi_bb = gimple_bb (phi);
1375 for (i = 0; i < 2; i++)
1376 {
1377 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1378 if (pred != assign_bb && pred != cond_bb)
1379 return;
1380 }
1381
06d65050
JH
1382 bool type_p = false;
1383 if (param_type && POINTER_TYPE_P (param_type))
1384 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1385 call, jfunc, offset);
b8f6e610 1386 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
8aab5218
MJ
1387 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1388 index,
1389 parm_ref_data_pass_through_p (fbi, index, call, parm),
1390 type_p);
b258210c
MJ
1391}
1392
61502ca8 1393/* Given OP which is passed as an actual argument to a called function,
b258210c 1394 determine if it is possible to construct a KNOWN_TYPE jump function for it
06d65050
JH
1395 and if so, create one and store it to JFUNC.
1396 EXPECTED_TYPE represents a type the argument should be in */
b258210c
MJ
1397
1398static void
f65cf2b7 1399compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
06d65050 1400 gimple call, tree expected_type)
b258210c 1401{
32aa622c 1402 HOST_WIDE_INT offset, size, max_size;
c7573249 1403 tree base;
b258210c 1404
05842ff5
MJ
1405 if (!flag_devirtualize
1406 || TREE_CODE (op) != ADDR_EXPR
06d65050
JH
1407 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1408 /* Be sure expected_type is polymorphic. */
1409 || !expected_type
1410 || TREE_CODE (expected_type) != RECORD_TYPE
1411 || !TYPE_BINFO (expected_type)
1412 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
b258210c
MJ
1413 return;
1414
1415 op = TREE_OPERAND (op, 0);
32aa622c
MJ
1416 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1417 if (!DECL_P (base)
1418 || max_size == -1
1419 || max_size != size
1420 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1421 || is_global_var (base))
1422 return;
1423
06d65050 1424 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
f65cf2b7
MJ
1425 return;
1426
06d65050
JH
1427 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1428 expected_type);
b258210c
MJ
1429}
1430
be95e2b9
MJ
1431/* Inspect the given TYPE and return true iff it has the same structure (the
1432 same number of fields of the same types) as a C++ member pointer. If
1433 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1434 corresponding fields there. */
1435
3e293154
MJ
1436static bool
1437type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1438{
1439 tree fld;
1440
1441 if (TREE_CODE (type) != RECORD_TYPE)
1442 return false;
1443
1444 fld = TYPE_FIELDS (type);
1445 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1446 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1447 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1448 return false;
1449
1450 if (method_ptr)
1451 *method_ptr = fld;
1452
910ad8de 1453 fld = DECL_CHAIN (fld);
8b7773a4 1454 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1455 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1456 return false;
1457 if (delta)
1458 *delta = fld;
1459
910ad8de 1460 if (DECL_CHAIN (fld))
3e293154
MJ
1461 return false;
1462
1463 return true;
1464}
1465
61502ca8 1466/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1467 return the rhs of its defining statement. Otherwise return RHS as it
1468 is. */
7ec49257
MJ
1469
1470static inline tree
1471get_ssa_def_if_simple_copy (tree rhs)
1472{
1473 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1474 {
1475 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1476
1477 if (gimple_assign_single_p (def_stmt))
1478 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1479 else
1480 break;
7ec49257
MJ
1481 }
1482 return rhs;
1483}
1484
8b7773a4
MJ
1485/* Simple linked list, describing known contents of an aggregate beforere
1486 call. */
1487
1488struct ipa_known_agg_contents_list
1489{
1490 /* Offset and size of the described part of the aggregate. */
1491 HOST_WIDE_INT offset, size;
1492 /* Known constant value or NULL if the contents is known to be unknown. */
1493 tree constant;
1494 /* Pointer to the next structure in the list. */
1495 struct ipa_known_agg_contents_list *next;
1496};
3e293154 1497
0d48ee34
MJ
1498/* Find the proper place in linked list of ipa_known_agg_contents_list
1499 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1500 unless there is a partial overlap, in which case return NULL, or such
1501 element is already there, in which case set *ALREADY_THERE to true. */
1502
1503static struct ipa_known_agg_contents_list **
1504get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1505 HOST_WIDE_INT lhs_offset,
1506 HOST_WIDE_INT lhs_size,
1507 bool *already_there)
1508{
1509 struct ipa_known_agg_contents_list **p = list;
1510 while (*p && (*p)->offset < lhs_offset)
1511 {
1512 if ((*p)->offset + (*p)->size > lhs_offset)
1513 return NULL;
1514 p = &(*p)->next;
1515 }
1516
1517 if (*p && (*p)->offset < lhs_offset + lhs_size)
1518 {
1519 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1520 /* We already know this value is subsequently overwritten with
1521 something else. */
1522 *already_there = true;
1523 else
1524 /* Otherwise this is a partial overlap which we cannot
1525 represent. */
1526 return NULL;
1527 }
1528 return p;
1529}
1530
1531/* Build aggregate jump function from LIST, assuming there are exactly
1532 CONST_COUNT constant entries there and that th offset of the passed argument
1533 is ARG_OFFSET and store it into JFUNC. */
1534
1535static void
1536build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1537 int const_count, HOST_WIDE_INT arg_offset,
1538 struct ipa_jump_func *jfunc)
1539{
1540 vec_alloc (jfunc->agg.items, const_count);
1541 while (list)
1542 {
1543 if (list->constant)
1544 {
1545 struct ipa_agg_jf_item item;
1546 item.offset = list->offset - arg_offset;
1547 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1548 item.value = unshare_expr_without_location (list->constant);
1549 jfunc->agg.items->quick_push (item);
1550 }
1551 list = list->next;
1552 }
1553}
1554
8b7773a4
MJ
1555/* Traverse statements from CALL backwards, scanning whether an aggregate given
1556 in ARG is filled in with constant values. ARG can either be an aggregate
0d48ee34
MJ
1557 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1558 aggregate. JFUNC is the jump function into which the constants are
1559 subsequently stored. */
be95e2b9 1560
3e293154 1561static void
0d48ee34
MJ
1562determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1563 struct ipa_jump_func *jfunc)
3e293154 1564{
8b7773a4
MJ
1565 struct ipa_known_agg_contents_list *list = NULL;
1566 int item_count = 0, const_count = 0;
1567 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1568 gimple_stmt_iterator gsi;
8b7773a4
MJ
1569 tree arg_base;
1570 bool check_ref, by_ref;
1571 ao_ref r;
3e293154 1572
8b7773a4
MJ
1573 /* The function operates in three stages. First, we prepare check_ref, r,
1574 arg_base and arg_offset based on what is actually passed as an actual
1575 argument. */
3e293154 1576
85942f45 1577 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1578 {
1579 by_ref = true;
1580 if (TREE_CODE (arg) == SSA_NAME)
1581 {
1582 tree type_size;
85942f45 1583 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
8b7773a4
MJ
1584 return;
1585 check_ref = true;
1586 arg_base = arg;
1587 arg_offset = 0;
85942f45 1588 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1589 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1590 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1591 }
1592 else if (TREE_CODE (arg) == ADDR_EXPR)
1593 {
1594 HOST_WIDE_INT arg_max_size;
1595
1596 arg = TREE_OPERAND (arg, 0);
1597 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1598 &arg_max_size);
1599 if (arg_max_size == -1
1600 || arg_max_size != arg_size
1601 || arg_offset < 0)
1602 return;
1603 if (DECL_P (arg_base))
1604 {
8b7773a4 1605 check_ref = false;
0d48ee34 1606 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1607 }
1608 else
1609 return;
1610 }
1611 else
1612 return;
1613 }
1614 else
1615 {
1616 HOST_WIDE_INT arg_max_size;
1617
1618 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1619
1620 by_ref = false;
1621 check_ref = false;
1622 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1623 &arg_max_size);
1624 if (arg_max_size == -1
1625 || arg_max_size != arg_size
1626 || arg_offset < 0)
1627 return;
1628
1629 ao_ref_init (&r, arg);
1630 }
1631
1632 /* Second stage walks back the BB, looks at individual statements and as long
1633 as it is confident of how the statements affect contents of the
1634 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1635 describing it. */
1636 gsi = gsi_for_stmt (call);
726a989a
RB
1637 gsi_prev (&gsi);
1638 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1639 {
8b7773a4 1640 struct ipa_known_agg_contents_list *n, **p;
726a989a 1641 gimple stmt = gsi_stmt (gsi);
8b7773a4
MJ
1642 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1643 tree lhs, rhs, lhs_base;
3e293154 1644
8b7773a4 1645 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1646 continue;
8b75fc9b 1647 if (!gimple_assign_single_p (stmt))
8b7773a4 1648 break;
3e293154 1649
726a989a
RB
1650 lhs = gimple_assign_lhs (stmt);
1651 rhs = gimple_assign_rhs1 (stmt);
0c6b087c 1652 if (!is_gimple_reg_type (TREE_TYPE (rhs))
7d2fb524
MJ
1653 || TREE_CODE (lhs) == BIT_FIELD_REF
1654 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1655 break;
3e293154 1656
8b7773a4
MJ
1657 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1658 &lhs_max_size);
1659 if (lhs_max_size == -1
0d48ee34 1660 || lhs_max_size != lhs_size)
8b7773a4 1661 break;
3e293154 1662
8b7773a4 1663 if (check_ref)
518dc859 1664 {
8b7773a4
MJ
1665 if (TREE_CODE (lhs_base) != MEM_REF
1666 || TREE_OPERAND (lhs_base, 0) != arg_base
1667 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1668 break;
3e293154 1669 }
8b7773a4 1670 else if (lhs_base != arg_base)
774b8a55
MJ
1671 {
1672 if (DECL_P (lhs_base))
1673 continue;
1674 else
1675 break;
1676 }
3e293154 1677
0d48ee34
MJ
1678 bool already_there = false;
1679 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1680 &already_there);
1681 if (!p)
8b7773a4 1682 break;
0d48ee34
MJ
1683 if (already_there)
1684 continue;
3e293154 1685
8b7773a4
MJ
1686 rhs = get_ssa_def_if_simple_copy (rhs);
1687 n = XALLOCA (struct ipa_known_agg_contents_list);
1688 n->size = lhs_size;
1689 n->offset = lhs_offset;
1690 if (is_gimple_ip_invariant (rhs))
1691 {
1692 n->constant = rhs;
1693 const_count++;
1694 }
1695 else
1696 n->constant = NULL_TREE;
1697 n->next = *p;
1698 *p = n;
3e293154 1699
8b7773a4 1700 item_count++;
dfea20f1
MJ
1701 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1702 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1703 break;
1704 }
be95e2b9 1705
8b7773a4
MJ
1706 /* Third stage just goes over the list and creates an appropriate vector of
1707 ipa_agg_jf_item structures out of it, of sourse only if there are
1708 any known constants to begin with. */
3e293154 1709
8b7773a4 1710 if (const_count)
3e293154 1711 {
8b7773a4 1712 jfunc->agg.by_ref = by_ref;
0d48ee34 1713 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
3e293154
MJ
1714 }
1715}
1716
06d65050
JH
1717static tree
1718ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1719{
1720 int n;
1721 tree type = (e->callee
67348ccc 1722 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1723 : gimple_call_fntype (e->call_stmt));
1724 tree t = TYPE_ARG_TYPES (type);
1725
1726 for (n = 0; n < i; n++)
1727 {
1728 if (!t)
1729 break;
1730 t = TREE_CHAIN (t);
1731 }
1732 if (t)
1733 return TREE_VALUE (t);
1734 if (!e->callee)
1735 return NULL;
67348ccc 1736 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1737 for (n = 0; n < i; n++)
1738 {
1739 if (!t)
1740 return NULL;
1741 t = TREE_CHAIN (t);
1742 }
1743 if (t)
1744 return TREE_TYPE (t);
1745 return NULL;
1746}
1747
3e293154
MJ
1748/* Compute jump function for all arguments of callsite CS and insert the
1749 information in the jump_functions array in the ipa_edge_args corresponding
1750 to this callsite. */
be95e2b9 1751
749aa96d 1752static void
8aab5218 1753ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
062c604f 1754 struct cgraph_edge *cs)
3e293154
MJ
1755{
1756 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09
MJ
1757 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1758 gimple call = cs->call_stmt;
8b7773a4 1759 int n, arg_num = gimple_call_num_args (call);
3e293154 1760
606d9a09 1761 if (arg_num == 0 || args->jump_functions)
3e293154 1762 return;
9771b263 1763 vec_safe_grow_cleared (args->jump_functions, arg_num);
3e293154 1764
96e24d49
JJ
1765 if (gimple_call_internal_p (call))
1766 return;
5fe8e757
MJ
1767 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1768 return;
1769
8b7773a4
MJ
1770 for (n = 0; n < arg_num; n++)
1771 {
1772 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1773 tree arg = gimple_call_arg (call, n);
06d65050 1774 tree param_type = ipa_get_callee_param_type (cs, n);
3e293154 1775
8b7773a4 1776 if (is_gimple_ip_invariant (arg))
4502fe8d 1777 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1778 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1779 && TREE_CODE (arg) == PARM_DECL)
1780 {
1781 int index = ipa_get_param_decl_index (info, arg);
1782
1783 gcc_assert (index >=0);
1784 /* Aggregate passed by value, check for pass-through, otherwise we
1785 will attempt to fill in aggregate contents later in this
1786 for cycle. */
8aab5218 1787 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 1788 {
b8f6e610 1789 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
8b7773a4
MJ
1790 continue;
1791 }
1792 }
1793 else if (TREE_CODE (arg) == SSA_NAME)
1794 {
1795 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1796 {
1797 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1798 if (index >= 0)
8b7773a4 1799 {
b8f6e610 1800 bool agg_p, type_p;
8aab5218 1801 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
06d65050
JH
1802 if (param_type && POINTER_TYPE_P (param_type))
1803 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1804 call, jfunc);
1805 else
1806 type_p = false;
b8f6e610 1807 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
06d65050
JH
1808 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1809 type_p);
8b7773a4
MJ
1810 }
1811 }
1812 else
1813 {
1814 gimple stmt = SSA_NAME_DEF_STMT (arg);
1815 if (is_gimple_assign (stmt))
8aab5218 1816 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 1817 call, stmt, arg, param_type);
8b7773a4 1818 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 1819 compute_complex_ancestor_jump_func (fbi, info, jfunc,
06d65050 1820 call, stmt, param_type);
8b7773a4
MJ
1821 }
1822 }
1823 else
06d65050
JH
1824 compute_known_type_jump_func (arg, jfunc, call,
1825 param_type
1826 && POINTER_TYPE_P (param_type)
1827 ? TREE_TYPE (param_type)
1828 : NULL);
3e293154 1829
85942f45
JH
1830 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1831 passed (because type conversions are ignored in gimple). Usually we can
1832 safely get type from function declaration, but in case of K&R prototypes or
1833 variadic functions we can try our luck with type of the pointer passed.
1834 TODO: Since we look for actual initialization of the memory object, we may better
1835 work out the type based on the memory stores we find. */
1836 if (!param_type)
1837 param_type = TREE_TYPE (arg);
1838
8b7773a4
MJ
1839 if ((jfunc->type != IPA_JF_PASS_THROUGH
1840 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1841 && (jfunc->type != IPA_JF_ANCESTOR
1842 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1843 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 1844 || POINTER_TYPE_P (param_type)))
0d48ee34 1845 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
8b7773a4 1846 }
3e293154
MJ
1847}
1848
749aa96d 1849/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 1850 from BB. */
749aa96d 1851
062c604f 1852static void
8aab5218 1853ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
749aa96d 1854{
8aab5218
MJ
1855 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1856 int i;
749aa96d
MJ
1857 struct cgraph_edge *cs;
1858
8aab5218 1859 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 1860 {
8aab5218 1861 struct cgraph_node *callee = cs->callee;
749aa96d 1862
8aab5218
MJ
1863 if (callee)
1864 {
1865 cgraph_function_or_thunk_node (callee, NULL);
1866 /* We do not need to bother analyzing calls to unknown functions
1867 unless they may become known during lto/whopr. */
1868 if (!callee->definition && !flag_lto)
1869 continue;
1870 }
1871 ipa_compute_jump_functions_for_edge (fbi, cs);
1872 }
749aa96d
MJ
1873}
1874
8b7773a4
MJ
1875/* If STMT looks like a statement loading a value from a member pointer formal
1876 parameter, return that parameter and store the offset of the field to
1877 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1878 might be clobbered). If USE_DELTA, then we look for a use of the delta
1879 field rather than the pfn. */
be95e2b9 1880
3e293154 1881static tree
8b7773a4
MJ
1882ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1883 HOST_WIDE_INT *offset_p)
3e293154 1884{
8b7773a4
MJ
1885 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1886
1887 if (!gimple_assign_single_p (stmt))
1888 return NULL_TREE;
3e293154 1889
8b7773a4 1890 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
1891 if (TREE_CODE (rhs) == COMPONENT_REF)
1892 {
1893 ref_field = TREE_OPERAND (rhs, 1);
1894 rhs = TREE_OPERAND (rhs, 0);
1895 }
1896 else
1897 ref_field = NULL_TREE;
d242d063 1898 if (TREE_CODE (rhs) != MEM_REF)
3e293154 1899 return NULL_TREE;
3e293154 1900 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
1901 if (TREE_CODE (rec) != ADDR_EXPR)
1902 return NULL_TREE;
1903 rec = TREE_OPERAND (rec, 0);
3e293154 1904 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 1905 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 1906 return NULL_TREE;
d242d063 1907 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 1908
8b7773a4
MJ
1909 if (use_delta)
1910 fld = delta_field;
1911 else
1912 fld = ptr_field;
1913 if (offset_p)
1914 *offset_p = int_bit_position (fld);
1915
ae788515
EB
1916 if (ref_field)
1917 {
1918 if (integer_nonzerop (ref_offset))
1919 return NULL_TREE;
ae788515
EB
1920 return ref_field == fld ? rec : NULL_TREE;
1921 }
3e293154 1922 else
8b7773a4
MJ
1923 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1924 : NULL_TREE;
3e293154
MJ
1925}
1926
1927/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 1928
3e293154
MJ
1929static bool
1930ipa_is_ssa_with_stmt_def (tree t)
1931{
1932 if (TREE_CODE (t) == SSA_NAME
1933 && !SSA_NAME_IS_DEFAULT_DEF (t))
1934 return true;
1935 else
1936 return false;
1937}
1938
40591473
MJ
1939/* Find the indirect call graph edge corresponding to STMT and mark it as a
1940 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1941 indirect call graph edge. */
be95e2b9 1942
40591473
MJ
1943static struct cgraph_edge *
1944ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
3e293154 1945{
e33c6cd6 1946 struct cgraph_edge *cs;
3e293154 1947
5f902d76 1948 cs = cgraph_edge (node, stmt);
b258210c 1949 cs->indirect_info->param_index = param_index;
8b7773a4 1950 cs->indirect_info->agg_contents = 0;
c13bc3d9 1951 cs->indirect_info->member_ptr = 0;
40591473 1952 return cs;
3e293154
MJ
1953}
1954
e33c6cd6 1955/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 1956 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
1957 intermediate information about each formal parameter. Currently it checks
1958 whether the call calls a pointer that is a formal parameter and if so, the
1959 parameter is marked with the called flag and an indirect call graph edge
1960 describing the call is created. This is very simple for ordinary pointers
1961 represented in SSA but not-so-nice when it comes to member pointers. The
1962 ugly part of this function does nothing more than trying to match the
1963 pattern of such a call. An example of such a pattern is the gimple dump
1964 below, the call is on the last line:
3e293154 1965
ae788515
EB
1966 <bb 2>:
1967 f$__delta_5 = f.__delta;
1968 f$__pfn_24 = f.__pfn;
1969
1970 or
3e293154 1971 <bb 2>:
d242d063
MJ
1972 f$__delta_5 = MEM[(struct *)&f];
1973 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 1974
ae788515 1975 and a few lines below:
8aa29647
MJ
1976
1977 <bb 5>
3e293154
MJ
1978 D.2496_3 = (int) f$__pfn_24;
1979 D.2497_4 = D.2496_3 & 1;
1980 if (D.2497_4 != 0)
1981 goto <bb 3>;
1982 else
1983 goto <bb 4>;
1984
8aa29647 1985 <bb 6>:
3e293154
MJ
1986 D.2500_7 = (unsigned int) f$__delta_5;
1987 D.2501_8 = &S + D.2500_7;
1988 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1989 D.2503_10 = *D.2502_9;
1990 D.2504_12 = f$__pfn_24 + -1;
1991 D.2505_13 = (unsigned int) D.2504_12;
1992 D.2506_14 = D.2503_10 + D.2505_13;
1993 D.2507_15 = *D.2506_14;
1994 iftmp.11_16 = (String:: *) D.2507_15;
1995
8aa29647 1996 <bb 7>:
3e293154
MJ
1997 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1998 D.2500_19 = (unsigned int) f$__delta_5;
1999 D.2508_20 = &S + D.2500_19;
2000 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2001
2002 Such patterns are results of simple calls to a member pointer:
2003
2004 int doprinting (int (MyString::* f)(int) const)
2005 {
2006 MyString S ("somestring");
2007
2008 return (S.*f)(4);
2009 }
8b7773a4
MJ
2010
2011 Moreover, the function also looks for called pointers loaded from aggregates
2012 passed by value or reference. */
3e293154
MJ
2013
2014static void
8aab5218
MJ
2015ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2016 tree target)
3e293154 2017{
8aab5218 2018 struct ipa_node_params *info = fbi->info;
8b7773a4
MJ
2019 HOST_WIDE_INT offset;
2020 bool by_ref;
3e293154 2021
3e293154
MJ
2022 if (SSA_NAME_IS_DEFAULT_DEF (target))
2023 {
b258210c 2024 tree var = SSA_NAME_VAR (target);
8aab5218 2025 int index = ipa_get_param_decl_index (info, var);
3e293154 2026 if (index >= 0)
8aab5218 2027 ipa_note_param_call (fbi->node, index, call);
3e293154
MJ
2028 return;
2029 }
2030
8aab5218
MJ
2031 int index;
2032 gimple def = SSA_NAME_DEF_STMT (target);
8b7773a4 2033 if (gimple_assign_single_p (def)
8aab5218 2034 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
8b7773a4 2035 gimple_assign_rhs1 (def), &index, &offset,
3ff2ca23 2036 NULL, &by_ref))
8b7773a4 2037 {
8aab5218 2038 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
68377e53
JH
2039 if (cs->indirect_info->offset != offset)
2040 cs->indirect_info->outer_type = NULL;
8b7773a4
MJ
2041 cs->indirect_info->offset = offset;
2042 cs->indirect_info->agg_contents = 1;
2043 cs->indirect_info->by_ref = by_ref;
2044 return;
2045 }
2046
3e293154
MJ
2047 /* Now we need to try to match the complex pattern of calling a member
2048 pointer. */
8b7773a4
MJ
2049 if (gimple_code (def) != GIMPLE_PHI
2050 || gimple_phi_num_args (def) != 2
2051 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2052 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2053 return;
2054
3e293154
MJ
2055 /* First, we need to check whether one of these is a load from a member
2056 pointer that is a parameter to this function. */
8aab5218
MJ
2057 tree n1 = PHI_ARG_DEF (def, 0);
2058 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2059 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2060 return;
8aab5218
MJ
2061 gimple d1 = SSA_NAME_DEF_STMT (n1);
2062 gimple d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2063
8aab5218
MJ
2064 tree rec;
2065 basic_block bb, virt_bb;
2066 basic_block join = gimple_bb (def);
8b7773a4 2067 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2068 {
8b7773a4 2069 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2070 return;
2071
8aa29647 2072 bb = EDGE_PRED (join, 0)->src;
726a989a 2073 virt_bb = gimple_bb (d2);
3e293154 2074 }
8b7773a4 2075 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2076 {
8aa29647 2077 bb = EDGE_PRED (join, 1)->src;
726a989a 2078 virt_bb = gimple_bb (d1);
3e293154
MJ
2079 }
2080 else
2081 return;
2082
2083 /* Second, we need to check that the basic blocks are laid out in the way
2084 corresponding to the pattern. */
2085
3e293154
MJ
2086 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2087 || single_pred (virt_bb) != bb
2088 || single_succ (virt_bb) != join)
2089 return;
2090
2091 /* Third, let's see that the branching is done depending on the least
2092 significant bit of the pfn. */
2093
8aab5218 2094 gimple branch = last_stmt (bb);
8aa29647 2095 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2096 return;
2097
12430896
RG
2098 if ((gimple_cond_code (branch) != NE_EXPR
2099 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2100 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2101 return;
3e293154 2102
8aab5218 2103 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2104 if (!ipa_is_ssa_with_stmt_def (cond))
2105 return;
2106
726a989a 2107 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2108 if (!is_gimple_assign (def)
726a989a
RB
2109 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2110 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2111 return;
726a989a
RB
2112
2113 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2114 if (!ipa_is_ssa_with_stmt_def (cond))
2115 return;
2116
726a989a 2117 def = SSA_NAME_DEF_STMT (cond);
3e293154 2118
8b75fc9b
MJ
2119 if (is_gimple_assign (def)
2120 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2121 {
726a989a 2122 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2123 if (!ipa_is_ssa_with_stmt_def (cond))
2124 return;
726a989a 2125 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2126 }
2127
8aab5218 2128 tree rec2;
6f7b8b70
RE
2129 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2130 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2131 == ptrmemfunc_vbit_in_delta),
2132 NULL);
3e293154
MJ
2133 if (rec != rec2)
2134 return;
2135
2136 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2137 if (index >= 0
8aab5218 2138 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2139 {
8aab5218 2140 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
68377e53
JH
2141 if (cs->indirect_info->offset != offset)
2142 cs->indirect_info->outer_type = NULL;
8b7773a4
MJ
2143 cs->indirect_info->offset = offset;
2144 cs->indirect_info->agg_contents = 1;
c13bc3d9 2145 cs->indirect_info->member_ptr = 1;
8b7773a4 2146 }
3e293154
MJ
2147
2148 return;
2149}
2150
b258210c
MJ
2151/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2152 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2153 FBI->node (described by FBI->info), create a call note for the
2154 statement. */
b258210c
MJ
2155
2156static void
8aab5218
MJ
2157ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2158 gimple call, tree target)
b258210c
MJ
2159{
2160 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2161 int index;
40591473 2162 HOST_WIDE_INT anc_offset;
b258210c 2163
05842ff5
MJ
2164 if (!flag_devirtualize)
2165 return;
2166
40591473 2167 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2168 return;
2169
8aab5218 2170 struct ipa_node_params *info = fbi->info;
40591473
MJ
2171 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2172 {
8aab5218 2173 struct ipa_jump_func jfunc;
40591473
MJ
2174 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2175 return;
b258210c 2176
40591473
MJ
2177 anc_offset = 0;
2178 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2179 gcc_assert (index >= 0);
06d65050
JH
2180 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2181 call, &jfunc))
40591473
MJ
2182 return;
2183 }
2184 else
2185 {
8aab5218 2186 struct ipa_jump_func jfunc;
40591473
MJ
2187 gimple stmt = SSA_NAME_DEF_STMT (obj);
2188 tree expr;
2189
2190 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2191 if (!expr)
2192 return;
2193 index = ipa_get_param_decl_index (info,
2194 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2195 gcc_assert (index >= 0);
06d65050
JH
2196 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2197 call, &jfunc, anc_offset))
40591473
MJ
2198 return;
2199 }
2200
8aab5218
MJ
2201 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2202 struct cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2203 ii->offset = anc_offset;
ae7e9ddd 2204 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2205 ii->otr_type = obj_type_ref_class (target);
40591473 2206 ii->polymorphic = 1;
b258210c
MJ
2207}
2208
2209/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2210 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2211 containing intermediate information about each formal parameter. */
b258210c
MJ
2212
2213static void
8aab5218 2214ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
b258210c
MJ
2215{
2216 tree target = gimple_call_fn (call);
b786d31f
JH
2217
2218 if (!target
2219 || (TREE_CODE (target) != SSA_NAME
2220 && !virtual_method_call_p (target)))
2221 return;
b258210c 2222
b786d31f
JH
2223 /* If we previously turned the call into a direct call, there is
2224 no need to analyze. */
8aab5218 2225 struct cgraph_edge *cs = cgraph_edge (fbi->node, call);
b786d31f 2226 if (cs && !cs->indirect_unknown_callee)
25583c4f 2227 return;
b258210c 2228 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2229 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2230 else if (virtual_method_call_p (target))
8aab5218 2231 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2232}
2233
2234
e33c6cd6 2235/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2236 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2237 formal parameters are called. */
be95e2b9 2238
3e293154 2239static void
8aab5218 2240ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
3e293154 2241{
726a989a 2242 if (is_gimple_call (stmt))
8aab5218 2243 ipa_analyze_call_uses (fbi, stmt);
062c604f
MJ
2244}
2245
2246/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2247 If OP is a parameter declaration, mark it as used in the info structure
2248 passed in DATA. */
2249
2250static bool
9f1363cd 2251visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
062c604f
MJ
2252{
2253 struct ipa_node_params *info = (struct ipa_node_params *) data;
2254
2255 op = get_base_address (op);
2256 if (op
2257 && TREE_CODE (op) == PARM_DECL)
2258 {
2259 int index = ipa_get_param_decl_index (info, op);
2260 gcc_assert (index >= 0);
310bc633 2261 ipa_set_param_used (info, index, true);
062c604f
MJ
2262 }
2263
2264 return false;
3e293154
MJ
2265}
2266
8aab5218
MJ
2267/* Scan the statements in BB and inspect the uses of formal parameters. Store
2268 the findings in various structures of the associated ipa_node_params
2269 structure, such as parameter flags, notes etc. FBI holds various data about
2270 the function being analyzed. */
be95e2b9 2271
062c604f 2272static void
8aab5218 2273ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
3e293154 2274{
726a989a 2275 gimple_stmt_iterator gsi;
8aab5218
MJ
2276 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2277 {
2278 gimple stmt = gsi_stmt (gsi);
3e293154 2279
8aab5218
MJ
2280 if (is_gimple_debug (stmt))
2281 continue;
3e293154 2282
8aab5218
MJ
2283 ipa_analyze_stmt_uses (fbi, stmt);
2284 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2285 visit_ref_for_mod_analysis,
2286 visit_ref_for_mod_analysis,
2287 visit_ref_for_mod_analysis);
5fe8e757 2288 }
8aab5218
MJ
2289 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2290 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2291 visit_ref_for_mod_analysis,
2292 visit_ref_for_mod_analysis,
2293 visit_ref_for_mod_analysis);
2294}
2295
2296/* Calculate controlled uses of parameters of NODE. */
2297
2298static void
2299ipa_analyze_controlled_uses (struct cgraph_node *node)
2300{
2301 struct ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2302
8aab5218 2303 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2304 {
2305 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2306 int controlled_uses = 0;
2307
062c604f
MJ
2308 /* For SSA regs see if parameter is used. For non-SSA we compute
2309 the flag during modification analysis. */
4502fe8d
MJ
2310 if (is_gimple_reg (parm))
2311 {
67348ccc 2312 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2313 parm);
2314 if (ddef && !has_zero_uses (ddef))
2315 {
2316 imm_use_iterator imm_iter;
2317 use_operand_p use_p;
2318
2319 ipa_set_param_used (info, i, true);
2320 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2321 if (!is_gimple_call (USE_STMT (use_p)))
2322 {
c6de6665
JJ
2323 if (!is_gimple_debug (USE_STMT (use_p)))
2324 {
2325 controlled_uses = IPA_UNDESCRIBED_USE;
2326 break;
2327 }
4502fe8d
MJ
2328 }
2329 else
2330 controlled_uses++;
2331 }
2332 else
2333 controlled_uses = 0;
2334 }
2335 else
2336 controlled_uses = IPA_UNDESCRIBED_USE;
2337 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2338 }
8aab5218 2339}
062c604f 2340
8aab5218 2341/* Free stuff in BI. */
062c604f 2342
8aab5218
MJ
2343static void
2344free_ipa_bb_info (struct ipa_bb_info *bi)
2345{
2346 bi->cg_edges.release ();
2347 bi->param_aa_statuses.release ();
3e293154
MJ
2348}
2349
8aab5218 2350/* Dominator walker driving the analysis. */
2c9561b5 2351
8aab5218 2352class analysis_dom_walker : public dom_walker
2c9561b5 2353{
8aab5218
MJ
2354public:
2355 analysis_dom_walker (struct func_body_info *fbi)
2356 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2357
8aab5218
MJ
2358 virtual void before_dom_children (basic_block);
2359
2360private:
2361 struct func_body_info *m_fbi;
2362};
2363
2364void
2365analysis_dom_walker::before_dom_children (basic_block bb)
2366{
2367 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2368 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2c9561b5
MJ
2369}
2370
dd5a833e
MS
2371/* Initialize the array describing properties of of formal parameters
2372 of NODE, analyze their uses and compute jump functions associated
2373 with actual arguments of calls from within NODE. */
062c604f
MJ
2374
2375void
2376ipa_analyze_node (struct cgraph_node *node)
2377{
8aab5218 2378 struct func_body_info fbi;
57dbdc5a 2379 struct ipa_node_params *info;
062c604f 2380
57dbdc5a
MJ
2381 ipa_check_create_node_params ();
2382 ipa_check_create_edge_args ();
2383 info = IPA_NODE_REF (node);
8aab5218
MJ
2384
2385 if (info->analysis_done)
2386 return;
2387 info->analysis_done = 1;
2388
2389 if (ipa_func_spec_opts_forbid_analysis_p (node))
2390 {
2391 for (int i = 0; i < ipa_get_param_count (info); i++)
2392 {
2393 ipa_set_param_used (info, i, true);
2394 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2395 }
2396 return;
2397 }
2398
2399 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2400 push_cfun (func);
2401 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2402 ipa_initialize_node_params (node);
8aab5218 2403 ipa_analyze_controlled_uses (node);
062c604f 2404
8aab5218
MJ
2405 fbi.node = node;
2406 fbi.info = IPA_NODE_REF (node);
2407 fbi.bb_infos = vNULL;
2408 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2409 fbi.param_count = ipa_get_param_count (info);
2410 fbi.aa_walked = 0;
062c604f 2411
8aab5218
MJ
2412 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2413 {
2414 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2415 bi->cg_edges.safe_push (cs);
2416 }
062c604f 2417
8aab5218
MJ
2418 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2419 {
2420 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2421 bi->cg_edges.safe_push (cs);
2422 }
2423
2424 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2425
2426 int i;
2427 struct ipa_bb_info *bi;
2428 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2429 free_ipa_bb_info (bi);
2430 fbi.bb_infos.release ();
2431 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2432 pop_cfun ();
062c604f
MJ
2433}
2434
e248d83f
MJ
2435/* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2436 attempt a type-based devirtualization. If successful, return the
2437 target function declaration, otherwise return NULL. */
2438
2439tree
2440ipa_intraprocedural_devirtualization (gimple call)
2441{
2442 tree binfo, token, fndecl;
2443 struct ipa_jump_func jfunc;
2444 tree otr = gimple_call_fn (call);
2445
2446 jfunc.type = IPA_JF_UNKNOWN;
2447 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
06d65050 2448 call, obj_type_ref_class (otr));
e248d83f
MJ
2449 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2450 return NULL_TREE;
2451 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2452 if (!binfo)
2453 return NULL_TREE;
2454 token = OBJ_TYPE_REF_TOKEN (otr);
ae7e9ddd 2455 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
e248d83f 2456 binfo);
450ad0cd
JH
2457#ifdef ENABLE_CHECKING
2458 if (fndecl)
2459 gcc_assert (possible_polymorphic_call_target_p
2460 (otr, cgraph_get_node (fndecl)));
2461#endif
e248d83f
MJ
2462 return fndecl;
2463}
062c604f 2464
61502ca8 2465/* Update the jump function DST when the call graph edge corresponding to SRC is
b258210c
MJ
2466 is being inlined, knowing that DST is of type ancestor and src of known
2467 type. */
2468
2469static void
2470combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2471 struct ipa_jump_func *dst)
2472{
c7573249
MJ
2473 HOST_WIDE_INT combined_offset;
2474 tree combined_type;
b258210c 2475
b8f6e610
MJ
2476 if (!ipa_get_jf_ancestor_type_preserved (dst))
2477 {
2478 dst->type = IPA_JF_UNKNOWN;
2479 return;
2480 }
2481
7b872d9e
MJ
2482 combined_offset = ipa_get_jf_known_type_offset (src)
2483 + ipa_get_jf_ancestor_offset (dst);
2484 combined_type = ipa_get_jf_ancestor_type (dst);
c7573249 2485
7b872d9e
MJ
2486 ipa_set_jf_known_type (dst, combined_offset,
2487 ipa_get_jf_known_type_base_type (src),
2488 combined_type);
b258210c
MJ
2489}
2490
be95e2b9 2491/* Update the jump functions associated with call graph edge E when the call
3e293154 2492 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2493 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2494
3e293154
MJ
2495static void
2496update_jump_functions_after_inlining (struct cgraph_edge *cs,
2497 struct cgraph_edge *e)
2498{
2499 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2500 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2501 int count = ipa_get_cs_argument_count (args);
2502 int i;
2503
2504 for (i = 0; i < count; i++)
2505 {
b258210c 2506 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3e293154 2507
685b0d13
MJ
2508 if (dst->type == IPA_JF_ANCESTOR)
2509 {
b258210c 2510 struct ipa_jump_func *src;
8b7773a4 2511 int dst_fid = dst->value.ancestor.formal_id;
685b0d13 2512
b258210c
MJ
2513 /* Variable number of arguments can cause havoc if we try to access
2514 one that does not exist in the inlined edge. So make sure we
2515 don't. */
8b7773a4 2516 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c
MJ
2517 {
2518 dst->type = IPA_JF_UNKNOWN;
2519 continue;
2520 }
2521
8b7773a4
MJ
2522 src = ipa_get_ith_jump_func (top, dst_fid);
2523
2524 if (src->agg.items
2525 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2526 {
2527 struct ipa_agg_jf_item *item;
2528 int j;
2529
2530 /* Currently we do not produce clobber aggregate jump functions,
2531 replace with merging when we do. */
2532 gcc_assert (!dst->agg.items);
2533
9771b263 2534 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2535 dst->agg.by_ref = src->agg.by_ref;
9771b263 2536 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2537 item->offset -= dst->value.ancestor.offset;
2538 }
2539
b258210c
MJ
2540 if (src->type == IPA_JF_KNOWN_TYPE)
2541 combine_known_type_and_ancestor_jfs (src, dst);
b258210c
MJ
2542 else if (src->type == IPA_JF_PASS_THROUGH
2543 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2544 {
2545 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2546 dst->value.ancestor.agg_preserved &=
2547 src->value.pass_through.agg_preserved;
b8f6e610
MJ
2548 dst->value.ancestor.type_preserved &=
2549 src->value.pass_through.type_preserved;
8b7773a4 2550 }
b258210c
MJ
2551 else if (src->type == IPA_JF_ANCESTOR)
2552 {
2553 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2554 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2555 dst->value.ancestor.agg_preserved &=
2556 src->value.ancestor.agg_preserved;
b8f6e610
MJ
2557 dst->value.ancestor.type_preserved &=
2558 src->value.ancestor.type_preserved;
b258210c
MJ
2559 }
2560 else
2561 dst->type = IPA_JF_UNKNOWN;
2562 }
2563 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2564 {
b258210c
MJ
2565 struct ipa_jump_func *src;
2566 /* We must check range due to calls with variable number of arguments
2567 and we cannot combine jump functions with operations. */
2568 if (dst->value.pass_through.operation == NOP_EXPR
2569 && (dst->value.pass_through.formal_id
2570 < ipa_get_cs_argument_count (top)))
2571 {
8b7773a4
MJ
2572 int dst_fid = dst->value.pass_through.formal_id;
2573 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2574 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
8b7773a4 2575
b8f6e610
MJ
2576 switch (src->type)
2577 {
2578 case IPA_JF_UNKNOWN:
2579 dst->type = IPA_JF_UNKNOWN;
2580 break;
2581 case IPA_JF_KNOWN_TYPE:
2ace77c2
JH
2582 if (ipa_get_jf_pass_through_type_preserved (dst))
2583 ipa_set_jf_known_type (dst,
2584 ipa_get_jf_known_type_offset (src),
2585 ipa_get_jf_known_type_base_type (src),
0a2550e7 2586 ipa_get_jf_known_type_component_type (src));
2ace77c2
JH
2587 else
2588 dst->type = IPA_JF_UNKNOWN;
b8f6e610
MJ
2589 break;
2590 case IPA_JF_CONST:
2591 ipa_set_jf_cst_copy (dst, src);
2592 break;
2593
2594 case IPA_JF_PASS_THROUGH:
2595 {
2596 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2597 enum tree_code operation;
2598 operation = ipa_get_jf_pass_through_operation (src);
2599
2600 if (operation == NOP_EXPR)
2601 {
2602 bool agg_p, type_p;
2603 agg_p = dst_agg_p
2604 && ipa_get_jf_pass_through_agg_preserved (src);
2605 type_p = ipa_get_jf_pass_through_type_preserved (src)
2606 && ipa_get_jf_pass_through_type_preserved (dst);
2607 ipa_set_jf_simple_pass_through (dst, formal_id,
2608 agg_p, type_p);
2609 }
2610 else
2611 {
2612 tree operand = ipa_get_jf_pass_through_operand (src);
2613 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2614 operation);
2615 }
2616 break;
2617 }
2618 case IPA_JF_ANCESTOR:
2619 {
2620 bool agg_p, type_p;
2621 agg_p = dst_agg_p
2622 && ipa_get_jf_ancestor_agg_preserved (src);
2623 type_p = ipa_get_jf_ancestor_type_preserved (src)
2624 && ipa_get_jf_pass_through_type_preserved (dst);
2625 ipa_set_ancestor_jf (dst,
2626 ipa_get_jf_ancestor_offset (src),
2627 ipa_get_jf_ancestor_type (src),
2628 ipa_get_jf_ancestor_formal_id (src),
2629 agg_p, type_p);
2630 break;
2631 }
2632 default:
2633 gcc_unreachable ();
2634 }
8b7773a4
MJ
2635
2636 if (src->agg.items
b8f6e610 2637 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2638 {
2639 /* Currently we do not produce clobber aggregate jump
2640 functions, replace with merging when we do. */
2641 gcc_assert (!dst->agg.items);
2642
2643 dst->agg.by_ref = src->agg.by_ref;
9771b263 2644 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2645 }
b258210c
MJ
2646 }
2647 else
2648 dst->type = IPA_JF_UNKNOWN;
3e293154 2649 }
b258210c
MJ
2650 }
2651}
2652
2653/* If TARGET is an addr_expr of a function declaration, make it the destination
81fa35bd 2654 of an indirect edge IE and return the edge. Otherwise, return NULL. */
b258210c 2655
3949c4a7 2656struct cgraph_edge *
81fa35bd 2657ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
b258210c
MJ
2658{
2659 struct cgraph_node *callee;
0f378cb5 2660 struct inline_edge_summary *es = inline_edge_summary (ie);
48b1474e 2661 bool unreachable = false;
b258210c 2662
ceeffab0
MJ
2663 if (TREE_CODE (target) == ADDR_EXPR)
2664 target = TREE_OPERAND (target, 0);
b258210c 2665 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2666 {
2667 target = canonicalize_constructor_val (target, NULL);
2668 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2669 {
c13bc3d9
MJ
2670 if (ie->indirect_info->member_ptr)
2671 /* Member pointer call that goes through a VMT lookup. */
2672 return NULL;
2673
2b5f0895
XDL
2674 if (dump_enabled_p ())
2675 {
3c9e6fca
ML
2676 const char *fmt = "discovered direct call to non-function in %s/%i, "
2677 "making it __builtin_unreachable\n";
2678
2679 if (ie->call_stmt)
2680 {
2681 location_t loc = gimple_location (ie->call_stmt);
2682 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc, fmt,
2683 ie->caller->name (), ie->caller->order);
2684 }
2685 else if (dump_file)
2686 fprintf (dump_file, fmt, ie->caller->name (), ie->caller->order);
2b5f0895 2687 }
3c9e6fca 2688
48b1474e
MJ
2689 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2690 callee = cgraph_get_create_node (target);
2691 unreachable = true;
a0a7b611 2692 }
48b1474e
MJ
2693 else
2694 callee = cgraph_get_node (target);
a0a7b611 2695 }
48b1474e
MJ
2696 else
2697 callee = cgraph_get_node (target);
a0a7b611
JH
2698
2699 /* Because may-edges are not explicitely represented and vtable may be external,
2700 we may create the first reference to the object in the unit. */
2701 if (!callee || callee->global.inlined_to)
2702 {
a0a7b611
JH
2703
2704 /* We are better to ensure we can refer to it.
2705 In the case of static functions we are out of luck, since we already
2706 removed its body. In the case of public functions we may or may
2707 not introduce the reference. */
2708 if (!canonicalize_constructor_val (target, NULL)
2709 || !TREE_PUBLIC (target))
2710 {
2711 if (dump_file)
2712 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2713 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
fec39fa6 2714 xstrdup (ie->caller->name ()),
67348ccc 2715 ie->caller->order,
fec39fa6 2716 xstrdup (ie->callee->name ()),
67348ccc 2717 ie->callee->order);
a0a7b611
JH
2718 return NULL;
2719 }
6f99e449 2720 callee = cgraph_get_create_node (target);
a0a7b611 2721 }
2b5f0895
XDL
2722
2723 if (!dbg_cnt (devirt))
2724 return NULL;
2725
1dbee8c9 2726 ipa_check_create_node_params ();
ceeffab0 2727
81fa35bd
MJ
2728 /* We can not make edges to inline clones. It is bug that someone removed
2729 the cgraph node too early. */
17afc0fe
JH
2730 gcc_assert (!callee->global.inlined_to);
2731
48b1474e 2732 if (dump_file && !unreachable)
b258210c
MJ
2733 {
2734 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
ceeffab0 2735 "(%s/%i -> %s/%i), for stmt ",
b258210c 2736 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
fec39fa6 2737 xstrdup (ie->caller->name ()),
67348ccc 2738 ie->caller->order,
fec39fa6 2739 xstrdup (callee->name ()),
67348ccc 2740 callee->order);
b258210c
MJ
2741 if (ie->call_stmt)
2742 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2743 else
2744 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 2745 }
2b5f0895
XDL
2746 if (dump_enabled_p ())
2747 {
3c9e6fca
ML
2748 const char *fmt = "converting indirect call in %s to direct call to %s\n";
2749
2750 if (ie->call_stmt)
2751 {
2752 location_t loc = gimple_location (ie->call_stmt);
2753
2754 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc, fmt,
2755 ie->caller->name (), callee->name ());
2756
2757 }
2758 else if (dump_file)
2759 fprintf (dump_file, fmt, ie->caller->name (), callee->name ());
2b5f0895 2760 }
042ae7d2
JH
2761 ie = cgraph_make_edge_direct (ie, callee);
2762 es = inline_edge_summary (ie);
2763 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2764 - eni_size_weights.call_cost);
2765 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2766 - eni_time_weights.call_cost);
749aa96d 2767
b258210c 2768 return ie;
3e293154
MJ
2769}
2770
8b7773a4
MJ
2771/* Retrieve value from aggregate jump function AGG for the given OFFSET or
2772 return NULL if there is not any. BY_REF specifies whether the value has to
2773 be passed by reference or by value. */
2774
2775tree
2776ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2777 HOST_WIDE_INT offset, bool by_ref)
2778{
2779 struct ipa_agg_jf_item *item;
2780 int i;
2781
2782 if (by_ref != agg->by_ref)
2783 return NULL;
2784
9771b263 2785 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2786 if (item->offset == offset)
2787 {
2788 /* Currently we do not have clobber values, return NULL for them once
2789 we do. */
2790 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2791 return item->value;
2792 }
8b7773a4
MJ
2793 return NULL;
2794}
2795
4502fe8d 2796/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
2797 reference description RDESC. Return true if the reference has been
2798 successfully found and removed. */
4502fe8d 2799
568cda29 2800static bool
5e20cdc9 2801remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
2802{
2803 struct ipa_ref *to_del;
2804 struct cgraph_edge *origin;
2805
2806 origin = rdesc->cs;
a854f856
MJ
2807 if (!origin)
2808 return false;
d122681a
ML
2809 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2810 origin->lto_stmt_uid);
568cda29
MJ
2811 if (!to_del)
2812 return false;
2813
d122681a 2814 to_del->remove_reference ();
4502fe8d
MJ
2815 if (dump_file)
2816 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
fec39fa6
TS
2817 xstrdup (origin->caller->name ()),
2818 origin->caller->order, xstrdup (symbol->name ()));
568cda29 2819 return true;
4502fe8d
MJ
2820}
2821
2822/* If JFUNC has a reference description with refcount different from
2823 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2824 NULL. JFUNC must be a constant jump function. */
2825
2826static struct ipa_cst_ref_desc *
2827jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2828{
2829 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2830 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2831 return rdesc;
2832 else
2833 return NULL;
2834}
2835
568cda29
MJ
2836/* If the value of constant jump function JFUNC is an address of a function
2837 declaration, return the associated call graph node. Otherwise return
2838 NULL. */
2839
2840static cgraph_node *
2841cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2842{
2843 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2844 tree cst = ipa_get_jf_constant (jfunc);
2845 if (TREE_CODE (cst) != ADDR_EXPR
2846 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2847 return NULL;
2848
2849 return cgraph_get_node (TREE_OPERAND (cst, 0));
2850}
2851
2852
2853/* If JFUNC is a constant jump function with a usable rdesc, decrement its
2854 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2855 the edge specified in the rdesc. Return false if either the symbol or the
2856 reference could not be found, otherwise return true. */
2857
2858static bool
2859try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2860{
2861 struct ipa_cst_ref_desc *rdesc;
2862 if (jfunc->type == IPA_JF_CONST
2863 && (rdesc = jfunc_rdesc_usable (jfunc))
2864 && --rdesc->refcount == 0)
2865 {
5e20cdc9 2866 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
2867 if (!symbol)
2868 return false;
2869
2870 return remove_described_reference (symbol, rdesc);
2871 }
2872 return true;
2873}
2874
b258210c
MJ
2875/* Try to find a destination for indirect edge IE that corresponds to a simple
2876 call or a call of a member function pointer and where the destination is a
2877 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
2878 determined, return the newly direct edge, otherwise return NULL.
2879 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 2880
b258210c
MJ
2881static struct cgraph_edge *
2882try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
2883 struct ipa_jump_func *jfunc,
2884 struct ipa_node_params *new_root_info)
b258210c 2885{
4502fe8d 2886 struct cgraph_edge *cs;
b258210c 2887 tree target;
042ae7d2 2888 bool agg_contents = ie->indirect_info->agg_contents;
b258210c 2889
8b7773a4 2890 if (ie->indirect_info->agg_contents)
d250540a
MJ
2891 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2892 ie->indirect_info->offset,
2893 ie->indirect_info->by_ref);
b258210c 2894 else
d250540a
MJ
2895 target = ipa_value_from_jfunc (new_root_info, jfunc);
2896 if (!target)
2897 return NULL;
4502fe8d
MJ
2898 cs = ipa_make_edge_direct_to_target (ie, target);
2899
a12cd2db 2900 if (cs && !agg_contents)
568cda29
MJ
2901 {
2902 bool ok;
2903 gcc_checking_assert (cs->callee
ae6d0907
MJ
2904 && (cs != ie
2905 || jfunc->type != IPA_JF_CONST
568cda29
MJ
2906 || !cgraph_node_for_jfunc (jfunc)
2907 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2908 ok = try_decrement_rdesc_refcount (jfunc);
2909 gcc_checking_assert (ok);
2910 }
4502fe8d
MJ
2911
2912 return cs;
b258210c
MJ
2913}
2914
bec81025
MJ
2915/* Return the target to be used in cases of impossible devirtualization. IE
2916 and target (the latter can be NULL) are dumped when dumping is enabled. */
2917
72972c22
MJ
2918tree
2919ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
2920{
2921 if (dump_file)
2922 {
2923 if (target)
2924 fprintf (dump_file,
72972c22 2925 "Type inconsistent devirtualization: %s/%i->%s\n",
bec81025
MJ
2926 ie->caller->name (), ie->caller->order,
2927 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2928 else
2929 fprintf (dump_file,
2930 "No devirtualization target in %s/%i\n",
2931 ie->caller->name (), ie->caller->order);
2932 }
2933 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2934 cgraph_get_create_node (new_target);
2935 return new_target;
2936}
2937
d250540a
MJ
2938/* Try to find a destination for indirect edge IE that corresponds to a virtual
2939 call based on a formal parameter which is described by jump function JFUNC
2940 and if it can be determined, make it direct and return the direct edge.
2941 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2942 are relative to. */
b258210c
MJ
2943
2944static struct cgraph_edge *
2945try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a
MJ
2946 struct ipa_jump_func *jfunc,
2947 struct ipa_node_params *new_root_info)
3e293154 2948{
9de2f554 2949 tree binfo, target;
85942f45
JH
2950
2951 if (!flag_devirtualize)
2952 return NULL;
b258210c 2953
9de2f554 2954 /* First try to do lookup via known virtual table pointer value. */
85942f45
JH
2955 if (!ie->indirect_info->by_ref)
2956 {
9de2f554
JH
2957 tree vtable;
2958 unsigned HOST_WIDE_INT offset;
85942f45
JH
2959 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2960 ie->indirect_info->offset,
2961 true);
9de2f554
JH
2962 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2963 {
2964 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2965 vtable, offset);
2966 if (target)
2967 {
2968 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2969 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2970 || !possible_polymorphic_call_target_p
2971 (ie, cgraph_get_node (target)))
72972c22 2972 target = ipa_impossible_devirt_target (ie, target);
9de2f554
JH
2973 return ipa_make_edge_direct_to_target (ie, target);
2974 }
2975 }
85942f45
JH
2976 }
2977
9de2f554 2978 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
d250540a 2979
da942ca0 2980 if (!binfo)
b258210c 2981 return NULL;
3e293154 2982
da942ca0
JH
2983 if (TREE_CODE (binfo) != TREE_BINFO)
2984 {
5bccb77a
JH
2985 ipa_polymorphic_call_context context;
2986 vec <cgraph_node *>targets;
2987 bool final;
2988
2989 if (!get_polymorphic_call_info_from_invariant
2990 (&context, binfo, ie->indirect_info->otr_type,
2991 ie->indirect_info->offset))
2992 return NULL;
2993 targets = possible_polymorphic_call_targets
2994 (ie->indirect_info->otr_type,
2995 ie->indirect_info->otr_token,
2996 context, &final);
2997 if (!final || targets.length () > 1)
85942f45 2998 return NULL;
5bccb77a
JH
2999 if (targets.length () == 1)
3000 target = targets[0]->decl;
3001 else
72972c22 3002 target = ipa_impossible_devirt_target (ie, NULL_TREE);
da942ca0 3003 }
b258210c 3004 else
5bccb77a
JH
3005 {
3006 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3007 ie->indirect_info->otr_type);
3008 if (binfo)
3009 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3010 binfo);
3011 else
3012 return NULL;
3013 }
b258210c
MJ
3014
3015 if (target)
450ad0cd 3016 {
bec81025 3017 if (!possible_polymorphic_call_target_p (ie, cgraph_get_node (target)))
72972c22 3018 target = ipa_impossible_devirt_target (ie, target);
450ad0cd
JH
3019 return ipa_make_edge_direct_to_target (ie, target);
3020 }
b258210c
MJ
3021 else
3022 return NULL;
3e293154
MJ
3023}
3024
3025/* Update the param called notes associated with NODE when CS is being inlined,
3026 assuming NODE is (potentially indirectly) inlined into CS->callee.
3027 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3028 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3029 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3030
f8e2a1ed 3031static bool
e33c6cd6
MJ
3032update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3033 struct cgraph_node *node,
9771b263 3034 vec<cgraph_edge_p> *new_edges)
3e293154 3035{
9e97ff61 3036 struct ipa_edge_args *top;
b258210c 3037 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 3038 struct ipa_node_params *new_root_info;
f8e2a1ed 3039 bool res = false;
3e293154 3040
e33c6cd6 3041 ipa_check_create_edge_args ();
9e97ff61 3042 top = IPA_EDGE_REF (cs);
d250540a
MJ
3043 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3044 ? cs->caller->global.inlined_to
3045 : cs->caller);
e33c6cd6
MJ
3046
3047 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3048 {
e33c6cd6 3049 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3050 struct ipa_jump_func *jfunc;
8b7773a4 3051 int param_index;
3e293154 3052
e33c6cd6 3053 next_ie = ie->next_callee;
3e293154 3054
5f902d76
JH
3055 if (ici->param_index == -1)
3056 continue;
e33c6cd6 3057
3e293154 3058 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 3059 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3060 {
5ee53a06 3061 ici->param_index = -1;
3e293154
MJ
3062 continue;
3063 }
3064
8b7773a4
MJ
3065 param_index = ici->param_index;
3066 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06
JH
3067
3068 if (!flag_indirect_inlining)
36b72910
JH
3069 new_direct_edge = NULL;
3070 else if (ici->polymorphic)
d250540a
MJ
3071 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3072 new_root_info);
b258210c 3073 else
d250540a
MJ
3074 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3075 new_root_info);
042ae7d2
JH
3076 /* If speculation was removed, then we need to do nothing. */
3077 if (new_direct_edge && new_direct_edge != ie)
3078 {
3079 new_direct_edge->indirect_inlining_edge = 1;
3080 top = IPA_EDGE_REF (cs);
3081 res = true;
3082 }
3083 else if (new_direct_edge)
685b0d13 3084 {
b258210c 3085 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
3086 if (new_direct_edge->call_stmt)
3087 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
3088 = !gimple_check_call_matching_types (
3089 new_direct_edge->call_stmt,
67348ccc 3090 new_direct_edge->callee->decl, false);
b258210c
MJ
3091 if (new_edges)
3092 {
9771b263 3093 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3094 res = true;
3095 }
042ae7d2 3096 top = IPA_EDGE_REF (cs);
685b0d13 3097 }
36b72910
JH
3098 else if (jfunc->type == IPA_JF_PASS_THROUGH
3099 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3100 {
8a2256dd
MJ
3101 if ((ici->agg_contents
3102 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3103 || (ici->polymorphic
3104 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
36b72910
JH
3105 ici->param_index = -1;
3106 else
3107 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3108 }
3109 else if (jfunc->type == IPA_JF_ANCESTOR)
3110 {
8a2256dd
MJ
3111 if ((ici->agg_contents
3112 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3113 || (ici->polymorphic
3114 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
36b72910
JH
3115 ici->param_index = -1;
3116 else
3117 {
3118 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
68377e53
JH
3119 if (ipa_get_jf_ancestor_offset (jfunc))
3120 ici->outer_type = NULL;
36b72910
JH
3121 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3122 }
3123 }
3124 else
3125 /* Either we can find a destination for this edge now or never. */
3126 ici->param_index = -1;
3e293154 3127 }
e33c6cd6 3128
f8e2a1ed 3129 return res;
3e293154
MJ
3130}
3131
3132/* Recursively traverse subtree of NODE (including node) made of inlined
3133 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3134 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3135 update_jump_functions_after_inlining on all non-inlined edges that lead out
3136 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3137 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3138 created. */
be95e2b9 3139
f8e2a1ed 3140static bool
3e293154
MJ
3141propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3142 struct cgraph_node *node,
9771b263 3143 vec<cgraph_edge_p> *new_edges)
3e293154
MJ
3144{
3145 struct cgraph_edge *e;
f8e2a1ed 3146 bool res;
3e293154 3147
e33c6cd6 3148 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3149
3150 for (e = node->callees; e; e = e->next_callee)
3151 if (!e->inline_failed)
f8e2a1ed 3152 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3153 else
3154 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3155 for (e = node->indirect_calls; e; e = e->next_callee)
3156 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3157
3158 return res;
3e293154
MJ
3159}
3160
4502fe8d
MJ
3161/* Combine two controlled uses counts as done during inlining. */
3162
3163static int
3164combine_controlled_uses_counters (int c, int d)
3165{
3166 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3167 return IPA_UNDESCRIBED_USE;
3168 else
3169 return c + d - 1;
3170}
3171
3172/* Propagate number of controlled users from CS->caleee to the new root of the
3173 tree of inlined nodes. */
3174
3175static void
3176propagate_controlled_uses (struct cgraph_edge *cs)
3177{
3178 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3179 struct cgraph_node *new_root = cs->caller->global.inlined_to
3180 ? cs->caller->global.inlined_to : cs->caller;
3181 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3182 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3183 int count, i;
3184
3185 count = MIN (ipa_get_cs_argument_count (args),
3186 ipa_get_param_count (old_root_info));
3187 for (i = 0; i < count; i++)
3188 {
3189 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3190 struct ipa_cst_ref_desc *rdesc;
3191
3192 if (jf->type == IPA_JF_PASS_THROUGH)
3193 {
3194 int src_idx, c, d;
3195 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3196 c = ipa_get_controlled_uses (new_root_info, src_idx);
3197 d = ipa_get_controlled_uses (old_root_info, i);
3198
3199 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3200 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3201 c = combine_controlled_uses_counters (c, d);
3202 ipa_set_controlled_uses (new_root_info, src_idx, c);
3203 if (c == 0 && new_root_info->ipcp_orig_node)
3204 {
3205 struct cgraph_node *n;
3206 struct ipa_ref *ref;
3207 tree t = new_root_info->known_vals[src_idx];
3208
3209 if (t && TREE_CODE (t) == ADDR_EXPR
3210 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3211 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
d122681a 3212 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
3213 {
3214 if (dump_file)
3215 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3216 "reference from %s/%i to %s/%i.\n",
fec39fa6 3217 xstrdup (new_root->name ()),
67348ccc 3218 new_root->order,
fec39fa6 3219 xstrdup (n->name ()), n->order);
d122681a 3220 ref->remove_reference ();
4502fe8d
MJ
3221 }
3222 }
3223 }
3224 else if (jf->type == IPA_JF_CONST
3225 && (rdesc = jfunc_rdesc_usable (jf)))
3226 {
3227 int d = ipa_get_controlled_uses (old_root_info, i);
3228 int c = rdesc->refcount;
3229 rdesc->refcount = combine_controlled_uses_counters (c, d);
3230 if (rdesc->refcount == 0)
3231 {
3232 tree cst = ipa_get_jf_constant (jf);
3233 struct cgraph_node *n;
3234 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3235 && TREE_CODE (TREE_OPERAND (cst, 0))
3236 == FUNCTION_DECL);
3237 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3238 if (n)
3239 {
3240 struct cgraph_node *clone;
568cda29 3241 bool ok;
67348ccc 3242 ok = remove_described_reference (n, rdesc);
568cda29 3243 gcc_checking_assert (ok);
4502fe8d
MJ
3244
3245 clone = cs->caller;
3246 while (clone->global.inlined_to
3247 && clone != rdesc->cs->caller
3248 && IPA_NODE_REF (clone)->ipcp_orig_node)
3249 {
3250 struct ipa_ref *ref;
d122681a 3251 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
3252 if (ref)
3253 {
3254 if (dump_file)
3255 fprintf (dump_file, "ipa-prop: Removing "
3256 "cloning-created reference "
3257 "from %s/%i to %s/%i.\n",
fec39fa6 3258 xstrdup (clone->name ()),
67348ccc 3259 clone->order,
fec39fa6 3260 xstrdup (n->name ()),
67348ccc 3261 n->order);
d122681a 3262 ref->remove_reference ();
4502fe8d
MJ
3263 }
3264 clone = clone->callers->caller;
3265 }
3266 }
3267 }
3268 }
3269 }
3270
3271 for (i = ipa_get_param_count (old_root_info);
3272 i < ipa_get_cs_argument_count (args);
3273 i++)
3274 {
3275 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3276
3277 if (jf->type == IPA_JF_CONST)
3278 {
3279 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3280 if (rdesc)
3281 rdesc->refcount = IPA_UNDESCRIBED_USE;
3282 }
3283 else if (jf->type == IPA_JF_PASS_THROUGH)
3284 ipa_set_controlled_uses (new_root_info,
3285 jf->value.pass_through.formal_id,
3286 IPA_UNDESCRIBED_USE);
3287 }
3288}
3289
3e293154
MJ
3290/* Update jump functions and call note functions on inlining the call site CS.
3291 CS is expected to lead to a node already cloned by
3292 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3293 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3294 created. */
be95e2b9 3295
f8e2a1ed 3296bool
3e293154 3297ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
9771b263 3298 vec<cgraph_edge_p> *new_edges)
3e293154 3299{
5ee53a06 3300 bool changed;
f8e2a1ed
MJ
3301 /* Do nothing if the preparation phase has not been carried out yet
3302 (i.e. during early inlining). */
9771b263 3303 if (!ipa_node_params_vector.exists ())
f8e2a1ed
MJ
3304 return false;
3305 gcc_assert (ipa_edge_args_vector);
3306
4502fe8d 3307 propagate_controlled_uses (cs);
5ee53a06
JH
3308 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3309
5ee53a06 3310 return changed;
518dc859
RL
3311}
3312
771578a0
MJ
3313/* Frees all dynamically allocated structures that the argument info points
3314 to. */
be95e2b9 3315
518dc859 3316void
771578a0 3317ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 3318{
9771b263 3319 vec_free (args->jump_functions);
771578a0 3320 memset (args, 0, sizeof (*args));
518dc859
RL
3321}
3322
771578a0 3323/* Free all ipa_edge structures. */
be95e2b9 3324
518dc859 3325void
771578a0 3326ipa_free_all_edge_args (void)
518dc859 3327{
771578a0
MJ
3328 int i;
3329 struct ipa_edge_args *args;
518dc859 3330
9771b263
DN
3331 if (!ipa_edge_args_vector)
3332 return;
3333
3334 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
3335 ipa_free_edge_args_substructures (args);
3336
9771b263 3337 vec_free (ipa_edge_args_vector);
518dc859
RL
3338}
3339
771578a0
MJ
3340/* Frees all dynamically allocated structures that the param info points
3341 to. */
be95e2b9 3342
518dc859 3343void
771578a0 3344ipa_free_node_params_substructures (struct ipa_node_params *info)
518dc859 3345{
9771b263 3346 info->descriptors.release ();
310bc633
MJ
3347 free (info->lattices);
3348 /* Lattice values and their sources are deallocated with their alocation
3349 pool. */
9771b263 3350 info->known_vals.release ();
771578a0 3351 memset (info, 0, sizeof (*info));
518dc859
RL
3352}
3353
771578a0 3354/* Free all ipa_node_params structures. */
be95e2b9 3355
518dc859 3356void
771578a0 3357ipa_free_all_node_params (void)
518dc859 3358{
771578a0
MJ
3359 int i;
3360 struct ipa_node_params *info;
518dc859 3361
9771b263 3362 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
771578a0
MJ
3363 ipa_free_node_params_substructures (info);
3364
9771b263 3365 ipa_node_params_vector.release ();
771578a0
MJ
3366}
3367
2c9561b5
MJ
3368/* Set the aggregate replacements of NODE to be AGGVALS. */
3369
3370void
3371ipa_set_node_agg_value_chain (struct cgraph_node *node,
3372 struct ipa_agg_replacement_value *aggvals)
3373{
9771b263
DN
3374 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3375 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2c9561b5 3376
9771b263 3377 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2c9561b5
MJ
3378}
3379
771578a0 3380/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 3381
771578a0 3382static void
5c0466b5 3383ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 3384{
568cda29
MJ
3385 struct ipa_edge_args *args;
3386
3387 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
9771b263 3388 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 3389 return;
568cda29
MJ
3390
3391 args = IPA_EDGE_REF (cs);
3392 if (args->jump_functions)
3393 {
3394 struct ipa_jump_func *jf;
3395 int i;
3396 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3397 {
3398 struct ipa_cst_ref_desc *rdesc;
3399 try_decrement_rdesc_refcount (jf);
3400 if (jf->type == IPA_JF_CONST
3401 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3402 && rdesc->cs == cs)
3403 rdesc->cs = NULL;
3404 }
568cda29
MJ
3405 }
3406
771578a0 3407 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
3408}
3409
771578a0 3410/* Hook that is called by cgraph.c when a node is removed. */
be95e2b9 3411
771578a0 3412static void
5c0466b5 3413ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3414{
dd6d1ad7 3415 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 3416 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2c9561b5 3417 ipa_free_node_params_substructures (IPA_NODE_REF (node));
9771b263
DN
3418 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3419 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
771578a0
MJ
3420}
3421
8b7773a4 3422/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 3423
771578a0
MJ
3424static void
3425ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
f8e2a1ed 3426 __attribute__((unused)) void *data)
771578a0
MJ
3427{
3428 struct ipa_edge_args *old_args, *new_args;
8b7773a4 3429 unsigned int i;
771578a0
MJ
3430
3431 ipa_check_create_edge_args ();
3432
3433 old_args = IPA_EDGE_REF (src);
3434 new_args = IPA_EDGE_REF (dst);
3435
9771b263 3436 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
8b7773a4 3437
9771b263 3438 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3439 {
3440 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3441 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3442
3443 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3444
3445 if (src_jf->type == IPA_JF_CONST)
3446 {
3447 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3448
3449 if (!src_rdesc)
3450 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3451 else if (src->caller == dst->caller)
3452 {
3453 struct ipa_ref *ref;
5e20cdc9 3454 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3455 gcc_checking_assert (n);
d122681a
ML
3456 ref = src->caller->find_reference (n, src->call_stmt,
3457 src->lto_stmt_uid);
568cda29 3458 gcc_checking_assert (ref);
d122681a 3459 dst->caller->clone_reference (ref, ref->stmt);
568cda29
MJ
3460
3461 gcc_checking_assert (ipa_refdesc_pool);
3462 struct ipa_cst_ref_desc *dst_rdesc
3463 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3464 dst_rdesc->cs = dst;
3465 dst_rdesc->refcount = src_rdesc->refcount;
3466 dst_rdesc->next_duplicate = NULL;
3467 dst_jf->value.constant.rdesc = dst_rdesc;
3468 }
4502fe8d
MJ
3469 else if (src_rdesc->cs == src)
3470 {
3471 struct ipa_cst_ref_desc *dst_rdesc;
3472 gcc_checking_assert (ipa_refdesc_pool);
3473 dst_rdesc
3474 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3475 dst_rdesc->cs = dst;
4502fe8d 3476 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3477 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3478 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3479 dst_jf->value.constant.rdesc = dst_rdesc;
3480 }
3481 else
3482 {
3483 struct ipa_cst_ref_desc *dst_rdesc;
3484 /* This can happen during inlining, when a JFUNC can refer to a
3485 reference taken in a function up in the tree of inline clones.
3486 We need to find the duplicate that refers to our tree of
3487 inline clones. */
3488
3489 gcc_assert (dst->caller->global.inlined_to);
3490 for (dst_rdesc = src_rdesc->next_duplicate;
3491 dst_rdesc;
3492 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3493 {
3494 struct cgraph_node *top;
3495 top = dst_rdesc->cs->caller->global.inlined_to
3496 ? dst_rdesc->cs->caller->global.inlined_to
3497 : dst_rdesc->cs->caller;
3498 if (dst->caller->global.inlined_to == top)
3499 break;
3500 }
44a60244 3501 gcc_assert (dst_rdesc);
4502fe8d
MJ
3502 dst_jf->value.constant.rdesc = dst_rdesc;
3503 }
3504 }
3505 }
771578a0
MJ
3506}
3507
3508/* Hook that is called by cgraph.c when a node is duplicated. */
be95e2b9 3509
771578a0
MJ
3510static void
3511ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
10a5dd5d 3512 ATTRIBUTE_UNUSED void *data)
771578a0
MJ
3513{
3514 struct ipa_node_params *old_info, *new_info;
2c9561b5 3515 struct ipa_agg_replacement_value *old_av, *new_av;
771578a0
MJ
3516
3517 ipa_check_create_node_params ();
3518 old_info = IPA_NODE_REF (src);
3519 new_info = IPA_NODE_REF (dst);
771578a0 3520
9771b263 3521 new_info->descriptors = old_info->descriptors.copy ();
310bc633 3522 new_info->lattices = NULL;
771578a0 3523 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 3524
8aab5218 3525 new_info->analysis_done = old_info->analysis_done;
3949c4a7 3526 new_info->node_enqueued = old_info->node_enqueued;
2c9561b5
MJ
3527
3528 old_av = ipa_get_agg_replacements_for_node (src);
3529 if (!old_av)
3530 return;
3531
3532 new_av = NULL;
3533 while (old_av)
3534 {
3535 struct ipa_agg_replacement_value *v;
3536
766090c2 3537 v = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
3538 memcpy (v, old_av, sizeof (*v));
3539 v->next = new_av;
3540 new_av = v;
3541 old_av = old_av->next;
3542 }
3543 ipa_set_node_agg_value_chain (dst, new_av);
771578a0
MJ
3544}
3545
40982661
JH
3546
3547/* Analyze newly added function into callgraph. */
3548
3549static void
3550ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3551{
0136f8f0
AH
3552 if (cgraph_function_with_gimple_body_p (node))
3553 ipa_analyze_node (node);
40982661
JH
3554}
3555
771578a0 3556/* Register our cgraph hooks if they are not already there. */
be95e2b9 3557
518dc859 3558void
771578a0 3559ipa_register_cgraph_hooks (void)
518dc859 3560{
771578a0
MJ
3561 if (!edge_removal_hook_holder)
3562 edge_removal_hook_holder =
3563 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3564 if (!node_removal_hook_holder)
3565 node_removal_hook_holder =
3566 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3567 if (!edge_duplication_hook_holder)
3568 edge_duplication_hook_holder =
3569 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3570 if (!node_duplication_hook_holder)
3571 node_duplication_hook_holder =
3572 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
40982661
JH
3573 function_insertion_hook_holder =
3574 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3575}
518dc859 3576
771578a0 3577/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3578
771578a0
MJ
3579static void
3580ipa_unregister_cgraph_hooks (void)
3581{
3582 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3583 edge_removal_hook_holder = NULL;
3584 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3585 node_removal_hook_holder = NULL;
3586 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3587 edge_duplication_hook_holder = NULL;
3588 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3589 node_duplication_hook_holder = NULL;
40982661
JH
3590 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3591 function_insertion_hook_holder = NULL;
771578a0
MJ
3592}
3593
3594/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3595 longer needed after ipa-cp. */
be95e2b9 3596
771578a0 3597void
e33c6cd6 3598ipa_free_all_structures_after_ipa_cp (void)
3e293154 3599{
5ee53a06 3600 if (!optimize)
3e293154
MJ
3601 {
3602 ipa_free_all_edge_args ();
3603 ipa_free_all_node_params ();
310bc633
MJ
3604 free_alloc_pool (ipcp_sources_pool);
3605 free_alloc_pool (ipcp_values_pool);
2c9561b5 3606 free_alloc_pool (ipcp_agg_lattice_pool);
3e293154 3607 ipa_unregister_cgraph_hooks ();
4502fe8d
MJ
3608 if (ipa_refdesc_pool)
3609 free_alloc_pool (ipa_refdesc_pool);
3e293154
MJ
3610 }
3611}
3612
3613/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3614 longer needed after indirect inlining. */
be95e2b9 3615
3e293154 3616void
e33c6cd6 3617ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3618{
3619 ipa_free_all_edge_args ();
3620 ipa_free_all_node_params ();
3621 ipa_unregister_cgraph_hooks ();
310bc633
MJ
3622 if (ipcp_sources_pool)
3623 free_alloc_pool (ipcp_sources_pool);
3624 if (ipcp_values_pool)
3625 free_alloc_pool (ipcp_values_pool);
2c9561b5
MJ
3626 if (ipcp_agg_lattice_pool)
3627 free_alloc_pool (ipcp_agg_lattice_pool);
4502fe8d
MJ
3628 if (ipa_refdesc_pool)
3629 free_alloc_pool (ipa_refdesc_pool);
518dc859
RL
3630}
3631
dcd416e3 3632/* Print ipa_tree_map data structures of all functions in the
518dc859 3633 callgraph to F. */
be95e2b9 3634
518dc859 3635void
2c9561b5 3636ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3637{
3638 int i, count;
3e293154 3639 struct ipa_node_params *info;
518dc859 3640
67348ccc 3641 if (!node->definition)
3e293154
MJ
3642 return;
3643 info = IPA_NODE_REF (node);
9de04252 3644 fprintf (f, " function %s/%i parameter descriptors:\n",
fec39fa6 3645 node->name (), node->order);
3e293154
MJ
3646 count = ipa_get_param_count (info);
3647 for (i = 0; i < count; i++)
518dc859 3648 {
4502fe8d
MJ
3649 int c;
3650
a4e33812 3651 fprintf (f, " ");
e067bd43 3652 ipa_dump_param (f, info, i);
339f49ec
JH
3653 if (ipa_is_param_used (info, i))
3654 fprintf (f, " used");
4502fe8d
MJ
3655 c = ipa_get_controlled_uses (info, i);
3656 if (c == IPA_UNDESCRIBED_USE)
3657 fprintf (f, " undescribed_use");
3658 else
3659 fprintf (f, " controlled_uses=%i", c);
3e293154 3660 fprintf (f, "\n");
518dc859
RL
3661 }
3662}
dcd416e3 3663
ca30a539 3664/* Print ipa_tree_map data structures of all functions in the
3e293154 3665 callgraph to F. */
be95e2b9 3666
3e293154 3667void
ca30a539 3668ipa_print_all_params (FILE * f)
3e293154
MJ
3669{
3670 struct cgraph_node *node;
3671
ca30a539 3672 fprintf (f, "\nFunction parameters:\n");
65c70e6b 3673 FOR_EACH_FUNCTION (node)
ca30a539 3674 ipa_print_node_params (f, node);
3e293154 3675}
3f84bf08
MJ
3676
3677/* Return a heap allocated vector containing formal parameters of FNDECL. */
3678
9771b263 3679vec<tree>
3f84bf08
MJ
3680ipa_get_vector_of_formal_parms (tree fndecl)
3681{
9771b263 3682 vec<tree> args;
3f84bf08
MJ
3683 int count;
3684 tree parm;
3685
0e8853ee 3686 gcc_assert (!flag_wpa);
310bc633 3687 count = count_formal_params (fndecl);
9771b263 3688 args.create (count);
910ad8de 3689 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 3690 args.quick_push (parm);
3f84bf08
MJ
3691
3692 return args;
3693}
3694
3695/* Return a heap allocated vector containing types of formal parameters of
3696 function type FNTYPE. */
3697
31519c38
AH
3698vec<tree>
3699ipa_get_vector_of_formal_parm_types (tree fntype)
3f84bf08 3700{
9771b263 3701 vec<tree> types;
3f84bf08
MJ
3702 int count = 0;
3703 tree t;
3704
3705 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3706 count++;
3707
9771b263 3708 types.create (count);
3f84bf08 3709 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 3710 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
3711
3712 return types;
3713}
3714
3715/* Modify the function declaration FNDECL and its type according to the plan in
3716 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3717 to reflect the actual parameters being modified which are determined by the
3718 base_index field. */
3719
3720void
31519c38 3721ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3f84bf08 3722{
31519c38
AH
3723 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3724 tree orig_type = TREE_TYPE (fndecl);
3725 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3f84bf08
MJ
3726
3727 /* The following test is an ugly hack, some functions simply don't have any
3728 arguments in their type. This is probably a bug but well... */
31519c38
AH
3729 bool care_for_types = (old_arg_types != NULL_TREE);
3730 bool last_parm_void;
3731 vec<tree> otypes;
3f84bf08
MJ
3732 if (care_for_types)
3733 {
3734 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3735 == void_type_node);
31519c38 3736 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3f84bf08 3737 if (last_parm_void)
9771b263 3738 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 3739 else
9771b263 3740 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
3741 }
3742 else
3743 {
3744 last_parm_void = false;
9771b263 3745 otypes.create (0);
3f84bf08
MJ
3746 }
3747
31519c38
AH
3748 int len = adjustments.length ();
3749 tree *link = &DECL_ARGUMENTS (fndecl);
3750 tree new_arg_types = NULL;
3751 for (int i = 0; i < len; i++)
3f84bf08
MJ
3752 {
3753 struct ipa_parm_adjustment *adj;
3754 gcc_assert (link);
3755
9771b263 3756 adj = &adjustments[i];
31519c38
AH
3757 tree parm;
3758 if (adj->op == IPA_PARM_OP_NEW)
3759 parm = NULL;
3760 else
3761 parm = oparms[adj->base_index];
3f84bf08
MJ
3762 adj->base = parm;
3763
31519c38 3764 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
3765 {
3766 if (care_for_types)
9771b263 3767 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
3768 new_arg_types);
3769 *link = parm;
910ad8de 3770 link = &DECL_CHAIN (parm);
3f84bf08 3771 }
31519c38 3772 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08
MJ
3773 {
3774 tree new_parm;
3775 tree ptype;
3776
3777 if (adj->by_ref)
3778 ptype = build_pointer_type (adj->type);
3779 else
e69dbe37
MJ
3780 {
3781 ptype = adj->type;
3782 if (is_gimple_reg_type (ptype))
3783 {
3784 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3785 if (TYPE_ALIGN (ptype) < malign)
3786 ptype = build_aligned_type (ptype, malign);
3787 }
3788 }
3f84bf08
MJ
3789
3790 if (care_for_types)
3791 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3792
3793 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3794 ptype);
31519c38
AH
3795 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3796 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3f84bf08
MJ
3797 DECL_ARTIFICIAL (new_parm) = 1;
3798 DECL_ARG_TYPE (new_parm) = ptype;
3799 DECL_CONTEXT (new_parm) = fndecl;
3800 TREE_USED (new_parm) = 1;
3801 DECL_IGNORED_P (new_parm) = 1;
3802 layout_decl (new_parm, 0);
3803
31519c38
AH
3804 if (adj->op == IPA_PARM_OP_NEW)
3805 adj->base = NULL;
3806 else
3807 adj->base = parm;
3808 adj->new_decl = new_parm;
3f84bf08
MJ
3809
3810 *link = new_parm;
910ad8de 3811 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
3812 }
3813 }
3814
3815 *link = NULL_TREE;
3816
31519c38 3817 tree new_reversed = NULL;
3f84bf08
MJ
3818 if (care_for_types)
3819 {
3820 new_reversed = nreverse (new_arg_types);
3821 if (last_parm_void)
3822 {
3823 if (new_reversed)
3824 TREE_CHAIN (new_arg_types) = void_list_node;
3825 else
3826 new_reversed = void_list_node;
3827 }
3828 }
3829
3830 /* Use copy_node to preserve as much as possible from original type
3831 (debug info, attribute lists etc.)
3832 Exception is METHOD_TYPEs must have THIS argument.
3833 When we are asked to remove it, we need to build new FUNCTION_TYPE
3834 instead. */
31519c38 3835 tree new_type = NULL;
3f84bf08 3836 if (TREE_CODE (orig_type) != METHOD_TYPE
31519c38 3837 || (adjustments[0].op == IPA_PARM_OP_COPY
9771b263 3838 && adjustments[0].base_index == 0))
3f84bf08 3839 {
4eb3f32c 3840 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
3841 TYPE_ARG_TYPES (new_type) = new_reversed;
3842 }
3843 else
3844 {
3845 new_type
3846 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3847 new_reversed));
3848 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3849 DECL_VINDEX (fndecl) = NULL_TREE;
3850 }
3851
d402c33d
JH
3852 /* When signature changes, we need to clear builtin info. */
3853 if (DECL_BUILT_IN (fndecl))
3854 {
3855 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3856 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3857 }
3858
3f84bf08
MJ
3859 /* This is a new type, not a copy of an old type. Need to reassociate
3860 variants. We can handle everything except the main variant lazily. */
31519c38 3861 tree t = TYPE_MAIN_VARIANT (orig_type);
3f84bf08
MJ
3862 if (orig_type != t)
3863 {
3864 TYPE_MAIN_VARIANT (new_type) = t;
3865 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3866 TYPE_NEXT_VARIANT (t) = new_type;
3867 }
3868 else
3869 {
3870 TYPE_MAIN_VARIANT (new_type) = new_type;
3871 TYPE_NEXT_VARIANT (new_type) = NULL;
3872 }
3873
3874 TREE_TYPE (fndecl) = new_type;
9b389a5e 3875 DECL_VIRTUAL_P (fndecl) = 0;
70d6d5c1 3876 DECL_LANG_SPECIFIC (fndecl) = NULL;
9771b263
DN
3877 otypes.release ();
3878 oparms.release ();
3f84bf08
MJ
3879}
3880
3881/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3882 If this is a directly recursive call, CS must be NULL. Otherwise it must
3883 contain the corresponding call graph edge. */
3884
3885void
3886ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3887 ipa_parm_adjustment_vec adjustments)
3888{
82338059 3889 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
9771b263
DN
3890 vec<tree> vargs;
3891 vec<tree, va_gc> **debug_args = NULL;
3f84bf08 3892 gimple new_stmt;
82338059 3893 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
3894 tree callee_decl;
3895 int i, len;
3896
9771b263
DN
3897 len = adjustments.length ();
3898 vargs.create (len);
67348ccc 3899 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
d122681a 3900 current_node->remove_stmt_references (stmt);
3f84bf08
MJ
3901
3902 gsi = gsi_for_stmt (stmt);
82338059
MJ
3903 prev_gsi = gsi;
3904 gsi_prev (&prev_gsi);
3f84bf08
MJ
3905 for (i = 0; i < len; i++)
3906 {
3907 struct ipa_parm_adjustment *adj;
3908
9771b263 3909 adj = &adjustments[i];
3f84bf08 3910
31519c38 3911 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
3912 {
3913 tree arg = gimple_call_arg (stmt, adj->base_index);
3914
9771b263 3915 vargs.quick_push (arg);
3f84bf08 3916 }
31519c38 3917 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08 3918 {
fffe1e40
MJ
3919 tree expr, base, off;
3920 location_t loc;
f43245d1 3921 unsigned int deref_align = 0;
c1ed6a01 3922 bool deref_base = false;
fffe1e40
MJ
3923
3924 /* We create a new parameter out of the value of the old one, we can
3925 do the following kind of transformations:
3926
3927 - A scalar passed by reference is converted to a scalar passed by
3928 value. (adj->by_ref is false and the type of the original
3929 actual argument is a pointer to a scalar).
3930
3931 - A part of an aggregate is passed instead of the whole aggregate.
3932 The part can be passed either by value or by reference, this is
3933 determined by value of adj->by_ref. Moreover, the code below
3934 handles both situations when the original aggregate is passed by
3935 value (its type is not a pointer) and when it is passed by
3936 reference (it is a pointer to an aggregate).
3937
3938 When the new argument is passed by reference (adj->by_ref is true)
3939 it must be a part of an aggregate and therefore we form it by
3940 simply taking the address of a reference inside the original
3941 aggregate. */
3942
3943 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3944 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
3945 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3946 : EXPR_LOCATION (base);
fffe1e40 3947
82d49829
MJ
3948 if (TREE_CODE (base) != ADDR_EXPR
3949 && POINTER_TYPE_P (TREE_TYPE (base)))
3950 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 3951 adj->offset / BITS_PER_UNIT);
3f84bf08 3952 else
3f84bf08 3953 {
fffe1e40
MJ
3954 HOST_WIDE_INT base_offset;
3955 tree prev_base;
c1ed6a01 3956 bool addrof;
fffe1e40
MJ
3957
3958 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
3959 {
3960 base = TREE_OPERAND (base, 0);
3961 addrof = true;
3962 }
3963 else
3964 addrof = false;
fffe1e40
MJ
3965 prev_base = base;
3966 base = get_addr_base_and_unit_offset (base, &base_offset);
3967 /* Aggregate arguments can have non-invariant addresses. */
3968 if (!base)
3969 {
3970 base = build_fold_addr_expr (prev_base);
82d49829 3971 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3972 adj->offset / BITS_PER_UNIT);
3973 }
3974 else if (TREE_CODE (base) == MEM_REF)
3975 {
c1ed6a01
MJ
3976 if (!addrof)
3977 {
3978 deref_base = true;
3979 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3980 }
82d49829 3981 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3982 base_offset
3983 + adj->offset / BITS_PER_UNIT);
3984 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 3985 off);
fffe1e40
MJ
3986 base = TREE_OPERAND (base, 0);
3987 }
3988 else
3989 {
82d49829 3990 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3991 base_offset
3992 + adj->offset / BITS_PER_UNIT);
3993 base = build_fold_addr_expr (base);
3994 }
3f84bf08 3995 }
fffe1e40 3996
3a5a825a
RG
3997 if (!adj->by_ref)
3998 {
3999 tree type = adj->type;
4000 unsigned int align;
4001 unsigned HOST_WIDE_INT misalign;
644ffefd 4002
c1ed6a01
MJ
4003 if (deref_base)
4004 {
4005 align = deref_align;
4006 misalign = 0;
4007 }
4008 else
4009 {
4010 get_pointer_alignment_1 (base, &align, &misalign);
4011 if (TYPE_ALIGN (type) > align)
4012 align = TYPE_ALIGN (type);
4013 }
807e902e 4014 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3a5a825a
RG
4015 * BITS_PER_UNIT);
4016 misalign = misalign & (align - 1);
4017 if (misalign != 0)
4018 align = (misalign & -misalign);
4019 if (align < TYPE_ALIGN (type))
4020 type = build_aligned_type (type, align);
4df65a85
RB
4021 base = force_gimple_operand_gsi (&gsi, base,
4022 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4023 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4df65a85
RB
4024 /* If expr is not a valid gimple call argument emit
4025 a load into a temporary. */
4026 if (is_gimple_reg_type (TREE_TYPE (expr)))
4027 {
4028 gimple tem = gimple_build_assign (NULL_TREE, expr);
4029 if (gimple_in_ssa_p (cfun))
4030 {
4031 gimple_set_vuse (tem, gimple_vuse (stmt));
4032 expr = make_ssa_name (TREE_TYPE (expr), tem);
4033 }
4034 else
4035 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4036 gimple_assign_set_lhs (tem, expr);
4037 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4038 }
3a5a825a
RG
4039 }
4040 else
4041 {
4042 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4043 expr = build_fold_addr_expr (expr);
4df65a85
RB
4044 expr = force_gimple_operand_gsi (&gsi, expr,
4045 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4046 }
9771b263 4047 vargs.quick_push (expr);
3f84bf08 4048 }
31519c38 4049 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
ddb555ed
JJ
4050 {
4051 unsigned int ix;
4052 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4053 gimple def_temp;
4054
4055 arg = gimple_call_arg (stmt, adj->base_index);
4056 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4057 {
4058 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4059 continue;
4060 arg = fold_convert_loc (gimple_location (stmt),
4061 TREE_TYPE (origin), arg);
4062 }
4063 if (debug_args == NULL)
4064 debug_args = decl_debug_args_insert (callee_decl);
9771b263 4065 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
4066 if (ddecl == origin)
4067 {
9771b263 4068 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
4069 break;
4070 }
4071 if (ddecl == NULL)
4072 {
4073 ddecl = make_node (DEBUG_EXPR_DECL);
4074 DECL_ARTIFICIAL (ddecl) = 1;
4075 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4076 DECL_MODE (ddecl) = DECL_MODE (origin);
4077
9771b263
DN
4078 vec_safe_push (*debug_args, origin);
4079 vec_safe_push (*debug_args, ddecl);
ddb555ed 4080 }
9771b263 4081 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
4082 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4083 }
3f84bf08
MJ
4084 }
4085
4086 if (dump_file && (dump_flags & TDF_DETAILS))
4087 {
4088 fprintf (dump_file, "replacing stmt:");
4089 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4090 }
4091
3f84bf08 4092 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 4093 vargs.release ();
3f84bf08
MJ
4094 if (gimple_call_lhs (stmt))
4095 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4096
4097 gimple_set_block (new_stmt, gimple_block (stmt));
4098 if (gimple_has_location (stmt))
4099 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 4100 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 4101 gimple_call_copy_flags (new_stmt, stmt);
4df65a85
RB
4102 if (gimple_in_ssa_p (cfun))
4103 {
4104 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4105 if (gimple_vdef (stmt))
4106 {
4107 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4108 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4109 }
4110 }
3f84bf08
MJ
4111
4112 if (dump_file && (dump_flags & TDF_DETAILS))
4113 {
4114 fprintf (dump_file, "with stmt:");
4115 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4116 fprintf (dump_file, "\n");
4117 }
4118 gsi_replace (&gsi, new_stmt, true);
4119 if (cs)
4120 cgraph_set_call_stmt (cs, new_stmt);
82338059
MJ
4121 do
4122 {
4123 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
4124 gsi_prev (&gsi);
4125 }
3d354792 4126 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
3f84bf08
MJ
4127}
4128
31519c38
AH
4129/* If the expression *EXPR should be replaced by a reduction of a parameter, do
4130 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4131 specifies whether the function should care about type incompatibility the
4132 current and new expressions. If it is false, the function will leave
4133 incompatibility issues to the caller. Return true iff the expression
4134 was modified. */
4135
4136bool
4137ipa_modify_expr (tree *expr, bool convert,
4138 ipa_parm_adjustment_vec adjustments)
4139{
4140 struct ipa_parm_adjustment *cand
4141 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4142 if (!cand)
4143 return false;
4144
4145 tree src;
4146 if (cand->by_ref)
4147 src = build_simple_mem_ref (cand->new_decl);
4148 else
4149 src = cand->new_decl;
4150
4151 if (dump_file && (dump_flags & TDF_DETAILS))
4152 {
4153 fprintf (dump_file, "About to replace expr ");
4154 print_generic_expr (dump_file, *expr, 0);
4155 fprintf (dump_file, " with ");
4156 print_generic_expr (dump_file, src, 0);
4157 fprintf (dump_file, "\n");
4158 }
4159
4160 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4161 {
4162 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4163 *expr = vce;
4164 }
4165 else
4166 *expr = src;
4167 return true;
4168}
4169
4170/* If T is an SSA_NAME, return NULL if it is not a default def or
4171 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4172 the base variable is always returned, regardless if it is a default
4173 def. Return T if it is not an SSA_NAME. */
4174
4175static tree
4176get_ssa_base_param (tree t, bool ignore_default_def)
4177{
4178 if (TREE_CODE (t) == SSA_NAME)
4179 {
4180 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4181 return SSA_NAME_VAR (t);
4182 else
4183 return NULL_TREE;
4184 }
4185 return t;
4186}
4187
4188/* Given an expression, return an adjustment entry specifying the
4189 transformation to be done on EXPR. If no suitable adjustment entry
4190 was found, returns NULL.
4191
4192 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4193 default def, otherwise bail on them.
4194
4195 If CONVERT is non-NULL, this function will set *CONVERT if the
4196 expression provided is a component reference. ADJUSTMENTS is the
4197 adjustments vector. */
4198
4199ipa_parm_adjustment *
4200ipa_get_adjustment_candidate (tree **expr, bool *convert,
4201 ipa_parm_adjustment_vec adjustments,
4202 bool ignore_default_def)
4203{
4204 if (TREE_CODE (**expr) == BIT_FIELD_REF
4205 || TREE_CODE (**expr) == IMAGPART_EXPR
4206 || TREE_CODE (**expr) == REALPART_EXPR)
4207 {
4208 *expr = &TREE_OPERAND (**expr, 0);
4209 if (convert)
4210 *convert = true;
4211 }
4212
4213 HOST_WIDE_INT offset, size, max_size;
4214 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4215 if (!base || size == -1 || max_size == -1)
4216 return NULL;
4217
4218 if (TREE_CODE (base) == MEM_REF)
4219 {
807e902e 4220 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
31519c38
AH
4221 base = TREE_OPERAND (base, 0);
4222 }
4223
4224 base = get_ssa_base_param (base, ignore_default_def);
4225 if (!base || TREE_CODE (base) != PARM_DECL)
4226 return NULL;
4227
4228 struct ipa_parm_adjustment *cand = NULL;
4229 unsigned int len = adjustments.length ();
4230 for (unsigned i = 0; i < len; i++)
4231 {
4232 struct ipa_parm_adjustment *adj = &adjustments[i];
4233
4234 if (adj->base == base
4235 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4236 {
4237 cand = adj;
4238 break;
4239 }
4240 }
4241
4242 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4243 return NULL;
4244 return cand;
4245}
4246
3f84bf08
MJ
4247/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4248
4249static bool
4250index_in_adjustments_multiple_times_p (int base_index,
4251 ipa_parm_adjustment_vec adjustments)
4252{
9771b263 4253 int i, len = adjustments.length ();
3f84bf08
MJ
4254 bool one = false;
4255
4256 for (i = 0; i < len; i++)
4257 {
4258 struct ipa_parm_adjustment *adj;
9771b263 4259 adj = &adjustments[i];
3f84bf08
MJ
4260
4261 if (adj->base_index == base_index)
4262 {
4263 if (one)
4264 return true;
4265 else
4266 one = true;
4267 }
4268 }
4269 return false;
4270}
4271
4272
4273/* Return adjustments that should have the same effect on function parameters
4274 and call arguments as if they were first changed according to adjustments in
4275 INNER and then by adjustments in OUTER. */
4276
4277ipa_parm_adjustment_vec
4278ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4279 ipa_parm_adjustment_vec outer)
4280{
9771b263
DN
4281 int i, outlen = outer.length ();
4282 int inlen = inner.length ();
3f84bf08
MJ
4283 int removals = 0;
4284 ipa_parm_adjustment_vec adjustments, tmp;
4285
9771b263 4286 tmp.create (inlen);
3f84bf08
MJ
4287 for (i = 0; i < inlen; i++)
4288 {
4289 struct ipa_parm_adjustment *n;
9771b263 4290 n = &inner[i];
3f84bf08 4291
31519c38 4292 if (n->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4293 removals++;
4294 else
31519c38
AH
4295 {
4296 /* FIXME: Handling of new arguments are not implemented yet. */
4297 gcc_assert (n->op != IPA_PARM_OP_NEW);
4298 tmp.quick_push (*n);
4299 }
3f84bf08
MJ
4300 }
4301
9771b263 4302 adjustments.create (outlen + removals);
3f84bf08
MJ
4303 for (i = 0; i < outlen; i++)
4304 {
f32682ca 4305 struct ipa_parm_adjustment r;
9771b263
DN
4306 struct ipa_parm_adjustment *out = &outer[i];
4307 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 4308
f32682ca 4309 memset (&r, 0, sizeof (r));
31519c38
AH
4310 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4311 if (out->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4312 {
4313 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4314 {
31519c38 4315 r.op = IPA_PARM_OP_REMOVE;
9771b263 4316 adjustments.quick_push (r);
3f84bf08
MJ
4317 }
4318 continue;
4319 }
31519c38
AH
4320 else
4321 {
4322 /* FIXME: Handling of new arguments are not implemented yet. */
4323 gcc_assert (out->op != IPA_PARM_OP_NEW);
4324 }
3f84bf08 4325
f32682ca
DN
4326 r.base_index = in->base_index;
4327 r.type = out->type;
3f84bf08
MJ
4328
4329 /* FIXME: Create nonlocal value too. */
4330
31519c38
AH
4331 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4332 r.op = IPA_PARM_OP_COPY;
4333 else if (in->op == IPA_PARM_OP_COPY)
f32682ca 4334 r.offset = out->offset;
31519c38 4335 else if (out->op == IPA_PARM_OP_COPY)
f32682ca 4336 r.offset = in->offset;
3f84bf08 4337 else
f32682ca 4338 r.offset = in->offset + out->offset;
9771b263 4339 adjustments.quick_push (r);
3f84bf08
MJ
4340 }
4341
4342 for (i = 0; i < inlen; i++)
4343 {
9771b263 4344 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08 4345
31519c38 4346 if (n->op == IPA_PARM_OP_REMOVE)
9771b263 4347 adjustments.quick_push (*n);
3f84bf08
MJ
4348 }
4349
9771b263 4350 tmp.release ();
3f84bf08
MJ
4351 return adjustments;
4352}
4353
4354/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4355 friendly way, assuming they are meant to be applied to FNDECL. */
4356
4357void
4358ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4359 tree fndecl)
4360{
9771b263 4361 int i, len = adjustments.length ();
3f84bf08 4362 bool first = true;
9771b263 4363 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
4364
4365 fprintf (file, "IPA param adjustments: ");
4366 for (i = 0; i < len; i++)
4367 {
4368 struct ipa_parm_adjustment *adj;
9771b263 4369 adj = &adjustments[i];
3f84bf08
MJ
4370
4371 if (!first)
4372 fprintf (file, " ");
4373 else
4374 first = false;
4375
4376 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 4377 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
4378 if (adj->base)
4379 {
4380 fprintf (file, ", base: ");
4381 print_generic_expr (file, adj->base, 0);
4382 }
31519c38 4383 if (adj->new_decl)
3f84bf08 4384 {
31519c38
AH
4385 fprintf (file, ", new_decl: ");
4386 print_generic_expr (file, adj->new_decl, 0);
3f84bf08
MJ
4387 }
4388 if (adj->new_ssa_base)
4389 {
4390 fprintf (file, ", new_ssa_base: ");
4391 print_generic_expr (file, adj->new_ssa_base, 0);
4392 }
4393
31519c38 4394 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08 4395 fprintf (file, ", copy_param");
31519c38 4396 else if (adj->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4397 fprintf (file, ", remove_param");
4398 else
4399 fprintf (file, ", offset %li", (long) adj->offset);
4400 if (adj->by_ref)
4401 fprintf (file, ", by_ref");
4402 print_node_brief (file, ", type: ", adj->type, 0);
4403 fprintf (file, "\n");
4404 }
9771b263 4405 parms.release ();
3f84bf08
MJ
4406}
4407
2c9561b5
MJ
4408/* Dump the AV linked list. */
4409
4410void
4411ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4412{
4413 bool comma = false;
4414 fprintf (f, " Aggregate replacements:");
4415 for (; av; av = av->next)
4416 {
4417 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4418 av->index, av->offset);
4419 print_generic_expr (f, av->value, 0);
4420 comma = true;
4421 }
4422 fprintf (f, "\n");
4423}
4424
fb3f88cc
JH
4425/* Stream out jump function JUMP_FUNC to OB. */
4426
4427static void
4428ipa_write_jump_function (struct output_block *ob,
4429 struct ipa_jump_func *jump_func)
4430{
8b7773a4
MJ
4431 struct ipa_agg_jf_item *item;
4432 struct bitpack_d bp;
4433 int i, count;
fb3f88cc 4434
8b7773a4 4435 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
4436 switch (jump_func->type)
4437 {
4438 case IPA_JF_UNKNOWN:
4439 break;
b258210c 4440 case IPA_JF_KNOWN_TYPE:
c7573249
MJ
4441 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4442 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4443 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
b258210c 4444 break;
fb3f88cc 4445 case IPA_JF_CONST:
5368224f 4446 gcc_assert (
4502fe8d
MJ
4447 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4448 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
4449 break;
4450 case IPA_JF_PASS_THROUGH:
412288f1 4451 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4452 if (jump_func->value.pass_through.operation == NOP_EXPR)
4453 {
4454 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4455 bp = bitpack_create (ob->main_stream);
4456 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
b8f6e610 4457 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4a53743e
MJ
4458 streamer_write_bitpack (&bp);
4459 }
4460 else
4461 {
4462 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4463 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4464 }
fb3f88cc
JH
4465 break;
4466 case IPA_JF_ANCESTOR:
412288f1 4467 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
b9393656 4468 stream_write_tree (ob, jump_func->value.ancestor.type, true);
412288f1 4469 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4470 bp = bitpack_create (ob->main_stream);
4471 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
b8f6e610 4472 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
8b7773a4 4473 streamer_write_bitpack (&bp);
fb3f88cc 4474 break;
8b7773a4
MJ
4475 }
4476
9771b263 4477 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4478 streamer_write_uhwi (ob, count);
4479 if (count)
4480 {
4481 bp = bitpack_create (ob->main_stream);
4482 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4483 streamer_write_bitpack (&bp);
4484 }
4485
9771b263 4486 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4487 {
4488 streamer_write_uhwi (ob, item->offset);
4489 stream_write_tree (ob, item->value, true);
fb3f88cc
JH
4490 }
4491}
4492
4493/* Read in jump function JUMP_FUNC from IB. */
4494
4495static void
4496ipa_read_jump_function (struct lto_input_block *ib,
4497 struct ipa_jump_func *jump_func,
4502fe8d 4498 struct cgraph_edge *cs,
fb3f88cc
JH
4499 struct data_in *data_in)
4500{
4a53743e
MJ
4501 enum jump_func_type jftype;
4502 enum tree_code operation;
8b7773a4 4503 int i, count;
fb3f88cc 4504
4a53743e
MJ
4505 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4506 switch (jftype)
fb3f88cc
JH
4507 {
4508 case IPA_JF_UNKNOWN:
4a53743e 4509 jump_func->type = IPA_JF_UNKNOWN;
fb3f88cc 4510 break;
b258210c 4511 case IPA_JF_KNOWN_TYPE:
4a53743e
MJ
4512 {
4513 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4514 tree base_type = stream_read_tree (ib, data_in);
4515 tree component_type = stream_read_tree (ib, data_in);
4516
4517 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4518 break;
4519 }
fb3f88cc 4520 case IPA_JF_CONST:
4502fe8d 4521 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4522 break;
4523 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4524 operation = (enum tree_code) streamer_read_uhwi (ib);
4525 if (operation == NOP_EXPR)
4526 {
4527 int formal_id = streamer_read_uhwi (ib);
4528 struct bitpack_d bp = streamer_read_bitpack (ib);
4529 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610
MJ
4530 bool type_preserved = bp_unpack_value (&bp, 1);
4531 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4532 type_preserved);
4a53743e
MJ
4533 }
4534 else
4535 {
4536 tree operand = stream_read_tree (ib, data_in);
4537 int formal_id = streamer_read_uhwi (ib);
4538 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4539 operation);
4540 }
fb3f88cc
JH
4541 break;
4542 case IPA_JF_ANCESTOR:
4a53743e
MJ
4543 {
4544 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4545 tree type = stream_read_tree (ib, data_in);
4546 int formal_id = streamer_read_uhwi (ib);
4547 struct bitpack_d bp = streamer_read_bitpack (ib);
4548 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610 4549 bool type_preserved = bp_unpack_value (&bp, 1);
4a53743e 4550
b8f6e610
MJ
4551 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4552 type_preserved);
4a53743e
MJ
4553 break;
4554 }
8b7773a4
MJ
4555 }
4556
4557 count = streamer_read_uhwi (ib);
9771b263 4558 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4559 if (count)
4560 {
4a53743e 4561 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4562 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4563 }
4564 for (i = 0; i < count; i++)
4565 {
f32682ca
DN
4566 struct ipa_agg_jf_item item;
4567 item.offset = streamer_read_uhwi (ib);
4568 item.value = stream_read_tree (ib, data_in);
9771b263 4569 jump_func->agg.items->quick_push (item);
fb3f88cc
JH
4570 }
4571}
4572
e33c6cd6
MJ
4573/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4574 relevant to indirect inlining to OB. */
661e7330
MJ
4575
4576static void
e33c6cd6
MJ
4577ipa_write_indirect_edge_info (struct output_block *ob,
4578 struct cgraph_edge *cs)
661e7330 4579{
e33c6cd6 4580 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4581 struct bitpack_d bp;
e33c6cd6 4582
412288f1 4583 streamer_write_hwi (ob, ii->param_index);
8b7773a4 4584 streamer_write_hwi (ob, ii->offset);
2465dcc2
RG
4585 bp = bitpack_create (ob->main_stream);
4586 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4587 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4588 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4589 bp_pack_value (&bp, ii->by_ref, 1);
68377e53
JH
4590 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4591 bp_pack_value (&bp, ii->maybe_derived_type, 1);
412288f1 4592 streamer_write_bitpack (&bp);
b258210c
MJ
4593
4594 if (ii->polymorphic)
4595 {
412288f1 4596 streamer_write_hwi (ob, ii->otr_token);
b9393656 4597 stream_write_tree (ob, ii->otr_type, true);
68377e53 4598 stream_write_tree (ob, ii->outer_type, true);
b258210c 4599 }
661e7330
MJ
4600}
4601
e33c6cd6
MJ
4602/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4603 relevant to indirect inlining from IB. */
661e7330
MJ
4604
4605static void
e33c6cd6
MJ
4606ipa_read_indirect_edge_info (struct lto_input_block *ib,
4607 struct data_in *data_in ATTRIBUTE_UNUSED,
4608 struct cgraph_edge *cs)
661e7330 4609{
e33c6cd6 4610 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4611 struct bitpack_d bp;
661e7330 4612
412288f1 4613 ii->param_index = (int) streamer_read_hwi (ib);
8b7773a4 4614 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
412288f1 4615 bp = streamer_read_bitpack (ib);
2465dcc2 4616 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4617 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4618 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4619 ii->by_ref = bp_unpack_value (&bp, 1);
68377e53
JH
4620 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4621 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
b258210c
MJ
4622 if (ii->polymorphic)
4623 {
412288f1 4624 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4625 ii->otr_type = stream_read_tree (ib, data_in);
68377e53 4626 ii->outer_type = stream_read_tree (ib, data_in);
b258210c 4627 }
661e7330
MJ
4628}
4629
fb3f88cc
JH
4630/* Stream out NODE info to OB. */
4631
4632static void
4633ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4634{
4635 int node_ref;
7380e6ef 4636 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4637 struct ipa_node_params *info = IPA_NODE_REF (node);
4638 int j;
4639 struct cgraph_edge *e;
2465dcc2 4640 struct bitpack_d bp;
fb3f88cc 4641
7380e6ef 4642 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4643 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4644 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4645
0e8853ee
JH
4646 streamer_write_uhwi (ob, ipa_get_param_count (info));
4647 for (j = 0; j < ipa_get_param_count (info); j++)
4648 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4649 bp = bitpack_create (ob->main_stream);
8aab5218 4650 gcc_assert (info->analysis_done
661e7330 4651 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4652 gcc_assert (!info->node_enqueued);
4653 gcc_assert (!info->ipcp_orig_node);
4654 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4655 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4656 streamer_write_bitpack (&bp);
4502fe8d
MJ
4657 for (j = 0; j < ipa_get_param_count (info); j++)
4658 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
fb3f88cc
JH
4659 for (e = node->callees; e; e = e->next_callee)
4660 {
4661 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4662
412288f1 4663 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
fb3f88cc
JH
4664 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4665 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4666 }
e33c6cd6 4667 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4668 {
4669 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4670
412288f1 4671 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
c8246dbe
JH
4672 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4673 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4674 ipa_write_indirect_edge_info (ob, e);
4675 }
fb3f88cc
JH
4676}
4677
61502ca8 4678/* Stream in NODE info from IB. */
fb3f88cc
JH
4679
4680static void
4681ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4682 struct data_in *data_in)
4683{
4684 struct ipa_node_params *info = IPA_NODE_REF (node);
4685 int k;
4686 struct cgraph_edge *e;
2465dcc2 4687 struct bitpack_d bp;
fb3f88cc 4688
0e8853ee 4689 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4690
0e8853ee
JH
4691 for (k = 0; k < ipa_get_param_count (info); k++)
4692 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4693
412288f1 4694 bp = streamer_read_bitpack (ib);
fb3f88cc 4695 if (ipa_get_param_count (info) != 0)
8aab5218 4696 info->analysis_done = true;
fb3f88cc
JH
4697 info->node_enqueued = false;
4698 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4699 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a
MJ
4700 for (k = 0; k < ipa_get_param_count (info); k++)
4701 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
fb3f88cc
JH
4702 for (e = node->callees; e; e = e->next_callee)
4703 {
4704 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4705 int count = streamer_read_uhwi (ib);
fb3f88cc 4706
fb3f88cc
JH
4707 if (!count)
4708 continue;
9771b263 4709 vec_safe_grow_cleared (args->jump_functions, count);
fb3f88cc 4710
fb3f88cc 4711 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d
MJ
4712 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4713 data_in);
fb3f88cc 4714 }
e33c6cd6 4715 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4716 {
4717 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4718 int count = streamer_read_uhwi (ib);
c8246dbe 4719
c8246dbe
JH
4720 if (count)
4721 {
9771b263 4722 vec_safe_grow_cleared (args->jump_functions, count);
c8246dbe 4723 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d 4724 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
606d9a09 4725 data_in);
c8246dbe
JH
4726 }
4727 ipa_read_indirect_edge_info (ib, data_in, e);
4728 }
fb3f88cc
JH
4729}
4730
4731/* Write jump functions for nodes in SET. */
4732
4733void
f27c1867 4734ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4735{
4736 struct cgraph_node *node;
93536c97 4737 struct output_block *ob;
fb3f88cc 4738 unsigned int count = 0;
f27c1867
JH
4739 lto_symtab_encoder_iterator lsei;
4740 lto_symtab_encoder_t encoder;
4741
fb3f88cc 4742
9771b263 4743 if (!ipa_node_params_vector.exists ())
93536c97 4744 return;
fb3f88cc 4745
93536c97 4746 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4747 encoder = ob->decl_state->symtab_node_encoder;
93536c97 4748 ob->cgraph_node = NULL;
f27c1867
JH
4749 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4750 lsei_next_function_in_partition (&lsei))
fb3f88cc 4751 {
f27c1867 4752 node = lsei_cgraph_node (lsei);
c47d0034
JH
4753 if (cgraph_function_with_gimple_body_p (node)
4754 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4755 count++;
4756 }
4757
412288f1 4758 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4759
4760 /* Process all of the functions. */
f27c1867
JH
4761 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4762 lsei_next_function_in_partition (&lsei))
fb3f88cc 4763 {
f27c1867 4764 node = lsei_cgraph_node (lsei);
c47d0034
JH
4765 if (cgraph_function_with_gimple_body_p (node)
4766 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4767 ipa_write_node_info (ob, node);
4768 }
412288f1 4769 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4770 produce_asm (ob, NULL);
4771 destroy_output_block (ob);
4772}
4773
4774/* Read section in file FILE_DATA of length LEN with data DATA. */
4775
4776static void
4777ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4778 size_t len)
4779{
4780 const struct lto_function_header *header =
4781 (const struct lto_function_header *) data;
4ad9a9de
EB
4782 const int cfg_offset = sizeof (struct lto_function_header);
4783 const int main_offset = cfg_offset + header->cfg_size;
4784 const int string_offset = main_offset + header->main_size;
fb3f88cc
JH
4785 struct data_in *data_in;
4786 struct lto_input_block ib_main;
4787 unsigned int i;
4788 unsigned int count;
4789
4790 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4791 header->main_size);
4792
4793 data_in =
4794 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4795 header->string_size, vNULL);
412288f1 4796 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4797
4798 for (i = 0; i < count; i++)
4799 {
4800 unsigned int index;
4801 struct cgraph_node *node;
7380e6ef 4802 lto_symtab_encoder_t encoder;
fb3f88cc 4803
412288f1 4804 index = streamer_read_uhwi (&ib_main);
7380e6ef
JH
4805 encoder = file_data->symtab_node_encoder;
4806 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4807 gcc_assert (node->definition);
fb3f88cc
JH
4808 ipa_read_node_info (&ib_main, node, data_in);
4809 }
4810 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4811 len);
4812 lto_data_in_delete (data_in);
4813}
4814
4815/* Read ipcp jump functions. */
4816
4817void
4818ipa_prop_read_jump_functions (void)
4819{
4820 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4821 struct lto_file_decl_data *file_data;
4822 unsigned int j = 0;
4823
4824 ipa_check_create_node_params ();
4825 ipa_check_create_edge_args ();
4826 ipa_register_cgraph_hooks ();
4827
4828 while ((file_data = file_data_vec[j++]))
4829 {
4830 size_t len;
4831 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4832
4833 if (data)
4834 ipa_prop_read_section (file_data, data, len);
4835 }
4836}
4837
b8698a0f 4838/* After merging units, we can get mismatch in argument counts.
61502ca8 4839 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
4840 Also compute called_with_variable_arg info. */
4841
4842void
4843ipa_update_after_lto_read (void)
4844{
05d3aa37
MJ
4845 ipa_check_create_node_params ();
4846 ipa_check_create_edge_args ();
fb3f88cc 4847}
2c9561b5
MJ
4848
4849void
4850write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4851{
4852 int node_ref;
4853 unsigned int count = 0;
4854 lto_symtab_encoder_t encoder;
4855 struct ipa_agg_replacement_value *aggvals, *av;
4856
4857 aggvals = ipa_get_agg_replacements_for_node (node);
4858 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4859 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4860 streamer_write_uhwi (ob, node_ref);
4861
4862 for (av = aggvals; av; av = av->next)
4863 count++;
4864 streamer_write_uhwi (ob, count);
4865
4866 for (av = aggvals; av; av = av->next)
4867 {
7b920a9a
MJ
4868 struct bitpack_d bp;
4869
2c9561b5
MJ
4870 streamer_write_uhwi (ob, av->offset);
4871 streamer_write_uhwi (ob, av->index);
4872 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4873
4874 bp = bitpack_create (ob->main_stream);
4875 bp_pack_value (&bp, av->by_ref, 1);
4876 streamer_write_bitpack (&bp);
2c9561b5
MJ
4877 }
4878}
4879
4880/* Stream in the aggregate value replacement chain for NODE from IB. */
4881
4882static void
4883read_agg_replacement_chain (struct lto_input_block *ib,
4884 struct cgraph_node *node,
4885 struct data_in *data_in)
4886{
4887 struct ipa_agg_replacement_value *aggvals = NULL;
4888 unsigned int count, i;
4889
4890 count = streamer_read_uhwi (ib);
4891 for (i = 0; i <count; i++)
4892 {
4893 struct ipa_agg_replacement_value *av;
7b920a9a 4894 struct bitpack_d bp;
2c9561b5 4895
766090c2 4896 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
4897 av->offset = streamer_read_uhwi (ib);
4898 av->index = streamer_read_uhwi (ib);
4899 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4900 bp = streamer_read_bitpack (ib);
4901 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4902 av->next = aggvals;
4903 aggvals = av;
4904 }
4905 ipa_set_node_agg_value_chain (node, aggvals);
4906}
4907
4908/* Write all aggregate replacement for nodes in set. */
4909
4910void
4911ipa_prop_write_all_agg_replacement (void)
4912{
4913 struct cgraph_node *node;
4914 struct output_block *ob;
4915 unsigned int count = 0;
4916 lto_symtab_encoder_iterator lsei;
4917 lto_symtab_encoder_t encoder;
4918
4919 if (!ipa_node_agg_replacements)
4920 return;
4921
4922 ob = create_output_block (LTO_section_ipcp_transform);
4923 encoder = ob->decl_state->symtab_node_encoder;
4924 ob->cgraph_node = NULL;
4925 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4926 lsei_next_function_in_partition (&lsei))
4927 {
4928 node = lsei_cgraph_node (lsei);
4929 if (cgraph_function_with_gimple_body_p (node)
4930 && ipa_get_agg_replacements_for_node (node) != NULL)
4931 count++;
4932 }
4933
4934 streamer_write_uhwi (ob, count);
4935
4936 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4937 lsei_next_function_in_partition (&lsei))
4938 {
4939 node = lsei_cgraph_node (lsei);
4940 if (cgraph_function_with_gimple_body_p (node)
4941 && ipa_get_agg_replacements_for_node (node) != NULL)
4942 write_agg_replacement_chain (ob, node);
4943 }
4944 streamer_write_char_stream (ob->main_stream, 0);
4945 produce_asm (ob, NULL);
4946 destroy_output_block (ob);
4947}
4948
4949/* Read replacements section in file FILE_DATA of length LEN with data
4950 DATA. */
4951
4952static void
4953read_replacements_section (struct lto_file_decl_data *file_data,
4954 const char *data,
4955 size_t len)
4956{
4957 const struct lto_function_header *header =
4958 (const struct lto_function_header *) data;
4959 const int cfg_offset = sizeof (struct lto_function_header);
4960 const int main_offset = cfg_offset + header->cfg_size;
4961 const int string_offset = main_offset + header->main_size;
4962 struct data_in *data_in;
4963 struct lto_input_block ib_main;
4964 unsigned int i;
4965 unsigned int count;
4966
4967 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4968 header->main_size);
4969
4970 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4971 header->string_size, vNULL);
2c9561b5
MJ
4972 count = streamer_read_uhwi (&ib_main);
4973
4974 for (i = 0; i < count; i++)
4975 {
4976 unsigned int index;
4977 struct cgraph_node *node;
4978 lto_symtab_encoder_t encoder;
4979
4980 index = streamer_read_uhwi (&ib_main);
4981 encoder = file_data->symtab_node_encoder;
4982 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4983 gcc_assert (node->definition);
2c9561b5
MJ
4984 read_agg_replacement_chain (&ib_main, node, data_in);
4985 }
4986 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4987 len);
4988 lto_data_in_delete (data_in);
4989}
4990
4991/* Read IPA-CP aggregate replacements. */
4992
4993void
4994ipa_prop_read_all_agg_replacement (void)
4995{
4996 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4997 struct lto_file_decl_data *file_data;
4998 unsigned int j = 0;
4999
5000 while ((file_data = file_data_vec[j++]))
5001 {
5002 size_t len;
5003 const char *data = lto_get_section_data (file_data,
5004 LTO_section_ipcp_transform,
5005 NULL, &len);
5006 if (data)
5007 read_replacements_section (file_data, data, len);
5008 }
5009}
5010
5011/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5012 NODE. */
5013
5014static void
5015adjust_agg_replacement_values (struct cgraph_node *node,
5016 struct ipa_agg_replacement_value *aggval)
5017{
5018 struct ipa_agg_replacement_value *v;
5019 int i, c = 0, d = 0, *adj;
5020
5021 if (!node->clone.combined_args_to_skip)
5022 return;
5023
5024 for (v = aggval; v; v = v->next)
5025 {
5026 gcc_assert (v->index >= 0);
5027 if (c < v->index)
5028 c = v->index;
5029 }
5030 c++;
5031
5032 adj = XALLOCAVEC (int, c);
5033 for (i = 0; i < c; i++)
5034 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5035 {
5036 adj[i] = -1;
5037 d++;
5038 }
5039 else
5040 adj[i] = i - d;
5041
5042 for (v = aggval; v; v = v->next)
5043 v->index = adj[v->index];
5044}
5045
8aab5218
MJ
5046/* Dominator walker driving the ipcp modification phase. */
5047
5048class ipcp_modif_dom_walker : public dom_walker
5049{
5050public:
5051 ipcp_modif_dom_walker (struct func_body_info *fbi,
5052 vec<ipa_param_descriptor> descs,
5053 struct ipa_agg_replacement_value *av,
5054 bool *sc, bool *cc)
5055 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5056 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5057
5058 virtual void before_dom_children (basic_block);
5059
5060private:
5061 struct func_body_info *m_fbi;
5062 vec<ipa_param_descriptor> m_descriptors;
5063 struct ipa_agg_replacement_value *m_aggval;
5064 bool *m_something_changed, *m_cfg_changed;
5065};
5066
5067void
5068ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5069{
5070 gimple_stmt_iterator gsi;
5071 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5072 {
5073 struct ipa_agg_replacement_value *v;
5074 gimple stmt = gsi_stmt (gsi);
5075 tree rhs, val, t;
5076 HOST_WIDE_INT offset, size;
5077 int index;
5078 bool by_ref, vce;
5079
5080 if (!gimple_assign_load_p (stmt))
5081 continue;
5082 rhs = gimple_assign_rhs1 (stmt);
5083 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5084 continue;
2c9561b5 5085
8aab5218
MJ
5086 vce = false;
5087 t = rhs;
5088 while (handled_component_p (t))
5089 {
5090 /* V_C_E can do things like convert an array of integers to one
5091 bigger integer and similar things we do not handle below. */
5092 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5093 {
5094 vce = true;
5095 break;
5096 }
5097 t = TREE_OPERAND (t, 0);
5098 }
5099 if (vce)
5100 continue;
5101
5102 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5103 &offset, &size, &by_ref))
5104 continue;
5105 for (v = m_aggval; v; v = v->next)
5106 if (v->index == index
5107 && v->offset == offset)
5108 break;
5109 if (!v
5110 || v->by_ref != by_ref
5111 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5112 continue;
5113
5114 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5115 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5116 {
5117 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5118 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5119 else if (TYPE_SIZE (TREE_TYPE (rhs))
5120 == TYPE_SIZE (TREE_TYPE (v->value)))
5121 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5122 else
5123 {
5124 if (dump_file)
5125 {
5126 fprintf (dump_file, " const ");
5127 print_generic_expr (dump_file, v->value, 0);
5128 fprintf (dump_file, " can't be converted to type of ");
5129 print_generic_expr (dump_file, rhs, 0);
5130 fprintf (dump_file, "\n");
5131 }
5132 continue;
5133 }
5134 }
5135 else
5136 val = v->value;
5137
5138 if (dump_file && (dump_flags & TDF_DETAILS))
5139 {
5140 fprintf (dump_file, "Modifying stmt:\n ");
5141 print_gimple_stmt (dump_file, stmt, 0, 0);
5142 }
5143 gimple_assign_set_rhs_from_tree (&gsi, val);
5144 update_stmt (stmt);
5145
5146 if (dump_file && (dump_flags & TDF_DETAILS))
5147 {
5148 fprintf (dump_file, "into:\n ");
5149 print_gimple_stmt (dump_file, stmt, 0, 0);
5150 fprintf (dump_file, "\n");
5151 }
5152
5153 *m_something_changed = true;
5154 if (maybe_clean_eh_stmt (stmt)
5155 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5156 *m_cfg_changed = true;
5157 }
5158
5159}
5160
5161/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5162
5163unsigned int
5164ipcp_transform_function (struct cgraph_node *node)
5165{
84562394 5166 vec<ipa_param_descriptor> descriptors = vNULL;
8aab5218 5167 struct func_body_info fbi;
2c9561b5 5168 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5169 int param_count;
5170 bool cfg_changed = false, something_changed = false;
5171
5172 gcc_checking_assert (cfun);
5173 gcc_checking_assert (current_function_decl);
5174
5175 if (dump_file)
5176 fprintf (dump_file, "Modification phase of node %s/%i\n",
fec39fa6 5177 node->name (), node->order);
2c9561b5
MJ
5178
5179 aggval = ipa_get_agg_replacements_for_node (node);
5180 if (!aggval)
5181 return 0;
67348ccc 5182 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5183 if (param_count == 0)
5184 return 0;
5185 adjust_agg_replacement_values (node, aggval);
5186 if (dump_file)
5187 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5188
8aab5218
MJ
5189 fbi.node = node;
5190 fbi.info = NULL;
5191 fbi.bb_infos = vNULL;
5192 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5193 fbi.param_count = param_count;
5194 fbi.aa_walked = 0;
2c9561b5 5195
8aab5218
MJ
5196 descriptors.safe_grow_cleared (param_count);
5197 ipa_populate_param_decls (node, descriptors);
5198 calculate_dominance_info (CDI_DOMINATORS);
5199 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5200 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5201
8aab5218
MJ
5202 int i;
5203 struct ipa_bb_info *bi;
5204 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5205 free_ipa_bb_info (bi);
5206 fbi.bb_infos.release ();
5207 free_dominance_info (CDI_DOMINATORS);
9771b263 5208 (*ipa_node_agg_replacements)[node->uid] = NULL;
9771b263 5209 descriptors.release ();
2c9561b5
MJ
5210
5211 if (!something_changed)
5212 return 0;
5213 else if (cfg_changed)
5214 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5215 else
5216 return TODO_update_ssa_only_virtuals;
5217}