]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
vect-nop-move.c (foo32x2_be): Call __builtin_ia32_emms for 32bit x86 targets.
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
d1e082c2 2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tree.h"
2fb9a547
AM
24#include "basic-block.h"
25#include "tree-ssa-alias.h"
26#include "internal-fn.h"
27#include "gimple-fold.h"
28#include "tree-eh.h"
29#include "gimple-expr.h"
30#include "is-a.h"
18f429e2 31#include "gimple.h"
d8a2d370
DN
32#include "expr.h"
33#include "stor-layout.h"
34#include "print-tree.h"
45b0be94 35#include "gimplify.h"
5be5c238 36#include "gimple-iterator.h"
18f429e2 37#include "gimplify-me.h"
5be5c238 38#include "gimple-walk.h"
518dc859 39#include "langhooks.h"
518dc859 40#include "target.h"
518dc859 41#include "ipa-prop.h"
442b4905
AM
42#include "bitmap.h"
43#include "gimple-ssa.h"
44#include "tree-cfg.h"
45#include "tree-phinodes.h"
46#include "ssa-iterators.h"
47#include "tree-into-ssa.h"
48#include "tree-dfa.h"
518dc859 49#include "tree-pass.h"
771578a0 50#include "tree-inline.h"
0f378cb5 51#include "ipa-inline.h"
518dc859 52#include "flags.h"
3e293154 53#include "diagnostic.h"
cf835838 54#include "gimple-pretty-print.h"
fb3f88cc 55#include "lto-streamer.h"
f0efc7aa
DN
56#include "data-streamer.h"
57#include "tree-streamer.h"
dfea20f1 58#include "params.h"
450ad0cd 59#include "ipa-utils.h"
771578a0 60
062c604f
MJ
61/* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
63
64struct param_analysis_info
65{
8b7773a4
MJ
66 bool parm_modified, ref_modified, pt_modified;
67 bitmap parm_visited_statements, pt_visited_statements;
062c604f
MJ
68};
69
771578a0 70/* Vector where the parameter infos are actually stored. */
84562394 71vec<ipa_node_params> ipa_node_params_vector;
2c9561b5 72/* Vector of known aggregate values in cloned nodes. */
9771b263 73vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
771578a0 74/* Vector where the parameter infos are actually stored. */
84562394 75vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
771578a0
MJ
76
77/* Holders of ipa cgraph hooks: */
e2c9111c
JH
78static struct cgraph_edge_hook_list *edge_removal_hook_holder;
79static struct cgraph_node_hook_list *node_removal_hook_holder;
80static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
81static struct cgraph_2node_hook_list *node_duplication_hook_holder;
40982661 82static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 83
4502fe8d
MJ
84/* Description of a reference to an IPA constant. */
85struct ipa_cst_ref_desc
86{
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge *cs;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc *next_duplicate;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
92 if out of control. */
93 int refcount;
94};
95
96/* Allocation pool for reference descriptions. */
97
98static alloc_pool ipa_refdesc_pool;
99
5fe8e757
MJ
100/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
102
103static bool
104ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
105{
67348ccc 106 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
107 struct cl_optimization *os;
108
109 if (!fs_opts)
110 return false;
111 os = TREE_OPTIMIZATION (fs_opts);
112 return !os->x_optimize || !os->x_flag_ipa_cp;
113}
114
be95e2b9
MJ
115/* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
117
d044dd17 118static int
84562394 119ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
518dc859
RL
120{
121 int i, count;
122
9771b263 123 count = descriptors.length ();
518dc859 124 for (i = 0; i < count; i++)
9771b263 125 if (descriptors[i].decl == ptree)
518dc859
RL
126 return i;
127
128 return -1;
129}
130
d044dd17
MJ
131/* Return index of the formal whose tree is PTREE in function which corresponds
132 to INFO. */
133
134int
135ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
136{
137 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
138}
139
140/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
141 NODE. */
be95e2b9 142
f8e2a1ed
MJ
143static void
144ipa_populate_param_decls (struct cgraph_node *node,
84562394 145 vec<ipa_param_descriptor> &descriptors)
518dc859
RL
146{
147 tree fndecl;
148 tree fnargs;
149 tree parm;
150 int param_num;
3e293154 151
67348ccc 152 fndecl = node->decl;
0e8853ee 153 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
154 fnargs = DECL_ARGUMENTS (fndecl);
155 param_num = 0;
910ad8de 156 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 157 {
9771b263 158 descriptors[param_num].decl = parm;
0e8853ee 159 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
518dc859
RL
160 param_num++;
161 }
162}
163
3f84bf08
MJ
164/* Return how many formal parameters FNDECL has. */
165
166static inline int
310bc633 167count_formal_params (tree fndecl)
3f84bf08
MJ
168{
169 tree parm;
170 int count = 0;
0e8853ee 171 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 172
910ad8de 173 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
174 count++;
175
176 return count;
177}
178
0e8853ee
JH
179/* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
182
183void
184ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
185{
186 fprintf (file, "param #%i", i);
187 if (info->descriptors[i].decl)
188 {
189 fprintf (file, " ");
190 print_generic_expr (file, info->descriptors[i].decl, 0);
191 }
192}
193
194/* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
196
197void
198ipa_alloc_node_params (struct cgraph_node *node, int param_count)
199{
200 struct ipa_node_params *info = IPA_NODE_REF (node);
201
202 if (!info->descriptors.exists () && param_count)
203 info->descriptors.safe_grow_cleared (param_count);
204}
205
f8e2a1ed
MJ
206/* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
208 param_decls. */
be95e2b9 209
f8e2a1ed
MJ
210void
211ipa_initialize_node_params (struct cgraph_node *node)
212{
213 struct ipa_node_params *info = IPA_NODE_REF (node);
214
9771b263 215 if (!info->descriptors.exists ())
f8e2a1ed 216 {
67348ccc 217 ipa_alloc_node_params (node, count_formal_params (node->decl));
0e8853ee 218 ipa_populate_param_decls (node, info->descriptors);
f8e2a1ed 219 }
518dc859
RL
220}
221
749aa96d
MJ
222/* Print the jump functions associated with call graph edge CS to file F. */
223
224static void
225ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
226{
227 int i, count;
228
229 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
230 for (i = 0; i < count; i++)
231 {
232 struct ipa_jump_func *jump_func;
233 enum jump_func_type type;
234
235 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
236 type = jump_func->type;
237
238 fprintf (f, " param %d: ", i);
239 if (type == IPA_JF_UNKNOWN)
240 fprintf (f, "UNKNOWN\n");
241 else if (type == IPA_JF_KNOWN_TYPE)
242 {
c7573249
MJ
243 fprintf (f, "KNOWN TYPE: base ");
244 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
245 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
246 jump_func->value.known_type.offset);
247 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
248 fprintf (f, "\n");
749aa96d
MJ
249 }
250 else if (type == IPA_JF_CONST)
251 {
4502fe8d 252 tree val = jump_func->value.constant.value;
749aa96d
MJ
253 fprintf (f, "CONST: ");
254 print_generic_expr (f, val, 0);
255 if (TREE_CODE (val) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
257 {
258 fprintf (f, " -> ");
259 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
260 0);
261 }
262 fprintf (f, "\n");
263 }
749aa96d
MJ
264 else if (type == IPA_JF_PASS_THROUGH)
265 {
266 fprintf (f, "PASS THROUGH: ");
8b7773a4 267 fprintf (f, "%d, op %s",
749aa96d 268 jump_func->value.pass_through.formal_id,
5806f481 269 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 270 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
271 {
272 fprintf (f, " ");
273 print_generic_expr (f,
274 jump_func->value.pass_through.operand, 0);
275 }
276 if (jump_func->value.pass_through.agg_preserved)
277 fprintf (f, ", agg_preserved");
b8f6e610
MJ
278 if (jump_func->value.pass_through.type_preserved)
279 fprintf (f, ", type_preserved");
3ea6239f 280 fprintf (f, "\n");
749aa96d
MJ
281 }
282 else if (type == IPA_JF_ANCESTOR)
283 {
284 fprintf (f, "ANCESTOR: ");
285 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
286 jump_func->value.ancestor.formal_id,
287 jump_func->value.ancestor.offset);
288 print_generic_expr (f, jump_func->value.ancestor.type, 0);
8b7773a4
MJ
289 if (jump_func->value.ancestor.agg_preserved)
290 fprintf (f, ", agg_preserved");
b8f6e610
MJ
291 if (jump_func->value.ancestor.type_preserved)
292 fprintf (f, ", type_preserved");
3ea6239f 293 fprintf (f, "\n");
749aa96d 294 }
8b7773a4
MJ
295
296 if (jump_func->agg.items)
297 {
298 struct ipa_agg_jf_item *item;
299 int j;
300
301 fprintf (f, " Aggregate passed by %s:\n",
302 jump_func->agg.by_ref ? "reference" : "value");
9771b263 303 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
304 {
305 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
306 item->offset);
307 if (TYPE_P (item->value))
308 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 309 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
310 else
311 {
312 fprintf (f, "cst: ");
313 print_generic_expr (f, item->value, 0);
314 }
315 fprintf (f, "\n");
316 }
317 }
749aa96d
MJ
318 }
319}
320
321
be95e2b9
MJ
322/* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
518dc859 325void
3e293154 326ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 327{
3e293154 328 struct cgraph_edge *cs;
518dc859 329
fec39fa6 330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
67348ccc 331 node->order);
3e293154
MJ
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
749aa96d 337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
fec39fa6
TS
338 xstrdup (node->name ()), node->order,
339 xstrdup (cs->callee->name ()),
67348ccc 340 cs->callee->order);
749aa96d
MJ
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
518dc859 343
9de04252 344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 345 {
9de04252 346 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
3e293154 349
9de04252
MJ
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
c13bc3d9 352 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 354 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i",
359 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
360
749aa96d
MJ
361 if (cs->call_stmt)
362 {
9de04252 363 fprintf (f, ", for stmt ");
749aa96d 364 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 365 }
749aa96d 366 else
9de04252 367 fprintf (f, "\n");
749aa96d 368 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
369 }
370}
371
372/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 373
3e293154
MJ
374void
375ipa_print_all_jump_functions (FILE *f)
376{
377 struct cgraph_node *node;
378
ca30a539 379 fprintf (f, "\nJump functions:\n");
65c70e6b 380 FOR_EACH_FUNCTION (node)
3e293154
MJ
381 {
382 ipa_print_node_jump_functions (f, node);
383 }
384}
385
7b872d9e
MJ
386/* Set JFUNC to be a known type jump function. */
387
388static void
389ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
390 tree base_type, tree component_type)
391{
06d65050
JH
392 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
393 && TYPE_BINFO (component_type));
7b872d9e
MJ
394 jfunc->type = IPA_JF_KNOWN_TYPE;
395 jfunc->value.known_type.offset = offset,
396 jfunc->value.known_type.base_type = base_type;
397 jfunc->value.known_type.component_type = component_type;
68377e53 398 gcc_assert (component_type);
7b872d9e
MJ
399}
400
b8f6e610
MJ
401/* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
403
404static void
405ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
406 struct ipa_jump_func *src)
407
408{
409 gcc_checking_assert (src->type == IPA_JF_CONST);
410 dst->type = IPA_JF_CONST;
411 dst->value.constant = src->value.constant;
412}
413
7b872d9e
MJ
414/* Set JFUNC to be a constant jmp function. */
415
416static void
4502fe8d
MJ
417ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
418 struct cgraph_edge *cs)
7b872d9e 419{
5368224f
DC
420 constant = unshare_expr (constant);
421 if (constant && EXPR_P (constant))
422 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
7b872d9e 423 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
424 jfunc->value.constant.value = unshare_expr_without_location (constant);
425
426 if (TREE_CODE (constant) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
428 {
429 struct ipa_cst_ref_desc *rdesc;
430 if (!ipa_refdesc_pool)
431 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
432 sizeof (struct ipa_cst_ref_desc), 32);
433
434 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
435 rdesc->cs = cs;
436 rdesc->next_duplicate = NULL;
437 rdesc->refcount = 1;
438 jfunc->value.constant.rdesc = rdesc;
439 }
440 else
441 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
442}
443
444/* Set JFUNC to be a simple pass-through jump function. */
445static void
8b7773a4 446ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
b8f6e610 447 bool agg_preserved, bool type_preserved)
7b872d9e
MJ
448{
449 jfunc->type = IPA_JF_PASS_THROUGH;
450 jfunc->value.pass_through.operand = NULL_TREE;
451 jfunc->value.pass_through.formal_id = formal_id;
452 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 453 jfunc->value.pass_through.agg_preserved = agg_preserved;
b8f6e610 454 jfunc->value.pass_through.type_preserved = type_preserved;
7b872d9e
MJ
455}
456
457/* Set JFUNC to be an arithmetic pass through jump function. */
458
459static void
460ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
461 tree operand, enum tree_code operation)
462{
463 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 464 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
465 jfunc->value.pass_through.formal_id = formal_id;
466 jfunc->value.pass_through.operation = operation;
8b7773a4 467 jfunc->value.pass_through.agg_preserved = false;
b8f6e610 468 jfunc->value.pass_through.type_preserved = false;
7b872d9e
MJ
469}
470
471/* Set JFUNC to be an ancestor jump function. */
472
473static void
474ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
b8f6e610
MJ
475 tree type, int formal_id, bool agg_preserved,
476 bool type_preserved)
7b872d9e
MJ
477{
478 jfunc->type = IPA_JF_ANCESTOR;
479 jfunc->value.ancestor.formal_id = formal_id;
480 jfunc->value.ancestor.offset = offset;
481 jfunc->value.ancestor.type = type;
8b7773a4 482 jfunc->value.ancestor.agg_preserved = agg_preserved;
b8f6e610 483 jfunc->value.ancestor.type_preserved = type_preserved;
7b872d9e
MJ
484}
485
e248d83f
MJ
486/* Extract the acual BINFO being described by JFUNC which must be a known type
487 jump function. */
488
489tree
490ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
491{
492 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
493 if (!base_binfo)
494 return NULL_TREE;
495 return get_binfo_at_offset (base_binfo,
496 jfunc->value.known_type.offset,
497 jfunc->value.known_type.component_type);
498}
499
f65cf2b7
MJ
500/* Structure to be passed in between detect_type_change and
501 check_stmt_for_type_change. */
502
503struct type_change_info
504{
290ebcb7
MJ
505 /* Offset into the object where there is the virtual method pointer we are
506 looking for. */
507 HOST_WIDE_INT offset;
508 /* The declaration or SSA_NAME pointer of the base that we are checking for
509 type change. */
510 tree object;
511 /* If we actually can tell the type that the object has changed to, it is
512 stored in this field. Otherwise it remains NULL_TREE. */
513 tree known_current_type;
f65cf2b7
MJ
514 /* Set to true if dynamic type change has been detected. */
515 bool type_maybe_changed;
290ebcb7
MJ
516 /* Set to true if multiple types have been encountered. known_current_type
517 must be disregarded in that case. */
518 bool multiple_types_encountered;
f65cf2b7
MJ
519};
520
521/* Return true if STMT can modify a virtual method table pointer.
522
523 This function makes special assumptions about both constructors and
524 destructors which are all the functions that are allowed to alter the VMT
525 pointers. It assumes that destructors begin with assignment into all VMT
526 pointers and that constructors essentially look in the following way:
527
528 1) The very first thing they do is that they call constructors of ancestor
529 sub-objects that have them.
530
531 2) Then VMT pointers of this and all its ancestors is set to new values
532 corresponding to the type corresponding to the constructor.
533
534 3) Only afterwards, other stuff such as constructor of member sub-objects
535 and the code written by the user is run. Only this may include calling
536 virtual functions, directly or indirectly.
537
538 There is no way to call a constructor of an ancestor sub-object in any
539 other way.
540
541 This means that we do not have to care whether constructors get the correct
542 type information because they will always change it (in fact, if we define
543 the type to be given by the VMT pointer, it is undefined).
544
545 The most important fact to derive from the above is that if, for some
546 statement in the section 3, we try to detect whether the dynamic type has
547 changed, we can safely ignore all calls as we examine the function body
548 backwards until we reach statements in section 2 because these calls cannot
549 be ancestor constructors or destructors (if the input is not bogus) and so
550 do not change the dynamic type (this holds true only for automatically
551 allocated objects but at the moment we devirtualize only these). We then
552 must detect that statements in section 2 change the dynamic type and can try
553 to derive the new type. That is enough and we can stop, we will never see
554 the calls into constructors of sub-objects in this code. Therefore we can
555 safely ignore all call statements that we traverse.
556 */
557
558static bool
559stmt_may_be_vtbl_ptr_store (gimple stmt)
560{
561 if (is_gimple_call (stmt))
562 return false;
eb6bcfb0
JH
563 else if (gimple_clobber_p (stmt))
564 return false;
f65cf2b7
MJ
565 else if (is_gimple_assign (stmt))
566 {
567 tree lhs = gimple_assign_lhs (stmt);
568
0004f992
MJ
569 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
570 {
571 if (flag_strict_aliasing
572 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
573 return false;
574
575 if (TREE_CODE (lhs) == COMPONENT_REF
576 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 577 return false;
0004f992
MJ
578 /* In the future we might want to use get_base_ref_and_offset to find
579 if there is a field corresponding to the offset and if so, proceed
580 almost like if it was a component ref. */
581 }
f65cf2b7
MJ
582 }
583 return true;
584}
585
290ebcb7
MJ
586/* If STMT can be proved to be an assignment to the virtual method table
587 pointer of ANALYZED_OBJ and the type associated with the new table
588 identified, return the type. Otherwise return NULL_TREE. */
589
590static tree
591extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
592{
593 HOST_WIDE_INT offset, size, max_size;
594 tree lhs, rhs, base;
595
596 if (!gimple_assign_single_p (stmt))
597 return NULL_TREE;
598
599 lhs = gimple_assign_lhs (stmt);
600 rhs = gimple_assign_rhs1 (stmt);
601 if (TREE_CODE (lhs) != COMPONENT_REF
602 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
603 || TREE_CODE (rhs) != ADDR_EXPR)
604 return NULL_TREE;
605 rhs = get_base_address (TREE_OPERAND (rhs, 0));
606 if (!rhs
607 || TREE_CODE (rhs) != VAR_DECL
608 || !DECL_VIRTUAL_P (rhs))
609 return NULL_TREE;
610
611 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
612 if (offset != tci->offset
613 || size != POINTER_SIZE
614 || max_size != POINTER_SIZE)
615 return NULL_TREE;
616 if (TREE_CODE (base) == MEM_REF)
617 {
618 if (TREE_CODE (tci->object) != MEM_REF
619 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
620 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
621 TREE_OPERAND (base, 1)))
622 return NULL_TREE;
623 }
624 else if (tci->object != base)
625 return NULL_TREE;
626
627 return DECL_CONTEXT (rhs);
628}
629
61502ca8 630/* Callback of walk_aliased_vdefs and a helper function for
f65cf2b7
MJ
631 detect_type_change to check whether a particular statement may modify
632 the virtual table pointer, and if possible also determine the new type of
633 the (sub-)object. It stores its result into DATA, which points to a
634 type_change_info structure. */
635
636static bool
637check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
638{
639 gimple stmt = SSA_NAME_DEF_STMT (vdef);
640 struct type_change_info *tci = (struct type_change_info *) data;
641
642 if (stmt_may_be_vtbl_ptr_store (stmt))
643 {
290ebcb7
MJ
644 tree type;
645 type = extr_type_from_vtbl_ptr_store (stmt, tci);
646 if (tci->type_maybe_changed
647 && type != tci->known_current_type)
648 tci->multiple_types_encountered = true;
649 tci->known_current_type = type;
f65cf2b7
MJ
650 tci->type_maybe_changed = true;
651 return true;
652 }
653 else
654 return false;
655}
656
290ebcb7
MJ
657
658
06d65050
JH
659/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
660 callsite CALL) by looking for assignments to its virtual table pointer. If
661 it is, return true and fill in the jump function JFUNC with relevant type
662 information or set it to unknown. ARG is the object itself (not a pointer
663 to it, unless dereferenced). BASE is the base of the memory access as
664 returned by get_ref_base_and_extent, as is the offset. */
f65cf2b7
MJ
665
666static bool
06d65050
JH
667detect_type_change (tree arg, tree base, tree comp_type, gimple call,
668 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
f65cf2b7
MJ
669{
670 struct type_change_info tci;
671 ao_ref ao;
672
673 gcc_checking_assert (DECL_P (arg)
674 || TREE_CODE (arg) == MEM_REF
675 || handled_component_p (arg));
676 /* Const calls cannot call virtual methods through VMT and so type changes do
677 not matter. */
06d65050
JH
678 if (!flag_devirtualize || !gimple_vuse (call)
679 /* Be sure expected_type is polymorphic. */
680 || !comp_type
681 || TREE_CODE (comp_type) != RECORD_TYPE
682 || !TYPE_BINFO (comp_type)
683 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
f65cf2b7
MJ
684 return false;
685
dd887943 686 ao_ref_init (&ao, arg);
f65cf2b7
MJ
687 ao.base = base;
688 ao.offset = offset;
689 ao.size = POINTER_SIZE;
690 ao.max_size = ao.size;
f65cf2b7 691
290ebcb7
MJ
692 tci.offset = offset;
693 tci.object = get_base_address (arg);
694 tci.known_current_type = NULL_TREE;
695 tci.type_maybe_changed = false;
696 tci.multiple_types_encountered = false;
697
f65cf2b7
MJ
698 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
699 &tci, NULL);
700 if (!tci.type_maybe_changed)
701 return false;
702
290ebcb7
MJ
703 if (!tci.known_current_type
704 || tci.multiple_types_encountered
705 || offset != 0)
706 jfunc->type = IPA_JF_UNKNOWN;
707 else
7b872d9e 708 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
290ebcb7 709
f65cf2b7
MJ
710 return true;
711}
712
713/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
714 SSA name (its dereference will become the base and the offset is assumed to
715 be zero). */
716
717static bool
06d65050
JH
718detect_type_change_ssa (tree arg, tree comp_type,
719 gimple call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
720{
721 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 722 if (!flag_devirtualize
06d65050 723 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
724 return false;
725
726 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 727 build_int_cst (ptr_type_node, 0));
f65cf2b7 728
06d65050 729 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
f65cf2b7
MJ
730}
731
fdb0e1b4
MJ
732/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
733 boolean variable pointed to by DATA. */
734
735static bool
736mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
737 void *data)
738{
739 bool *b = (bool *) data;
740 *b = true;
741 return true;
742}
743
688010ba 744/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4
MJ
745 a value known not to be modified in this function before reaching the
746 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
747 information about the parameter. */
fdb0e1b4
MJ
748
749static bool
8b7773a4
MJ
750parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
751 gimple stmt, tree parm_load)
fdb0e1b4
MJ
752{
753 bool modified = false;
8b7773a4 754 bitmap *visited_stmts;
fdb0e1b4
MJ
755 ao_ref refd;
756
8b7773a4
MJ
757 if (parm_ainfo && parm_ainfo->parm_modified)
758 return false;
fdb0e1b4
MJ
759
760 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4
MJ
761 ao_ref_init (&refd, parm_load);
762 /* We can cache visited statements only when parm_ainfo is available and when
763 we are looking at a naked load of the whole parameter. */
764 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
765 visited_stmts = NULL;
766 else
767 visited_stmts = &parm_ainfo->parm_visited_statements;
768 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
769 visited_stmts);
770 if (parm_ainfo && modified)
771 parm_ainfo->parm_modified = true;
772 return !modified;
fdb0e1b4
MJ
773}
774
775/* If STMT is an assignment that loads a value from an parameter declaration,
776 return the index of the parameter in ipa_node_params which has not been
777 modified. Otherwise return -1. */
778
779static int
84562394 780load_from_unmodified_param (vec<ipa_param_descriptor> descriptors,
fdb0e1b4
MJ
781 struct param_analysis_info *parms_ainfo,
782 gimple stmt)
783{
784 int index;
785 tree op1;
786
787 if (!gimple_assign_single_p (stmt))
788 return -1;
789
790 op1 = gimple_assign_rhs1 (stmt);
791 if (TREE_CODE (op1) != PARM_DECL)
792 return -1;
793
d044dd17 794 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 795 if (index < 0
8b7773a4
MJ
796 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
797 : NULL, stmt, op1))
fdb0e1b4
MJ
798 return -1;
799
800 return index;
801}
f65cf2b7 802
8b7773a4
MJ
803/* Return true if memory reference REF loads data that are known to be
804 unmodified in this function before reaching statement STMT. PARM_AINFO, if
805 non-NULL, is a pointer to a structure containing temporary information about
806 PARM. */
807
808static bool
809parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
810 gimple stmt, tree ref)
811{
812 bool modified = false;
813 ao_ref refd;
814
815 gcc_checking_assert (gimple_vuse (stmt));
816 if (parm_ainfo && parm_ainfo->ref_modified)
817 return false;
818
819 ao_ref_init (&refd, ref);
820 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
821 NULL);
822 if (parm_ainfo && modified)
823 parm_ainfo->ref_modified = true;
824 return !modified;
825}
826
827/* Return true if the data pointed to by PARM is known to be unmodified in this
828 function before reaching call statement CALL into which it is passed.
829 PARM_AINFO is a pointer to a structure containing temporary information
830 about PARM. */
831
832static bool
833parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
834 gimple call, tree parm)
835{
836 bool modified = false;
837 ao_ref refd;
838
839 /* It's unnecessary to calculate anything about memory contnets for a const
840 function because it is not goin to use it. But do not cache the result
841 either. Also, no such calculations for non-pointers. */
842 if (!gimple_vuse (call)
843 || !POINTER_TYPE_P (TREE_TYPE (parm)))
844 return false;
845
846 if (parm_ainfo->pt_modified)
847 return false;
848
849 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
850 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
851 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
852 if (modified)
853 parm_ainfo->pt_modified = true;
854 return !modified;
855}
856
857/* Return true if we can prove that OP is a memory reference loading unmodified
858 data from an aggregate passed as a parameter and if the aggregate is passed
859 by reference, that the alias type of the load corresponds to the type of the
860 formal parameter (so that we can rely on this type for TBAA in callers).
861 INFO and PARMS_AINFO describe parameters of the current function (but the
862 latter can be NULL), STMT is the load statement. If function returns true,
863 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
864 within the aggregate and whether it is a load from a value passed by
865 reference respectively. */
866
867static bool
84562394 868ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor> descriptors,
8b7773a4
MJ
869 struct param_analysis_info *parms_ainfo, gimple stmt,
870 tree op, int *index_p, HOST_WIDE_INT *offset_p,
3ff2ca23 871 HOST_WIDE_INT *size_p, bool *by_ref_p)
8b7773a4
MJ
872{
873 int index;
874 HOST_WIDE_INT size, max_size;
875 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
876
877 if (max_size == -1 || max_size != size || *offset_p < 0)
878 return false;
879
880 if (DECL_P (base))
881 {
d044dd17 882 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4
MJ
883 if (index >= 0
884 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
885 : NULL, stmt, op))
886 {
887 *index_p = index;
888 *by_ref_p = false;
3ff2ca23
JJ
889 if (size_p)
890 *size_p = size;
8b7773a4
MJ
891 return true;
892 }
893 return false;
894 }
895
896 if (TREE_CODE (base) != MEM_REF
897 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
898 || !integer_zerop (TREE_OPERAND (base, 1)))
899 return false;
900
901 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
902 {
903 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 904 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
905 }
906 else
907 {
908 /* This branch catches situations where a pointer parameter is not a
909 gimple register, for example:
910
911 void hip7(S*) (struct S * p)
912 {
913 void (*<T2e4>) (struct S *) D.1867;
914 struct S * p.1;
915
916 <bb 2>:
917 p.1_1 = p;
918 D.1867_2 = p.1_1->f;
919 D.1867_2 ();
920 gdp = &p;
921 */
922
923 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
d044dd17 924 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
8b7773a4
MJ
925 }
926
927 if (index >= 0
928 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
929 stmt, op))
930 {
931 *index_p = index;
932 *by_ref_p = true;
3ff2ca23
JJ
933 if (size_p)
934 *size_p = size;
8b7773a4
MJ
935 return true;
936 }
937 return false;
938}
939
940/* Just like the previous function, just without the param_analysis_info
941 pointer, for users outside of this file. */
942
943bool
944ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
945 tree op, int *index_p, HOST_WIDE_INT *offset_p,
946 bool *by_ref_p)
947{
d044dd17 948 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
3ff2ca23 949 offset_p, NULL, by_ref_p);
8b7773a4
MJ
950}
951
b258210c 952/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
953 of an assignment statement STMT, try to determine whether we are actually
954 handling any of the following cases and construct an appropriate jump
955 function into JFUNC if so:
956
957 1) The passed value is loaded from a formal parameter which is not a gimple
958 register (most probably because it is addressable, the value has to be
959 scalar) and we can guarantee the value has not changed. This case can
960 therefore be described by a simple pass-through jump function. For example:
961
962 foo (int a)
963 {
964 int a.0;
965
966 a.0_2 = a;
967 bar (a.0_2);
968
969 2) The passed value can be described by a simple arithmetic pass-through
970 jump function. E.g.
971
972 foo (int a)
973 {
974 int D.2064;
975
976 D.2064_4 = a.1(D) + 4;
977 bar (D.2064_4);
978
979 This case can also occur in combination of the previous one, e.g.:
980
981 foo (int a, int z)
982 {
983 int a.0;
984 int D.2064;
985
986 a.0_3 = a;
987 D.2064_4 = a.0_3 + 4;
988 foo (D.2064_4);
989
990 3) The passed value is an address of an object within another one (which
991 also passed by reference). Such situations are described by an ancestor
992 jump function and describe situations such as:
993
994 B::foo() (struct B * const this)
995 {
996 struct A * D.1845;
997
998 D.1845_2 = &this_1(D)->D.1748;
999 A::bar (D.1845_2);
1000
1001 INFO is the structure describing individual parameters access different
1002 stages of IPA optimizations. PARMS_AINFO contains the information that is
1003 only needed for intraprocedural analysis. */
685b0d13
MJ
1004
1005static void
b258210c 1006compute_complex_assign_jump_func (struct ipa_node_params *info,
fdb0e1b4 1007 struct param_analysis_info *parms_ainfo,
b258210c 1008 struct ipa_jump_func *jfunc,
06d65050
JH
1009 gimple call, gimple stmt, tree name,
1010 tree param_type)
685b0d13
MJ
1011{
1012 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 1013 tree op1, tc_ssa, base, ssa;
685b0d13 1014 int index;
685b0d13 1015
685b0d13 1016 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1017
fdb0e1b4 1018 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1019 {
fdb0e1b4
MJ
1020 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1021 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1022 else
d044dd17 1023 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
fdb0e1b4
MJ
1024 SSA_NAME_DEF_STMT (op1));
1025 tc_ssa = op1;
1026 }
1027 else
1028 {
d044dd17 1029 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
fdb0e1b4
MJ
1030 tc_ssa = gimple_assign_lhs (stmt);
1031 }
1032
1033 if (index >= 0)
1034 {
1035 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 1036
b258210c 1037 if (op2)
685b0d13 1038 {
b258210c
MJ
1039 if (!is_gimple_ip_invariant (op2)
1040 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1041 && !useless_type_conversion_p (TREE_TYPE (name),
1042 TREE_TYPE (op1))))
1043 return;
1044
7b872d9e
MJ
1045 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1046 gimple_assign_rhs_code (stmt));
685b0d13 1047 }
b8f6e610 1048 else if (gimple_assign_single_p (stmt))
8b7773a4
MJ
1049 {
1050 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1051 call, tc_ssa);
06d65050
JH
1052 bool type_p = false;
1053
1054 if (param_type && POINTER_TYPE_P (param_type))
1055 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1056 call, jfunc);
b8f6e610
MJ
1057 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1058 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
8b7773a4 1059 }
685b0d13
MJ
1060 return;
1061 }
1062
1063 if (TREE_CODE (op1) != ADDR_EXPR)
1064 return;
1065 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1066 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1067 return;
32aa622c
MJ
1068 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1069 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1070 /* If this is a varying address, punt. */
1071 || max_size == -1
1072 || max_size != size)
685b0d13 1073 return;
32aa622c 1074 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
f65cf2b7
MJ
1075 ssa = TREE_OPERAND (base, 0);
1076 if (TREE_CODE (ssa) != SSA_NAME
1077 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1078 || offset < 0)
685b0d13
MJ
1079 return;
1080
b8f6e610 1081 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1082 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1083 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
b8f6e610 1084 {
06d65050
JH
1085 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1086 call, jfunc, offset);
b8f6e610
MJ
1087 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1088 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1089 parm_ref_data_pass_through_p (&parms_ainfo[index],
1090 call, ssa), type_p);
1091 }
685b0d13
MJ
1092}
1093
40591473
MJ
1094/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1095 it looks like:
1096
1097 iftmp.1_3 = &obj_2(D)->D.1762;
1098
1099 The base of the MEM_REF must be a default definition SSA NAME of a
1100 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1101 whole MEM_REF expression is returned and the offset calculated from any
1102 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1103 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1104
1105static tree
1106get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1107{
1108 HOST_WIDE_INT size, max_size;
1109 tree expr, parm, obj;
1110
1111 if (!gimple_assign_single_p (assign))
1112 return NULL_TREE;
1113 expr = gimple_assign_rhs1 (assign);
1114
1115 if (TREE_CODE (expr) != ADDR_EXPR)
1116 return NULL_TREE;
1117 expr = TREE_OPERAND (expr, 0);
1118 obj = expr;
1119 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1120
1121 if (TREE_CODE (expr) != MEM_REF
1122 /* If this is a varying address, punt. */
1123 || max_size == -1
1124 || max_size != size
1125 || *offset < 0)
1126 return NULL_TREE;
1127 parm = TREE_OPERAND (expr, 0);
1128 if (TREE_CODE (parm) != SSA_NAME
1129 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1130 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1131 return NULL_TREE;
1132
1133 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1134 *obj_p = obj;
1135 return expr;
1136}
1137
685b0d13 1138
b258210c
MJ
1139/* Given that an actual argument is an SSA_NAME that is a result of a phi
1140 statement PHI, try to find out whether NAME is in fact a
1141 multiple-inheritance typecast from a descendant into an ancestor of a formal
1142 parameter and thus can be described by an ancestor jump function and if so,
1143 write the appropriate function into JFUNC.
1144
1145 Essentially we want to match the following pattern:
1146
1147 if (obj_2(D) != 0B)
1148 goto <bb 3>;
1149 else
1150 goto <bb 4>;
1151
1152 <bb 3>:
1153 iftmp.1_3 = &obj_2(D)->D.1762;
1154
1155 <bb 4>:
1156 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1157 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1158 return D.1879_6; */
1159
1160static void
1161compute_complex_ancestor_jump_func (struct ipa_node_params *info,
8b7773a4 1162 struct param_analysis_info *parms_ainfo,
b258210c 1163 struct ipa_jump_func *jfunc,
06d65050 1164 gimple call, gimple phi, tree param_type)
b258210c 1165{
40591473 1166 HOST_WIDE_INT offset;
b258210c
MJ
1167 gimple assign, cond;
1168 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1169 tree tmp, parm, expr, obj;
b258210c
MJ
1170 int index, i;
1171
54e348cb 1172 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1173 return;
1174
54e348cb
MJ
1175 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1176 tmp = PHI_ARG_DEF (phi, 0);
1177 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1178 tmp = PHI_ARG_DEF (phi, 1);
1179 else
1180 return;
b258210c
MJ
1181 if (TREE_CODE (tmp) != SSA_NAME
1182 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1183 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1184 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1185 return;
1186
1187 assign = SSA_NAME_DEF_STMT (tmp);
1188 assign_bb = gimple_bb (assign);
40591473 1189 if (!single_pred_p (assign_bb))
b258210c 1190 return;
40591473
MJ
1191 expr = get_ancestor_addr_info (assign, &obj, &offset);
1192 if (!expr)
b258210c
MJ
1193 return;
1194 parm = TREE_OPERAND (expr, 0);
b258210c 1195 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
40591473 1196 gcc_assert (index >= 0);
b258210c
MJ
1197
1198 cond_bb = single_pred (assign_bb);
1199 cond = last_stmt (cond_bb);
69610617
SB
1200 if (!cond
1201 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1202 || gimple_cond_code (cond) != NE_EXPR
1203 || gimple_cond_lhs (cond) != parm
1204 || !integer_zerop (gimple_cond_rhs (cond)))
1205 return;
1206
b258210c
MJ
1207 phi_bb = gimple_bb (phi);
1208 for (i = 0; i < 2; i++)
1209 {
1210 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1211 if (pred != assign_bb && pred != cond_bb)
1212 return;
1213 }
1214
06d65050
JH
1215 bool type_p = false;
1216 if (param_type && POINTER_TYPE_P (param_type))
1217 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1218 call, jfunc, offset);
b8f6e610 1219 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
8b7773a4
MJ
1220 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1221 parm_ref_data_pass_through_p (&parms_ainfo[index],
b8f6e610 1222 call, parm), type_p);
b258210c
MJ
1223}
1224
61502ca8 1225/* Given OP which is passed as an actual argument to a called function,
b258210c 1226 determine if it is possible to construct a KNOWN_TYPE jump function for it
06d65050
JH
1227 and if so, create one and store it to JFUNC.
1228 EXPECTED_TYPE represents a type the argument should be in */
b258210c
MJ
1229
1230static void
f65cf2b7 1231compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
06d65050 1232 gimple call, tree expected_type)
b258210c 1233{
32aa622c 1234 HOST_WIDE_INT offset, size, max_size;
c7573249 1235 tree base;
b258210c 1236
05842ff5
MJ
1237 if (!flag_devirtualize
1238 || TREE_CODE (op) != ADDR_EXPR
06d65050
JH
1239 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1240 /* Be sure expected_type is polymorphic. */
1241 || !expected_type
1242 || TREE_CODE (expected_type) != RECORD_TYPE
1243 || !TYPE_BINFO (expected_type)
1244 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
b258210c
MJ
1245 return;
1246
1247 op = TREE_OPERAND (op, 0);
32aa622c
MJ
1248 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1249 if (!DECL_P (base)
1250 || max_size == -1
1251 || max_size != size
1252 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1253 || is_global_var (base))
1254 return;
1255
06d65050 1256 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
f65cf2b7
MJ
1257 return;
1258
06d65050
JH
1259 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1260 expected_type);
b258210c
MJ
1261}
1262
be95e2b9
MJ
1263/* Inspect the given TYPE and return true iff it has the same structure (the
1264 same number of fields of the same types) as a C++ member pointer. If
1265 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1266 corresponding fields there. */
1267
3e293154
MJ
1268static bool
1269type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1270{
1271 tree fld;
1272
1273 if (TREE_CODE (type) != RECORD_TYPE)
1274 return false;
1275
1276 fld = TYPE_FIELDS (type);
1277 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1278 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1279 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1280 return false;
1281
1282 if (method_ptr)
1283 *method_ptr = fld;
1284
910ad8de 1285 fld = DECL_CHAIN (fld);
8b7773a4 1286 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1287 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1288 return false;
1289 if (delta)
1290 *delta = fld;
1291
910ad8de 1292 if (DECL_CHAIN (fld))
3e293154
MJ
1293 return false;
1294
1295 return true;
1296}
1297
61502ca8 1298/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1299 return the rhs of its defining statement. Otherwise return RHS as it
1300 is. */
7ec49257
MJ
1301
1302static inline tree
1303get_ssa_def_if_simple_copy (tree rhs)
1304{
1305 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1306 {
1307 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1308
1309 if (gimple_assign_single_p (def_stmt))
1310 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1311 else
1312 break;
7ec49257
MJ
1313 }
1314 return rhs;
1315}
1316
8b7773a4
MJ
1317/* Simple linked list, describing known contents of an aggregate beforere
1318 call. */
1319
1320struct ipa_known_agg_contents_list
1321{
1322 /* Offset and size of the described part of the aggregate. */
1323 HOST_WIDE_INT offset, size;
1324 /* Known constant value or NULL if the contents is known to be unknown. */
1325 tree constant;
1326 /* Pointer to the next structure in the list. */
1327 struct ipa_known_agg_contents_list *next;
1328};
3e293154 1329
8b7773a4
MJ
1330/* Traverse statements from CALL backwards, scanning whether an aggregate given
1331 in ARG is filled in with constant values. ARG can either be an aggregate
1332 expression or a pointer to an aggregate. JFUNC is the jump function into
1333 which the constants are subsequently stored. */
be95e2b9 1334
3e293154 1335static void
8b7773a4
MJ
1336determine_known_aggregate_parts (gimple call, tree arg,
1337 struct ipa_jump_func *jfunc)
3e293154 1338{
8b7773a4
MJ
1339 struct ipa_known_agg_contents_list *list = NULL;
1340 int item_count = 0, const_count = 0;
1341 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1342 gimple_stmt_iterator gsi;
8b7773a4
MJ
1343 tree arg_base;
1344 bool check_ref, by_ref;
1345 ao_ref r;
3e293154 1346
8b7773a4
MJ
1347 /* The function operates in three stages. First, we prepare check_ref, r,
1348 arg_base and arg_offset based on what is actually passed as an actual
1349 argument. */
3e293154 1350
8b7773a4
MJ
1351 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1352 {
1353 by_ref = true;
1354 if (TREE_CODE (arg) == SSA_NAME)
1355 {
1356 tree type_size;
cc269bb6 1357 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)))))
8b7773a4
MJ
1358 return;
1359 check_ref = true;
1360 arg_base = arg;
1361 arg_offset = 0;
1362 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
ae7e9ddd 1363 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1364 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1365 }
1366 else if (TREE_CODE (arg) == ADDR_EXPR)
1367 {
1368 HOST_WIDE_INT arg_max_size;
1369
1370 arg = TREE_OPERAND (arg, 0);
1371 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1372 &arg_max_size);
1373 if (arg_max_size == -1
1374 || arg_max_size != arg_size
1375 || arg_offset < 0)
1376 return;
1377 if (DECL_P (arg_base))
1378 {
1379 tree size;
1380 check_ref = false;
1381 size = build_int_cst (integer_type_node, arg_size);
1382 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1383 }
1384 else
1385 return;
1386 }
1387 else
1388 return;
1389 }
1390 else
1391 {
1392 HOST_WIDE_INT arg_max_size;
1393
1394 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1395
1396 by_ref = false;
1397 check_ref = false;
1398 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1399 &arg_max_size);
1400 if (arg_max_size == -1
1401 || arg_max_size != arg_size
1402 || arg_offset < 0)
1403 return;
1404
1405 ao_ref_init (&r, arg);
1406 }
1407
1408 /* Second stage walks back the BB, looks at individual statements and as long
1409 as it is confident of how the statements affect contents of the
1410 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1411 describing it. */
1412 gsi = gsi_for_stmt (call);
726a989a
RB
1413 gsi_prev (&gsi);
1414 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1415 {
8b7773a4 1416 struct ipa_known_agg_contents_list *n, **p;
726a989a 1417 gimple stmt = gsi_stmt (gsi);
8b7773a4
MJ
1418 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1419 tree lhs, rhs, lhs_base;
1420 bool partial_overlap;
3e293154 1421
8b7773a4 1422 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1423 continue;
8b75fc9b 1424 if (!gimple_assign_single_p (stmt))
8b7773a4 1425 break;
3e293154 1426
726a989a
RB
1427 lhs = gimple_assign_lhs (stmt);
1428 rhs = gimple_assign_rhs1 (stmt);
0c6b087c 1429 if (!is_gimple_reg_type (TREE_TYPE (rhs))
7d2fb524
MJ
1430 || TREE_CODE (lhs) == BIT_FIELD_REF
1431 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1432 break;
3e293154 1433
8b7773a4
MJ
1434 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1435 &lhs_max_size);
1436 if (lhs_max_size == -1
1437 || lhs_max_size != lhs_size
1438 || (lhs_offset < arg_offset
1439 && lhs_offset + lhs_size > arg_offset)
1440 || (lhs_offset < arg_offset + arg_size
1441 && lhs_offset + lhs_size > arg_offset + arg_size))
1442 break;
3e293154 1443
8b7773a4 1444 if (check_ref)
518dc859 1445 {
8b7773a4
MJ
1446 if (TREE_CODE (lhs_base) != MEM_REF
1447 || TREE_OPERAND (lhs_base, 0) != arg_base
1448 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1449 break;
3e293154 1450 }
8b7773a4 1451 else if (lhs_base != arg_base)
774b8a55
MJ
1452 {
1453 if (DECL_P (lhs_base))
1454 continue;
1455 else
1456 break;
1457 }
3e293154 1458
8b7773a4
MJ
1459 if (lhs_offset + lhs_size < arg_offset
1460 || lhs_offset >= (arg_offset + arg_size))
1461 continue;
1462
1463 partial_overlap = false;
1464 p = &list;
1465 while (*p && (*p)->offset < lhs_offset)
3e293154 1466 {
8b7773a4 1467 if ((*p)->offset + (*p)->size > lhs_offset)
3e293154 1468 {
8b7773a4
MJ
1469 partial_overlap = true;
1470 break;
3e293154 1471 }
8b7773a4
MJ
1472 p = &(*p)->next;
1473 }
1474 if (partial_overlap)
1475 break;
1476 if (*p && (*p)->offset < lhs_offset + lhs_size)
1477 {
1478 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1479 /* We already know this value is subsequently overwritten with
1480 something else. */
1481 continue;
3e293154 1482 else
8b7773a4
MJ
1483 /* Otherwise this is a partial overlap which we cannot
1484 represent. */
1485 break;
3e293154 1486 }
3e293154 1487
8b7773a4
MJ
1488 rhs = get_ssa_def_if_simple_copy (rhs);
1489 n = XALLOCA (struct ipa_known_agg_contents_list);
1490 n->size = lhs_size;
1491 n->offset = lhs_offset;
1492 if (is_gimple_ip_invariant (rhs))
1493 {
1494 n->constant = rhs;
1495 const_count++;
1496 }
1497 else
1498 n->constant = NULL_TREE;
1499 n->next = *p;
1500 *p = n;
3e293154 1501
8b7773a4 1502 item_count++;
dfea20f1
MJ
1503 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1504 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1505 break;
1506 }
be95e2b9 1507
8b7773a4
MJ
1508 /* Third stage just goes over the list and creates an appropriate vector of
1509 ipa_agg_jf_item structures out of it, of sourse only if there are
1510 any known constants to begin with. */
3e293154 1511
8b7773a4 1512 if (const_count)
3e293154 1513 {
8b7773a4 1514 jfunc->agg.by_ref = by_ref;
9771b263 1515 vec_alloc (jfunc->agg.items, const_count);
8b7773a4
MJ
1516 while (list)
1517 {
1518 if (list->constant)
1519 {
f32682ca
DN
1520 struct ipa_agg_jf_item item;
1521 item.offset = list->offset - arg_offset;
7d2fb524 1522 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
d1f98542 1523 item.value = unshare_expr_without_location (list->constant);
9771b263 1524 jfunc->agg.items->quick_push (item);
8b7773a4
MJ
1525 }
1526 list = list->next;
1527 }
3e293154
MJ
1528 }
1529}
1530
06d65050
JH
1531static tree
1532ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1533{
1534 int n;
1535 tree type = (e->callee
67348ccc 1536 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1537 : gimple_call_fntype (e->call_stmt));
1538 tree t = TYPE_ARG_TYPES (type);
1539
1540 for (n = 0; n < i; n++)
1541 {
1542 if (!t)
1543 break;
1544 t = TREE_CHAIN (t);
1545 }
1546 if (t)
1547 return TREE_VALUE (t);
1548 if (!e->callee)
1549 return NULL;
67348ccc 1550 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1551 for (n = 0; n < i; n++)
1552 {
1553 if (!t)
1554 return NULL;
1555 t = TREE_CHAIN (t);
1556 }
1557 if (t)
1558 return TREE_TYPE (t);
1559 return NULL;
1560}
1561
3e293154
MJ
1562/* Compute jump function for all arguments of callsite CS and insert the
1563 information in the jump_functions array in the ipa_edge_args corresponding
1564 to this callsite. */
be95e2b9 1565
749aa96d 1566static void
c419671c 1567ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
062c604f 1568 struct cgraph_edge *cs)
3e293154
MJ
1569{
1570 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09
MJ
1571 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1572 gimple call = cs->call_stmt;
8b7773a4 1573 int n, arg_num = gimple_call_num_args (call);
3e293154 1574
606d9a09 1575 if (arg_num == 0 || args->jump_functions)
3e293154 1576 return;
9771b263 1577 vec_safe_grow_cleared (args->jump_functions, arg_num);
3e293154 1578
96e24d49
JJ
1579 if (gimple_call_internal_p (call))
1580 return;
5fe8e757
MJ
1581 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1582 return;
1583
8b7773a4
MJ
1584 for (n = 0; n < arg_num; n++)
1585 {
1586 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1587 tree arg = gimple_call_arg (call, n);
06d65050 1588 tree param_type = ipa_get_callee_param_type (cs, n);
3e293154 1589
8b7773a4 1590 if (is_gimple_ip_invariant (arg))
4502fe8d 1591 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1592 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1593 && TREE_CODE (arg) == PARM_DECL)
1594 {
1595 int index = ipa_get_param_decl_index (info, arg);
1596
1597 gcc_assert (index >=0);
1598 /* Aggregate passed by value, check for pass-through, otherwise we
1599 will attempt to fill in aggregate contents later in this
1600 for cycle. */
1601 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1602 {
b8f6e610 1603 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
8b7773a4
MJ
1604 continue;
1605 }
1606 }
1607 else if (TREE_CODE (arg) == SSA_NAME)
1608 {
1609 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1610 {
1611 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1612 if (index >= 0)
8b7773a4 1613 {
b8f6e610 1614 bool agg_p, type_p;
8b7773a4
MJ
1615 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1616 call, arg);
06d65050
JH
1617 if (param_type && POINTER_TYPE_P (param_type))
1618 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1619 call, jfunc);
1620 else
1621 type_p = false;
b8f6e610 1622 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
06d65050
JH
1623 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1624 type_p);
8b7773a4
MJ
1625 }
1626 }
1627 else
1628 {
1629 gimple stmt = SSA_NAME_DEF_STMT (arg);
1630 if (is_gimple_assign (stmt))
1631 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
06d65050 1632 call, stmt, arg, param_type);
8b7773a4
MJ
1633 else if (gimple_code (stmt) == GIMPLE_PHI)
1634 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
06d65050 1635 call, stmt, param_type);
8b7773a4
MJ
1636 }
1637 }
1638 else
06d65050
JH
1639 compute_known_type_jump_func (arg, jfunc, call,
1640 param_type
1641 && POINTER_TYPE_P (param_type)
1642 ? TREE_TYPE (param_type)
1643 : NULL);
3e293154 1644
8b7773a4
MJ
1645 if ((jfunc->type != IPA_JF_PASS_THROUGH
1646 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1647 && (jfunc->type != IPA_JF_ANCESTOR
1648 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1649 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1650 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1651 determine_known_aggregate_parts (call, arg, jfunc);
1652 }
3e293154
MJ
1653}
1654
749aa96d
MJ
1655/* Compute jump functions for all edges - both direct and indirect - outgoing
1656 from NODE. Also count the actual arguments in the process. */
1657
062c604f
MJ
1658static void
1659ipa_compute_jump_functions (struct cgraph_node *node,
c419671c 1660 struct param_analysis_info *parms_ainfo)
749aa96d
MJ
1661{
1662 struct cgraph_edge *cs;
1663
1664 for (cs = node->callees; cs; cs = cs->next_callee)
1665 {
d7da5cc8
MJ
1666 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1667 NULL);
749aa96d
MJ
1668 /* We do not need to bother analyzing calls to unknown
1669 functions unless they may become known during lto/whopr. */
67348ccc 1670 if (!callee->definition && !flag_lto)
749aa96d 1671 continue;
c419671c 1672 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1673 }
1674
1675 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
c419671c 1676 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1677}
1678
8b7773a4
MJ
1679/* If STMT looks like a statement loading a value from a member pointer formal
1680 parameter, return that parameter and store the offset of the field to
1681 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1682 might be clobbered). If USE_DELTA, then we look for a use of the delta
1683 field rather than the pfn. */
be95e2b9 1684
3e293154 1685static tree
8b7773a4
MJ
1686ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1687 HOST_WIDE_INT *offset_p)
3e293154 1688{
8b7773a4
MJ
1689 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1690
1691 if (!gimple_assign_single_p (stmt))
1692 return NULL_TREE;
3e293154 1693
8b7773a4 1694 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
1695 if (TREE_CODE (rhs) == COMPONENT_REF)
1696 {
1697 ref_field = TREE_OPERAND (rhs, 1);
1698 rhs = TREE_OPERAND (rhs, 0);
1699 }
1700 else
1701 ref_field = NULL_TREE;
d242d063 1702 if (TREE_CODE (rhs) != MEM_REF)
3e293154 1703 return NULL_TREE;
3e293154 1704 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
1705 if (TREE_CODE (rec) != ADDR_EXPR)
1706 return NULL_TREE;
1707 rec = TREE_OPERAND (rec, 0);
3e293154 1708 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 1709 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 1710 return NULL_TREE;
d242d063 1711 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 1712
8b7773a4
MJ
1713 if (use_delta)
1714 fld = delta_field;
1715 else
1716 fld = ptr_field;
1717 if (offset_p)
1718 *offset_p = int_bit_position (fld);
1719
ae788515
EB
1720 if (ref_field)
1721 {
1722 if (integer_nonzerop (ref_offset))
1723 return NULL_TREE;
ae788515
EB
1724 return ref_field == fld ? rec : NULL_TREE;
1725 }
3e293154 1726 else
8b7773a4
MJ
1727 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1728 : NULL_TREE;
3e293154
MJ
1729}
1730
1731/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 1732
3e293154
MJ
1733static bool
1734ipa_is_ssa_with_stmt_def (tree t)
1735{
1736 if (TREE_CODE (t) == SSA_NAME
1737 && !SSA_NAME_IS_DEFAULT_DEF (t))
1738 return true;
1739 else
1740 return false;
1741}
1742
40591473
MJ
1743/* Find the indirect call graph edge corresponding to STMT and mark it as a
1744 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1745 indirect call graph edge. */
be95e2b9 1746
40591473
MJ
1747static struct cgraph_edge *
1748ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
3e293154 1749{
e33c6cd6 1750 struct cgraph_edge *cs;
3e293154 1751
5f902d76 1752 cs = cgraph_edge (node, stmt);
b258210c 1753 cs->indirect_info->param_index = param_index;
8b7773a4 1754 cs->indirect_info->agg_contents = 0;
c13bc3d9 1755 cs->indirect_info->member_ptr = 0;
40591473 1756 return cs;
3e293154
MJ
1757}
1758
e33c6cd6 1759/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 1760 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
1761 intermediate information about each formal parameter. Currently it checks
1762 whether the call calls a pointer that is a formal parameter and if so, the
1763 parameter is marked with the called flag and an indirect call graph edge
1764 describing the call is created. This is very simple for ordinary pointers
1765 represented in SSA but not-so-nice when it comes to member pointers. The
1766 ugly part of this function does nothing more than trying to match the
1767 pattern of such a call. An example of such a pattern is the gimple dump
1768 below, the call is on the last line:
3e293154 1769
ae788515
EB
1770 <bb 2>:
1771 f$__delta_5 = f.__delta;
1772 f$__pfn_24 = f.__pfn;
1773
1774 or
3e293154 1775 <bb 2>:
d242d063
MJ
1776 f$__delta_5 = MEM[(struct *)&f];
1777 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 1778
ae788515 1779 and a few lines below:
8aa29647
MJ
1780
1781 <bb 5>
3e293154
MJ
1782 D.2496_3 = (int) f$__pfn_24;
1783 D.2497_4 = D.2496_3 & 1;
1784 if (D.2497_4 != 0)
1785 goto <bb 3>;
1786 else
1787 goto <bb 4>;
1788
8aa29647 1789 <bb 6>:
3e293154
MJ
1790 D.2500_7 = (unsigned int) f$__delta_5;
1791 D.2501_8 = &S + D.2500_7;
1792 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1793 D.2503_10 = *D.2502_9;
1794 D.2504_12 = f$__pfn_24 + -1;
1795 D.2505_13 = (unsigned int) D.2504_12;
1796 D.2506_14 = D.2503_10 + D.2505_13;
1797 D.2507_15 = *D.2506_14;
1798 iftmp.11_16 = (String:: *) D.2507_15;
1799
8aa29647 1800 <bb 7>:
3e293154
MJ
1801 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1802 D.2500_19 = (unsigned int) f$__delta_5;
1803 D.2508_20 = &S + D.2500_19;
1804 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1805
1806 Such patterns are results of simple calls to a member pointer:
1807
1808 int doprinting (int (MyString::* f)(int) const)
1809 {
1810 MyString S ("somestring");
1811
1812 return (S.*f)(4);
1813 }
8b7773a4
MJ
1814
1815 Moreover, the function also looks for called pointers loaded from aggregates
1816 passed by value or reference. */
3e293154
MJ
1817
1818static void
b258210c
MJ
1819ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1820 struct ipa_node_params *info,
c419671c 1821 struct param_analysis_info *parms_ainfo,
b258210c 1822 gimple call, tree target)
3e293154 1823{
726a989a 1824 gimple def;
3e293154 1825 tree n1, n2;
726a989a
RB
1826 gimple d1, d2;
1827 tree rec, rec2, cond;
1828 gimple branch;
3e293154 1829 int index;
3e293154 1830 basic_block bb, virt_bb, join;
8b7773a4
MJ
1831 HOST_WIDE_INT offset;
1832 bool by_ref;
3e293154 1833
3e293154
MJ
1834 if (SSA_NAME_IS_DEFAULT_DEF (target))
1835 {
b258210c 1836 tree var = SSA_NAME_VAR (target);
3e293154
MJ
1837 index = ipa_get_param_decl_index (info, var);
1838 if (index >= 0)
40591473 1839 ipa_note_param_call (node, index, call);
3e293154
MJ
1840 return;
1841 }
1842
8b7773a4
MJ
1843 def = SSA_NAME_DEF_STMT (target);
1844 if (gimple_assign_single_p (def)
d044dd17 1845 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
8b7773a4 1846 gimple_assign_rhs1 (def), &index, &offset,
3ff2ca23 1847 NULL, &by_ref))
8b7773a4
MJ
1848 {
1849 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
68377e53
JH
1850 if (cs->indirect_info->offset != offset)
1851 cs->indirect_info->outer_type = NULL;
8b7773a4
MJ
1852 cs->indirect_info->offset = offset;
1853 cs->indirect_info->agg_contents = 1;
1854 cs->indirect_info->by_ref = by_ref;
1855 return;
1856 }
1857
3e293154
MJ
1858 /* Now we need to try to match the complex pattern of calling a member
1859 pointer. */
8b7773a4
MJ
1860 if (gimple_code (def) != GIMPLE_PHI
1861 || gimple_phi_num_args (def) != 2
1862 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
1863 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1864 return;
1865
3e293154
MJ
1866 /* First, we need to check whether one of these is a load from a member
1867 pointer that is a parameter to this function. */
1868 n1 = PHI_ARG_DEF (def, 0);
1869 n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 1870 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154
MJ
1871 return;
1872 d1 = SSA_NAME_DEF_STMT (n1);
1873 d2 = SSA_NAME_DEF_STMT (n2);
1874
8aa29647 1875 join = gimple_bb (def);
8b7773a4 1876 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 1877 {
8b7773a4 1878 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
1879 return;
1880
8aa29647 1881 bb = EDGE_PRED (join, 0)->src;
726a989a 1882 virt_bb = gimple_bb (d2);
3e293154 1883 }
8b7773a4 1884 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 1885 {
8aa29647 1886 bb = EDGE_PRED (join, 1)->src;
726a989a 1887 virt_bb = gimple_bb (d1);
3e293154
MJ
1888 }
1889 else
1890 return;
1891
1892 /* Second, we need to check that the basic blocks are laid out in the way
1893 corresponding to the pattern. */
1894
3e293154
MJ
1895 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1896 || single_pred (virt_bb) != bb
1897 || single_succ (virt_bb) != join)
1898 return;
1899
1900 /* Third, let's see that the branching is done depending on the least
1901 significant bit of the pfn. */
1902
1903 branch = last_stmt (bb);
8aa29647 1904 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
1905 return;
1906
12430896
RG
1907 if ((gimple_cond_code (branch) != NE_EXPR
1908 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 1909 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 1910 return;
3e293154 1911
726a989a 1912 cond = gimple_cond_lhs (branch);
3e293154
MJ
1913 if (!ipa_is_ssa_with_stmt_def (cond))
1914 return;
1915
726a989a 1916 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 1917 if (!is_gimple_assign (def)
726a989a
RB
1918 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1919 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 1920 return;
726a989a
RB
1921
1922 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1923 if (!ipa_is_ssa_with_stmt_def (cond))
1924 return;
1925
726a989a 1926 def = SSA_NAME_DEF_STMT (cond);
3e293154 1927
8b75fc9b
MJ
1928 if (is_gimple_assign (def)
1929 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 1930 {
726a989a 1931 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1932 if (!ipa_is_ssa_with_stmt_def (cond))
1933 return;
726a989a 1934 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
1935 }
1936
6f7b8b70
RE
1937 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1938 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
1939 == ptrmemfunc_vbit_in_delta),
1940 NULL);
3e293154
MJ
1941 if (rec != rec2)
1942 return;
1943
1944 index = ipa_get_param_decl_index (info, rec);
8b7773a4
MJ
1945 if (index >= 0
1946 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1947 {
1948 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
68377e53
JH
1949 if (cs->indirect_info->offset != offset)
1950 cs->indirect_info->outer_type = NULL;
8b7773a4
MJ
1951 cs->indirect_info->offset = offset;
1952 cs->indirect_info->agg_contents = 1;
c13bc3d9 1953 cs->indirect_info->member_ptr = 1;
8b7773a4 1954 }
3e293154
MJ
1955
1956 return;
1957}
1958
b258210c
MJ
1959/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1960 object referenced in the expression is a formal parameter of the caller
1961 (described by INFO), create a call note for the statement. */
1962
1963static void
1964ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1965 struct ipa_node_params *info, gimple call,
1966 tree target)
1967{
40591473
MJ
1968 struct cgraph_edge *cs;
1969 struct cgraph_indirect_call_info *ii;
f65cf2b7 1970 struct ipa_jump_func jfunc;
b258210c 1971 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 1972 int index;
40591473 1973 HOST_WIDE_INT anc_offset;
b258210c 1974
05842ff5
MJ
1975 if (!flag_devirtualize)
1976 return;
1977
40591473 1978 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
1979 return;
1980
40591473
MJ
1981 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1982 {
1983 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1984 return;
b258210c 1985
40591473
MJ
1986 anc_offset = 0;
1987 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1988 gcc_assert (index >= 0);
06d65050
JH
1989 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1990 call, &jfunc))
40591473
MJ
1991 return;
1992 }
1993 else
1994 {
1995 gimple stmt = SSA_NAME_DEF_STMT (obj);
1996 tree expr;
1997
1998 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1999 if (!expr)
2000 return;
2001 index = ipa_get_param_decl_index (info,
2002 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2003 gcc_assert (index >= 0);
06d65050
JH
2004 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2005 call, &jfunc, anc_offset))
40591473
MJ
2006 return;
2007 }
2008
2009 cs = ipa_note_param_call (node, index, call);
2010 ii = cs->indirect_info;
8b7773a4 2011 ii->offset = anc_offset;
ae7e9ddd 2012 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2013 ii->otr_type = obj_type_ref_class (target);
40591473 2014 ii->polymorphic = 1;
b258210c
MJ
2015}
2016
2017/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2018 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2019 containing intermediate information about each formal parameter. */
b258210c
MJ
2020
2021static void
2022ipa_analyze_call_uses (struct cgraph_node *node,
062c604f 2023 struct ipa_node_params *info,
c419671c 2024 struct param_analysis_info *parms_ainfo, gimple call)
b258210c
MJ
2025{
2026 tree target = gimple_call_fn (call);
2027
25583c4f
RS
2028 if (!target)
2029 return;
b258210c 2030 if (TREE_CODE (target) == SSA_NAME)
c419671c 2031 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1d5755ef 2032 else if (virtual_method_call_p (target))
b258210c
MJ
2033 ipa_analyze_virtual_call_uses (node, info, call, target);
2034}
2035
2036
e33c6cd6
MJ
2037/* Analyze the call statement STMT with respect to formal parameters (described
2038 in INFO) of caller given by NODE. Currently it only checks whether formal
c419671c 2039 parameters are called. PARMS_AINFO is a pointer to a vector containing
062c604f 2040 intermediate information about each formal parameter. */
be95e2b9 2041
3e293154 2042static void
e33c6cd6 2043ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
c419671c 2044 struct param_analysis_info *parms_ainfo, gimple stmt)
3e293154 2045{
726a989a 2046 if (is_gimple_call (stmt))
c419671c 2047 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2048}
2049
2050/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2051 If OP is a parameter declaration, mark it as used in the info structure
2052 passed in DATA. */
2053
2054static bool
2055visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2056 tree op, void *data)
2057{
2058 struct ipa_node_params *info = (struct ipa_node_params *) data;
2059
2060 op = get_base_address (op);
2061 if (op
2062 && TREE_CODE (op) == PARM_DECL)
2063 {
2064 int index = ipa_get_param_decl_index (info, op);
2065 gcc_assert (index >= 0);
310bc633 2066 ipa_set_param_used (info, index, true);
062c604f
MJ
2067 }
2068
2069 return false;
3e293154
MJ
2070}
2071
2072/* Scan the function body of NODE and inspect the uses of formal parameters.
2073 Store the findings in various structures of the associated ipa_node_params
c419671c 2074 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
062c604f 2075 vector containing intermediate information about each formal parameter. */
be95e2b9 2076
062c604f
MJ
2077static void
2078ipa_analyze_params_uses (struct cgraph_node *node,
c419671c 2079 struct param_analysis_info *parms_ainfo)
3e293154 2080{
67348ccc 2081 tree decl = node->decl;
3e293154
MJ
2082 basic_block bb;
2083 struct function *func;
726a989a 2084 gimple_stmt_iterator gsi;
3e293154 2085 struct ipa_node_params *info = IPA_NODE_REF (node);
062c604f 2086 int i;
3e293154 2087
726a989a 2088 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
3e293154 2089 return;
3e293154 2090
5fe8e757
MJ
2091 info->uses_analysis_done = 1;
2092 if (ipa_func_spec_opts_forbid_analysis_p (node))
2093 {
2094 for (i = 0; i < ipa_get_param_count (info); i++)
2095 {
2096 ipa_set_param_used (info, i, true);
2097 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2098 }
2099 return;
2100 }
2101
062c604f
MJ
2102 for (i = 0; i < ipa_get_param_count (info); i++)
2103 {
2104 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2105 int controlled_uses = 0;
2106
062c604f
MJ
2107 /* For SSA regs see if parameter is used. For non-SSA we compute
2108 the flag during modification analysis. */
4502fe8d
MJ
2109 if (is_gimple_reg (parm))
2110 {
67348ccc 2111 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2112 parm);
2113 if (ddef && !has_zero_uses (ddef))
2114 {
2115 imm_use_iterator imm_iter;
2116 use_operand_p use_p;
2117
2118 ipa_set_param_used (info, i, true);
2119 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2120 if (!is_gimple_call (USE_STMT (use_p)))
2121 {
2122 controlled_uses = IPA_UNDESCRIBED_USE;
2123 break;
2124 }
2125 else
2126 controlled_uses++;
2127 }
2128 else
2129 controlled_uses = 0;
2130 }
2131 else
2132 controlled_uses = IPA_UNDESCRIBED_USE;
2133 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f
MJ
2134 }
2135
3e293154
MJ
2136 func = DECL_STRUCT_FUNCTION (decl);
2137 FOR_EACH_BB_FN (bb, func)
2138 {
726a989a 2139 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3e293154 2140 {
726a989a 2141 gimple stmt = gsi_stmt (gsi);
062c604f
MJ
2142
2143 if (is_gimple_debug (stmt))
2144 continue;
2145
c419671c 2146 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2147 walk_stmt_load_store_addr_ops (stmt, info,
2148 visit_ref_for_mod_analysis,
2149 visit_ref_for_mod_analysis,
2150 visit_ref_for_mod_analysis);
518dc859 2151 }
355a7673 2152 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
062c604f
MJ
2153 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2154 visit_ref_for_mod_analysis,
2155 visit_ref_for_mod_analysis,
2156 visit_ref_for_mod_analysis);
518dc859 2157 }
3e293154
MJ
2158}
2159
2c9561b5
MJ
2160/* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2161
2162static void
2163free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2164{
2165 int i;
2166
2167 for (i = 0; i < param_count; i++)
2168 {
2169 if (parms_ainfo[i].parm_visited_statements)
2170 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2171 if (parms_ainfo[i].pt_visited_statements)
2172 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2173 }
2174}
2175
dd5a833e
MS
2176/* Initialize the array describing properties of of formal parameters
2177 of NODE, analyze their uses and compute jump functions associated
2178 with actual arguments of calls from within NODE. */
062c604f
MJ
2179
2180void
2181ipa_analyze_node (struct cgraph_node *node)
2182{
57dbdc5a 2183 struct ipa_node_params *info;
c419671c 2184 struct param_analysis_info *parms_ainfo;
2c9561b5 2185 int param_count;
062c604f 2186
57dbdc5a
MJ
2187 ipa_check_create_node_params ();
2188 ipa_check_create_edge_args ();
2189 info = IPA_NODE_REF (node);
67348ccc 2190 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
062c604f
MJ
2191 ipa_initialize_node_params (node);
2192
2193 param_count = ipa_get_param_count (info);
c419671c
MJ
2194 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2195 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
062c604f 2196
c419671c
MJ
2197 ipa_analyze_params_uses (node, parms_ainfo);
2198 ipa_compute_jump_functions (node, parms_ainfo);
062c604f 2199
2c9561b5 2200 free_parms_ainfo (parms_ainfo, param_count);
f65cf2b7 2201 pop_cfun ();
062c604f
MJ
2202}
2203
e248d83f
MJ
2204/* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2205 attempt a type-based devirtualization. If successful, return the
2206 target function declaration, otherwise return NULL. */
2207
2208tree
2209ipa_intraprocedural_devirtualization (gimple call)
2210{
2211 tree binfo, token, fndecl;
2212 struct ipa_jump_func jfunc;
2213 tree otr = gimple_call_fn (call);
2214
2215 jfunc.type = IPA_JF_UNKNOWN;
2216 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
06d65050 2217 call, obj_type_ref_class (otr));
e248d83f
MJ
2218 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2219 return NULL_TREE;
2220 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2221 if (!binfo)
2222 return NULL_TREE;
2223 token = OBJ_TYPE_REF_TOKEN (otr);
ae7e9ddd 2224 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
e248d83f 2225 binfo);
450ad0cd
JH
2226#ifdef ENABLE_CHECKING
2227 if (fndecl)
2228 gcc_assert (possible_polymorphic_call_target_p
2229 (otr, cgraph_get_node (fndecl)));
2230#endif
e248d83f
MJ
2231 return fndecl;
2232}
062c604f 2233
61502ca8 2234/* Update the jump function DST when the call graph edge corresponding to SRC is
b258210c
MJ
2235 is being inlined, knowing that DST is of type ancestor and src of known
2236 type. */
2237
2238static void
2239combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2240 struct ipa_jump_func *dst)
2241{
c7573249
MJ
2242 HOST_WIDE_INT combined_offset;
2243 tree combined_type;
b258210c 2244
b8f6e610
MJ
2245 if (!ipa_get_jf_ancestor_type_preserved (dst))
2246 {
2247 dst->type = IPA_JF_UNKNOWN;
2248 return;
2249 }
2250
7b872d9e
MJ
2251 combined_offset = ipa_get_jf_known_type_offset (src)
2252 + ipa_get_jf_ancestor_offset (dst);
2253 combined_type = ipa_get_jf_ancestor_type (dst);
c7573249 2254
7b872d9e
MJ
2255 ipa_set_jf_known_type (dst, combined_offset,
2256 ipa_get_jf_known_type_base_type (src),
2257 combined_type);
b258210c
MJ
2258}
2259
be95e2b9 2260/* Update the jump functions associated with call graph edge E when the call
3e293154 2261 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2262 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2263
3e293154
MJ
2264static void
2265update_jump_functions_after_inlining (struct cgraph_edge *cs,
2266 struct cgraph_edge *e)
2267{
2268 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2269 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2270 int count = ipa_get_cs_argument_count (args);
2271 int i;
2272
2273 for (i = 0; i < count; i++)
2274 {
b258210c 2275 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3e293154 2276
685b0d13
MJ
2277 if (dst->type == IPA_JF_ANCESTOR)
2278 {
b258210c 2279 struct ipa_jump_func *src;
8b7773a4 2280 int dst_fid = dst->value.ancestor.formal_id;
685b0d13 2281
b258210c
MJ
2282 /* Variable number of arguments can cause havoc if we try to access
2283 one that does not exist in the inlined edge. So make sure we
2284 don't. */
8b7773a4 2285 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c
MJ
2286 {
2287 dst->type = IPA_JF_UNKNOWN;
2288 continue;
2289 }
2290
8b7773a4
MJ
2291 src = ipa_get_ith_jump_func (top, dst_fid);
2292
2293 if (src->agg.items
2294 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2295 {
2296 struct ipa_agg_jf_item *item;
2297 int j;
2298
2299 /* Currently we do not produce clobber aggregate jump functions,
2300 replace with merging when we do. */
2301 gcc_assert (!dst->agg.items);
2302
9771b263 2303 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2304 dst->agg.by_ref = src->agg.by_ref;
9771b263 2305 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2306 item->offset -= dst->value.ancestor.offset;
2307 }
2308
b258210c
MJ
2309 if (src->type == IPA_JF_KNOWN_TYPE)
2310 combine_known_type_and_ancestor_jfs (src, dst);
b258210c
MJ
2311 else if (src->type == IPA_JF_PASS_THROUGH
2312 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2313 {
2314 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2315 dst->value.ancestor.agg_preserved &=
2316 src->value.pass_through.agg_preserved;
b8f6e610
MJ
2317 dst->value.ancestor.type_preserved &=
2318 src->value.pass_through.type_preserved;
8b7773a4 2319 }
b258210c
MJ
2320 else if (src->type == IPA_JF_ANCESTOR)
2321 {
2322 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2323 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2324 dst->value.ancestor.agg_preserved &=
2325 src->value.ancestor.agg_preserved;
b8f6e610
MJ
2326 dst->value.ancestor.type_preserved &=
2327 src->value.ancestor.type_preserved;
b258210c
MJ
2328 }
2329 else
2330 dst->type = IPA_JF_UNKNOWN;
2331 }
2332 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2333 {
b258210c
MJ
2334 struct ipa_jump_func *src;
2335 /* We must check range due to calls with variable number of arguments
2336 and we cannot combine jump functions with operations. */
2337 if (dst->value.pass_through.operation == NOP_EXPR
2338 && (dst->value.pass_through.formal_id
2339 < ipa_get_cs_argument_count (top)))
2340 {
8b7773a4
MJ
2341 int dst_fid = dst->value.pass_through.formal_id;
2342 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2343 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
8b7773a4 2344
b8f6e610
MJ
2345 switch (src->type)
2346 {
2347 case IPA_JF_UNKNOWN:
2348 dst->type = IPA_JF_UNKNOWN;
2349 break;
2350 case IPA_JF_KNOWN_TYPE:
2351 ipa_set_jf_known_type (dst,
2352 ipa_get_jf_known_type_offset (src),
2353 ipa_get_jf_known_type_base_type (src),
2354 ipa_get_jf_known_type_base_type (src));
2355 break;
2356 case IPA_JF_CONST:
2357 ipa_set_jf_cst_copy (dst, src);
2358 break;
2359
2360 case IPA_JF_PASS_THROUGH:
2361 {
2362 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2363 enum tree_code operation;
2364 operation = ipa_get_jf_pass_through_operation (src);
2365
2366 if (operation == NOP_EXPR)
2367 {
2368 bool agg_p, type_p;
2369 agg_p = dst_agg_p
2370 && ipa_get_jf_pass_through_agg_preserved (src);
2371 type_p = ipa_get_jf_pass_through_type_preserved (src)
2372 && ipa_get_jf_pass_through_type_preserved (dst);
2373 ipa_set_jf_simple_pass_through (dst, formal_id,
2374 agg_p, type_p);
2375 }
2376 else
2377 {
2378 tree operand = ipa_get_jf_pass_through_operand (src);
2379 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2380 operation);
2381 }
2382 break;
2383 }
2384 case IPA_JF_ANCESTOR:
2385 {
2386 bool agg_p, type_p;
2387 agg_p = dst_agg_p
2388 && ipa_get_jf_ancestor_agg_preserved (src);
2389 type_p = ipa_get_jf_ancestor_type_preserved (src)
2390 && ipa_get_jf_pass_through_type_preserved (dst);
2391 ipa_set_ancestor_jf (dst,
2392 ipa_get_jf_ancestor_offset (src),
2393 ipa_get_jf_ancestor_type (src),
2394 ipa_get_jf_ancestor_formal_id (src),
2395 agg_p, type_p);
2396 break;
2397 }
2398 default:
2399 gcc_unreachable ();
2400 }
8b7773a4
MJ
2401
2402 if (src->agg.items
b8f6e610 2403 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2404 {
2405 /* Currently we do not produce clobber aggregate jump
2406 functions, replace with merging when we do. */
2407 gcc_assert (!dst->agg.items);
2408
2409 dst->agg.by_ref = src->agg.by_ref;
9771b263 2410 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2411 }
b258210c
MJ
2412 }
2413 else
2414 dst->type = IPA_JF_UNKNOWN;
3e293154 2415 }
b258210c
MJ
2416 }
2417}
2418
2419/* If TARGET is an addr_expr of a function declaration, make it the destination
81fa35bd 2420 of an indirect edge IE and return the edge. Otherwise, return NULL. */
b258210c 2421
3949c4a7 2422struct cgraph_edge *
81fa35bd 2423ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
b258210c
MJ
2424{
2425 struct cgraph_node *callee;
0f378cb5 2426 struct inline_edge_summary *es = inline_edge_summary (ie);
48b1474e 2427 bool unreachable = false;
b258210c 2428
ceeffab0
MJ
2429 if (TREE_CODE (target) == ADDR_EXPR)
2430 target = TREE_OPERAND (target, 0);
b258210c 2431 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2432 {
2433 target = canonicalize_constructor_val (target, NULL);
2434 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2435 {
c13bc3d9
MJ
2436 if (ie->indirect_info->member_ptr)
2437 /* Member pointer call that goes through a VMT lookup. */
2438 return NULL;
2439
a0a7b611
JH
2440 if (dump_file)
2441 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
48b1474e 2442 " in %s/%i, making it unreachable.\n",
fec39fa6 2443 ie->caller->name (), ie->caller->order);
48b1474e
MJ
2444 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2445 callee = cgraph_get_create_node (target);
2446 unreachable = true;
a0a7b611 2447 }
48b1474e
MJ
2448 else
2449 callee = cgraph_get_node (target);
a0a7b611 2450 }
48b1474e
MJ
2451 else
2452 callee = cgraph_get_node (target);
a0a7b611
JH
2453
2454 /* Because may-edges are not explicitely represented and vtable may be external,
2455 we may create the first reference to the object in the unit. */
2456 if (!callee || callee->global.inlined_to)
2457 {
a0a7b611
JH
2458
2459 /* We are better to ensure we can refer to it.
2460 In the case of static functions we are out of luck, since we already
2461 removed its body. In the case of public functions we may or may
2462 not introduce the reference. */
2463 if (!canonicalize_constructor_val (target, NULL)
2464 || !TREE_PUBLIC (target))
2465 {
2466 if (dump_file)
2467 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2468 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
fec39fa6 2469 xstrdup (ie->caller->name ()),
67348ccc 2470 ie->caller->order,
fec39fa6 2471 xstrdup (ie->callee->name ()),
67348ccc 2472 ie->callee->order);
a0a7b611
JH
2473 return NULL;
2474 }
6f99e449 2475 callee = cgraph_get_create_node (target);
a0a7b611 2476 }
1dbee8c9 2477 ipa_check_create_node_params ();
ceeffab0 2478
81fa35bd
MJ
2479 /* We can not make edges to inline clones. It is bug that someone removed
2480 the cgraph node too early. */
17afc0fe
JH
2481 gcc_assert (!callee->global.inlined_to);
2482
48b1474e 2483 if (dump_file && !unreachable)
b258210c
MJ
2484 {
2485 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
ceeffab0 2486 "(%s/%i -> %s/%i), for stmt ",
b258210c 2487 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
fec39fa6 2488 xstrdup (ie->caller->name ()),
67348ccc 2489 ie->caller->order,
fec39fa6 2490 xstrdup (callee->name ()),
67348ccc 2491 callee->order);
b258210c
MJ
2492 if (ie->call_stmt)
2493 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2494 else
2495 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2
JH
2496 }
2497 ie = cgraph_make_edge_direct (ie, callee);
2498 es = inline_edge_summary (ie);
2499 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2500 - eni_size_weights.call_cost);
2501 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2502 - eni_time_weights.call_cost);
749aa96d 2503
b258210c 2504 return ie;
3e293154
MJ
2505}
2506
8b7773a4
MJ
2507/* Retrieve value from aggregate jump function AGG for the given OFFSET or
2508 return NULL if there is not any. BY_REF specifies whether the value has to
2509 be passed by reference or by value. */
2510
2511tree
2512ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2513 HOST_WIDE_INT offset, bool by_ref)
2514{
2515 struct ipa_agg_jf_item *item;
2516 int i;
2517
2518 if (by_ref != agg->by_ref)
2519 return NULL;
2520
9771b263 2521 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2522 if (item->offset == offset)
2523 {
2524 /* Currently we do not have clobber values, return NULL for them once
2525 we do. */
2526 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2527 return item->value;
2528 }
8b7773a4
MJ
2529 return NULL;
2530}
2531
4502fe8d 2532/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
2533 reference description RDESC. Return true if the reference has been
2534 successfully found and removed. */
4502fe8d 2535
568cda29 2536static bool
5e20cdc9 2537remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
2538{
2539 struct ipa_ref *to_del;
2540 struct cgraph_edge *origin;
2541
2542 origin = rdesc->cs;
a854f856
MJ
2543 if (!origin)
2544 return false;
67348ccc 2545 to_del = ipa_find_reference (origin->caller, symbol,
042ae7d2 2546 origin->call_stmt, origin->lto_stmt_uid);
568cda29
MJ
2547 if (!to_del)
2548 return false;
2549
4502fe8d
MJ
2550 ipa_remove_reference (to_del);
2551 if (dump_file)
2552 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
fec39fa6
TS
2553 xstrdup (origin->caller->name ()),
2554 origin->caller->order, xstrdup (symbol->name ()));
568cda29 2555 return true;
4502fe8d
MJ
2556}
2557
2558/* If JFUNC has a reference description with refcount different from
2559 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2560 NULL. JFUNC must be a constant jump function. */
2561
2562static struct ipa_cst_ref_desc *
2563jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2564{
2565 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2566 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2567 return rdesc;
2568 else
2569 return NULL;
2570}
2571
568cda29
MJ
2572/* If the value of constant jump function JFUNC is an address of a function
2573 declaration, return the associated call graph node. Otherwise return
2574 NULL. */
2575
2576static cgraph_node *
2577cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2578{
2579 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2580 tree cst = ipa_get_jf_constant (jfunc);
2581 if (TREE_CODE (cst) != ADDR_EXPR
2582 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2583 return NULL;
2584
2585 return cgraph_get_node (TREE_OPERAND (cst, 0));
2586}
2587
2588
2589/* If JFUNC is a constant jump function with a usable rdesc, decrement its
2590 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2591 the edge specified in the rdesc. Return false if either the symbol or the
2592 reference could not be found, otherwise return true. */
2593
2594static bool
2595try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2596{
2597 struct ipa_cst_ref_desc *rdesc;
2598 if (jfunc->type == IPA_JF_CONST
2599 && (rdesc = jfunc_rdesc_usable (jfunc))
2600 && --rdesc->refcount == 0)
2601 {
5e20cdc9 2602 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
2603 if (!symbol)
2604 return false;
2605
2606 return remove_described_reference (symbol, rdesc);
2607 }
2608 return true;
2609}
2610
b258210c
MJ
2611/* Try to find a destination for indirect edge IE that corresponds to a simple
2612 call or a call of a member function pointer and where the destination is a
2613 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
2614 determined, return the newly direct edge, otherwise return NULL.
2615 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 2616
b258210c
MJ
2617static struct cgraph_edge *
2618try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
2619 struct ipa_jump_func *jfunc,
2620 struct ipa_node_params *new_root_info)
b258210c 2621{
4502fe8d 2622 struct cgraph_edge *cs;
b258210c 2623 tree target;
042ae7d2 2624 bool agg_contents = ie->indirect_info->agg_contents;
b258210c 2625
8b7773a4 2626 if (ie->indirect_info->agg_contents)
d250540a
MJ
2627 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2628 ie->indirect_info->offset,
2629 ie->indirect_info->by_ref);
b258210c 2630 else
d250540a
MJ
2631 target = ipa_value_from_jfunc (new_root_info, jfunc);
2632 if (!target)
2633 return NULL;
4502fe8d
MJ
2634 cs = ipa_make_edge_direct_to_target (ie, target);
2635
a12cd2db 2636 if (cs && !agg_contents)
568cda29
MJ
2637 {
2638 bool ok;
2639 gcc_checking_assert (cs->callee
ae6d0907
MJ
2640 && (cs != ie
2641 || jfunc->type != IPA_JF_CONST
568cda29
MJ
2642 || !cgraph_node_for_jfunc (jfunc)
2643 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2644 ok = try_decrement_rdesc_refcount (jfunc);
2645 gcc_checking_assert (ok);
2646 }
4502fe8d
MJ
2647
2648 return cs;
b258210c
MJ
2649}
2650
d250540a
MJ
2651/* Try to find a destination for indirect edge IE that corresponds to a virtual
2652 call based on a formal parameter which is described by jump function JFUNC
2653 and if it can be determined, make it direct and return the direct edge.
2654 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2655 are relative to. */
b258210c
MJ
2656
2657static struct cgraph_edge *
2658try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a
MJ
2659 struct ipa_jump_func *jfunc,
2660 struct ipa_node_params *new_root_info)
3e293154 2661{
c7573249 2662 tree binfo, target;
b258210c 2663
d250540a
MJ
2664 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2665
da942ca0 2666 if (!binfo)
b258210c 2667 return NULL;
3e293154 2668
da942ca0
JH
2669 if (TREE_CODE (binfo) != TREE_BINFO)
2670 {
c49bdb2e
JH
2671 binfo = gimple_extract_devirt_binfo_from_cst
2672 (binfo, ie->indirect_info->otr_type);
da942ca0
JH
2673 if (!binfo)
2674 return NULL;
2675 }
2676
d250540a 2677 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
c7573249 2678 ie->indirect_info->otr_type);
b258210c 2679 if (binfo)
c7573249
MJ
2680 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2681 binfo);
b258210c
MJ
2682 else
2683 return NULL;
2684
2685 if (target)
450ad0cd
JH
2686 {
2687#ifdef ENABLE_CHECKING
2688 gcc_assert (possible_polymorphic_call_target_p
2689 (ie, cgraph_get_node (target)));
2690#endif
2691 return ipa_make_edge_direct_to_target (ie, target);
2692 }
b258210c
MJ
2693 else
2694 return NULL;
3e293154
MJ
2695}
2696
2697/* Update the param called notes associated with NODE when CS is being inlined,
2698 assuming NODE is (potentially indirectly) inlined into CS->callee.
2699 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 2700 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 2701 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 2702
f8e2a1ed 2703static bool
e33c6cd6
MJ
2704update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2705 struct cgraph_node *node,
9771b263 2706 vec<cgraph_edge_p> *new_edges)
3e293154 2707{
9e97ff61 2708 struct ipa_edge_args *top;
b258210c 2709 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 2710 struct ipa_node_params *new_root_info;
f8e2a1ed 2711 bool res = false;
3e293154 2712
e33c6cd6 2713 ipa_check_create_edge_args ();
9e97ff61 2714 top = IPA_EDGE_REF (cs);
d250540a
MJ
2715 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2716 ? cs->caller->global.inlined_to
2717 : cs->caller);
e33c6cd6
MJ
2718
2719 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 2720 {
e33c6cd6 2721 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 2722 struct ipa_jump_func *jfunc;
8b7773a4 2723 int param_index;
3e293154 2724
e33c6cd6 2725 next_ie = ie->next_callee;
3e293154 2726
5f902d76
JH
2727 if (ici->param_index == -1)
2728 continue;
e33c6cd6 2729
3e293154 2730 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 2731 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 2732 {
5ee53a06 2733 ici->param_index = -1;
3e293154
MJ
2734 continue;
2735 }
2736
8b7773a4
MJ
2737 param_index = ici->param_index;
2738 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06
JH
2739
2740 if (!flag_indirect_inlining)
36b72910
JH
2741 new_direct_edge = NULL;
2742 else if (ici->polymorphic)
d250540a
MJ
2743 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2744 new_root_info);
b258210c 2745 else
d250540a
MJ
2746 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2747 new_root_info);
042ae7d2
JH
2748 /* If speculation was removed, then we need to do nothing. */
2749 if (new_direct_edge && new_direct_edge != ie)
2750 {
2751 new_direct_edge->indirect_inlining_edge = 1;
2752 top = IPA_EDGE_REF (cs);
2753 res = true;
2754 }
2755 else if (new_direct_edge)
685b0d13 2756 {
b258210c 2757 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
2758 if (new_direct_edge->call_stmt)
2759 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
2760 = !gimple_check_call_matching_types (
2761 new_direct_edge->call_stmt,
67348ccc 2762 new_direct_edge->callee->decl, false);
b258210c
MJ
2763 if (new_edges)
2764 {
9771b263 2765 new_edges->safe_push (new_direct_edge);
b258210c
MJ
2766 res = true;
2767 }
042ae7d2 2768 top = IPA_EDGE_REF (cs);
685b0d13 2769 }
36b72910
JH
2770 else if (jfunc->type == IPA_JF_PASS_THROUGH
2771 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2772 {
2773 if (ici->agg_contents
2774 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2775 ici->param_index = -1;
2776 else
2777 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2778 }
2779 else if (jfunc->type == IPA_JF_ANCESTOR)
2780 {
2781 if (ici->agg_contents
2782 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2783 ici->param_index = -1;
2784 else
2785 {
2786 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
68377e53
JH
2787 if (ipa_get_jf_ancestor_offset (jfunc))
2788 ici->outer_type = NULL;
36b72910
JH
2789 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2790 }
2791 }
2792 else
2793 /* Either we can find a destination for this edge now or never. */
2794 ici->param_index = -1;
3e293154 2795 }
e33c6cd6 2796
f8e2a1ed 2797 return res;
3e293154
MJ
2798}
2799
2800/* Recursively traverse subtree of NODE (including node) made of inlined
2801 cgraph_edges when CS has been inlined and invoke
e33c6cd6 2802 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
2803 update_jump_functions_after_inlining on all non-inlined edges that lead out
2804 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2805 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2806 created. */
be95e2b9 2807
f8e2a1ed 2808static bool
3e293154
MJ
2809propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2810 struct cgraph_node *node,
9771b263 2811 vec<cgraph_edge_p> *new_edges)
3e293154
MJ
2812{
2813 struct cgraph_edge *e;
f8e2a1ed 2814 bool res;
3e293154 2815
e33c6cd6 2816 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
2817
2818 for (e = node->callees; e; e = e->next_callee)
2819 if (!e->inline_failed)
f8e2a1ed 2820 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
2821 else
2822 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
2823 for (e = node->indirect_calls; e; e = e->next_callee)
2824 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
2825
2826 return res;
3e293154
MJ
2827}
2828
4502fe8d
MJ
2829/* Combine two controlled uses counts as done during inlining. */
2830
2831static int
2832combine_controlled_uses_counters (int c, int d)
2833{
2834 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2835 return IPA_UNDESCRIBED_USE;
2836 else
2837 return c + d - 1;
2838}
2839
2840/* Propagate number of controlled users from CS->caleee to the new root of the
2841 tree of inlined nodes. */
2842
2843static void
2844propagate_controlled_uses (struct cgraph_edge *cs)
2845{
2846 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2847 struct cgraph_node *new_root = cs->caller->global.inlined_to
2848 ? cs->caller->global.inlined_to : cs->caller;
2849 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2850 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2851 int count, i;
2852
2853 count = MIN (ipa_get_cs_argument_count (args),
2854 ipa_get_param_count (old_root_info));
2855 for (i = 0; i < count; i++)
2856 {
2857 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2858 struct ipa_cst_ref_desc *rdesc;
2859
2860 if (jf->type == IPA_JF_PASS_THROUGH)
2861 {
2862 int src_idx, c, d;
2863 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2864 c = ipa_get_controlled_uses (new_root_info, src_idx);
2865 d = ipa_get_controlled_uses (old_root_info, i);
2866
2867 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2868 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2869 c = combine_controlled_uses_counters (c, d);
2870 ipa_set_controlled_uses (new_root_info, src_idx, c);
2871 if (c == 0 && new_root_info->ipcp_orig_node)
2872 {
2873 struct cgraph_node *n;
2874 struct ipa_ref *ref;
2875 tree t = new_root_info->known_vals[src_idx];
2876
2877 if (t && TREE_CODE (t) == ADDR_EXPR
2878 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2879 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
67348ccc
DM
2880 && (ref = ipa_find_reference (new_root,
2881 n, NULL, 0)))
4502fe8d
MJ
2882 {
2883 if (dump_file)
2884 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2885 "reference from %s/%i to %s/%i.\n",
fec39fa6 2886 xstrdup (new_root->name ()),
67348ccc 2887 new_root->order,
fec39fa6 2888 xstrdup (n->name ()), n->order);
4502fe8d
MJ
2889 ipa_remove_reference (ref);
2890 }
2891 }
2892 }
2893 else if (jf->type == IPA_JF_CONST
2894 && (rdesc = jfunc_rdesc_usable (jf)))
2895 {
2896 int d = ipa_get_controlled_uses (old_root_info, i);
2897 int c = rdesc->refcount;
2898 rdesc->refcount = combine_controlled_uses_counters (c, d);
2899 if (rdesc->refcount == 0)
2900 {
2901 tree cst = ipa_get_jf_constant (jf);
2902 struct cgraph_node *n;
2903 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2904 && TREE_CODE (TREE_OPERAND (cst, 0))
2905 == FUNCTION_DECL);
2906 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2907 if (n)
2908 {
2909 struct cgraph_node *clone;
568cda29 2910 bool ok;
67348ccc 2911 ok = remove_described_reference (n, rdesc);
568cda29 2912 gcc_checking_assert (ok);
4502fe8d
MJ
2913
2914 clone = cs->caller;
2915 while (clone->global.inlined_to
2916 && clone != rdesc->cs->caller
2917 && IPA_NODE_REF (clone)->ipcp_orig_node)
2918 {
2919 struct ipa_ref *ref;
67348ccc
DM
2920 ref = ipa_find_reference (clone,
2921 n, NULL, 0);
4502fe8d
MJ
2922 if (ref)
2923 {
2924 if (dump_file)
2925 fprintf (dump_file, "ipa-prop: Removing "
2926 "cloning-created reference "
2927 "from %s/%i to %s/%i.\n",
fec39fa6 2928 xstrdup (clone->name ()),
67348ccc 2929 clone->order,
fec39fa6 2930 xstrdup (n->name ()),
67348ccc 2931 n->order);
4502fe8d
MJ
2932 ipa_remove_reference (ref);
2933 }
2934 clone = clone->callers->caller;
2935 }
2936 }
2937 }
2938 }
2939 }
2940
2941 for (i = ipa_get_param_count (old_root_info);
2942 i < ipa_get_cs_argument_count (args);
2943 i++)
2944 {
2945 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2946
2947 if (jf->type == IPA_JF_CONST)
2948 {
2949 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2950 if (rdesc)
2951 rdesc->refcount = IPA_UNDESCRIBED_USE;
2952 }
2953 else if (jf->type == IPA_JF_PASS_THROUGH)
2954 ipa_set_controlled_uses (new_root_info,
2955 jf->value.pass_through.formal_id,
2956 IPA_UNDESCRIBED_USE);
2957 }
2958}
2959
3e293154
MJ
2960/* Update jump functions and call note functions on inlining the call site CS.
2961 CS is expected to lead to a node already cloned by
2962 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2963 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2964 created. */
be95e2b9 2965
f8e2a1ed 2966bool
3e293154 2967ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
9771b263 2968 vec<cgraph_edge_p> *new_edges)
3e293154 2969{
5ee53a06 2970 bool changed;
f8e2a1ed
MJ
2971 /* Do nothing if the preparation phase has not been carried out yet
2972 (i.e. during early inlining). */
9771b263 2973 if (!ipa_node_params_vector.exists ())
f8e2a1ed
MJ
2974 return false;
2975 gcc_assert (ipa_edge_args_vector);
2976
4502fe8d 2977 propagate_controlled_uses (cs);
5ee53a06
JH
2978 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2979
5ee53a06 2980 return changed;
518dc859
RL
2981}
2982
771578a0
MJ
2983/* Frees all dynamically allocated structures that the argument info points
2984 to. */
be95e2b9 2985
518dc859 2986void
771578a0 2987ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 2988{
9771b263 2989 vec_free (args->jump_functions);
771578a0 2990 memset (args, 0, sizeof (*args));
518dc859
RL
2991}
2992
771578a0 2993/* Free all ipa_edge structures. */
be95e2b9 2994
518dc859 2995void
771578a0 2996ipa_free_all_edge_args (void)
518dc859 2997{
771578a0
MJ
2998 int i;
2999 struct ipa_edge_args *args;
518dc859 3000
9771b263
DN
3001 if (!ipa_edge_args_vector)
3002 return;
3003
3004 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
3005 ipa_free_edge_args_substructures (args);
3006
9771b263 3007 vec_free (ipa_edge_args_vector);
518dc859
RL
3008}
3009
771578a0
MJ
3010/* Frees all dynamically allocated structures that the param info points
3011 to. */
be95e2b9 3012
518dc859 3013void
771578a0 3014ipa_free_node_params_substructures (struct ipa_node_params *info)
518dc859 3015{
9771b263 3016 info->descriptors.release ();
310bc633
MJ
3017 free (info->lattices);
3018 /* Lattice values and their sources are deallocated with their alocation
3019 pool. */
9771b263 3020 info->known_vals.release ();
771578a0 3021 memset (info, 0, sizeof (*info));
518dc859
RL
3022}
3023
771578a0 3024/* Free all ipa_node_params structures. */
be95e2b9 3025
518dc859 3026void
771578a0 3027ipa_free_all_node_params (void)
518dc859 3028{
771578a0
MJ
3029 int i;
3030 struct ipa_node_params *info;
518dc859 3031
9771b263 3032 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
771578a0
MJ
3033 ipa_free_node_params_substructures (info);
3034
9771b263 3035 ipa_node_params_vector.release ();
771578a0
MJ
3036}
3037
2c9561b5
MJ
3038/* Set the aggregate replacements of NODE to be AGGVALS. */
3039
3040void
3041ipa_set_node_agg_value_chain (struct cgraph_node *node,
3042 struct ipa_agg_replacement_value *aggvals)
3043{
9771b263
DN
3044 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3045 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2c9561b5 3046
9771b263 3047 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2c9561b5
MJ
3048}
3049
771578a0 3050/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 3051
771578a0 3052static void
5c0466b5 3053ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 3054{
568cda29
MJ
3055 struct ipa_edge_args *args;
3056
3057 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
9771b263 3058 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 3059 return;
568cda29
MJ
3060
3061 args = IPA_EDGE_REF (cs);
3062 if (args->jump_functions)
3063 {
3064 struct ipa_jump_func *jf;
3065 int i;
3066 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3067 {
3068 struct ipa_cst_ref_desc *rdesc;
3069 try_decrement_rdesc_refcount (jf);
3070 if (jf->type == IPA_JF_CONST
3071 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3072 && rdesc->cs == cs)
3073 rdesc->cs = NULL;
3074 }
568cda29
MJ
3075 }
3076
771578a0 3077 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
3078}
3079
771578a0 3080/* Hook that is called by cgraph.c when a node is removed. */
be95e2b9 3081
771578a0 3082static void
5c0466b5 3083ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3084{
dd6d1ad7 3085 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 3086 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2c9561b5 3087 ipa_free_node_params_substructures (IPA_NODE_REF (node));
9771b263
DN
3088 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3089 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
771578a0
MJ
3090}
3091
8b7773a4 3092/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 3093
771578a0
MJ
3094static void
3095ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
f8e2a1ed 3096 __attribute__((unused)) void *data)
771578a0
MJ
3097{
3098 struct ipa_edge_args *old_args, *new_args;
8b7773a4 3099 unsigned int i;
771578a0
MJ
3100
3101 ipa_check_create_edge_args ();
3102
3103 old_args = IPA_EDGE_REF (src);
3104 new_args = IPA_EDGE_REF (dst);
3105
9771b263 3106 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
8b7773a4 3107
9771b263 3108 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3109 {
3110 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3111 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3112
3113 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3114
3115 if (src_jf->type == IPA_JF_CONST)
3116 {
3117 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3118
3119 if (!src_rdesc)
3120 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3121 else if (src->caller == dst->caller)
3122 {
3123 struct ipa_ref *ref;
5e20cdc9 3124 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3125 gcc_checking_assert (n);
67348ccc 3126 ref = ipa_find_reference (src->caller, n,
568cda29
MJ
3127 src->call_stmt, src->lto_stmt_uid);
3128 gcc_checking_assert (ref);
67348ccc 3129 ipa_clone_ref (ref, dst->caller, ref->stmt);
568cda29
MJ
3130
3131 gcc_checking_assert (ipa_refdesc_pool);
3132 struct ipa_cst_ref_desc *dst_rdesc
3133 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3134 dst_rdesc->cs = dst;
3135 dst_rdesc->refcount = src_rdesc->refcount;
3136 dst_rdesc->next_duplicate = NULL;
3137 dst_jf->value.constant.rdesc = dst_rdesc;
3138 }
4502fe8d
MJ
3139 else if (src_rdesc->cs == src)
3140 {
3141 struct ipa_cst_ref_desc *dst_rdesc;
3142 gcc_checking_assert (ipa_refdesc_pool);
3143 dst_rdesc
3144 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3145 dst_rdesc->cs = dst;
4502fe8d 3146 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3147 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3148 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3149 dst_jf->value.constant.rdesc = dst_rdesc;
3150 }
3151 else
3152 {
3153 struct ipa_cst_ref_desc *dst_rdesc;
3154 /* This can happen during inlining, when a JFUNC can refer to a
3155 reference taken in a function up in the tree of inline clones.
3156 We need to find the duplicate that refers to our tree of
3157 inline clones. */
3158
3159 gcc_assert (dst->caller->global.inlined_to);
3160 for (dst_rdesc = src_rdesc->next_duplicate;
3161 dst_rdesc;
3162 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3163 {
3164 struct cgraph_node *top;
3165 top = dst_rdesc->cs->caller->global.inlined_to
3166 ? dst_rdesc->cs->caller->global.inlined_to
3167 : dst_rdesc->cs->caller;
3168 if (dst->caller->global.inlined_to == top)
3169 break;
3170 }
44a60244 3171 gcc_assert (dst_rdesc);
4502fe8d
MJ
3172 dst_jf->value.constant.rdesc = dst_rdesc;
3173 }
3174 }
3175 }
771578a0
MJ
3176}
3177
3178/* Hook that is called by cgraph.c when a node is duplicated. */
be95e2b9 3179
771578a0
MJ
3180static void
3181ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
10a5dd5d 3182 ATTRIBUTE_UNUSED void *data)
771578a0
MJ
3183{
3184 struct ipa_node_params *old_info, *new_info;
2c9561b5 3185 struct ipa_agg_replacement_value *old_av, *new_av;
771578a0
MJ
3186
3187 ipa_check_create_node_params ();
3188 old_info = IPA_NODE_REF (src);
3189 new_info = IPA_NODE_REF (dst);
771578a0 3190
9771b263 3191 new_info->descriptors = old_info->descriptors.copy ();
310bc633 3192 new_info->lattices = NULL;
771578a0 3193 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 3194
3949c4a7
MJ
3195 new_info->uses_analysis_done = old_info->uses_analysis_done;
3196 new_info->node_enqueued = old_info->node_enqueued;
2c9561b5
MJ
3197
3198 old_av = ipa_get_agg_replacements_for_node (src);
3199 if (!old_av)
3200 return;
3201
3202 new_av = NULL;
3203 while (old_av)
3204 {
3205 struct ipa_agg_replacement_value *v;
3206
3207 v = ggc_alloc_ipa_agg_replacement_value ();
3208 memcpy (v, old_av, sizeof (*v));
3209 v->next = new_av;
3210 new_av = v;
3211 old_av = old_av->next;
3212 }
3213 ipa_set_node_agg_value_chain (dst, new_av);
771578a0
MJ
3214}
3215
40982661
JH
3216
3217/* Analyze newly added function into callgraph. */
3218
3219static void
3220ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3221{
0136f8f0
AH
3222 if (cgraph_function_with_gimple_body_p (node))
3223 ipa_analyze_node (node);
40982661
JH
3224}
3225
771578a0 3226/* Register our cgraph hooks if they are not already there. */
be95e2b9 3227
518dc859 3228void
771578a0 3229ipa_register_cgraph_hooks (void)
518dc859 3230{
771578a0
MJ
3231 if (!edge_removal_hook_holder)
3232 edge_removal_hook_holder =
3233 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3234 if (!node_removal_hook_holder)
3235 node_removal_hook_holder =
3236 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3237 if (!edge_duplication_hook_holder)
3238 edge_duplication_hook_holder =
3239 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3240 if (!node_duplication_hook_holder)
3241 node_duplication_hook_holder =
3242 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
40982661
JH
3243 function_insertion_hook_holder =
3244 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3245}
518dc859 3246
771578a0 3247/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3248
771578a0
MJ
3249static void
3250ipa_unregister_cgraph_hooks (void)
3251{
3252 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3253 edge_removal_hook_holder = NULL;
3254 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3255 node_removal_hook_holder = NULL;
3256 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3257 edge_duplication_hook_holder = NULL;
3258 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3259 node_duplication_hook_holder = NULL;
40982661
JH
3260 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3261 function_insertion_hook_holder = NULL;
771578a0
MJ
3262}
3263
3264/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3265 longer needed after ipa-cp. */
be95e2b9 3266
771578a0 3267void
e33c6cd6 3268ipa_free_all_structures_after_ipa_cp (void)
3e293154 3269{
5ee53a06 3270 if (!optimize)
3e293154
MJ
3271 {
3272 ipa_free_all_edge_args ();
3273 ipa_free_all_node_params ();
310bc633
MJ
3274 free_alloc_pool (ipcp_sources_pool);
3275 free_alloc_pool (ipcp_values_pool);
2c9561b5 3276 free_alloc_pool (ipcp_agg_lattice_pool);
3e293154 3277 ipa_unregister_cgraph_hooks ();
4502fe8d
MJ
3278 if (ipa_refdesc_pool)
3279 free_alloc_pool (ipa_refdesc_pool);
3e293154
MJ
3280 }
3281}
3282
3283/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3284 longer needed after indirect inlining. */
be95e2b9 3285
3e293154 3286void
e33c6cd6 3287ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3288{
3289 ipa_free_all_edge_args ();
3290 ipa_free_all_node_params ();
3291 ipa_unregister_cgraph_hooks ();
310bc633
MJ
3292 if (ipcp_sources_pool)
3293 free_alloc_pool (ipcp_sources_pool);
3294 if (ipcp_values_pool)
3295 free_alloc_pool (ipcp_values_pool);
2c9561b5
MJ
3296 if (ipcp_agg_lattice_pool)
3297 free_alloc_pool (ipcp_agg_lattice_pool);
4502fe8d
MJ
3298 if (ipa_refdesc_pool)
3299 free_alloc_pool (ipa_refdesc_pool);
518dc859
RL
3300}
3301
dcd416e3 3302/* Print ipa_tree_map data structures of all functions in the
518dc859 3303 callgraph to F. */
be95e2b9 3304
518dc859 3305void
2c9561b5 3306ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3307{
3308 int i, count;
3e293154 3309 struct ipa_node_params *info;
518dc859 3310
67348ccc 3311 if (!node->definition)
3e293154
MJ
3312 return;
3313 info = IPA_NODE_REF (node);
9de04252 3314 fprintf (f, " function %s/%i parameter descriptors:\n",
fec39fa6 3315 node->name (), node->order);
3e293154
MJ
3316 count = ipa_get_param_count (info);
3317 for (i = 0; i < count; i++)
518dc859 3318 {
4502fe8d
MJ
3319 int c;
3320
e067bd43 3321 ipa_dump_param (f, info, i);
339f49ec
JH
3322 if (ipa_is_param_used (info, i))
3323 fprintf (f, " used");
4502fe8d
MJ
3324 c = ipa_get_controlled_uses (info, i);
3325 if (c == IPA_UNDESCRIBED_USE)
3326 fprintf (f, " undescribed_use");
3327 else
3328 fprintf (f, " controlled_uses=%i", c);
3e293154 3329 fprintf (f, "\n");
518dc859
RL
3330 }
3331}
dcd416e3 3332
ca30a539 3333/* Print ipa_tree_map data structures of all functions in the
3e293154 3334 callgraph to F. */
be95e2b9 3335
3e293154 3336void
ca30a539 3337ipa_print_all_params (FILE * f)
3e293154
MJ
3338{
3339 struct cgraph_node *node;
3340
ca30a539 3341 fprintf (f, "\nFunction parameters:\n");
65c70e6b 3342 FOR_EACH_FUNCTION (node)
ca30a539 3343 ipa_print_node_params (f, node);
3e293154 3344}
3f84bf08
MJ
3345
3346/* Return a heap allocated vector containing formal parameters of FNDECL. */
3347
9771b263 3348vec<tree>
3f84bf08
MJ
3349ipa_get_vector_of_formal_parms (tree fndecl)
3350{
9771b263 3351 vec<tree> args;
3f84bf08
MJ
3352 int count;
3353 tree parm;
3354
0e8853ee 3355 gcc_assert (!flag_wpa);
310bc633 3356 count = count_formal_params (fndecl);
9771b263 3357 args.create (count);
910ad8de 3358 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 3359 args.quick_push (parm);
3f84bf08
MJ
3360
3361 return args;
3362}
3363
3364/* Return a heap allocated vector containing types of formal parameters of
3365 function type FNTYPE. */
3366
31519c38
AH
3367vec<tree>
3368ipa_get_vector_of_formal_parm_types (tree fntype)
3f84bf08 3369{
9771b263 3370 vec<tree> types;
3f84bf08
MJ
3371 int count = 0;
3372 tree t;
3373
3374 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3375 count++;
3376
9771b263 3377 types.create (count);
3f84bf08 3378 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 3379 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
3380
3381 return types;
3382}
3383
3384/* Modify the function declaration FNDECL and its type according to the plan in
3385 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3386 to reflect the actual parameters being modified which are determined by the
3387 base_index field. */
3388
3389void
31519c38 3390ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3f84bf08 3391{
31519c38
AH
3392 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3393 tree orig_type = TREE_TYPE (fndecl);
3394 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3f84bf08
MJ
3395
3396 /* The following test is an ugly hack, some functions simply don't have any
3397 arguments in their type. This is probably a bug but well... */
31519c38
AH
3398 bool care_for_types = (old_arg_types != NULL_TREE);
3399 bool last_parm_void;
3400 vec<tree> otypes;
3f84bf08
MJ
3401 if (care_for_types)
3402 {
3403 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3404 == void_type_node);
31519c38 3405 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3f84bf08 3406 if (last_parm_void)
9771b263 3407 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 3408 else
9771b263 3409 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
3410 }
3411 else
3412 {
3413 last_parm_void = false;
9771b263 3414 otypes.create (0);
3f84bf08
MJ
3415 }
3416
31519c38
AH
3417 int len = adjustments.length ();
3418 tree *link = &DECL_ARGUMENTS (fndecl);
3419 tree new_arg_types = NULL;
3420 for (int i = 0; i < len; i++)
3f84bf08
MJ
3421 {
3422 struct ipa_parm_adjustment *adj;
3423 gcc_assert (link);
3424
9771b263 3425 adj = &adjustments[i];
31519c38
AH
3426 tree parm;
3427 if (adj->op == IPA_PARM_OP_NEW)
3428 parm = NULL;
3429 else
3430 parm = oparms[adj->base_index];
3f84bf08
MJ
3431 adj->base = parm;
3432
31519c38 3433 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
3434 {
3435 if (care_for_types)
9771b263 3436 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
3437 new_arg_types);
3438 *link = parm;
910ad8de 3439 link = &DECL_CHAIN (parm);
3f84bf08 3440 }
31519c38 3441 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08
MJ
3442 {
3443 tree new_parm;
3444 tree ptype;
3445
3446 if (adj->by_ref)
3447 ptype = build_pointer_type (adj->type);
3448 else
e69dbe37
MJ
3449 {
3450 ptype = adj->type;
3451 if (is_gimple_reg_type (ptype))
3452 {
3453 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3454 if (TYPE_ALIGN (ptype) < malign)
3455 ptype = build_aligned_type (ptype, malign);
3456 }
3457 }
3f84bf08
MJ
3458
3459 if (care_for_types)
3460 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3461
3462 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3463 ptype);
31519c38
AH
3464 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3465 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3f84bf08
MJ
3466 DECL_ARTIFICIAL (new_parm) = 1;
3467 DECL_ARG_TYPE (new_parm) = ptype;
3468 DECL_CONTEXT (new_parm) = fndecl;
3469 TREE_USED (new_parm) = 1;
3470 DECL_IGNORED_P (new_parm) = 1;
3471 layout_decl (new_parm, 0);
3472
31519c38
AH
3473 if (adj->op == IPA_PARM_OP_NEW)
3474 adj->base = NULL;
3475 else
3476 adj->base = parm;
3477 adj->new_decl = new_parm;
3f84bf08
MJ
3478
3479 *link = new_parm;
910ad8de 3480 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
3481 }
3482 }
3483
3484 *link = NULL_TREE;
3485
31519c38 3486 tree new_reversed = NULL;
3f84bf08
MJ
3487 if (care_for_types)
3488 {
3489 new_reversed = nreverse (new_arg_types);
3490 if (last_parm_void)
3491 {
3492 if (new_reversed)
3493 TREE_CHAIN (new_arg_types) = void_list_node;
3494 else
3495 new_reversed = void_list_node;
3496 }
3497 }
3498
3499 /* Use copy_node to preserve as much as possible from original type
3500 (debug info, attribute lists etc.)
3501 Exception is METHOD_TYPEs must have THIS argument.
3502 When we are asked to remove it, we need to build new FUNCTION_TYPE
3503 instead. */
31519c38 3504 tree new_type = NULL;
3f84bf08 3505 if (TREE_CODE (orig_type) != METHOD_TYPE
31519c38 3506 || (adjustments[0].op == IPA_PARM_OP_COPY
9771b263 3507 && adjustments[0].base_index == 0))
3f84bf08 3508 {
4eb3f32c 3509 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
3510 TYPE_ARG_TYPES (new_type) = new_reversed;
3511 }
3512 else
3513 {
3514 new_type
3515 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3516 new_reversed));
3517 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3518 DECL_VINDEX (fndecl) = NULL_TREE;
3519 }
3520
d402c33d
JH
3521 /* When signature changes, we need to clear builtin info. */
3522 if (DECL_BUILT_IN (fndecl))
3523 {
3524 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3525 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3526 }
3527
3f84bf08
MJ
3528 /* This is a new type, not a copy of an old type. Need to reassociate
3529 variants. We can handle everything except the main variant lazily. */
31519c38 3530 tree t = TYPE_MAIN_VARIANT (orig_type);
3f84bf08
MJ
3531 if (orig_type != t)
3532 {
3533 TYPE_MAIN_VARIANT (new_type) = t;
3534 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3535 TYPE_NEXT_VARIANT (t) = new_type;
3536 }
3537 else
3538 {
3539 TYPE_MAIN_VARIANT (new_type) = new_type;
3540 TYPE_NEXT_VARIANT (new_type) = NULL;
3541 }
3542
3543 TREE_TYPE (fndecl) = new_type;
9b389a5e 3544 DECL_VIRTUAL_P (fndecl) = 0;
9771b263
DN
3545 otypes.release ();
3546 oparms.release ();
3f84bf08
MJ
3547}
3548
3549/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3550 If this is a directly recursive call, CS must be NULL. Otherwise it must
3551 contain the corresponding call graph edge. */
3552
3553void
3554ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3555 ipa_parm_adjustment_vec adjustments)
3556{
82338059 3557 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
9771b263
DN
3558 vec<tree> vargs;
3559 vec<tree, va_gc> **debug_args = NULL;
3f84bf08 3560 gimple new_stmt;
82338059 3561 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
3562 tree callee_decl;
3563 int i, len;
3564
9771b263
DN
3565 len = adjustments.length ();
3566 vargs.create (len);
67348ccc
DM
3567 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3568 ipa_remove_stmt_references (current_node, stmt);
3f84bf08
MJ
3569
3570 gsi = gsi_for_stmt (stmt);
82338059
MJ
3571 prev_gsi = gsi;
3572 gsi_prev (&prev_gsi);
3f84bf08
MJ
3573 for (i = 0; i < len; i++)
3574 {
3575 struct ipa_parm_adjustment *adj;
3576
9771b263 3577 adj = &adjustments[i];
3f84bf08 3578
31519c38 3579 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
3580 {
3581 tree arg = gimple_call_arg (stmt, adj->base_index);
3582
9771b263 3583 vargs.quick_push (arg);
3f84bf08 3584 }
31519c38 3585 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08 3586 {
fffe1e40
MJ
3587 tree expr, base, off;
3588 location_t loc;
f43245d1 3589 unsigned int deref_align = 0;
c1ed6a01 3590 bool deref_base = false;
fffe1e40
MJ
3591
3592 /* We create a new parameter out of the value of the old one, we can
3593 do the following kind of transformations:
3594
3595 - A scalar passed by reference is converted to a scalar passed by
3596 value. (adj->by_ref is false and the type of the original
3597 actual argument is a pointer to a scalar).
3598
3599 - A part of an aggregate is passed instead of the whole aggregate.
3600 The part can be passed either by value or by reference, this is
3601 determined by value of adj->by_ref. Moreover, the code below
3602 handles both situations when the original aggregate is passed by
3603 value (its type is not a pointer) and when it is passed by
3604 reference (it is a pointer to an aggregate).
3605
3606 When the new argument is passed by reference (adj->by_ref is true)
3607 it must be a part of an aggregate and therefore we form it by
3608 simply taking the address of a reference inside the original
3609 aggregate. */
3610
3611 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3612 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
3613 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3614 : EXPR_LOCATION (base);
fffe1e40 3615
82d49829
MJ
3616 if (TREE_CODE (base) != ADDR_EXPR
3617 && POINTER_TYPE_P (TREE_TYPE (base)))
3618 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 3619 adj->offset / BITS_PER_UNIT);
3f84bf08 3620 else
3f84bf08 3621 {
fffe1e40
MJ
3622 HOST_WIDE_INT base_offset;
3623 tree prev_base;
c1ed6a01 3624 bool addrof;
fffe1e40
MJ
3625
3626 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
3627 {
3628 base = TREE_OPERAND (base, 0);
3629 addrof = true;
3630 }
3631 else
3632 addrof = false;
fffe1e40
MJ
3633 prev_base = base;
3634 base = get_addr_base_and_unit_offset (base, &base_offset);
3635 /* Aggregate arguments can have non-invariant addresses. */
3636 if (!base)
3637 {
3638 base = build_fold_addr_expr (prev_base);
82d49829 3639 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3640 adj->offset / BITS_PER_UNIT);
3641 }
3642 else if (TREE_CODE (base) == MEM_REF)
3643 {
c1ed6a01
MJ
3644 if (!addrof)
3645 {
3646 deref_base = true;
3647 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3648 }
82d49829 3649 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3650 base_offset
3651 + adj->offset / BITS_PER_UNIT);
3652 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 3653 off);
fffe1e40
MJ
3654 base = TREE_OPERAND (base, 0);
3655 }
3656 else
3657 {
82d49829 3658 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3659 base_offset
3660 + adj->offset / BITS_PER_UNIT);
3661 base = build_fold_addr_expr (base);
3662 }
3f84bf08 3663 }
fffe1e40 3664
3a5a825a
RG
3665 if (!adj->by_ref)
3666 {
3667 tree type = adj->type;
3668 unsigned int align;
3669 unsigned HOST_WIDE_INT misalign;
644ffefd 3670
c1ed6a01
MJ
3671 if (deref_base)
3672 {
3673 align = deref_align;
3674 misalign = 0;
3675 }
3676 else
3677 {
3678 get_pointer_alignment_1 (base, &align, &misalign);
3679 if (TYPE_ALIGN (type) > align)
3680 align = TYPE_ALIGN (type);
3681 }
27bcd47c
LC
3682 misalign += (tree_to_double_int (off)
3683 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3a5a825a
RG
3684 * BITS_PER_UNIT);
3685 misalign = misalign & (align - 1);
3686 if (misalign != 0)
3687 align = (misalign & -misalign);
3688 if (align < TYPE_ALIGN (type))
3689 type = build_aligned_type (type, align);
3690 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3691 }
3692 else
3693 {
3694 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3695 expr = build_fold_addr_expr (expr);
3696 }
fffe1e40 3697
3f84bf08
MJ
3698 expr = force_gimple_operand_gsi (&gsi, expr,
3699 adj->by_ref
3700 || is_gimple_reg_type (adj->type),
3701 NULL, true, GSI_SAME_STMT);
9771b263 3702 vargs.quick_push (expr);
3f84bf08 3703 }
31519c38 3704 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
ddb555ed
JJ
3705 {
3706 unsigned int ix;
3707 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3708 gimple def_temp;
3709
3710 arg = gimple_call_arg (stmt, adj->base_index);
3711 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3712 {
3713 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3714 continue;
3715 arg = fold_convert_loc (gimple_location (stmt),
3716 TREE_TYPE (origin), arg);
3717 }
3718 if (debug_args == NULL)
3719 debug_args = decl_debug_args_insert (callee_decl);
9771b263 3720 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
3721 if (ddecl == origin)
3722 {
9771b263 3723 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
3724 break;
3725 }
3726 if (ddecl == NULL)
3727 {
3728 ddecl = make_node (DEBUG_EXPR_DECL);
3729 DECL_ARTIFICIAL (ddecl) = 1;
3730 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3731 DECL_MODE (ddecl) = DECL_MODE (origin);
3732
9771b263
DN
3733 vec_safe_push (*debug_args, origin);
3734 vec_safe_push (*debug_args, ddecl);
ddb555ed 3735 }
9771b263 3736 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
3737 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3738 }
3f84bf08
MJ
3739 }
3740
3741 if (dump_file && (dump_flags & TDF_DETAILS))
3742 {
3743 fprintf (dump_file, "replacing stmt:");
3744 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3745 }
3746
3f84bf08 3747 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 3748 vargs.release ();
3f84bf08
MJ
3749 if (gimple_call_lhs (stmt))
3750 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3751
3752 gimple_set_block (new_stmt, gimple_block (stmt));
3753 if (gimple_has_location (stmt))
3754 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 3755 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 3756 gimple_call_copy_flags (new_stmt, stmt);
3f84bf08
MJ
3757
3758 if (dump_file && (dump_flags & TDF_DETAILS))
3759 {
3760 fprintf (dump_file, "with stmt:");
3761 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3762 fprintf (dump_file, "\n");
3763 }
3764 gsi_replace (&gsi, new_stmt, true);
3765 if (cs)
3766 cgraph_set_call_stmt (cs, new_stmt);
82338059
MJ
3767 do
3768 {
3769 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3770 gsi_prev (&gsi);
3771 }
3772 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3773 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3774
3f84bf08
MJ
3775 update_ssa (TODO_update_ssa);
3776 free_dominance_info (CDI_DOMINATORS);
3777}
3778
31519c38
AH
3779/* If the expression *EXPR should be replaced by a reduction of a parameter, do
3780 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3781 specifies whether the function should care about type incompatibility the
3782 current and new expressions. If it is false, the function will leave
3783 incompatibility issues to the caller. Return true iff the expression
3784 was modified. */
3785
3786bool
3787ipa_modify_expr (tree *expr, bool convert,
3788 ipa_parm_adjustment_vec adjustments)
3789{
3790 struct ipa_parm_adjustment *cand
3791 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
3792 if (!cand)
3793 return false;
3794
3795 tree src;
3796 if (cand->by_ref)
3797 src = build_simple_mem_ref (cand->new_decl);
3798 else
3799 src = cand->new_decl;
3800
3801 if (dump_file && (dump_flags & TDF_DETAILS))
3802 {
3803 fprintf (dump_file, "About to replace expr ");
3804 print_generic_expr (dump_file, *expr, 0);
3805 fprintf (dump_file, " with ");
3806 print_generic_expr (dump_file, src, 0);
3807 fprintf (dump_file, "\n");
3808 }
3809
3810 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3811 {
3812 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3813 *expr = vce;
3814 }
3815 else
3816 *expr = src;
3817 return true;
3818}
3819
3820/* If T is an SSA_NAME, return NULL if it is not a default def or
3821 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3822 the base variable is always returned, regardless if it is a default
3823 def. Return T if it is not an SSA_NAME. */
3824
3825static tree
3826get_ssa_base_param (tree t, bool ignore_default_def)
3827{
3828 if (TREE_CODE (t) == SSA_NAME)
3829 {
3830 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
3831 return SSA_NAME_VAR (t);
3832 else
3833 return NULL_TREE;
3834 }
3835 return t;
3836}
3837
3838/* Given an expression, return an adjustment entry specifying the
3839 transformation to be done on EXPR. If no suitable adjustment entry
3840 was found, returns NULL.
3841
3842 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3843 default def, otherwise bail on them.
3844
3845 If CONVERT is non-NULL, this function will set *CONVERT if the
3846 expression provided is a component reference. ADJUSTMENTS is the
3847 adjustments vector. */
3848
3849ipa_parm_adjustment *
3850ipa_get_adjustment_candidate (tree **expr, bool *convert,
3851 ipa_parm_adjustment_vec adjustments,
3852 bool ignore_default_def)
3853{
3854 if (TREE_CODE (**expr) == BIT_FIELD_REF
3855 || TREE_CODE (**expr) == IMAGPART_EXPR
3856 || TREE_CODE (**expr) == REALPART_EXPR)
3857 {
3858 *expr = &TREE_OPERAND (**expr, 0);
3859 if (convert)
3860 *convert = true;
3861 }
3862
3863 HOST_WIDE_INT offset, size, max_size;
3864 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
3865 if (!base || size == -1 || max_size == -1)
3866 return NULL;
3867
3868 if (TREE_CODE (base) == MEM_REF)
3869 {
3870 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
3871 base = TREE_OPERAND (base, 0);
3872 }
3873
3874 base = get_ssa_base_param (base, ignore_default_def);
3875 if (!base || TREE_CODE (base) != PARM_DECL)
3876 return NULL;
3877
3878 struct ipa_parm_adjustment *cand = NULL;
3879 unsigned int len = adjustments.length ();
3880 for (unsigned i = 0; i < len; i++)
3881 {
3882 struct ipa_parm_adjustment *adj = &adjustments[i];
3883
3884 if (adj->base == base
3885 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
3886 {
3887 cand = adj;
3888 break;
3889 }
3890 }
3891
3892 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
3893 return NULL;
3894 return cand;
3895}
3896
3f84bf08
MJ
3897/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3898
3899static bool
3900index_in_adjustments_multiple_times_p (int base_index,
3901 ipa_parm_adjustment_vec adjustments)
3902{
9771b263 3903 int i, len = adjustments.length ();
3f84bf08
MJ
3904 bool one = false;
3905
3906 for (i = 0; i < len; i++)
3907 {
3908 struct ipa_parm_adjustment *adj;
9771b263 3909 adj = &adjustments[i];
3f84bf08
MJ
3910
3911 if (adj->base_index == base_index)
3912 {
3913 if (one)
3914 return true;
3915 else
3916 one = true;
3917 }
3918 }
3919 return false;
3920}
3921
3922
3923/* Return adjustments that should have the same effect on function parameters
3924 and call arguments as if they were first changed according to adjustments in
3925 INNER and then by adjustments in OUTER. */
3926
3927ipa_parm_adjustment_vec
3928ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3929 ipa_parm_adjustment_vec outer)
3930{
9771b263
DN
3931 int i, outlen = outer.length ();
3932 int inlen = inner.length ();
3f84bf08
MJ
3933 int removals = 0;
3934 ipa_parm_adjustment_vec adjustments, tmp;
3935
9771b263 3936 tmp.create (inlen);
3f84bf08
MJ
3937 for (i = 0; i < inlen; i++)
3938 {
3939 struct ipa_parm_adjustment *n;
9771b263 3940 n = &inner[i];
3f84bf08 3941
31519c38 3942 if (n->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
3943 removals++;
3944 else
31519c38
AH
3945 {
3946 /* FIXME: Handling of new arguments are not implemented yet. */
3947 gcc_assert (n->op != IPA_PARM_OP_NEW);
3948 tmp.quick_push (*n);
3949 }
3f84bf08
MJ
3950 }
3951
9771b263 3952 adjustments.create (outlen + removals);
3f84bf08
MJ
3953 for (i = 0; i < outlen; i++)
3954 {
f32682ca 3955 struct ipa_parm_adjustment r;
9771b263
DN
3956 struct ipa_parm_adjustment *out = &outer[i];
3957 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 3958
f32682ca 3959 memset (&r, 0, sizeof (r));
31519c38
AH
3960 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
3961 if (out->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
3962 {
3963 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3964 {
31519c38 3965 r.op = IPA_PARM_OP_REMOVE;
9771b263 3966 adjustments.quick_push (r);
3f84bf08
MJ
3967 }
3968 continue;
3969 }
31519c38
AH
3970 else
3971 {
3972 /* FIXME: Handling of new arguments are not implemented yet. */
3973 gcc_assert (out->op != IPA_PARM_OP_NEW);
3974 }
3f84bf08 3975
f32682ca
DN
3976 r.base_index = in->base_index;
3977 r.type = out->type;
3f84bf08
MJ
3978
3979 /* FIXME: Create nonlocal value too. */
3980
31519c38
AH
3981 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
3982 r.op = IPA_PARM_OP_COPY;
3983 else if (in->op == IPA_PARM_OP_COPY)
f32682ca 3984 r.offset = out->offset;
31519c38 3985 else if (out->op == IPA_PARM_OP_COPY)
f32682ca 3986 r.offset = in->offset;
3f84bf08 3987 else
f32682ca 3988 r.offset = in->offset + out->offset;
9771b263 3989 adjustments.quick_push (r);
3f84bf08
MJ
3990 }
3991
3992 for (i = 0; i < inlen; i++)
3993 {
9771b263 3994 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08 3995
31519c38 3996 if (n->op == IPA_PARM_OP_REMOVE)
9771b263 3997 adjustments.quick_push (*n);
3f84bf08
MJ
3998 }
3999
9771b263 4000 tmp.release ();
3f84bf08
MJ
4001 return adjustments;
4002}
4003
4004/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4005 friendly way, assuming they are meant to be applied to FNDECL. */
4006
4007void
4008ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4009 tree fndecl)
4010{
9771b263 4011 int i, len = adjustments.length ();
3f84bf08 4012 bool first = true;
9771b263 4013 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
4014
4015 fprintf (file, "IPA param adjustments: ");
4016 for (i = 0; i < len; i++)
4017 {
4018 struct ipa_parm_adjustment *adj;
9771b263 4019 adj = &adjustments[i];
3f84bf08
MJ
4020
4021 if (!first)
4022 fprintf (file, " ");
4023 else
4024 first = false;
4025
4026 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 4027 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
4028 if (adj->base)
4029 {
4030 fprintf (file, ", base: ");
4031 print_generic_expr (file, adj->base, 0);
4032 }
31519c38 4033 if (adj->new_decl)
3f84bf08 4034 {
31519c38
AH
4035 fprintf (file, ", new_decl: ");
4036 print_generic_expr (file, adj->new_decl, 0);
3f84bf08
MJ
4037 }
4038 if (adj->new_ssa_base)
4039 {
4040 fprintf (file, ", new_ssa_base: ");
4041 print_generic_expr (file, adj->new_ssa_base, 0);
4042 }
4043
31519c38 4044 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08 4045 fprintf (file, ", copy_param");
31519c38 4046 else if (adj->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4047 fprintf (file, ", remove_param");
4048 else
4049 fprintf (file, ", offset %li", (long) adj->offset);
4050 if (adj->by_ref)
4051 fprintf (file, ", by_ref");
4052 print_node_brief (file, ", type: ", adj->type, 0);
4053 fprintf (file, "\n");
4054 }
9771b263 4055 parms.release ();
3f84bf08
MJ
4056}
4057
2c9561b5
MJ
4058/* Dump the AV linked list. */
4059
4060void
4061ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4062{
4063 bool comma = false;
4064 fprintf (f, " Aggregate replacements:");
4065 for (; av; av = av->next)
4066 {
4067 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4068 av->index, av->offset);
4069 print_generic_expr (f, av->value, 0);
4070 comma = true;
4071 }
4072 fprintf (f, "\n");
4073}
4074
fb3f88cc
JH
4075/* Stream out jump function JUMP_FUNC to OB. */
4076
4077static void
4078ipa_write_jump_function (struct output_block *ob,
4079 struct ipa_jump_func *jump_func)
4080{
8b7773a4
MJ
4081 struct ipa_agg_jf_item *item;
4082 struct bitpack_d bp;
4083 int i, count;
fb3f88cc 4084
8b7773a4 4085 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
4086 switch (jump_func->type)
4087 {
4088 case IPA_JF_UNKNOWN:
4089 break;
b258210c 4090 case IPA_JF_KNOWN_TYPE:
c7573249
MJ
4091 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4092 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4093 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
b258210c 4094 break;
fb3f88cc 4095 case IPA_JF_CONST:
5368224f 4096 gcc_assert (
4502fe8d
MJ
4097 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4098 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
4099 break;
4100 case IPA_JF_PASS_THROUGH:
412288f1 4101 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4102 if (jump_func->value.pass_through.operation == NOP_EXPR)
4103 {
4104 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4105 bp = bitpack_create (ob->main_stream);
4106 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
b8f6e610 4107 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4a53743e
MJ
4108 streamer_write_bitpack (&bp);
4109 }
4110 else
4111 {
4112 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4113 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4114 }
fb3f88cc
JH
4115 break;
4116 case IPA_JF_ANCESTOR:
412288f1 4117 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
b9393656 4118 stream_write_tree (ob, jump_func->value.ancestor.type, true);
412288f1 4119 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4120 bp = bitpack_create (ob->main_stream);
4121 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
b8f6e610 4122 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
8b7773a4 4123 streamer_write_bitpack (&bp);
fb3f88cc 4124 break;
8b7773a4
MJ
4125 }
4126
9771b263 4127 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4128 streamer_write_uhwi (ob, count);
4129 if (count)
4130 {
4131 bp = bitpack_create (ob->main_stream);
4132 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4133 streamer_write_bitpack (&bp);
4134 }
4135
9771b263 4136 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4137 {
4138 streamer_write_uhwi (ob, item->offset);
4139 stream_write_tree (ob, item->value, true);
fb3f88cc
JH
4140 }
4141}
4142
4143/* Read in jump function JUMP_FUNC from IB. */
4144
4145static void
4146ipa_read_jump_function (struct lto_input_block *ib,
4147 struct ipa_jump_func *jump_func,
4502fe8d 4148 struct cgraph_edge *cs,
fb3f88cc
JH
4149 struct data_in *data_in)
4150{
4a53743e
MJ
4151 enum jump_func_type jftype;
4152 enum tree_code operation;
8b7773a4 4153 int i, count;
fb3f88cc 4154
4a53743e
MJ
4155 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4156 switch (jftype)
fb3f88cc
JH
4157 {
4158 case IPA_JF_UNKNOWN:
4a53743e 4159 jump_func->type = IPA_JF_UNKNOWN;
fb3f88cc 4160 break;
b258210c 4161 case IPA_JF_KNOWN_TYPE:
4a53743e
MJ
4162 {
4163 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4164 tree base_type = stream_read_tree (ib, data_in);
4165 tree component_type = stream_read_tree (ib, data_in);
4166
4167 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4168 break;
4169 }
fb3f88cc 4170 case IPA_JF_CONST:
4502fe8d 4171 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4172 break;
4173 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4174 operation = (enum tree_code) streamer_read_uhwi (ib);
4175 if (operation == NOP_EXPR)
4176 {
4177 int formal_id = streamer_read_uhwi (ib);
4178 struct bitpack_d bp = streamer_read_bitpack (ib);
4179 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610
MJ
4180 bool type_preserved = bp_unpack_value (&bp, 1);
4181 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4182 type_preserved);
4a53743e
MJ
4183 }
4184 else
4185 {
4186 tree operand = stream_read_tree (ib, data_in);
4187 int formal_id = streamer_read_uhwi (ib);
4188 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4189 operation);
4190 }
fb3f88cc
JH
4191 break;
4192 case IPA_JF_ANCESTOR:
4a53743e
MJ
4193 {
4194 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4195 tree type = stream_read_tree (ib, data_in);
4196 int formal_id = streamer_read_uhwi (ib);
4197 struct bitpack_d bp = streamer_read_bitpack (ib);
4198 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610 4199 bool type_preserved = bp_unpack_value (&bp, 1);
4a53743e 4200
b8f6e610
MJ
4201 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4202 type_preserved);
4a53743e
MJ
4203 break;
4204 }
8b7773a4
MJ
4205 }
4206
4207 count = streamer_read_uhwi (ib);
9771b263 4208 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4209 if (count)
4210 {
4a53743e 4211 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4212 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4213 }
4214 for (i = 0; i < count; i++)
4215 {
f32682ca
DN
4216 struct ipa_agg_jf_item item;
4217 item.offset = streamer_read_uhwi (ib);
4218 item.value = stream_read_tree (ib, data_in);
9771b263 4219 jump_func->agg.items->quick_push (item);
fb3f88cc
JH
4220 }
4221}
4222
e33c6cd6
MJ
4223/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4224 relevant to indirect inlining to OB. */
661e7330
MJ
4225
4226static void
e33c6cd6
MJ
4227ipa_write_indirect_edge_info (struct output_block *ob,
4228 struct cgraph_edge *cs)
661e7330 4229{
e33c6cd6 4230 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4231 struct bitpack_d bp;
e33c6cd6 4232
412288f1 4233 streamer_write_hwi (ob, ii->param_index);
8b7773a4 4234 streamer_write_hwi (ob, ii->offset);
2465dcc2
RG
4235 bp = bitpack_create (ob->main_stream);
4236 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4237 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4238 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4239 bp_pack_value (&bp, ii->by_ref, 1);
68377e53
JH
4240 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4241 bp_pack_value (&bp, ii->maybe_derived_type, 1);
412288f1 4242 streamer_write_bitpack (&bp);
b258210c
MJ
4243
4244 if (ii->polymorphic)
4245 {
412288f1 4246 streamer_write_hwi (ob, ii->otr_token);
b9393656 4247 stream_write_tree (ob, ii->otr_type, true);
68377e53 4248 stream_write_tree (ob, ii->outer_type, true);
b258210c 4249 }
661e7330
MJ
4250}
4251
e33c6cd6
MJ
4252/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4253 relevant to indirect inlining from IB. */
661e7330
MJ
4254
4255static void
e33c6cd6
MJ
4256ipa_read_indirect_edge_info (struct lto_input_block *ib,
4257 struct data_in *data_in ATTRIBUTE_UNUSED,
4258 struct cgraph_edge *cs)
661e7330 4259{
e33c6cd6 4260 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4261 struct bitpack_d bp;
661e7330 4262
412288f1 4263 ii->param_index = (int) streamer_read_hwi (ib);
8b7773a4 4264 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
412288f1 4265 bp = streamer_read_bitpack (ib);
2465dcc2 4266 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4267 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4268 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4269 ii->by_ref = bp_unpack_value (&bp, 1);
68377e53
JH
4270 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4271 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
b258210c
MJ
4272 if (ii->polymorphic)
4273 {
412288f1 4274 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4275 ii->otr_type = stream_read_tree (ib, data_in);
68377e53 4276 ii->outer_type = stream_read_tree (ib, data_in);
b258210c 4277 }
661e7330
MJ
4278}
4279
fb3f88cc
JH
4280/* Stream out NODE info to OB. */
4281
4282static void
4283ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4284{
4285 int node_ref;
7380e6ef 4286 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4287 struct ipa_node_params *info = IPA_NODE_REF (node);
4288 int j;
4289 struct cgraph_edge *e;
2465dcc2 4290 struct bitpack_d bp;
fb3f88cc 4291
7380e6ef 4292 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4293 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4294 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4295
0e8853ee
JH
4296 streamer_write_uhwi (ob, ipa_get_param_count (info));
4297 for (j = 0; j < ipa_get_param_count (info); j++)
4298 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4299 bp = bitpack_create (ob->main_stream);
062c604f 4300 gcc_assert (info->uses_analysis_done
661e7330 4301 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4302 gcc_assert (!info->node_enqueued);
4303 gcc_assert (!info->ipcp_orig_node);
4304 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4305 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4306 streamer_write_bitpack (&bp);
4502fe8d
MJ
4307 for (j = 0; j < ipa_get_param_count (info); j++)
4308 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
fb3f88cc
JH
4309 for (e = node->callees; e; e = e->next_callee)
4310 {
4311 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4312
412288f1 4313 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
fb3f88cc
JH
4314 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4315 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4316 }
e33c6cd6 4317 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4318 {
4319 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4320
412288f1 4321 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
c8246dbe
JH
4322 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4323 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4324 ipa_write_indirect_edge_info (ob, e);
4325 }
fb3f88cc
JH
4326}
4327
61502ca8 4328/* Stream in NODE info from IB. */
fb3f88cc
JH
4329
4330static void
4331ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4332 struct data_in *data_in)
4333{
4334 struct ipa_node_params *info = IPA_NODE_REF (node);
4335 int k;
4336 struct cgraph_edge *e;
2465dcc2 4337 struct bitpack_d bp;
fb3f88cc 4338
0e8853ee 4339 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4340
0e8853ee
JH
4341 for (k = 0; k < ipa_get_param_count (info); k++)
4342 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4343
412288f1 4344 bp = streamer_read_bitpack (ib);
fb3f88cc 4345 if (ipa_get_param_count (info) != 0)
062c604f 4346 info->uses_analysis_done = true;
fb3f88cc
JH
4347 info->node_enqueued = false;
4348 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4349 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a
MJ
4350 for (k = 0; k < ipa_get_param_count (info); k++)
4351 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
fb3f88cc
JH
4352 for (e = node->callees; e; e = e->next_callee)
4353 {
4354 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4355 int count = streamer_read_uhwi (ib);
fb3f88cc 4356
fb3f88cc
JH
4357 if (!count)
4358 continue;
9771b263 4359 vec_safe_grow_cleared (args->jump_functions, count);
fb3f88cc 4360
fb3f88cc 4361 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d
MJ
4362 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4363 data_in);
fb3f88cc 4364 }
e33c6cd6 4365 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4366 {
4367 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4368 int count = streamer_read_uhwi (ib);
c8246dbe 4369
c8246dbe
JH
4370 if (count)
4371 {
9771b263 4372 vec_safe_grow_cleared (args->jump_functions, count);
c8246dbe 4373 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d 4374 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
606d9a09 4375 data_in);
c8246dbe
JH
4376 }
4377 ipa_read_indirect_edge_info (ib, data_in, e);
4378 }
fb3f88cc
JH
4379}
4380
4381/* Write jump functions for nodes in SET. */
4382
4383void
f27c1867 4384ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4385{
4386 struct cgraph_node *node;
93536c97 4387 struct output_block *ob;
fb3f88cc 4388 unsigned int count = 0;
f27c1867
JH
4389 lto_symtab_encoder_iterator lsei;
4390 lto_symtab_encoder_t encoder;
4391
fb3f88cc 4392
9771b263 4393 if (!ipa_node_params_vector.exists ())
93536c97 4394 return;
fb3f88cc 4395
93536c97 4396 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4397 encoder = ob->decl_state->symtab_node_encoder;
93536c97 4398 ob->cgraph_node = NULL;
f27c1867
JH
4399 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4400 lsei_next_function_in_partition (&lsei))
fb3f88cc 4401 {
f27c1867 4402 node = lsei_cgraph_node (lsei);
c47d0034
JH
4403 if (cgraph_function_with_gimple_body_p (node)
4404 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4405 count++;
4406 }
4407
412288f1 4408 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4409
4410 /* Process all of the functions. */
f27c1867
JH
4411 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4412 lsei_next_function_in_partition (&lsei))
fb3f88cc 4413 {
f27c1867 4414 node = lsei_cgraph_node (lsei);
c47d0034
JH
4415 if (cgraph_function_with_gimple_body_p (node)
4416 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4417 ipa_write_node_info (ob, node);
4418 }
412288f1 4419 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4420 produce_asm (ob, NULL);
4421 destroy_output_block (ob);
4422}
4423
4424/* Read section in file FILE_DATA of length LEN with data DATA. */
4425
4426static void
4427ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4428 size_t len)
4429{
4430 const struct lto_function_header *header =
4431 (const struct lto_function_header *) data;
4ad9a9de
EB
4432 const int cfg_offset = sizeof (struct lto_function_header);
4433 const int main_offset = cfg_offset + header->cfg_size;
4434 const int string_offset = main_offset + header->main_size;
fb3f88cc
JH
4435 struct data_in *data_in;
4436 struct lto_input_block ib_main;
4437 unsigned int i;
4438 unsigned int count;
4439
4440 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4441 header->main_size);
4442
4443 data_in =
4444 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4445 header->string_size, vNULL);
412288f1 4446 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4447
4448 for (i = 0; i < count; i++)
4449 {
4450 unsigned int index;
4451 struct cgraph_node *node;
7380e6ef 4452 lto_symtab_encoder_t encoder;
fb3f88cc 4453
412288f1 4454 index = streamer_read_uhwi (&ib_main);
7380e6ef
JH
4455 encoder = file_data->symtab_node_encoder;
4456 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4457 gcc_assert (node->definition);
fb3f88cc
JH
4458 ipa_read_node_info (&ib_main, node, data_in);
4459 }
4460 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4461 len);
4462 lto_data_in_delete (data_in);
4463}
4464
4465/* Read ipcp jump functions. */
4466
4467void
4468ipa_prop_read_jump_functions (void)
4469{
4470 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4471 struct lto_file_decl_data *file_data;
4472 unsigned int j = 0;
4473
4474 ipa_check_create_node_params ();
4475 ipa_check_create_edge_args ();
4476 ipa_register_cgraph_hooks ();
4477
4478 while ((file_data = file_data_vec[j++]))
4479 {
4480 size_t len;
4481 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4482
4483 if (data)
4484 ipa_prop_read_section (file_data, data, len);
4485 }
4486}
4487
b8698a0f 4488/* After merging units, we can get mismatch in argument counts.
61502ca8 4489 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
4490 Also compute called_with_variable_arg info. */
4491
4492void
4493ipa_update_after_lto_read (void)
4494{
05d3aa37
MJ
4495 ipa_check_create_node_params ();
4496 ipa_check_create_edge_args ();
fb3f88cc 4497}
2c9561b5
MJ
4498
4499void
4500write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4501{
4502 int node_ref;
4503 unsigned int count = 0;
4504 lto_symtab_encoder_t encoder;
4505 struct ipa_agg_replacement_value *aggvals, *av;
4506
4507 aggvals = ipa_get_agg_replacements_for_node (node);
4508 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4509 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4510 streamer_write_uhwi (ob, node_ref);
4511
4512 for (av = aggvals; av; av = av->next)
4513 count++;
4514 streamer_write_uhwi (ob, count);
4515
4516 for (av = aggvals; av; av = av->next)
4517 {
7b920a9a
MJ
4518 struct bitpack_d bp;
4519
2c9561b5
MJ
4520 streamer_write_uhwi (ob, av->offset);
4521 streamer_write_uhwi (ob, av->index);
4522 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4523
4524 bp = bitpack_create (ob->main_stream);
4525 bp_pack_value (&bp, av->by_ref, 1);
4526 streamer_write_bitpack (&bp);
2c9561b5
MJ
4527 }
4528}
4529
4530/* Stream in the aggregate value replacement chain for NODE from IB. */
4531
4532static void
4533read_agg_replacement_chain (struct lto_input_block *ib,
4534 struct cgraph_node *node,
4535 struct data_in *data_in)
4536{
4537 struct ipa_agg_replacement_value *aggvals = NULL;
4538 unsigned int count, i;
4539
4540 count = streamer_read_uhwi (ib);
4541 for (i = 0; i <count; i++)
4542 {
4543 struct ipa_agg_replacement_value *av;
7b920a9a 4544 struct bitpack_d bp;
2c9561b5
MJ
4545
4546 av = ggc_alloc_ipa_agg_replacement_value ();
4547 av->offset = streamer_read_uhwi (ib);
4548 av->index = streamer_read_uhwi (ib);
4549 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4550 bp = streamer_read_bitpack (ib);
4551 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4552 av->next = aggvals;
4553 aggvals = av;
4554 }
4555 ipa_set_node_agg_value_chain (node, aggvals);
4556}
4557
4558/* Write all aggregate replacement for nodes in set. */
4559
4560void
4561ipa_prop_write_all_agg_replacement (void)
4562{
4563 struct cgraph_node *node;
4564 struct output_block *ob;
4565 unsigned int count = 0;
4566 lto_symtab_encoder_iterator lsei;
4567 lto_symtab_encoder_t encoder;
4568
4569 if (!ipa_node_agg_replacements)
4570 return;
4571
4572 ob = create_output_block (LTO_section_ipcp_transform);
4573 encoder = ob->decl_state->symtab_node_encoder;
4574 ob->cgraph_node = NULL;
4575 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4576 lsei_next_function_in_partition (&lsei))
4577 {
4578 node = lsei_cgraph_node (lsei);
4579 if (cgraph_function_with_gimple_body_p (node)
4580 && ipa_get_agg_replacements_for_node (node) != NULL)
4581 count++;
4582 }
4583
4584 streamer_write_uhwi (ob, count);
4585
4586 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4587 lsei_next_function_in_partition (&lsei))
4588 {
4589 node = lsei_cgraph_node (lsei);
4590 if (cgraph_function_with_gimple_body_p (node)
4591 && ipa_get_agg_replacements_for_node (node) != NULL)
4592 write_agg_replacement_chain (ob, node);
4593 }
4594 streamer_write_char_stream (ob->main_stream, 0);
4595 produce_asm (ob, NULL);
4596 destroy_output_block (ob);
4597}
4598
4599/* Read replacements section in file FILE_DATA of length LEN with data
4600 DATA. */
4601
4602static void
4603read_replacements_section (struct lto_file_decl_data *file_data,
4604 const char *data,
4605 size_t len)
4606{
4607 const struct lto_function_header *header =
4608 (const struct lto_function_header *) data;
4609 const int cfg_offset = sizeof (struct lto_function_header);
4610 const int main_offset = cfg_offset + header->cfg_size;
4611 const int string_offset = main_offset + header->main_size;
4612 struct data_in *data_in;
4613 struct lto_input_block ib_main;
4614 unsigned int i;
4615 unsigned int count;
4616
4617 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4618 header->main_size);
4619
4620 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4621 header->string_size, vNULL);
2c9561b5
MJ
4622 count = streamer_read_uhwi (&ib_main);
4623
4624 for (i = 0; i < count; i++)
4625 {
4626 unsigned int index;
4627 struct cgraph_node *node;
4628 lto_symtab_encoder_t encoder;
4629
4630 index = streamer_read_uhwi (&ib_main);
4631 encoder = file_data->symtab_node_encoder;
4632 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4633 gcc_assert (node->definition);
2c9561b5
MJ
4634 read_agg_replacement_chain (&ib_main, node, data_in);
4635 }
4636 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4637 len);
4638 lto_data_in_delete (data_in);
4639}
4640
4641/* Read IPA-CP aggregate replacements. */
4642
4643void
4644ipa_prop_read_all_agg_replacement (void)
4645{
4646 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4647 struct lto_file_decl_data *file_data;
4648 unsigned int j = 0;
4649
4650 while ((file_data = file_data_vec[j++]))
4651 {
4652 size_t len;
4653 const char *data = lto_get_section_data (file_data,
4654 LTO_section_ipcp_transform,
4655 NULL, &len);
4656 if (data)
4657 read_replacements_section (file_data, data, len);
4658 }
4659}
4660
4661/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4662 NODE. */
4663
4664static void
4665adjust_agg_replacement_values (struct cgraph_node *node,
4666 struct ipa_agg_replacement_value *aggval)
4667{
4668 struct ipa_agg_replacement_value *v;
4669 int i, c = 0, d = 0, *adj;
4670
4671 if (!node->clone.combined_args_to_skip)
4672 return;
4673
4674 for (v = aggval; v; v = v->next)
4675 {
4676 gcc_assert (v->index >= 0);
4677 if (c < v->index)
4678 c = v->index;
4679 }
4680 c++;
4681
4682 adj = XALLOCAVEC (int, c);
4683 for (i = 0; i < c; i++)
4684 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4685 {
4686 adj[i] = -1;
4687 d++;
4688 }
4689 else
4690 adj[i] = i - d;
4691
4692 for (v = aggval; v; v = v->next)
4693 v->index = adj[v->index];
4694}
4695
4696
4697/* Function body transformation phase. */
4698
4699unsigned int
4700ipcp_transform_function (struct cgraph_node *node)
4701{
84562394 4702 vec<ipa_param_descriptor> descriptors = vNULL;
2c9561b5
MJ
4703 struct param_analysis_info *parms_ainfo;
4704 struct ipa_agg_replacement_value *aggval;
4705 gimple_stmt_iterator gsi;
4706 basic_block bb;
4707 int param_count;
4708 bool cfg_changed = false, something_changed = false;
4709
4710 gcc_checking_assert (cfun);
4711 gcc_checking_assert (current_function_decl);
4712
4713 if (dump_file)
4714 fprintf (dump_file, "Modification phase of node %s/%i\n",
fec39fa6 4715 node->name (), node->order);
2c9561b5
MJ
4716
4717 aggval = ipa_get_agg_replacements_for_node (node);
4718 if (!aggval)
4719 return 0;
67348ccc 4720 param_count = count_formal_params (node->decl);
2c9561b5
MJ
4721 if (param_count == 0)
4722 return 0;
4723 adjust_agg_replacement_values (node, aggval);
4724 if (dump_file)
4725 ipa_dump_agg_replacement_values (dump_file, aggval);
4726 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4727 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
9771b263 4728 descriptors.safe_grow_cleared (param_count);
2c9561b5
MJ
4729 ipa_populate_param_decls (node, descriptors);
4730
11cd3bed 4731 FOR_EACH_BB_FN (bb, cfun)
2c9561b5
MJ
4732 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4733 {
4734 struct ipa_agg_replacement_value *v;
4735 gimple stmt = gsi_stmt (gsi);
4736 tree rhs, val, t;
3ff2ca23 4737 HOST_WIDE_INT offset, size;
2c9561b5
MJ
4738 int index;
4739 bool by_ref, vce;
4740
4741 if (!gimple_assign_load_p (stmt))
4742 continue;
4743 rhs = gimple_assign_rhs1 (stmt);
4744 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4745 continue;
4746
4747 vce = false;
4748 t = rhs;
4749 while (handled_component_p (t))
4750 {
4751 /* V_C_E can do things like convert an array of integers to one
4752 bigger integer and similar things we do not handle below. */
4753 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4754 {
4755 vce = true;
4756 break;
4757 }
4758 t = TREE_OPERAND (t, 0);
4759 }
4760 if (vce)
4761 continue;
4762
4763 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
3ff2ca23 4764 rhs, &index, &offset, &size, &by_ref))
2c9561b5
MJ
4765 continue;
4766 for (v = aggval; v; v = v->next)
4767 if (v->index == index
4768 && v->offset == offset)
4769 break;
3ff2ca23
JJ
4770 if (!v
4771 || v->by_ref != by_ref
9439e9a1 4772 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
2c9561b5
MJ
4773 continue;
4774
4775 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4776 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4777 {
4778 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4779 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4780 else if (TYPE_SIZE (TREE_TYPE (rhs))
4781 == TYPE_SIZE (TREE_TYPE (v->value)))
4782 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4783 else
4784 {
4785 if (dump_file)
4786 {
4787 fprintf (dump_file, " const ");
4788 print_generic_expr (dump_file, v->value, 0);
4789 fprintf (dump_file, " can't be converted to type of ");
4790 print_generic_expr (dump_file, rhs, 0);
4791 fprintf (dump_file, "\n");
4792 }
4793 continue;
4794 }
4795 }
4796 else
4797 val = v->value;
4798
4799 if (dump_file && (dump_flags & TDF_DETAILS))
4800 {
4801 fprintf (dump_file, "Modifying stmt:\n ");
4802 print_gimple_stmt (dump_file, stmt, 0, 0);
4803 }
4804 gimple_assign_set_rhs_from_tree (&gsi, val);
4805 update_stmt (stmt);
4806
4807 if (dump_file && (dump_flags & TDF_DETAILS))
4808 {
4809 fprintf (dump_file, "into:\n ");
4810 print_gimple_stmt (dump_file, stmt, 0, 0);
4811 fprintf (dump_file, "\n");
4812 }
4813
4814 something_changed = true;
4815 if (maybe_clean_eh_stmt (stmt)
4816 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4817 cfg_changed = true;
4818 }
4819
9771b263 4820 (*ipa_node_agg_replacements)[node->uid] = NULL;
2c9561b5 4821 free_parms_ainfo (parms_ainfo, param_count);
9771b263 4822 descriptors.release ();
2c9561b5
MJ
4823
4824 if (!something_changed)
4825 return 0;
4826 else if (cfg_changed)
4827 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4828 else
4829 return TODO_update_ssa_only_virtuals;
4830}