]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
cuintp.c: Replace host_integerp (..., 0) with tree_fits_shwi_p throughout.
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
d1e082c2 2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tree.h"
18f429e2 24#include "gimple.h"
45b0be94 25#include "gimplify.h"
5be5c238 26#include "gimple-iterator.h"
18f429e2 27#include "gimplify-me.h"
5be5c238 28#include "gimple-walk.h"
518dc859
RL
29#include "langhooks.h"
30#include "ggc.h"
31#include "target.h"
518dc859 32#include "ipa-prop.h"
442b4905
AM
33#include "bitmap.h"
34#include "gimple-ssa.h"
35#include "tree-cfg.h"
36#include "tree-phinodes.h"
37#include "ssa-iterators.h"
38#include "tree-into-ssa.h"
39#include "tree-dfa.h"
518dc859 40#include "tree-pass.h"
771578a0 41#include "tree-inline.h"
0f378cb5 42#include "ipa-inline.h"
518dc859 43#include "flags.h"
3e293154 44#include "diagnostic.h"
cf835838 45#include "gimple-pretty-print.h"
fb3f88cc 46#include "lto-streamer.h"
f0efc7aa
DN
47#include "data-streamer.h"
48#include "tree-streamer.h"
dfea20f1 49#include "params.h"
450ad0cd 50#include "ipa-utils.h"
771578a0 51
062c604f
MJ
52/* Intermediate information about a parameter that is only useful during the
53 run of ipa_analyze_node and is not kept afterwards. */
54
55struct param_analysis_info
56{
8b7773a4
MJ
57 bool parm_modified, ref_modified, pt_modified;
58 bitmap parm_visited_statements, pt_visited_statements;
062c604f
MJ
59};
60
771578a0 61/* Vector where the parameter infos are actually stored. */
9771b263 62vec<ipa_node_params_t> ipa_node_params_vector;
2c9561b5 63/* Vector of known aggregate values in cloned nodes. */
9771b263 64vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
771578a0 65/* Vector where the parameter infos are actually stored. */
9771b263 66vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
771578a0
MJ
67
68/* Holders of ipa cgraph hooks: */
e2c9111c
JH
69static struct cgraph_edge_hook_list *edge_removal_hook_holder;
70static struct cgraph_node_hook_list *node_removal_hook_holder;
71static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
72static struct cgraph_2node_hook_list *node_duplication_hook_holder;
40982661 73static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 74
4502fe8d
MJ
75/* Description of a reference to an IPA constant. */
76struct ipa_cst_ref_desc
77{
78 /* Edge that corresponds to the statement which took the reference. */
79 struct cgraph_edge *cs;
80 /* Linked list of duplicates created when call graph edges are cloned. */
81 struct ipa_cst_ref_desc *next_duplicate;
82 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
83 if out of control. */
84 int refcount;
85};
86
87/* Allocation pool for reference descriptions. */
88
89static alloc_pool ipa_refdesc_pool;
90
5fe8e757
MJ
91/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
92 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
93
94static bool
95ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
96{
67348ccc 97 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
98 struct cl_optimization *os;
99
100 if (!fs_opts)
101 return false;
102 os = TREE_OPTIMIZATION (fs_opts);
103 return !os->x_optimize || !os->x_flag_ipa_cp;
104}
105
be95e2b9
MJ
106/* Return index of the formal whose tree is PTREE in function which corresponds
107 to INFO. */
108
d044dd17 109static int
9771b263 110ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
518dc859
RL
111{
112 int i, count;
113
9771b263 114 count = descriptors.length ();
518dc859 115 for (i = 0; i < count; i++)
9771b263 116 if (descriptors[i].decl == ptree)
518dc859
RL
117 return i;
118
119 return -1;
120}
121
d044dd17
MJ
122/* Return index of the formal whose tree is PTREE in function which corresponds
123 to INFO. */
124
125int
126ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
127{
128 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
129}
130
131/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
132 NODE. */
be95e2b9 133
f8e2a1ed
MJ
134static void
135ipa_populate_param_decls (struct cgraph_node *node,
9771b263 136 vec<ipa_param_descriptor_t> &descriptors)
518dc859
RL
137{
138 tree fndecl;
139 tree fnargs;
140 tree parm;
141 int param_num;
3e293154 142
67348ccc 143 fndecl = node->decl;
0e8853ee 144 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
145 fnargs = DECL_ARGUMENTS (fndecl);
146 param_num = 0;
910ad8de 147 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 148 {
9771b263 149 descriptors[param_num].decl = parm;
0e8853ee 150 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
518dc859
RL
151 param_num++;
152 }
153}
154
3f84bf08
MJ
155/* Return how many formal parameters FNDECL has. */
156
157static inline int
310bc633 158count_formal_params (tree fndecl)
3f84bf08
MJ
159{
160 tree parm;
161 int count = 0;
0e8853ee 162 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 163
910ad8de 164 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
165 count++;
166
167 return count;
168}
169
0e8853ee
JH
170/* Return the declaration of Ith formal parameter of the function corresponding
171 to INFO. Note there is no setter function as this array is built just once
172 using ipa_initialize_node_params. */
173
174void
175ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
176{
177 fprintf (file, "param #%i", i);
178 if (info->descriptors[i].decl)
179 {
180 fprintf (file, " ");
181 print_generic_expr (file, info->descriptors[i].decl, 0);
182 }
183}
184
185/* Initialize the ipa_node_params structure associated with NODE
186 to hold PARAM_COUNT parameters. */
187
188void
189ipa_alloc_node_params (struct cgraph_node *node, int param_count)
190{
191 struct ipa_node_params *info = IPA_NODE_REF (node);
192
193 if (!info->descriptors.exists () && param_count)
194 info->descriptors.safe_grow_cleared (param_count);
195}
196
f8e2a1ed
MJ
197/* Initialize the ipa_node_params structure associated with NODE by counting
198 the function parameters, creating the descriptors and populating their
199 param_decls. */
be95e2b9 200
f8e2a1ed
MJ
201void
202ipa_initialize_node_params (struct cgraph_node *node)
203{
204 struct ipa_node_params *info = IPA_NODE_REF (node);
205
9771b263 206 if (!info->descriptors.exists ())
f8e2a1ed 207 {
67348ccc 208 ipa_alloc_node_params (node, count_formal_params (node->decl));
0e8853ee 209 ipa_populate_param_decls (node, info->descriptors);
f8e2a1ed 210 }
518dc859
RL
211}
212
749aa96d
MJ
213/* Print the jump functions associated with call graph edge CS to file F. */
214
215static void
216ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
217{
218 int i, count;
219
220 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
221 for (i = 0; i < count; i++)
222 {
223 struct ipa_jump_func *jump_func;
224 enum jump_func_type type;
225
226 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
227 type = jump_func->type;
228
229 fprintf (f, " param %d: ", i);
230 if (type == IPA_JF_UNKNOWN)
231 fprintf (f, "UNKNOWN\n");
232 else if (type == IPA_JF_KNOWN_TYPE)
233 {
c7573249
MJ
234 fprintf (f, "KNOWN TYPE: base ");
235 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
236 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
237 jump_func->value.known_type.offset);
238 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
239 fprintf (f, "\n");
749aa96d
MJ
240 }
241 else if (type == IPA_JF_CONST)
242 {
4502fe8d 243 tree val = jump_func->value.constant.value;
749aa96d
MJ
244 fprintf (f, "CONST: ");
245 print_generic_expr (f, val, 0);
246 if (TREE_CODE (val) == ADDR_EXPR
247 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
248 {
249 fprintf (f, " -> ");
250 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
251 0);
252 }
253 fprintf (f, "\n");
254 }
749aa96d
MJ
255 else if (type == IPA_JF_PASS_THROUGH)
256 {
257 fprintf (f, "PASS THROUGH: ");
8b7773a4 258 fprintf (f, "%d, op %s",
749aa96d 259 jump_func->value.pass_through.formal_id,
5806f481 260 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 261 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
262 {
263 fprintf (f, " ");
264 print_generic_expr (f,
265 jump_func->value.pass_through.operand, 0);
266 }
267 if (jump_func->value.pass_through.agg_preserved)
268 fprintf (f, ", agg_preserved");
b8f6e610
MJ
269 if (jump_func->value.pass_through.type_preserved)
270 fprintf (f, ", type_preserved");
3ea6239f 271 fprintf (f, "\n");
749aa96d
MJ
272 }
273 else if (type == IPA_JF_ANCESTOR)
274 {
275 fprintf (f, "ANCESTOR: ");
276 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
277 jump_func->value.ancestor.formal_id,
278 jump_func->value.ancestor.offset);
279 print_generic_expr (f, jump_func->value.ancestor.type, 0);
8b7773a4
MJ
280 if (jump_func->value.ancestor.agg_preserved)
281 fprintf (f, ", agg_preserved");
b8f6e610
MJ
282 if (jump_func->value.ancestor.type_preserved)
283 fprintf (f, ", type_preserved");
3ea6239f 284 fprintf (f, "\n");
749aa96d 285 }
8b7773a4
MJ
286
287 if (jump_func->agg.items)
288 {
289 struct ipa_agg_jf_item *item;
290 int j;
291
292 fprintf (f, " Aggregate passed by %s:\n",
293 jump_func->agg.by_ref ? "reference" : "value");
9771b263 294 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
295 {
296 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
297 item->offset);
298 if (TYPE_P (item->value))
299 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
300 tree_low_cst (TYPE_SIZE (item->value), 1));
301 else
302 {
303 fprintf (f, "cst: ");
304 print_generic_expr (f, item->value, 0);
305 }
306 fprintf (f, "\n");
307 }
308 }
749aa96d
MJ
309 }
310}
311
312
be95e2b9
MJ
313/* Print the jump functions of all arguments on all call graph edges going from
314 NODE to file F. */
315
518dc859 316void
3e293154 317ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 318{
3e293154 319 struct cgraph_edge *cs;
518dc859 320
fec39fa6 321 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
67348ccc 322 node->order);
3e293154
MJ
323 for (cs = node->callees; cs; cs = cs->next_callee)
324 {
325 if (!ipa_edge_args_info_available_for_edge_p (cs))
326 continue;
327
749aa96d 328 fprintf (f, " callsite %s/%i -> %s/%i : \n",
fec39fa6
TS
329 xstrdup (node->name ()), node->order,
330 xstrdup (cs->callee->name ()),
67348ccc 331 cs->callee->order);
749aa96d
MJ
332 ipa_print_node_jump_functions_for_edge (f, cs);
333 }
518dc859 334
9de04252 335 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 336 {
9de04252 337 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
338 if (!ipa_edge_args_info_available_for_edge_p (cs))
339 continue;
3e293154 340
9de04252
MJ
341 ii = cs->indirect_info;
342 if (ii->agg_contents)
c13bc3d9 343 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 344 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 345 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
346 ii->param_index, ii->offset,
347 ii->by_ref ? "by reference" : "by_value");
348 else
349 fprintf (f, " indirect %s callsite, calling param %i",
350 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
351
749aa96d
MJ
352 if (cs->call_stmt)
353 {
9de04252 354 fprintf (f, ", for stmt ");
749aa96d 355 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 356 }
749aa96d 357 else
9de04252 358 fprintf (f, "\n");
749aa96d 359 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
360 }
361}
362
363/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 364
3e293154
MJ
365void
366ipa_print_all_jump_functions (FILE *f)
367{
368 struct cgraph_node *node;
369
ca30a539 370 fprintf (f, "\nJump functions:\n");
65c70e6b 371 FOR_EACH_FUNCTION (node)
3e293154
MJ
372 {
373 ipa_print_node_jump_functions (f, node);
374 }
375}
376
7b872d9e
MJ
377/* Set JFUNC to be a known type jump function. */
378
379static void
380ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
381 tree base_type, tree component_type)
382{
06d65050
JH
383 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
384 && TYPE_BINFO (component_type));
7b872d9e
MJ
385 jfunc->type = IPA_JF_KNOWN_TYPE;
386 jfunc->value.known_type.offset = offset,
387 jfunc->value.known_type.base_type = base_type;
388 jfunc->value.known_type.component_type = component_type;
389}
390
b8f6e610
MJ
391/* Set JFUNC to be a copy of another jmp (to be used by jump function
392 combination code). The two functions will share their rdesc. */
393
394static void
395ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
396 struct ipa_jump_func *src)
397
398{
399 gcc_checking_assert (src->type == IPA_JF_CONST);
400 dst->type = IPA_JF_CONST;
401 dst->value.constant = src->value.constant;
402}
403
7b872d9e
MJ
404/* Set JFUNC to be a constant jmp function. */
405
406static void
4502fe8d
MJ
407ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
408 struct cgraph_edge *cs)
7b872d9e 409{
5368224f
DC
410 constant = unshare_expr (constant);
411 if (constant && EXPR_P (constant))
412 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
7b872d9e 413 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
414 jfunc->value.constant.value = unshare_expr_without_location (constant);
415
416 if (TREE_CODE (constant) == ADDR_EXPR
417 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
418 {
419 struct ipa_cst_ref_desc *rdesc;
420 if (!ipa_refdesc_pool)
421 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
422 sizeof (struct ipa_cst_ref_desc), 32);
423
424 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
425 rdesc->cs = cs;
426 rdesc->next_duplicate = NULL;
427 rdesc->refcount = 1;
428 jfunc->value.constant.rdesc = rdesc;
429 }
430 else
431 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
432}
433
434/* Set JFUNC to be a simple pass-through jump function. */
435static void
8b7773a4 436ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
b8f6e610 437 bool agg_preserved, bool type_preserved)
7b872d9e
MJ
438{
439 jfunc->type = IPA_JF_PASS_THROUGH;
440 jfunc->value.pass_through.operand = NULL_TREE;
441 jfunc->value.pass_through.formal_id = formal_id;
442 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 443 jfunc->value.pass_through.agg_preserved = agg_preserved;
b8f6e610 444 jfunc->value.pass_through.type_preserved = type_preserved;
7b872d9e
MJ
445}
446
447/* Set JFUNC to be an arithmetic pass through jump function. */
448
449static void
450ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
451 tree operand, enum tree_code operation)
452{
453 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 454 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
455 jfunc->value.pass_through.formal_id = formal_id;
456 jfunc->value.pass_through.operation = operation;
8b7773a4 457 jfunc->value.pass_through.agg_preserved = false;
b8f6e610 458 jfunc->value.pass_through.type_preserved = false;
7b872d9e
MJ
459}
460
461/* Set JFUNC to be an ancestor jump function. */
462
463static void
464ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
b8f6e610
MJ
465 tree type, int formal_id, bool agg_preserved,
466 bool type_preserved)
7b872d9e
MJ
467{
468 jfunc->type = IPA_JF_ANCESTOR;
469 jfunc->value.ancestor.formal_id = formal_id;
470 jfunc->value.ancestor.offset = offset;
471 jfunc->value.ancestor.type = type;
8b7773a4 472 jfunc->value.ancestor.agg_preserved = agg_preserved;
b8f6e610 473 jfunc->value.ancestor.type_preserved = type_preserved;
7b872d9e
MJ
474}
475
e248d83f
MJ
476/* Extract the acual BINFO being described by JFUNC which must be a known type
477 jump function. */
478
479tree
480ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
481{
482 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
483 if (!base_binfo)
484 return NULL_TREE;
485 return get_binfo_at_offset (base_binfo,
486 jfunc->value.known_type.offset,
487 jfunc->value.known_type.component_type);
488}
489
f65cf2b7
MJ
490/* Structure to be passed in between detect_type_change and
491 check_stmt_for_type_change. */
492
493struct type_change_info
494{
290ebcb7
MJ
495 /* Offset into the object where there is the virtual method pointer we are
496 looking for. */
497 HOST_WIDE_INT offset;
498 /* The declaration or SSA_NAME pointer of the base that we are checking for
499 type change. */
500 tree object;
501 /* If we actually can tell the type that the object has changed to, it is
502 stored in this field. Otherwise it remains NULL_TREE. */
503 tree known_current_type;
f65cf2b7
MJ
504 /* Set to true if dynamic type change has been detected. */
505 bool type_maybe_changed;
290ebcb7
MJ
506 /* Set to true if multiple types have been encountered. known_current_type
507 must be disregarded in that case. */
508 bool multiple_types_encountered;
f65cf2b7
MJ
509};
510
511/* Return true if STMT can modify a virtual method table pointer.
512
513 This function makes special assumptions about both constructors and
514 destructors which are all the functions that are allowed to alter the VMT
515 pointers. It assumes that destructors begin with assignment into all VMT
516 pointers and that constructors essentially look in the following way:
517
518 1) The very first thing they do is that they call constructors of ancestor
519 sub-objects that have them.
520
521 2) Then VMT pointers of this and all its ancestors is set to new values
522 corresponding to the type corresponding to the constructor.
523
524 3) Only afterwards, other stuff such as constructor of member sub-objects
525 and the code written by the user is run. Only this may include calling
526 virtual functions, directly or indirectly.
527
528 There is no way to call a constructor of an ancestor sub-object in any
529 other way.
530
531 This means that we do not have to care whether constructors get the correct
532 type information because they will always change it (in fact, if we define
533 the type to be given by the VMT pointer, it is undefined).
534
535 The most important fact to derive from the above is that if, for some
536 statement in the section 3, we try to detect whether the dynamic type has
537 changed, we can safely ignore all calls as we examine the function body
538 backwards until we reach statements in section 2 because these calls cannot
539 be ancestor constructors or destructors (if the input is not bogus) and so
540 do not change the dynamic type (this holds true only for automatically
541 allocated objects but at the moment we devirtualize only these). We then
542 must detect that statements in section 2 change the dynamic type and can try
543 to derive the new type. That is enough and we can stop, we will never see
544 the calls into constructors of sub-objects in this code. Therefore we can
545 safely ignore all call statements that we traverse.
546 */
547
548static bool
549stmt_may_be_vtbl_ptr_store (gimple stmt)
550{
551 if (is_gimple_call (stmt))
552 return false;
553 else if (is_gimple_assign (stmt))
554 {
555 tree lhs = gimple_assign_lhs (stmt);
556
0004f992
MJ
557 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
558 {
559 if (flag_strict_aliasing
560 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
561 return false;
562
563 if (TREE_CODE (lhs) == COMPONENT_REF
564 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 565 return false;
0004f992
MJ
566 /* In the future we might want to use get_base_ref_and_offset to find
567 if there is a field corresponding to the offset and if so, proceed
568 almost like if it was a component ref. */
569 }
f65cf2b7
MJ
570 }
571 return true;
572}
573
290ebcb7
MJ
574/* If STMT can be proved to be an assignment to the virtual method table
575 pointer of ANALYZED_OBJ and the type associated with the new table
576 identified, return the type. Otherwise return NULL_TREE. */
577
578static tree
579extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
580{
581 HOST_WIDE_INT offset, size, max_size;
582 tree lhs, rhs, base;
583
584 if (!gimple_assign_single_p (stmt))
585 return NULL_TREE;
586
587 lhs = gimple_assign_lhs (stmt);
588 rhs = gimple_assign_rhs1 (stmt);
589 if (TREE_CODE (lhs) != COMPONENT_REF
590 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
591 || TREE_CODE (rhs) != ADDR_EXPR)
592 return NULL_TREE;
593 rhs = get_base_address (TREE_OPERAND (rhs, 0));
594 if (!rhs
595 || TREE_CODE (rhs) != VAR_DECL
596 || !DECL_VIRTUAL_P (rhs))
597 return NULL_TREE;
598
599 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
600 if (offset != tci->offset
601 || size != POINTER_SIZE
602 || max_size != POINTER_SIZE)
603 return NULL_TREE;
604 if (TREE_CODE (base) == MEM_REF)
605 {
606 if (TREE_CODE (tci->object) != MEM_REF
607 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
608 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
609 TREE_OPERAND (base, 1)))
610 return NULL_TREE;
611 }
612 else if (tci->object != base)
613 return NULL_TREE;
614
615 return DECL_CONTEXT (rhs);
616}
617
61502ca8 618/* Callback of walk_aliased_vdefs and a helper function for
f65cf2b7
MJ
619 detect_type_change to check whether a particular statement may modify
620 the virtual table pointer, and if possible also determine the new type of
621 the (sub-)object. It stores its result into DATA, which points to a
622 type_change_info structure. */
623
624static bool
625check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
626{
627 gimple stmt = SSA_NAME_DEF_STMT (vdef);
628 struct type_change_info *tci = (struct type_change_info *) data;
629
630 if (stmt_may_be_vtbl_ptr_store (stmt))
631 {
290ebcb7
MJ
632 tree type;
633 type = extr_type_from_vtbl_ptr_store (stmt, tci);
634 if (tci->type_maybe_changed
635 && type != tci->known_current_type)
636 tci->multiple_types_encountered = true;
637 tci->known_current_type = type;
f65cf2b7
MJ
638 tci->type_maybe_changed = true;
639 return true;
640 }
641 else
642 return false;
643}
644
290ebcb7
MJ
645
646
06d65050
JH
647/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
648 callsite CALL) by looking for assignments to its virtual table pointer. If
649 it is, return true and fill in the jump function JFUNC with relevant type
650 information or set it to unknown. ARG is the object itself (not a pointer
651 to it, unless dereferenced). BASE is the base of the memory access as
652 returned by get_ref_base_and_extent, as is the offset. */
f65cf2b7
MJ
653
654static bool
06d65050
JH
655detect_type_change (tree arg, tree base, tree comp_type, gimple call,
656 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
f65cf2b7
MJ
657{
658 struct type_change_info tci;
659 ao_ref ao;
660
661 gcc_checking_assert (DECL_P (arg)
662 || TREE_CODE (arg) == MEM_REF
663 || handled_component_p (arg));
664 /* Const calls cannot call virtual methods through VMT and so type changes do
665 not matter. */
06d65050
JH
666 if (!flag_devirtualize || !gimple_vuse (call)
667 /* Be sure expected_type is polymorphic. */
668 || !comp_type
669 || TREE_CODE (comp_type) != RECORD_TYPE
670 || !TYPE_BINFO (comp_type)
671 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
f65cf2b7
MJ
672 return false;
673
dd887943 674 ao_ref_init (&ao, arg);
f65cf2b7
MJ
675 ao.base = base;
676 ao.offset = offset;
677 ao.size = POINTER_SIZE;
678 ao.max_size = ao.size;
f65cf2b7 679
290ebcb7
MJ
680 tci.offset = offset;
681 tci.object = get_base_address (arg);
682 tci.known_current_type = NULL_TREE;
683 tci.type_maybe_changed = false;
684 tci.multiple_types_encountered = false;
685
f65cf2b7
MJ
686 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
687 &tci, NULL);
688 if (!tci.type_maybe_changed)
689 return false;
690
290ebcb7
MJ
691 if (!tci.known_current_type
692 || tci.multiple_types_encountered
693 || offset != 0)
694 jfunc->type = IPA_JF_UNKNOWN;
695 else
7b872d9e 696 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
290ebcb7 697
f65cf2b7
MJ
698 return true;
699}
700
701/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
702 SSA name (its dereference will become the base and the offset is assumed to
703 be zero). */
704
705static bool
06d65050
JH
706detect_type_change_ssa (tree arg, tree comp_type,
707 gimple call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
708{
709 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 710 if (!flag_devirtualize
06d65050 711 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
712 return false;
713
714 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 715 build_int_cst (ptr_type_node, 0));
f65cf2b7 716
06d65050 717 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
f65cf2b7
MJ
718}
719
fdb0e1b4
MJ
720/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
721 boolean variable pointed to by DATA. */
722
723static bool
724mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
725 void *data)
726{
727 bool *b = (bool *) data;
728 *b = true;
729 return true;
730}
731
688010ba 732/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4
MJ
733 a value known not to be modified in this function before reaching the
734 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
735 information about the parameter. */
fdb0e1b4
MJ
736
737static bool
8b7773a4
MJ
738parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
739 gimple stmt, tree parm_load)
fdb0e1b4
MJ
740{
741 bool modified = false;
8b7773a4 742 bitmap *visited_stmts;
fdb0e1b4
MJ
743 ao_ref refd;
744
8b7773a4
MJ
745 if (parm_ainfo && parm_ainfo->parm_modified)
746 return false;
fdb0e1b4
MJ
747
748 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4
MJ
749 ao_ref_init (&refd, parm_load);
750 /* We can cache visited statements only when parm_ainfo is available and when
751 we are looking at a naked load of the whole parameter. */
752 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
753 visited_stmts = NULL;
754 else
755 visited_stmts = &parm_ainfo->parm_visited_statements;
756 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
757 visited_stmts);
758 if (parm_ainfo && modified)
759 parm_ainfo->parm_modified = true;
760 return !modified;
fdb0e1b4
MJ
761}
762
763/* If STMT is an assignment that loads a value from an parameter declaration,
764 return the index of the parameter in ipa_node_params which has not been
765 modified. Otherwise return -1. */
766
767static int
9771b263 768load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
fdb0e1b4
MJ
769 struct param_analysis_info *parms_ainfo,
770 gimple stmt)
771{
772 int index;
773 tree op1;
774
775 if (!gimple_assign_single_p (stmt))
776 return -1;
777
778 op1 = gimple_assign_rhs1 (stmt);
779 if (TREE_CODE (op1) != PARM_DECL)
780 return -1;
781
d044dd17 782 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 783 if (index < 0
8b7773a4
MJ
784 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
785 : NULL, stmt, op1))
fdb0e1b4
MJ
786 return -1;
787
788 return index;
789}
f65cf2b7 790
8b7773a4
MJ
791/* Return true if memory reference REF loads data that are known to be
792 unmodified in this function before reaching statement STMT. PARM_AINFO, if
793 non-NULL, is a pointer to a structure containing temporary information about
794 PARM. */
795
796static bool
797parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
798 gimple stmt, tree ref)
799{
800 bool modified = false;
801 ao_ref refd;
802
803 gcc_checking_assert (gimple_vuse (stmt));
804 if (parm_ainfo && parm_ainfo->ref_modified)
805 return false;
806
807 ao_ref_init (&refd, ref);
808 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
809 NULL);
810 if (parm_ainfo && modified)
811 parm_ainfo->ref_modified = true;
812 return !modified;
813}
814
815/* Return true if the data pointed to by PARM is known to be unmodified in this
816 function before reaching call statement CALL into which it is passed.
817 PARM_AINFO is a pointer to a structure containing temporary information
818 about PARM. */
819
820static bool
821parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
822 gimple call, tree parm)
823{
824 bool modified = false;
825 ao_ref refd;
826
827 /* It's unnecessary to calculate anything about memory contnets for a const
828 function because it is not goin to use it. But do not cache the result
829 either. Also, no such calculations for non-pointers. */
830 if (!gimple_vuse (call)
831 || !POINTER_TYPE_P (TREE_TYPE (parm)))
832 return false;
833
834 if (parm_ainfo->pt_modified)
835 return false;
836
837 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
838 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
839 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
840 if (modified)
841 parm_ainfo->pt_modified = true;
842 return !modified;
843}
844
845/* Return true if we can prove that OP is a memory reference loading unmodified
846 data from an aggregate passed as a parameter and if the aggregate is passed
847 by reference, that the alias type of the load corresponds to the type of the
848 formal parameter (so that we can rely on this type for TBAA in callers).
849 INFO and PARMS_AINFO describe parameters of the current function (but the
850 latter can be NULL), STMT is the load statement. If function returns true,
851 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
852 within the aggregate and whether it is a load from a value passed by
853 reference respectively. */
854
855static bool
9771b263 856ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
8b7773a4
MJ
857 struct param_analysis_info *parms_ainfo, gimple stmt,
858 tree op, int *index_p, HOST_WIDE_INT *offset_p,
3ff2ca23 859 HOST_WIDE_INT *size_p, bool *by_ref_p)
8b7773a4
MJ
860{
861 int index;
862 HOST_WIDE_INT size, max_size;
863 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
864
865 if (max_size == -1 || max_size != size || *offset_p < 0)
866 return false;
867
868 if (DECL_P (base))
869 {
d044dd17 870 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4
MJ
871 if (index >= 0
872 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
873 : NULL, stmt, op))
874 {
875 *index_p = index;
876 *by_ref_p = false;
3ff2ca23
JJ
877 if (size_p)
878 *size_p = size;
8b7773a4
MJ
879 return true;
880 }
881 return false;
882 }
883
884 if (TREE_CODE (base) != MEM_REF
885 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
886 || !integer_zerop (TREE_OPERAND (base, 1)))
887 return false;
888
889 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
890 {
891 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 892 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
893 }
894 else
895 {
896 /* This branch catches situations where a pointer parameter is not a
897 gimple register, for example:
898
899 void hip7(S*) (struct S * p)
900 {
901 void (*<T2e4>) (struct S *) D.1867;
902 struct S * p.1;
903
904 <bb 2>:
905 p.1_1 = p;
906 D.1867_2 = p.1_1->f;
907 D.1867_2 ();
908 gdp = &p;
909 */
910
911 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
d044dd17 912 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
8b7773a4
MJ
913 }
914
915 if (index >= 0
916 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
917 stmt, op))
918 {
919 *index_p = index;
920 *by_ref_p = true;
3ff2ca23
JJ
921 if (size_p)
922 *size_p = size;
8b7773a4
MJ
923 return true;
924 }
925 return false;
926}
927
928/* Just like the previous function, just without the param_analysis_info
929 pointer, for users outside of this file. */
930
931bool
932ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
933 tree op, int *index_p, HOST_WIDE_INT *offset_p,
934 bool *by_ref_p)
935{
d044dd17 936 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
3ff2ca23 937 offset_p, NULL, by_ref_p);
8b7773a4
MJ
938}
939
b258210c 940/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
941 of an assignment statement STMT, try to determine whether we are actually
942 handling any of the following cases and construct an appropriate jump
943 function into JFUNC if so:
944
945 1) The passed value is loaded from a formal parameter which is not a gimple
946 register (most probably because it is addressable, the value has to be
947 scalar) and we can guarantee the value has not changed. This case can
948 therefore be described by a simple pass-through jump function. For example:
949
950 foo (int a)
951 {
952 int a.0;
953
954 a.0_2 = a;
955 bar (a.0_2);
956
957 2) The passed value can be described by a simple arithmetic pass-through
958 jump function. E.g.
959
960 foo (int a)
961 {
962 int D.2064;
963
964 D.2064_4 = a.1(D) + 4;
965 bar (D.2064_4);
966
967 This case can also occur in combination of the previous one, e.g.:
968
969 foo (int a, int z)
970 {
971 int a.0;
972 int D.2064;
973
974 a.0_3 = a;
975 D.2064_4 = a.0_3 + 4;
976 foo (D.2064_4);
977
978 3) The passed value is an address of an object within another one (which
979 also passed by reference). Such situations are described by an ancestor
980 jump function and describe situations such as:
981
982 B::foo() (struct B * const this)
983 {
984 struct A * D.1845;
985
986 D.1845_2 = &this_1(D)->D.1748;
987 A::bar (D.1845_2);
988
989 INFO is the structure describing individual parameters access different
990 stages of IPA optimizations. PARMS_AINFO contains the information that is
991 only needed for intraprocedural analysis. */
685b0d13
MJ
992
993static void
b258210c 994compute_complex_assign_jump_func (struct ipa_node_params *info,
fdb0e1b4 995 struct param_analysis_info *parms_ainfo,
b258210c 996 struct ipa_jump_func *jfunc,
06d65050
JH
997 gimple call, gimple stmt, tree name,
998 tree param_type)
685b0d13
MJ
999{
1000 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 1001 tree op1, tc_ssa, base, ssa;
685b0d13 1002 int index;
685b0d13 1003
685b0d13 1004 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1005
fdb0e1b4 1006 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1007 {
fdb0e1b4
MJ
1008 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1009 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1010 else
d044dd17 1011 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
fdb0e1b4
MJ
1012 SSA_NAME_DEF_STMT (op1));
1013 tc_ssa = op1;
1014 }
1015 else
1016 {
d044dd17 1017 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
fdb0e1b4
MJ
1018 tc_ssa = gimple_assign_lhs (stmt);
1019 }
1020
1021 if (index >= 0)
1022 {
1023 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 1024
b258210c 1025 if (op2)
685b0d13 1026 {
b258210c
MJ
1027 if (!is_gimple_ip_invariant (op2)
1028 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1029 && !useless_type_conversion_p (TREE_TYPE (name),
1030 TREE_TYPE (op1))))
1031 return;
1032
7b872d9e
MJ
1033 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1034 gimple_assign_rhs_code (stmt));
685b0d13 1035 }
b8f6e610 1036 else if (gimple_assign_single_p (stmt))
8b7773a4
MJ
1037 {
1038 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1039 call, tc_ssa);
06d65050
JH
1040 bool type_p = false;
1041
1042 if (param_type && POINTER_TYPE_P (param_type))
1043 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1044 call, jfunc);
b8f6e610
MJ
1045 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1046 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
8b7773a4 1047 }
685b0d13
MJ
1048 return;
1049 }
1050
1051 if (TREE_CODE (op1) != ADDR_EXPR)
1052 return;
1053 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1054 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1055 return;
32aa622c
MJ
1056 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1057 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1058 /* If this is a varying address, punt. */
1059 || max_size == -1
1060 || max_size != size)
685b0d13 1061 return;
32aa622c 1062 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
f65cf2b7
MJ
1063 ssa = TREE_OPERAND (base, 0);
1064 if (TREE_CODE (ssa) != SSA_NAME
1065 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1066 || offset < 0)
685b0d13
MJ
1067 return;
1068
b8f6e610 1069 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1070 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1071 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
b8f6e610 1072 {
06d65050
JH
1073 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1074 call, jfunc, offset);
b8f6e610
MJ
1075 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1076 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1077 parm_ref_data_pass_through_p (&parms_ainfo[index],
1078 call, ssa), type_p);
1079 }
685b0d13
MJ
1080}
1081
40591473
MJ
1082/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1083 it looks like:
1084
1085 iftmp.1_3 = &obj_2(D)->D.1762;
1086
1087 The base of the MEM_REF must be a default definition SSA NAME of a
1088 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1089 whole MEM_REF expression is returned and the offset calculated from any
1090 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1091 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1092
1093static tree
1094get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1095{
1096 HOST_WIDE_INT size, max_size;
1097 tree expr, parm, obj;
1098
1099 if (!gimple_assign_single_p (assign))
1100 return NULL_TREE;
1101 expr = gimple_assign_rhs1 (assign);
1102
1103 if (TREE_CODE (expr) != ADDR_EXPR)
1104 return NULL_TREE;
1105 expr = TREE_OPERAND (expr, 0);
1106 obj = expr;
1107 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1108
1109 if (TREE_CODE (expr) != MEM_REF
1110 /* If this is a varying address, punt. */
1111 || max_size == -1
1112 || max_size != size
1113 || *offset < 0)
1114 return NULL_TREE;
1115 parm = TREE_OPERAND (expr, 0);
1116 if (TREE_CODE (parm) != SSA_NAME
1117 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1118 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1119 return NULL_TREE;
1120
1121 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1122 *obj_p = obj;
1123 return expr;
1124}
1125
685b0d13 1126
b258210c
MJ
1127/* Given that an actual argument is an SSA_NAME that is a result of a phi
1128 statement PHI, try to find out whether NAME is in fact a
1129 multiple-inheritance typecast from a descendant into an ancestor of a formal
1130 parameter and thus can be described by an ancestor jump function and if so,
1131 write the appropriate function into JFUNC.
1132
1133 Essentially we want to match the following pattern:
1134
1135 if (obj_2(D) != 0B)
1136 goto <bb 3>;
1137 else
1138 goto <bb 4>;
1139
1140 <bb 3>:
1141 iftmp.1_3 = &obj_2(D)->D.1762;
1142
1143 <bb 4>:
1144 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1145 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1146 return D.1879_6; */
1147
1148static void
1149compute_complex_ancestor_jump_func (struct ipa_node_params *info,
8b7773a4 1150 struct param_analysis_info *parms_ainfo,
b258210c 1151 struct ipa_jump_func *jfunc,
06d65050 1152 gimple call, gimple phi, tree param_type)
b258210c 1153{
40591473 1154 HOST_WIDE_INT offset;
b258210c
MJ
1155 gimple assign, cond;
1156 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1157 tree tmp, parm, expr, obj;
b258210c
MJ
1158 int index, i;
1159
54e348cb 1160 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1161 return;
1162
54e348cb
MJ
1163 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1164 tmp = PHI_ARG_DEF (phi, 0);
1165 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1166 tmp = PHI_ARG_DEF (phi, 1);
1167 else
1168 return;
b258210c
MJ
1169 if (TREE_CODE (tmp) != SSA_NAME
1170 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1171 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1172 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1173 return;
1174
1175 assign = SSA_NAME_DEF_STMT (tmp);
1176 assign_bb = gimple_bb (assign);
40591473 1177 if (!single_pred_p (assign_bb))
b258210c 1178 return;
40591473
MJ
1179 expr = get_ancestor_addr_info (assign, &obj, &offset);
1180 if (!expr)
b258210c
MJ
1181 return;
1182 parm = TREE_OPERAND (expr, 0);
b258210c 1183 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
40591473 1184 gcc_assert (index >= 0);
b258210c
MJ
1185
1186 cond_bb = single_pred (assign_bb);
1187 cond = last_stmt (cond_bb);
69610617
SB
1188 if (!cond
1189 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1190 || gimple_cond_code (cond) != NE_EXPR
1191 || gimple_cond_lhs (cond) != parm
1192 || !integer_zerop (gimple_cond_rhs (cond)))
1193 return;
1194
b258210c
MJ
1195 phi_bb = gimple_bb (phi);
1196 for (i = 0; i < 2; i++)
1197 {
1198 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1199 if (pred != assign_bb && pred != cond_bb)
1200 return;
1201 }
1202
06d65050
JH
1203 bool type_p = false;
1204 if (param_type && POINTER_TYPE_P (param_type))
1205 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1206 call, jfunc, offset);
b8f6e610 1207 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
8b7773a4
MJ
1208 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1209 parm_ref_data_pass_through_p (&parms_ainfo[index],
b8f6e610 1210 call, parm), type_p);
b258210c
MJ
1211}
1212
61502ca8 1213/* Given OP which is passed as an actual argument to a called function,
b258210c 1214 determine if it is possible to construct a KNOWN_TYPE jump function for it
06d65050
JH
1215 and if so, create one and store it to JFUNC.
1216 EXPECTED_TYPE represents a type the argument should be in */
b258210c
MJ
1217
1218static void
f65cf2b7 1219compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
06d65050 1220 gimple call, tree expected_type)
b258210c 1221{
32aa622c 1222 HOST_WIDE_INT offset, size, max_size;
c7573249 1223 tree base;
b258210c 1224
05842ff5
MJ
1225 if (!flag_devirtualize
1226 || TREE_CODE (op) != ADDR_EXPR
06d65050
JH
1227 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1228 /* Be sure expected_type is polymorphic. */
1229 || !expected_type
1230 || TREE_CODE (expected_type) != RECORD_TYPE
1231 || !TYPE_BINFO (expected_type)
1232 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
b258210c
MJ
1233 return;
1234
1235 op = TREE_OPERAND (op, 0);
32aa622c
MJ
1236 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1237 if (!DECL_P (base)
1238 || max_size == -1
1239 || max_size != size
1240 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1241 || is_global_var (base))
1242 return;
1243
06d65050 1244 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
f65cf2b7
MJ
1245 return;
1246
06d65050
JH
1247 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1248 expected_type);
b258210c
MJ
1249}
1250
be95e2b9
MJ
1251/* Inspect the given TYPE and return true iff it has the same structure (the
1252 same number of fields of the same types) as a C++ member pointer. If
1253 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1254 corresponding fields there. */
1255
3e293154
MJ
1256static bool
1257type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1258{
1259 tree fld;
1260
1261 if (TREE_CODE (type) != RECORD_TYPE)
1262 return false;
1263
1264 fld = TYPE_FIELDS (type);
1265 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4
MJ
1266 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1267 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
3e293154
MJ
1268 return false;
1269
1270 if (method_ptr)
1271 *method_ptr = fld;
1272
910ad8de 1273 fld = DECL_CHAIN (fld);
8b7773a4
MJ
1274 if (!fld || INTEGRAL_TYPE_P (fld)
1275 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
3e293154
MJ
1276 return false;
1277 if (delta)
1278 *delta = fld;
1279
910ad8de 1280 if (DECL_CHAIN (fld))
3e293154
MJ
1281 return false;
1282
1283 return true;
1284}
1285
61502ca8 1286/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1287 return the rhs of its defining statement. Otherwise return RHS as it
1288 is. */
7ec49257
MJ
1289
1290static inline tree
1291get_ssa_def_if_simple_copy (tree rhs)
1292{
1293 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1294 {
1295 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1296
1297 if (gimple_assign_single_p (def_stmt))
1298 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1299 else
1300 break;
7ec49257
MJ
1301 }
1302 return rhs;
1303}
1304
8b7773a4
MJ
1305/* Simple linked list, describing known contents of an aggregate beforere
1306 call. */
1307
1308struct ipa_known_agg_contents_list
1309{
1310 /* Offset and size of the described part of the aggregate. */
1311 HOST_WIDE_INT offset, size;
1312 /* Known constant value or NULL if the contents is known to be unknown. */
1313 tree constant;
1314 /* Pointer to the next structure in the list. */
1315 struct ipa_known_agg_contents_list *next;
1316};
3e293154 1317
8b7773a4
MJ
1318/* Traverse statements from CALL backwards, scanning whether an aggregate given
1319 in ARG is filled in with constant values. ARG can either be an aggregate
1320 expression or a pointer to an aggregate. JFUNC is the jump function into
1321 which the constants are subsequently stored. */
be95e2b9 1322
3e293154 1323static void
8b7773a4
MJ
1324determine_known_aggregate_parts (gimple call, tree arg,
1325 struct ipa_jump_func *jfunc)
3e293154 1326{
8b7773a4
MJ
1327 struct ipa_known_agg_contents_list *list = NULL;
1328 int item_count = 0, const_count = 0;
1329 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1330 gimple_stmt_iterator gsi;
8b7773a4
MJ
1331 tree arg_base;
1332 bool check_ref, by_ref;
1333 ao_ref r;
3e293154 1334
8b7773a4
MJ
1335 /* The function operates in three stages. First, we prepare check_ref, r,
1336 arg_base and arg_offset based on what is actually passed as an actual
1337 argument. */
3e293154 1338
8b7773a4
MJ
1339 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1340 {
1341 by_ref = true;
1342 if (TREE_CODE (arg) == SSA_NAME)
1343 {
1344 tree type_size;
1345 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1346 return;
1347 check_ref = true;
1348 arg_base = arg;
1349 arg_offset = 0;
1350 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1351 arg_size = tree_low_cst (type_size, 1);
1352 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1353 }
1354 else if (TREE_CODE (arg) == ADDR_EXPR)
1355 {
1356 HOST_WIDE_INT arg_max_size;
1357
1358 arg = TREE_OPERAND (arg, 0);
1359 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1360 &arg_max_size);
1361 if (arg_max_size == -1
1362 || arg_max_size != arg_size
1363 || arg_offset < 0)
1364 return;
1365 if (DECL_P (arg_base))
1366 {
1367 tree size;
1368 check_ref = false;
1369 size = build_int_cst (integer_type_node, arg_size);
1370 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1371 }
1372 else
1373 return;
1374 }
1375 else
1376 return;
1377 }
1378 else
1379 {
1380 HOST_WIDE_INT arg_max_size;
1381
1382 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1383
1384 by_ref = false;
1385 check_ref = false;
1386 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1387 &arg_max_size);
1388 if (arg_max_size == -1
1389 || arg_max_size != arg_size
1390 || arg_offset < 0)
1391 return;
1392
1393 ao_ref_init (&r, arg);
1394 }
1395
1396 /* Second stage walks back the BB, looks at individual statements and as long
1397 as it is confident of how the statements affect contents of the
1398 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1399 describing it. */
1400 gsi = gsi_for_stmt (call);
726a989a
RB
1401 gsi_prev (&gsi);
1402 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1403 {
8b7773a4 1404 struct ipa_known_agg_contents_list *n, **p;
726a989a 1405 gimple stmt = gsi_stmt (gsi);
8b7773a4
MJ
1406 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1407 tree lhs, rhs, lhs_base;
1408 bool partial_overlap;
3e293154 1409
8b7773a4 1410 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1411 continue;
8b75fc9b 1412 if (!gimple_assign_single_p (stmt))
8b7773a4 1413 break;
3e293154 1414
726a989a
RB
1415 lhs = gimple_assign_lhs (stmt);
1416 rhs = gimple_assign_rhs1 (stmt);
7d2fb524
MJ
1417 if (!is_gimple_reg_type (rhs)
1418 || TREE_CODE (lhs) == BIT_FIELD_REF
1419 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1420 break;
3e293154 1421
8b7773a4
MJ
1422 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1423 &lhs_max_size);
1424 if (lhs_max_size == -1
1425 || lhs_max_size != lhs_size
1426 || (lhs_offset < arg_offset
1427 && lhs_offset + lhs_size > arg_offset)
1428 || (lhs_offset < arg_offset + arg_size
1429 && lhs_offset + lhs_size > arg_offset + arg_size))
1430 break;
3e293154 1431
8b7773a4 1432 if (check_ref)
518dc859 1433 {
8b7773a4
MJ
1434 if (TREE_CODE (lhs_base) != MEM_REF
1435 || TREE_OPERAND (lhs_base, 0) != arg_base
1436 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1437 break;
3e293154 1438 }
8b7773a4 1439 else if (lhs_base != arg_base)
774b8a55
MJ
1440 {
1441 if (DECL_P (lhs_base))
1442 continue;
1443 else
1444 break;
1445 }
3e293154 1446
8b7773a4
MJ
1447 if (lhs_offset + lhs_size < arg_offset
1448 || lhs_offset >= (arg_offset + arg_size))
1449 continue;
1450
1451 partial_overlap = false;
1452 p = &list;
1453 while (*p && (*p)->offset < lhs_offset)
3e293154 1454 {
8b7773a4 1455 if ((*p)->offset + (*p)->size > lhs_offset)
3e293154 1456 {
8b7773a4
MJ
1457 partial_overlap = true;
1458 break;
3e293154 1459 }
8b7773a4
MJ
1460 p = &(*p)->next;
1461 }
1462 if (partial_overlap)
1463 break;
1464 if (*p && (*p)->offset < lhs_offset + lhs_size)
1465 {
1466 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1467 /* We already know this value is subsequently overwritten with
1468 something else. */
1469 continue;
3e293154 1470 else
8b7773a4
MJ
1471 /* Otherwise this is a partial overlap which we cannot
1472 represent. */
1473 break;
3e293154 1474 }
3e293154 1475
8b7773a4
MJ
1476 rhs = get_ssa_def_if_simple_copy (rhs);
1477 n = XALLOCA (struct ipa_known_agg_contents_list);
1478 n->size = lhs_size;
1479 n->offset = lhs_offset;
1480 if (is_gimple_ip_invariant (rhs))
1481 {
1482 n->constant = rhs;
1483 const_count++;
1484 }
1485 else
1486 n->constant = NULL_TREE;
1487 n->next = *p;
1488 *p = n;
3e293154 1489
8b7773a4 1490 item_count++;
dfea20f1
MJ
1491 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1492 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1493 break;
1494 }
be95e2b9 1495
8b7773a4
MJ
1496 /* Third stage just goes over the list and creates an appropriate vector of
1497 ipa_agg_jf_item structures out of it, of sourse only if there are
1498 any known constants to begin with. */
3e293154 1499
8b7773a4 1500 if (const_count)
3e293154 1501 {
8b7773a4 1502 jfunc->agg.by_ref = by_ref;
9771b263 1503 vec_alloc (jfunc->agg.items, const_count);
8b7773a4
MJ
1504 while (list)
1505 {
1506 if (list->constant)
1507 {
f32682ca
DN
1508 struct ipa_agg_jf_item item;
1509 item.offset = list->offset - arg_offset;
7d2fb524 1510 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
d1f98542 1511 item.value = unshare_expr_without_location (list->constant);
9771b263 1512 jfunc->agg.items->quick_push (item);
8b7773a4
MJ
1513 }
1514 list = list->next;
1515 }
3e293154
MJ
1516 }
1517}
1518
06d65050
JH
1519static tree
1520ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1521{
1522 int n;
1523 tree type = (e->callee
67348ccc 1524 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1525 : gimple_call_fntype (e->call_stmt));
1526 tree t = TYPE_ARG_TYPES (type);
1527
1528 for (n = 0; n < i; n++)
1529 {
1530 if (!t)
1531 break;
1532 t = TREE_CHAIN (t);
1533 }
1534 if (t)
1535 return TREE_VALUE (t);
1536 if (!e->callee)
1537 return NULL;
67348ccc 1538 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1539 for (n = 0; n < i; n++)
1540 {
1541 if (!t)
1542 return NULL;
1543 t = TREE_CHAIN (t);
1544 }
1545 if (t)
1546 return TREE_TYPE (t);
1547 return NULL;
1548}
1549
3e293154
MJ
1550/* Compute jump function for all arguments of callsite CS and insert the
1551 information in the jump_functions array in the ipa_edge_args corresponding
1552 to this callsite. */
be95e2b9 1553
749aa96d 1554static void
c419671c 1555ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
062c604f 1556 struct cgraph_edge *cs)
3e293154
MJ
1557{
1558 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09
MJ
1559 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1560 gimple call = cs->call_stmt;
8b7773a4 1561 int n, arg_num = gimple_call_num_args (call);
3e293154 1562
606d9a09 1563 if (arg_num == 0 || args->jump_functions)
3e293154 1564 return;
9771b263 1565 vec_safe_grow_cleared (args->jump_functions, arg_num);
3e293154 1566
96e24d49
JJ
1567 if (gimple_call_internal_p (call))
1568 return;
5fe8e757
MJ
1569 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1570 return;
1571
8b7773a4
MJ
1572 for (n = 0; n < arg_num; n++)
1573 {
1574 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1575 tree arg = gimple_call_arg (call, n);
06d65050 1576 tree param_type = ipa_get_callee_param_type (cs, n);
3e293154 1577
8b7773a4 1578 if (is_gimple_ip_invariant (arg))
4502fe8d 1579 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1580 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1581 && TREE_CODE (arg) == PARM_DECL)
1582 {
1583 int index = ipa_get_param_decl_index (info, arg);
1584
1585 gcc_assert (index >=0);
1586 /* Aggregate passed by value, check for pass-through, otherwise we
1587 will attempt to fill in aggregate contents later in this
1588 for cycle. */
1589 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1590 {
b8f6e610 1591 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
8b7773a4
MJ
1592 continue;
1593 }
1594 }
1595 else if (TREE_CODE (arg) == SSA_NAME)
1596 {
1597 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1598 {
1599 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1600 if (index >= 0)
8b7773a4 1601 {
b8f6e610 1602 bool agg_p, type_p;
8b7773a4
MJ
1603 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1604 call, arg);
06d65050
JH
1605 if (param_type && POINTER_TYPE_P (param_type))
1606 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1607 call, jfunc);
1608 else
1609 type_p = false;
b8f6e610 1610 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
06d65050
JH
1611 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1612 type_p);
8b7773a4
MJ
1613 }
1614 }
1615 else
1616 {
1617 gimple stmt = SSA_NAME_DEF_STMT (arg);
1618 if (is_gimple_assign (stmt))
1619 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
06d65050 1620 call, stmt, arg, param_type);
8b7773a4
MJ
1621 else if (gimple_code (stmt) == GIMPLE_PHI)
1622 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
06d65050 1623 call, stmt, param_type);
8b7773a4
MJ
1624 }
1625 }
1626 else
06d65050
JH
1627 compute_known_type_jump_func (arg, jfunc, call,
1628 param_type
1629 && POINTER_TYPE_P (param_type)
1630 ? TREE_TYPE (param_type)
1631 : NULL);
3e293154 1632
8b7773a4
MJ
1633 if ((jfunc->type != IPA_JF_PASS_THROUGH
1634 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1635 && (jfunc->type != IPA_JF_ANCESTOR
1636 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1637 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1638 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1639 determine_known_aggregate_parts (call, arg, jfunc);
1640 }
3e293154
MJ
1641}
1642
749aa96d
MJ
1643/* Compute jump functions for all edges - both direct and indirect - outgoing
1644 from NODE. Also count the actual arguments in the process. */
1645
062c604f
MJ
1646static void
1647ipa_compute_jump_functions (struct cgraph_node *node,
c419671c 1648 struct param_analysis_info *parms_ainfo)
749aa96d
MJ
1649{
1650 struct cgraph_edge *cs;
1651
1652 for (cs = node->callees; cs; cs = cs->next_callee)
1653 {
d7da5cc8
MJ
1654 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1655 NULL);
749aa96d
MJ
1656 /* We do not need to bother analyzing calls to unknown
1657 functions unless they may become known during lto/whopr. */
67348ccc 1658 if (!callee->definition && !flag_lto)
749aa96d 1659 continue;
c419671c 1660 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1661 }
1662
1663 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
c419671c 1664 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1665}
1666
8b7773a4
MJ
1667/* If STMT looks like a statement loading a value from a member pointer formal
1668 parameter, return that parameter and store the offset of the field to
1669 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1670 might be clobbered). If USE_DELTA, then we look for a use of the delta
1671 field rather than the pfn. */
be95e2b9 1672
3e293154 1673static tree
8b7773a4
MJ
1674ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1675 HOST_WIDE_INT *offset_p)
3e293154 1676{
8b7773a4
MJ
1677 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1678
1679 if (!gimple_assign_single_p (stmt))
1680 return NULL_TREE;
3e293154 1681
8b7773a4 1682 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
1683 if (TREE_CODE (rhs) == COMPONENT_REF)
1684 {
1685 ref_field = TREE_OPERAND (rhs, 1);
1686 rhs = TREE_OPERAND (rhs, 0);
1687 }
1688 else
1689 ref_field = NULL_TREE;
d242d063 1690 if (TREE_CODE (rhs) != MEM_REF)
3e293154 1691 return NULL_TREE;
3e293154 1692 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
1693 if (TREE_CODE (rec) != ADDR_EXPR)
1694 return NULL_TREE;
1695 rec = TREE_OPERAND (rec, 0);
3e293154 1696 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 1697 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 1698 return NULL_TREE;
d242d063 1699 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 1700
8b7773a4
MJ
1701 if (use_delta)
1702 fld = delta_field;
1703 else
1704 fld = ptr_field;
1705 if (offset_p)
1706 *offset_p = int_bit_position (fld);
1707
ae788515
EB
1708 if (ref_field)
1709 {
1710 if (integer_nonzerop (ref_offset))
1711 return NULL_TREE;
ae788515
EB
1712 return ref_field == fld ? rec : NULL_TREE;
1713 }
3e293154 1714 else
8b7773a4
MJ
1715 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1716 : NULL_TREE;
3e293154
MJ
1717}
1718
1719/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 1720
3e293154
MJ
1721static bool
1722ipa_is_ssa_with_stmt_def (tree t)
1723{
1724 if (TREE_CODE (t) == SSA_NAME
1725 && !SSA_NAME_IS_DEFAULT_DEF (t))
1726 return true;
1727 else
1728 return false;
1729}
1730
40591473
MJ
1731/* Find the indirect call graph edge corresponding to STMT and mark it as a
1732 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1733 indirect call graph edge. */
be95e2b9 1734
40591473
MJ
1735static struct cgraph_edge *
1736ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
3e293154 1737{
e33c6cd6 1738 struct cgraph_edge *cs;
3e293154 1739
5f902d76 1740 cs = cgraph_edge (node, stmt);
b258210c 1741 cs->indirect_info->param_index = param_index;
8b7773a4 1742 cs->indirect_info->offset = 0;
40591473 1743 cs->indirect_info->polymorphic = 0;
8b7773a4 1744 cs->indirect_info->agg_contents = 0;
c13bc3d9 1745 cs->indirect_info->member_ptr = 0;
40591473 1746 return cs;
3e293154
MJ
1747}
1748
e33c6cd6 1749/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 1750 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
1751 intermediate information about each formal parameter. Currently it checks
1752 whether the call calls a pointer that is a formal parameter and if so, the
1753 parameter is marked with the called flag and an indirect call graph edge
1754 describing the call is created. This is very simple for ordinary pointers
1755 represented in SSA but not-so-nice when it comes to member pointers. The
1756 ugly part of this function does nothing more than trying to match the
1757 pattern of such a call. An example of such a pattern is the gimple dump
1758 below, the call is on the last line:
3e293154 1759
ae788515
EB
1760 <bb 2>:
1761 f$__delta_5 = f.__delta;
1762 f$__pfn_24 = f.__pfn;
1763
1764 or
3e293154 1765 <bb 2>:
d242d063
MJ
1766 f$__delta_5 = MEM[(struct *)&f];
1767 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 1768
ae788515 1769 and a few lines below:
8aa29647
MJ
1770
1771 <bb 5>
3e293154
MJ
1772 D.2496_3 = (int) f$__pfn_24;
1773 D.2497_4 = D.2496_3 & 1;
1774 if (D.2497_4 != 0)
1775 goto <bb 3>;
1776 else
1777 goto <bb 4>;
1778
8aa29647 1779 <bb 6>:
3e293154
MJ
1780 D.2500_7 = (unsigned int) f$__delta_5;
1781 D.2501_8 = &S + D.2500_7;
1782 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1783 D.2503_10 = *D.2502_9;
1784 D.2504_12 = f$__pfn_24 + -1;
1785 D.2505_13 = (unsigned int) D.2504_12;
1786 D.2506_14 = D.2503_10 + D.2505_13;
1787 D.2507_15 = *D.2506_14;
1788 iftmp.11_16 = (String:: *) D.2507_15;
1789
8aa29647 1790 <bb 7>:
3e293154
MJ
1791 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1792 D.2500_19 = (unsigned int) f$__delta_5;
1793 D.2508_20 = &S + D.2500_19;
1794 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1795
1796 Such patterns are results of simple calls to a member pointer:
1797
1798 int doprinting (int (MyString::* f)(int) const)
1799 {
1800 MyString S ("somestring");
1801
1802 return (S.*f)(4);
1803 }
8b7773a4
MJ
1804
1805 Moreover, the function also looks for called pointers loaded from aggregates
1806 passed by value or reference. */
3e293154
MJ
1807
1808static void
b258210c
MJ
1809ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1810 struct ipa_node_params *info,
c419671c 1811 struct param_analysis_info *parms_ainfo,
b258210c 1812 gimple call, tree target)
3e293154 1813{
726a989a 1814 gimple def;
3e293154 1815 tree n1, n2;
726a989a
RB
1816 gimple d1, d2;
1817 tree rec, rec2, cond;
1818 gimple branch;
3e293154 1819 int index;
3e293154 1820 basic_block bb, virt_bb, join;
8b7773a4
MJ
1821 HOST_WIDE_INT offset;
1822 bool by_ref;
3e293154 1823
3e293154
MJ
1824 if (SSA_NAME_IS_DEFAULT_DEF (target))
1825 {
b258210c 1826 tree var = SSA_NAME_VAR (target);
3e293154
MJ
1827 index = ipa_get_param_decl_index (info, var);
1828 if (index >= 0)
40591473 1829 ipa_note_param_call (node, index, call);
3e293154
MJ
1830 return;
1831 }
1832
8b7773a4
MJ
1833 def = SSA_NAME_DEF_STMT (target);
1834 if (gimple_assign_single_p (def)
d044dd17 1835 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
8b7773a4 1836 gimple_assign_rhs1 (def), &index, &offset,
3ff2ca23 1837 NULL, &by_ref))
8b7773a4
MJ
1838 {
1839 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1840 cs->indirect_info->offset = offset;
1841 cs->indirect_info->agg_contents = 1;
1842 cs->indirect_info->by_ref = by_ref;
1843 return;
1844 }
1845
3e293154
MJ
1846 /* Now we need to try to match the complex pattern of calling a member
1847 pointer. */
8b7773a4
MJ
1848 if (gimple_code (def) != GIMPLE_PHI
1849 || gimple_phi_num_args (def) != 2
1850 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
1851 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1852 return;
1853
3e293154
MJ
1854 /* First, we need to check whether one of these is a load from a member
1855 pointer that is a parameter to this function. */
1856 n1 = PHI_ARG_DEF (def, 0);
1857 n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 1858 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154
MJ
1859 return;
1860 d1 = SSA_NAME_DEF_STMT (n1);
1861 d2 = SSA_NAME_DEF_STMT (n2);
1862
8aa29647 1863 join = gimple_bb (def);
8b7773a4 1864 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 1865 {
8b7773a4 1866 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
1867 return;
1868
8aa29647 1869 bb = EDGE_PRED (join, 0)->src;
726a989a 1870 virt_bb = gimple_bb (d2);
3e293154 1871 }
8b7773a4 1872 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 1873 {
8aa29647 1874 bb = EDGE_PRED (join, 1)->src;
726a989a 1875 virt_bb = gimple_bb (d1);
3e293154
MJ
1876 }
1877 else
1878 return;
1879
1880 /* Second, we need to check that the basic blocks are laid out in the way
1881 corresponding to the pattern. */
1882
3e293154
MJ
1883 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1884 || single_pred (virt_bb) != bb
1885 || single_succ (virt_bb) != join)
1886 return;
1887
1888 /* Third, let's see that the branching is done depending on the least
1889 significant bit of the pfn. */
1890
1891 branch = last_stmt (bb);
8aa29647 1892 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
1893 return;
1894
12430896
RG
1895 if ((gimple_cond_code (branch) != NE_EXPR
1896 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 1897 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 1898 return;
3e293154 1899
726a989a 1900 cond = gimple_cond_lhs (branch);
3e293154
MJ
1901 if (!ipa_is_ssa_with_stmt_def (cond))
1902 return;
1903
726a989a 1904 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 1905 if (!is_gimple_assign (def)
726a989a
RB
1906 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1907 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 1908 return;
726a989a
RB
1909
1910 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1911 if (!ipa_is_ssa_with_stmt_def (cond))
1912 return;
1913
726a989a 1914 def = SSA_NAME_DEF_STMT (cond);
3e293154 1915
8b75fc9b
MJ
1916 if (is_gimple_assign (def)
1917 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 1918 {
726a989a 1919 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1920 if (!ipa_is_ssa_with_stmt_def (cond))
1921 return;
726a989a 1922 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
1923 }
1924
6f7b8b70
RE
1925 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1926 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
1927 == ptrmemfunc_vbit_in_delta),
1928 NULL);
3e293154
MJ
1929 if (rec != rec2)
1930 return;
1931
1932 index = ipa_get_param_decl_index (info, rec);
8b7773a4
MJ
1933 if (index >= 0
1934 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1935 {
1936 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1937 cs->indirect_info->offset = offset;
1938 cs->indirect_info->agg_contents = 1;
c13bc3d9 1939 cs->indirect_info->member_ptr = 1;
8b7773a4 1940 }
3e293154
MJ
1941
1942 return;
1943}
1944
b258210c
MJ
1945/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1946 object referenced in the expression is a formal parameter of the caller
1947 (described by INFO), create a call note for the statement. */
1948
1949static void
1950ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1951 struct ipa_node_params *info, gimple call,
1952 tree target)
1953{
40591473
MJ
1954 struct cgraph_edge *cs;
1955 struct cgraph_indirect_call_info *ii;
f65cf2b7 1956 struct ipa_jump_func jfunc;
b258210c 1957 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 1958 int index;
40591473 1959 HOST_WIDE_INT anc_offset;
b258210c 1960
05842ff5
MJ
1961 if (!flag_devirtualize)
1962 return;
1963
40591473 1964 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
1965 return;
1966
40591473
MJ
1967 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1968 {
1969 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1970 return;
b258210c 1971
40591473
MJ
1972 anc_offset = 0;
1973 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1974 gcc_assert (index >= 0);
06d65050
JH
1975 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1976 call, &jfunc))
40591473
MJ
1977 return;
1978 }
1979 else
1980 {
1981 gimple stmt = SSA_NAME_DEF_STMT (obj);
1982 tree expr;
1983
1984 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1985 if (!expr)
1986 return;
1987 index = ipa_get_param_decl_index (info,
1988 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1989 gcc_assert (index >= 0);
06d65050
JH
1990 if (detect_type_change (obj, expr, obj_type_ref_class (target),
1991 call, &jfunc, anc_offset))
40591473
MJ
1992 return;
1993 }
1994
1995 cs = ipa_note_param_call (node, index, call);
1996 ii = cs->indirect_info;
8b7773a4 1997 ii->offset = anc_offset;
40591473 1998 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
c49bdb2e 1999 ii->otr_type = obj_type_ref_class (target);
40591473 2000 ii->polymorphic = 1;
b258210c
MJ
2001}
2002
2003/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2004 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2005 containing intermediate information about each formal parameter. */
b258210c
MJ
2006
2007static void
2008ipa_analyze_call_uses (struct cgraph_node *node,
062c604f 2009 struct ipa_node_params *info,
c419671c 2010 struct param_analysis_info *parms_ainfo, gimple call)
b258210c
MJ
2011{
2012 tree target = gimple_call_fn (call);
2013
25583c4f
RS
2014 if (!target)
2015 return;
b258210c 2016 if (TREE_CODE (target) == SSA_NAME)
c419671c 2017 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1d5755ef 2018 else if (virtual_method_call_p (target))
b258210c
MJ
2019 ipa_analyze_virtual_call_uses (node, info, call, target);
2020}
2021
2022
e33c6cd6
MJ
2023/* Analyze the call statement STMT with respect to formal parameters (described
2024 in INFO) of caller given by NODE. Currently it only checks whether formal
c419671c 2025 parameters are called. PARMS_AINFO is a pointer to a vector containing
062c604f 2026 intermediate information about each formal parameter. */
be95e2b9 2027
3e293154 2028static void
e33c6cd6 2029ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
c419671c 2030 struct param_analysis_info *parms_ainfo, gimple stmt)
3e293154 2031{
726a989a 2032 if (is_gimple_call (stmt))
c419671c 2033 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2034}
2035
2036/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2037 If OP is a parameter declaration, mark it as used in the info structure
2038 passed in DATA. */
2039
2040static bool
2041visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2042 tree op, void *data)
2043{
2044 struct ipa_node_params *info = (struct ipa_node_params *) data;
2045
2046 op = get_base_address (op);
2047 if (op
2048 && TREE_CODE (op) == PARM_DECL)
2049 {
2050 int index = ipa_get_param_decl_index (info, op);
2051 gcc_assert (index >= 0);
310bc633 2052 ipa_set_param_used (info, index, true);
062c604f
MJ
2053 }
2054
2055 return false;
3e293154
MJ
2056}
2057
2058/* Scan the function body of NODE and inspect the uses of formal parameters.
2059 Store the findings in various structures of the associated ipa_node_params
c419671c 2060 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
062c604f 2061 vector containing intermediate information about each formal parameter. */
be95e2b9 2062
062c604f
MJ
2063static void
2064ipa_analyze_params_uses (struct cgraph_node *node,
c419671c 2065 struct param_analysis_info *parms_ainfo)
3e293154 2066{
67348ccc 2067 tree decl = node->decl;
3e293154
MJ
2068 basic_block bb;
2069 struct function *func;
726a989a 2070 gimple_stmt_iterator gsi;
3e293154 2071 struct ipa_node_params *info = IPA_NODE_REF (node);
062c604f 2072 int i;
3e293154 2073
726a989a 2074 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
3e293154 2075 return;
3e293154 2076
5fe8e757
MJ
2077 info->uses_analysis_done = 1;
2078 if (ipa_func_spec_opts_forbid_analysis_p (node))
2079 {
2080 for (i = 0; i < ipa_get_param_count (info); i++)
2081 {
2082 ipa_set_param_used (info, i, true);
2083 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2084 }
2085 return;
2086 }
2087
062c604f
MJ
2088 for (i = 0; i < ipa_get_param_count (info); i++)
2089 {
2090 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2091 int controlled_uses = 0;
2092
062c604f
MJ
2093 /* For SSA regs see if parameter is used. For non-SSA we compute
2094 the flag during modification analysis. */
4502fe8d
MJ
2095 if (is_gimple_reg (parm))
2096 {
67348ccc 2097 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2098 parm);
2099 if (ddef && !has_zero_uses (ddef))
2100 {
2101 imm_use_iterator imm_iter;
2102 use_operand_p use_p;
2103
2104 ipa_set_param_used (info, i, true);
2105 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2106 if (!is_gimple_call (USE_STMT (use_p)))
2107 {
2108 controlled_uses = IPA_UNDESCRIBED_USE;
2109 break;
2110 }
2111 else
2112 controlled_uses++;
2113 }
2114 else
2115 controlled_uses = 0;
2116 }
2117 else
2118 controlled_uses = IPA_UNDESCRIBED_USE;
2119 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f
MJ
2120 }
2121
3e293154
MJ
2122 func = DECL_STRUCT_FUNCTION (decl);
2123 FOR_EACH_BB_FN (bb, func)
2124 {
726a989a 2125 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3e293154 2126 {
726a989a 2127 gimple stmt = gsi_stmt (gsi);
062c604f
MJ
2128
2129 if (is_gimple_debug (stmt))
2130 continue;
2131
c419671c 2132 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2133 walk_stmt_load_store_addr_ops (stmt, info,
2134 visit_ref_for_mod_analysis,
2135 visit_ref_for_mod_analysis,
2136 visit_ref_for_mod_analysis);
518dc859 2137 }
355a7673 2138 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
062c604f
MJ
2139 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2140 visit_ref_for_mod_analysis,
2141 visit_ref_for_mod_analysis,
2142 visit_ref_for_mod_analysis);
518dc859 2143 }
3e293154
MJ
2144}
2145
2c9561b5
MJ
2146/* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2147
2148static void
2149free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2150{
2151 int i;
2152
2153 for (i = 0; i < param_count; i++)
2154 {
2155 if (parms_ainfo[i].parm_visited_statements)
2156 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2157 if (parms_ainfo[i].pt_visited_statements)
2158 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2159 }
2160}
2161
dd5a833e
MS
2162/* Initialize the array describing properties of of formal parameters
2163 of NODE, analyze their uses and compute jump functions associated
2164 with actual arguments of calls from within NODE. */
062c604f
MJ
2165
2166void
2167ipa_analyze_node (struct cgraph_node *node)
2168{
57dbdc5a 2169 struct ipa_node_params *info;
c419671c 2170 struct param_analysis_info *parms_ainfo;
2c9561b5 2171 int param_count;
062c604f 2172
57dbdc5a
MJ
2173 ipa_check_create_node_params ();
2174 ipa_check_create_edge_args ();
2175 info = IPA_NODE_REF (node);
67348ccc 2176 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
062c604f
MJ
2177 ipa_initialize_node_params (node);
2178
2179 param_count = ipa_get_param_count (info);
c419671c
MJ
2180 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2181 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
062c604f 2182
c419671c
MJ
2183 ipa_analyze_params_uses (node, parms_ainfo);
2184 ipa_compute_jump_functions (node, parms_ainfo);
062c604f 2185
2c9561b5 2186 free_parms_ainfo (parms_ainfo, param_count);
f65cf2b7 2187 pop_cfun ();
062c604f
MJ
2188}
2189
e248d83f
MJ
2190/* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2191 attempt a type-based devirtualization. If successful, return the
2192 target function declaration, otherwise return NULL. */
2193
2194tree
2195ipa_intraprocedural_devirtualization (gimple call)
2196{
2197 tree binfo, token, fndecl;
2198 struct ipa_jump_func jfunc;
2199 tree otr = gimple_call_fn (call);
2200
2201 jfunc.type = IPA_JF_UNKNOWN;
2202 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
06d65050 2203 call, obj_type_ref_class (otr));
e248d83f
MJ
2204 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2205 return NULL_TREE;
2206 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2207 if (!binfo)
2208 return NULL_TREE;
2209 token = OBJ_TYPE_REF_TOKEN (otr);
2210 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2211 binfo);
450ad0cd
JH
2212#ifdef ENABLE_CHECKING
2213 if (fndecl)
2214 gcc_assert (possible_polymorphic_call_target_p
2215 (otr, cgraph_get_node (fndecl)));
2216#endif
e248d83f
MJ
2217 return fndecl;
2218}
062c604f 2219
61502ca8 2220/* Update the jump function DST when the call graph edge corresponding to SRC is
b258210c
MJ
2221 is being inlined, knowing that DST is of type ancestor and src of known
2222 type. */
2223
2224static void
2225combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2226 struct ipa_jump_func *dst)
2227{
c7573249
MJ
2228 HOST_WIDE_INT combined_offset;
2229 tree combined_type;
b258210c 2230
b8f6e610
MJ
2231 if (!ipa_get_jf_ancestor_type_preserved (dst))
2232 {
2233 dst->type = IPA_JF_UNKNOWN;
2234 return;
2235 }
2236
7b872d9e
MJ
2237 combined_offset = ipa_get_jf_known_type_offset (src)
2238 + ipa_get_jf_ancestor_offset (dst);
2239 combined_type = ipa_get_jf_ancestor_type (dst);
c7573249 2240
7b872d9e
MJ
2241 ipa_set_jf_known_type (dst, combined_offset,
2242 ipa_get_jf_known_type_base_type (src),
2243 combined_type);
b258210c
MJ
2244}
2245
be95e2b9 2246/* Update the jump functions associated with call graph edge E when the call
3e293154 2247 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2248 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2249
3e293154
MJ
2250static void
2251update_jump_functions_after_inlining (struct cgraph_edge *cs,
2252 struct cgraph_edge *e)
2253{
2254 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2255 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2256 int count = ipa_get_cs_argument_count (args);
2257 int i;
2258
2259 for (i = 0; i < count; i++)
2260 {
b258210c 2261 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3e293154 2262
685b0d13
MJ
2263 if (dst->type == IPA_JF_ANCESTOR)
2264 {
b258210c 2265 struct ipa_jump_func *src;
8b7773a4 2266 int dst_fid = dst->value.ancestor.formal_id;
685b0d13 2267
b258210c
MJ
2268 /* Variable number of arguments can cause havoc if we try to access
2269 one that does not exist in the inlined edge. So make sure we
2270 don't. */
8b7773a4 2271 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c
MJ
2272 {
2273 dst->type = IPA_JF_UNKNOWN;
2274 continue;
2275 }
2276
8b7773a4
MJ
2277 src = ipa_get_ith_jump_func (top, dst_fid);
2278
2279 if (src->agg.items
2280 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2281 {
2282 struct ipa_agg_jf_item *item;
2283 int j;
2284
2285 /* Currently we do not produce clobber aggregate jump functions,
2286 replace with merging when we do. */
2287 gcc_assert (!dst->agg.items);
2288
9771b263 2289 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2290 dst->agg.by_ref = src->agg.by_ref;
9771b263 2291 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2292 item->offset -= dst->value.ancestor.offset;
2293 }
2294
b258210c
MJ
2295 if (src->type == IPA_JF_KNOWN_TYPE)
2296 combine_known_type_and_ancestor_jfs (src, dst);
b258210c
MJ
2297 else if (src->type == IPA_JF_PASS_THROUGH
2298 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2299 {
2300 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2301 dst->value.ancestor.agg_preserved &=
2302 src->value.pass_through.agg_preserved;
b8f6e610
MJ
2303 dst->value.ancestor.type_preserved &=
2304 src->value.pass_through.type_preserved;
8b7773a4 2305 }
b258210c
MJ
2306 else if (src->type == IPA_JF_ANCESTOR)
2307 {
2308 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2309 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2310 dst->value.ancestor.agg_preserved &=
2311 src->value.ancestor.agg_preserved;
b8f6e610
MJ
2312 dst->value.ancestor.type_preserved &=
2313 src->value.ancestor.type_preserved;
b258210c
MJ
2314 }
2315 else
2316 dst->type = IPA_JF_UNKNOWN;
2317 }
2318 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2319 {
b258210c
MJ
2320 struct ipa_jump_func *src;
2321 /* We must check range due to calls with variable number of arguments
2322 and we cannot combine jump functions with operations. */
2323 if (dst->value.pass_through.operation == NOP_EXPR
2324 && (dst->value.pass_through.formal_id
2325 < ipa_get_cs_argument_count (top)))
2326 {
8b7773a4
MJ
2327 int dst_fid = dst->value.pass_through.formal_id;
2328 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2329 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
8b7773a4 2330
b8f6e610
MJ
2331 switch (src->type)
2332 {
2333 case IPA_JF_UNKNOWN:
2334 dst->type = IPA_JF_UNKNOWN;
2335 break;
2336 case IPA_JF_KNOWN_TYPE:
2337 ipa_set_jf_known_type (dst,
2338 ipa_get_jf_known_type_offset (src),
2339 ipa_get_jf_known_type_base_type (src),
2340 ipa_get_jf_known_type_base_type (src));
2341 break;
2342 case IPA_JF_CONST:
2343 ipa_set_jf_cst_copy (dst, src);
2344 break;
2345
2346 case IPA_JF_PASS_THROUGH:
2347 {
2348 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2349 enum tree_code operation;
2350 operation = ipa_get_jf_pass_through_operation (src);
2351
2352 if (operation == NOP_EXPR)
2353 {
2354 bool agg_p, type_p;
2355 agg_p = dst_agg_p
2356 && ipa_get_jf_pass_through_agg_preserved (src);
2357 type_p = ipa_get_jf_pass_through_type_preserved (src)
2358 && ipa_get_jf_pass_through_type_preserved (dst);
2359 ipa_set_jf_simple_pass_through (dst, formal_id,
2360 agg_p, type_p);
2361 }
2362 else
2363 {
2364 tree operand = ipa_get_jf_pass_through_operand (src);
2365 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2366 operation);
2367 }
2368 break;
2369 }
2370 case IPA_JF_ANCESTOR:
2371 {
2372 bool agg_p, type_p;
2373 agg_p = dst_agg_p
2374 && ipa_get_jf_ancestor_agg_preserved (src);
2375 type_p = ipa_get_jf_ancestor_type_preserved (src)
2376 && ipa_get_jf_pass_through_type_preserved (dst);
2377 ipa_set_ancestor_jf (dst,
2378 ipa_get_jf_ancestor_offset (src),
2379 ipa_get_jf_ancestor_type (src),
2380 ipa_get_jf_ancestor_formal_id (src),
2381 agg_p, type_p);
2382 break;
2383 }
2384 default:
2385 gcc_unreachable ();
2386 }
8b7773a4
MJ
2387
2388 if (src->agg.items
b8f6e610 2389 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2390 {
2391 /* Currently we do not produce clobber aggregate jump
2392 functions, replace with merging when we do. */
2393 gcc_assert (!dst->agg.items);
2394
2395 dst->agg.by_ref = src->agg.by_ref;
9771b263 2396 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2397 }
b258210c
MJ
2398 }
2399 else
2400 dst->type = IPA_JF_UNKNOWN;
3e293154 2401 }
b258210c
MJ
2402 }
2403}
2404
2405/* If TARGET is an addr_expr of a function declaration, make it the destination
81fa35bd 2406 of an indirect edge IE and return the edge. Otherwise, return NULL. */
b258210c 2407
3949c4a7 2408struct cgraph_edge *
81fa35bd 2409ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
b258210c
MJ
2410{
2411 struct cgraph_node *callee;
0f378cb5 2412 struct inline_edge_summary *es = inline_edge_summary (ie);
48b1474e 2413 bool unreachable = false;
b258210c 2414
ceeffab0
MJ
2415 if (TREE_CODE (target) == ADDR_EXPR)
2416 target = TREE_OPERAND (target, 0);
b258210c 2417 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2418 {
2419 target = canonicalize_constructor_val (target, NULL);
2420 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2421 {
c13bc3d9
MJ
2422 if (ie->indirect_info->member_ptr)
2423 /* Member pointer call that goes through a VMT lookup. */
2424 return NULL;
2425
a0a7b611
JH
2426 if (dump_file)
2427 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
48b1474e 2428 " in %s/%i, making it unreachable.\n",
fec39fa6 2429 ie->caller->name (), ie->caller->order);
48b1474e
MJ
2430 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2431 callee = cgraph_get_create_node (target);
2432 unreachable = true;
a0a7b611 2433 }
48b1474e
MJ
2434 else
2435 callee = cgraph_get_node (target);
a0a7b611 2436 }
48b1474e
MJ
2437 else
2438 callee = cgraph_get_node (target);
a0a7b611
JH
2439
2440 /* Because may-edges are not explicitely represented and vtable may be external,
2441 we may create the first reference to the object in the unit. */
2442 if (!callee || callee->global.inlined_to)
2443 {
a0a7b611
JH
2444
2445 /* We are better to ensure we can refer to it.
2446 In the case of static functions we are out of luck, since we already
2447 removed its body. In the case of public functions we may or may
2448 not introduce the reference. */
2449 if (!canonicalize_constructor_val (target, NULL)
2450 || !TREE_PUBLIC (target))
2451 {
2452 if (dump_file)
2453 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2454 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
fec39fa6 2455 xstrdup (ie->caller->name ()),
67348ccc 2456 ie->caller->order,
fec39fa6 2457 xstrdup (ie->callee->name ()),
67348ccc 2458 ie->callee->order);
a0a7b611
JH
2459 return NULL;
2460 }
6f99e449 2461 callee = cgraph_get_create_node (target);
a0a7b611 2462 }
1dbee8c9 2463 ipa_check_create_node_params ();
ceeffab0 2464
81fa35bd
MJ
2465 /* We can not make edges to inline clones. It is bug that someone removed
2466 the cgraph node too early. */
17afc0fe
JH
2467 gcc_assert (!callee->global.inlined_to);
2468
48b1474e 2469 if (dump_file && !unreachable)
b258210c
MJ
2470 {
2471 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
ceeffab0 2472 "(%s/%i -> %s/%i), for stmt ",
b258210c 2473 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
fec39fa6 2474 xstrdup (ie->caller->name ()),
67348ccc 2475 ie->caller->order,
fec39fa6 2476 xstrdup (callee->name ()),
67348ccc 2477 callee->order);
b258210c
MJ
2478 if (ie->call_stmt)
2479 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2480 else
2481 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2
JH
2482 }
2483 ie = cgraph_make_edge_direct (ie, callee);
2484 es = inline_edge_summary (ie);
2485 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2486 - eni_size_weights.call_cost);
2487 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2488 - eni_time_weights.call_cost);
749aa96d 2489
b258210c 2490 return ie;
3e293154
MJ
2491}
2492
8b7773a4
MJ
2493/* Retrieve value from aggregate jump function AGG for the given OFFSET or
2494 return NULL if there is not any. BY_REF specifies whether the value has to
2495 be passed by reference or by value. */
2496
2497tree
2498ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2499 HOST_WIDE_INT offset, bool by_ref)
2500{
2501 struct ipa_agg_jf_item *item;
2502 int i;
2503
2504 if (by_ref != agg->by_ref)
2505 return NULL;
2506
9771b263 2507 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2508 if (item->offset == offset)
2509 {
2510 /* Currently we do not have clobber values, return NULL for them once
2511 we do. */
2512 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2513 return item->value;
2514 }
8b7773a4
MJ
2515 return NULL;
2516}
2517
4502fe8d 2518/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
2519 reference description RDESC. Return true if the reference has been
2520 successfully found and removed. */
4502fe8d 2521
568cda29 2522static bool
5e20cdc9 2523remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
2524{
2525 struct ipa_ref *to_del;
2526 struct cgraph_edge *origin;
2527
2528 origin = rdesc->cs;
a854f856
MJ
2529 if (!origin)
2530 return false;
67348ccc 2531 to_del = ipa_find_reference (origin->caller, symbol,
042ae7d2 2532 origin->call_stmt, origin->lto_stmt_uid);
568cda29
MJ
2533 if (!to_del)
2534 return false;
2535
4502fe8d
MJ
2536 ipa_remove_reference (to_del);
2537 if (dump_file)
2538 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
fec39fa6
TS
2539 xstrdup (origin->caller->name ()),
2540 origin->caller->order, xstrdup (symbol->name ()));
568cda29 2541 return true;
4502fe8d
MJ
2542}
2543
2544/* If JFUNC has a reference description with refcount different from
2545 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2546 NULL. JFUNC must be a constant jump function. */
2547
2548static struct ipa_cst_ref_desc *
2549jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2550{
2551 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2552 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2553 return rdesc;
2554 else
2555 return NULL;
2556}
2557
568cda29
MJ
2558/* If the value of constant jump function JFUNC is an address of a function
2559 declaration, return the associated call graph node. Otherwise return
2560 NULL. */
2561
2562static cgraph_node *
2563cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2564{
2565 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2566 tree cst = ipa_get_jf_constant (jfunc);
2567 if (TREE_CODE (cst) != ADDR_EXPR
2568 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2569 return NULL;
2570
2571 return cgraph_get_node (TREE_OPERAND (cst, 0));
2572}
2573
2574
2575/* If JFUNC is a constant jump function with a usable rdesc, decrement its
2576 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2577 the edge specified in the rdesc. Return false if either the symbol or the
2578 reference could not be found, otherwise return true. */
2579
2580static bool
2581try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2582{
2583 struct ipa_cst_ref_desc *rdesc;
2584 if (jfunc->type == IPA_JF_CONST
2585 && (rdesc = jfunc_rdesc_usable (jfunc))
2586 && --rdesc->refcount == 0)
2587 {
5e20cdc9 2588 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
2589 if (!symbol)
2590 return false;
2591
2592 return remove_described_reference (symbol, rdesc);
2593 }
2594 return true;
2595}
2596
b258210c
MJ
2597/* Try to find a destination for indirect edge IE that corresponds to a simple
2598 call or a call of a member function pointer and where the destination is a
2599 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
2600 determined, return the newly direct edge, otherwise return NULL.
2601 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 2602
b258210c
MJ
2603static struct cgraph_edge *
2604try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
2605 struct ipa_jump_func *jfunc,
2606 struct ipa_node_params *new_root_info)
b258210c 2607{
4502fe8d 2608 struct cgraph_edge *cs;
b258210c 2609 tree target;
042ae7d2 2610 bool agg_contents = ie->indirect_info->agg_contents;
b258210c 2611
8b7773a4 2612 if (ie->indirect_info->agg_contents)
d250540a
MJ
2613 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2614 ie->indirect_info->offset,
2615 ie->indirect_info->by_ref);
b258210c 2616 else
d250540a
MJ
2617 target = ipa_value_from_jfunc (new_root_info, jfunc);
2618 if (!target)
2619 return NULL;
4502fe8d
MJ
2620 cs = ipa_make_edge_direct_to_target (ie, target);
2621
a12cd2db 2622 if (cs && !agg_contents)
568cda29
MJ
2623 {
2624 bool ok;
2625 gcc_checking_assert (cs->callee
ae6d0907
MJ
2626 && (cs != ie
2627 || jfunc->type != IPA_JF_CONST
568cda29
MJ
2628 || !cgraph_node_for_jfunc (jfunc)
2629 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2630 ok = try_decrement_rdesc_refcount (jfunc);
2631 gcc_checking_assert (ok);
2632 }
4502fe8d
MJ
2633
2634 return cs;
b258210c
MJ
2635}
2636
d250540a
MJ
2637/* Try to find a destination for indirect edge IE that corresponds to a virtual
2638 call based on a formal parameter which is described by jump function JFUNC
2639 and if it can be determined, make it direct and return the direct edge.
2640 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2641 are relative to. */
b258210c
MJ
2642
2643static struct cgraph_edge *
2644try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a
MJ
2645 struct ipa_jump_func *jfunc,
2646 struct ipa_node_params *new_root_info)
3e293154 2647{
c7573249 2648 tree binfo, target;
b258210c 2649
d250540a
MJ
2650 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2651
da942ca0 2652 if (!binfo)
b258210c 2653 return NULL;
3e293154 2654
da942ca0
JH
2655 if (TREE_CODE (binfo) != TREE_BINFO)
2656 {
c49bdb2e
JH
2657 binfo = gimple_extract_devirt_binfo_from_cst
2658 (binfo, ie->indirect_info->otr_type);
da942ca0
JH
2659 if (!binfo)
2660 return NULL;
2661 }
2662
d250540a 2663 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
c7573249 2664 ie->indirect_info->otr_type);
b258210c 2665 if (binfo)
c7573249
MJ
2666 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2667 binfo);
b258210c
MJ
2668 else
2669 return NULL;
2670
2671 if (target)
450ad0cd
JH
2672 {
2673#ifdef ENABLE_CHECKING
2674 gcc_assert (possible_polymorphic_call_target_p
2675 (ie, cgraph_get_node (target)));
2676#endif
2677 return ipa_make_edge_direct_to_target (ie, target);
2678 }
b258210c
MJ
2679 else
2680 return NULL;
3e293154
MJ
2681}
2682
2683/* Update the param called notes associated with NODE when CS is being inlined,
2684 assuming NODE is (potentially indirectly) inlined into CS->callee.
2685 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 2686 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 2687 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 2688
f8e2a1ed 2689static bool
e33c6cd6
MJ
2690update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2691 struct cgraph_node *node,
9771b263 2692 vec<cgraph_edge_p> *new_edges)
3e293154 2693{
9e97ff61 2694 struct ipa_edge_args *top;
b258210c 2695 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 2696 struct ipa_node_params *new_root_info;
f8e2a1ed 2697 bool res = false;
3e293154 2698
e33c6cd6 2699 ipa_check_create_edge_args ();
9e97ff61 2700 top = IPA_EDGE_REF (cs);
d250540a
MJ
2701 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2702 ? cs->caller->global.inlined_to
2703 : cs->caller);
e33c6cd6
MJ
2704
2705 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 2706 {
e33c6cd6 2707 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 2708 struct ipa_jump_func *jfunc;
8b7773a4 2709 int param_index;
3e293154 2710
e33c6cd6 2711 next_ie = ie->next_callee;
3e293154 2712
5f902d76
JH
2713 if (ici->param_index == -1)
2714 continue;
e33c6cd6 2715
3e293154 2716 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 2717 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 2718 {
5ee53a06 2719 ici->param_index = -1;
3e293154
MJ
2720 continue;
2721 }
2722
8b7773a4
MJ
2723 param_index = ici->param_index;
2724 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06
JH
2725
2726 if (!flag_indirect_inlining)
36b72910
JH
2727 new_direct_edge = NULL;
2728 else if (ici->polymorphic)
d250540a
MJ
2729 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2730 new_root_info);
b258210c 2731 else
d250540a
MJ
2732 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2733 new_root_info);
042ae7d2
JH
2734 /* If speculation was removed, then we need to do nothing. */
2735 if (new_direct_edge && new_direct_edge != ie)
2736 {
2737 new_direct_edge->indirect_inlining_edge = 1;
2738 top = IPA_EDGE_REF (cs);
2739 res = true;
2740 }
2741 else if (new_direct_edge)
685b0d13 2742 {
b258210c 2743 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
2744 if (new_direct_edge->call_stmt)
2745 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
2746 = !gimple_check_call_matching_types (
2747 new_direct_edge->call_stmt,
67348ccc 2748 new_direct_edge->callee->decl, false);
b258210c
MJ
2749 if (new_edges)
2750 {
9771b263 2751 new_edges->safe_push (new_direct_edge);
b258210c
MJ
2752 res = true;
2753 }
042ae7d2 2754 top = IPA_EDGE_REF (cs);
685b0d13 2755 }
36b72910
JH
2756 else if (jfunc->type == IPA_JF_PASS_THROUGH
2757 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2758 {
2759 if (ici->agg_contents
2760 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2761 ici->param_index = -1;
2762 else
2763 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2764 }
2765 else if (jfunc->type == IPA_JF_ANCESTOR)
2766 {
2767 if (ici->agg_contents
2768 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2769 ici->param_index = -1;
2770 else
2771 {
2772 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2773 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2774 }
2775 }
2776 else
2777 /* Either we can find a destination for this edge now or never. */
2778 ici->param_index = -1;
3e293154 2779 }
e33c6cd6 2780
f8e2a1ed 2781 return res;
3e293154
MJ
2782}
2783
2784/* Recursively traverse subtree of NODE (including node) made of inlined
2785 cgraph_edges when CS has been inlined and invoke
e33c6cd6 2786 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
2787 update_jump_functions_after_inlining on all non-inlined edges that lead out
2788 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2789 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2790 created. */
be95e2b9 2791
f8e2a1ed 2792static bool
3e293154
MJ
2793propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2794 struct cgraph_node *node,
9771b263 2795 vec<cgraph_edge_p> *new_edges)
3e293154
MJ
2796{
2797 struct cgraph_edge *e;
f8e2a1ed 2798 bool res;
3e293154 2799
e33c6cd6 2800 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
2801
2802 for (e = node->callees; e; e = e->next_callee)
2803 if (!e->inline_failed)
f8e2a1ed 2804 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
2805 else
2806 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
2807 for (e = node->indirect_calls; e; e = e->next_callee)
2808 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
2809
2810 return res;
3e293154
MJ
2811}
2812
4502fe8d
MJ
2813/* Combine two controlled uses counts as done during inlining. */
2814
2815static int
2816combine_controlled_uses_counters (int c, int d)
2817{
2818 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2819 return IPA_UNDESCRIBED_USE;
2820 else
2821 return c + d - 1;
2822}
2823
2824/* Propagate number of controlled users from CS->caleee to the new root of the
2825 tree of inlined nodes. */
2826
2827static void
2828propagate_controlled_uses (struct cgraph_edge *cs)
2829{
2830 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2831 struct cgraph_node *new_root = cs->caller->global.inlined_to
2832 ? cs->caller->global.inlined_to : cs->caller;
2833 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2834 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2835 int count, i;
2836
2837 count = MIN (ipa_get_cs_argument_count (args),
2838 ipa_get_param_count (old_root_info));
2839 for (i = 0; i < count; i++)
2840 {
2841 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2842 struct ipa_cst_ref_desc *rdesc;
2843
2844 if (jf->type == IPA_JF_PASS_THROUGH)
2845 {
2846 int src_idx, c, d;
2847 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2848 c = ipa_get_controlled_uses (new_root_info, src_idx);
2849 d = ipa_get_controlled_uses (old_root_info, i);
2850
2851 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2852 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2853 c = combine_controlled_uses_counters (c, d);
2854 ipa_set_controlled_uses (new_root_info, src_idx, c);
2855 if (c == 0 && new_root_info->ipcp_orig_node)
2856 {
2857 struct cgraph_node *n;
2858 struct ipa_ref *ref;
2859 tree t = new_root_info->known_vals[src_idx];
2860
2861 if (t && TREE_CODE (t) == ADDR_EXPR
2862 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2863 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
67348ccc
DM
2864 && (ref = ipa_find_reference (new_root,
2865 n, NULL, 0)))
4502fe8d
MJ
2866 {
2867 if (dump_file)
2868 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2869 "reference from %s/%i to %s/%i.\n",
fec39fa6 2870 xstrdup (new_root->name ()),
67348ccc 2871 new_root->order,
fec39fa6 2872 xstrdup (n->name ()), n->order);
4502fe8d
MJ
2873 ipa_remove_reference (ref);
2874 }
2875 }
2876 }
2877 else if (jf->type == IPA_JF_CONST
2878 && (rdesc = jfunc_rdesc_usable (jf)))
2879 {
2880 int d = ipa_get_controlled_uses (old_root_info, i);
2881 int c = rdesc->refcount;
2882 rdesc->refcount = combine_controlled_uses_counters (c, d);
2883 if (rdesc->refcount == 0)
2884 {
2885 tree cst = ipa_get_jf_constant (jf);
2886 struct cgraph_node *n;
2887 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2888 && TREE_CODE (TREE_OPERAND (cst, 0))
2889 == FUNCTION_DECL);
2890 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2891 if (n)
2892 {
2893 struct cgraph_node *clone;
568cda29 2894 bool ok;
67348ccc 2895 ok = remove_described_reference (n, rdesc);
568cda29 2896 gcc_checking_assert (ok);
4502fe8d
MJ
2897
2898 clone = cs->caller;
2899 while (clone->global.inlined_to
2900 && clone != rdesc->cs->caller
2901 && IPA_NODE_REF (clone)->ipcp_orig_node)
2902 {
2903 struct ipa_ref *ref;
67348ccc
DM
2904 ref = ipa_find_reference (clone,
2905 n, NULL, 0);
4502fe8d
MJ
2906 if (ref)
2907 {
2908 if (dump_file)
2909 fprintf (dump_file, "ipa-prop: Removing "
2910 "cloning-created reference "
2911 "from %s/%i to %s/%i.\n",
fec39fa6 2912 xstrdup (clone->name ()),
67348ccc 2913 clone->order,
fec39fa6 2914 xstrdup (n->name ()),
67348ccc 2915 n->order);
4502fe8d
MJ
2916 ipa_remove_reference (ref);
2917 }
2918 clone = clone->callers->caller;
2919 }
2920 }
2921 }
2922 }
2923 }
2924
2925 for (i = ipa_get_param_count (old_root_info);
2926 i < ipa_get_cs_argument_count (args);
2927 i++)
2928 {
2929 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2930
2931 if (jf->type == IPA_JF_CONST)
2932 {
2933 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2934 if (rdesc)
2935 rdesc->refcount = IPA_UNDESCRIBED_USE;
2936 }
2937 else if (jf->type == IPA_JF_PASS_THROUGH)
2938 ipa_set_controlled_uses (new_root_info,
2939 jf->value.pass_through.formal_id,
2940 IPA_UNDESCRIBED_USE);
2941 }
2942}
2943
3e293154
MJ
2944/* Update jump functions and call note functions on inlining the call site CS.
2945 CS is expected to lead to a node already cloned by
2946 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2947 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2948 created. */
be95e2b9 2949
f8e2a1ed 2950bool
3e293154 2951ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
9771b263 2952 vec<cgraph_edge_p> *new_edges)
3e293154 2953{
5ee53a06 2954 bool changed;
f8e2a1ed
MJ
2955 /* Do nothing if the preparation phase has not been carried out yet
2956 (i.e. during early inlining). */
9771b263 2957 if (!ipa_node_params_vector.exists ())
f8e2a1ed
MJ
2958 return false;
2959 gcc_assert (ipa_edge_args_vector);
2960
4502fe8d 2961 propagate_controlled_uses (cs);
5ee53a06
JH
2962 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2963
5ee53a06 2964 return changed;
518dc859
RL
2965}
2966
771578a0
MJ
2967/* Frees all dynamically allocated structures that the argument info points
2968 to. */
be95e2b9 2969
518dc859 2970void
771578a0 2971ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 2972{
9771b263 2973 vec_free (args->jump_functions);
771578a0 2974 memset (args, 0, sizeof (*args));
518dc859
RL
2975}
2976
771578a0 2977/* Free all ipa_edge structures. */
be95e2b9 2978
518dc859 2979void
771578a0 2980ipa_free_all_edge_args (void)
518dc859 2981{
771578a0
MJ
2982 int i;
2983 struct ipa_edge_args *args;
518dc859 2984
9771b263
DN
2985 if (!ipa_edge_args_vector)
2986 return;
2987
2988 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
2989 ipa_free_edge_args_substructures (args);
2990
9771b263 2991 vec_free (ipa_edge_args_vector);
518dc859
RL
2992}
2993
771578a0
MJ
2994/* Frees all dynamically allocated structures that the param info points
2995 to. */
be95e2b9 2996
518dc859 2997void
771578a0 2998ipa_free_node_params_substructures (struct ipa_node_params *info)
518dc859 2999{
9771b263 3000 info->descriptors.release ();
310bc633
MJ
3001 free (info->lattices);
3002 /* Lattice values and their sources are deallocated with their alocation
3003 pool. */
9771b263 3004 info->known_vals.release ();
771578a0 3005 memset (info, 0, sizeof (*info));
518dc859
RL
3006}
3007
771578a0 3008/* Free all ipa_node_params structures. */
be95e2b9 3009
518dc859 3010void
771578a0 3011ipa_free_all_node_params (void)
518dc859 3012{
771578a0
MJ
3013 int i;
3014 struct ipa_node_params *info;
518dc859 3015
9771b263 3016 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
771578a0
MJ
3017 ipa_free_node_params_substructures (info);
3018
9771b263 3019 ipa_node_params_vector.release ();
771578a0
MJ
3020}
3021
2c9561b5
MJ
3022/* Set the aggregate replacements of NODE to be AGGVALS. */
3023
3024void
3025ipa_set_node_agg_value_chain (struct cgraph_node *node,
3026 struct ipa_agg_replacement_value *aggvals)
3027{
9771b263
DN
3028 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3029 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2c9561b5 3030
9771b263 3031 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2c9561b5
MJ
3032}
3033
771578a0 3034/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 3035
771578a0 3036static void
5c0466b5 3037ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 3038{
568cda29
MJ
3039 struct ipa_edge_args *args;
3040
3041 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
9771b263 3042 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 3043 return;
568cda29
MJ
3044
3045 args = IPA_EDGE_REF (cs);
3046 if (args->jump_functions)
3047 {
3048 struct ipa_jump_func *jf;
3049 int i;
3050 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3051 {
3052 struct ipa_cst_ref_desc *rdesc;
3053 try_decrement_rdesc_refcount (jf);
3054 if (jf->type == IPA_JF_CONST
3055 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3056 && rdesc->cs == cs)
3057 rdesc->cs = NULL;
3058 }
568cda29
MJ
3059 }
3060
771578a0 3061 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
3062}
3063
771578a0 3064/* Hook that is called by cgraph.c when a node is removed. */
be95e2b9 3065
771578a0 3066static void
5c0466b5 3067ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3068{
dd6d1ad7 3069 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 3070 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2c9561b5 3071 ipa_free_node_params_substructures (IPA_NODE_REF (node));
9771b263
DN
3072 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3073 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
771578a0
MJ
3074}
3075
8b7773a4 3076/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 3077
771578a0
MJ
3078static void
3079ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
f8e2a1ed 3080 __attribute__((unused)) void *data)
771578a0
MJ
3081{
3082 struct ipa_edge_args *old_args, *new_args;
8b7773a4 3083 unsigned int i;
771578a0
MJ
3084
3085 ipa_check_create_edge_args ();
3086
3087 old_args = IPA_EDGE_REF (src);
3088 new_args = IPA_EDGE_REF (dst);
3089
9771b263 3090 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
8b7773a4 3091
9771b263 3092 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3093 {
3094 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3095 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3096
3097 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3098
3099 if (src_jf->type == IPA_JF_CONST)
3100 {
3101 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3102
3103 if (!src_rdesc)
3104 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3105 else if (src->caller == dst->caller)
3106 {
3107 struct ipa_ref *ref;
5e20cdc9 3108 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3109 gcc_checking_assert (n);
67348ccc 3110 ref = ipa_find_reference (src->caller, n,
568cda29
MJ
3111 src->call_stmt, src->lto_stmt_uid);
3112 gcc_checking_assert (ref);
67348ccc 3113 ipa_clone_ref (ref, dst->caller, ref->stmt);
568cda29
MJ
3114
3115 gcc_checking_assert (ipa_refdesc_pool);
3116 struct ipa_cst_ref_desc *dst_rdesc
3117 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3118 dst_rdesc->cs = dst;
3119 dst_rdesc->refcount = src_rdesc->refcount;
3120 dst_rdesc->next_duplicate = NULL;
3121 dst_jf->value.constant.rdesc = dst_rdesc;
3122 }
4502fe8d
MJ
3123 else if (src_rdesc->cs == src)
3124 {
3125 struct ipa_cst_ref_desc *dst_rdesc;
3126 gcc_checking_assert (ipa_refdesc_pool);
3127 dst_rdesc
3128 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3129 dst_rdesc->cs = dst;
4502fe8d 3130 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3131 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3132 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3133 dst_jf->value.constant.rdesc = dst_rdesc;
3134 }
3135 else
3136 {
3137 struct ipa_cst_ref_desc *dst_rdesc;
3138 /* This can happen during inlining, when a JFUNC can refer to a
3139 reference taken in a function up in the tree of inline clones.
3140 We need to find the duplicate that refers to our tree of
3141 inline clones. */
3142
3143 gcc_assert (dst->caller->global.inlined_to);
3144 for (dst_rdesc = src_rdesc->next_duplicate;
3145 dst_rdesc;
3146 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3147 {
3148 struct cgraph_node *top;
3149 top = dst_rdesc->cs->caller->global.inlined_to
3150 ? dst_rdesc->cs->caller->global.inlined_to
3151 : dst_rdesc->cs->caller;
3152 if (dst->caller->global.inlined_to == top)
3153 break;
3154 }
44a60244 3155 gcc_assert (dst_rdesc);
4502fe8d
MJ
3156 dst_jf->value.constant.rdesc = dst_rdesc;
3157 }
3158 }
3159 }
771578a0
MJ
3160}
3161
3162/* Hook that is called by cgraph.c when a node is duplicated. */
be95e2b9 3163
771578a0
MJ
3164static void
3165ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
10a5dd5d 3166 ATTRIBUTE_UNUSED void *data)
771578a0
MJ
3167{
3168 struct ipa_node_params *old_info, *new_info;
2c9561b5 3169 struct ipa_agg_replacement_value *old_av, *new_av;
771578a0
MJ
3170
3171 ipa_check_create_node_params ();
3172 old_info = IPA_NODE_REF (src);
3173 new_info = IPA_NODE_REF (dst);
771578a0 3174
9771b263 3175 new_info->descriptors = old_info->descriptors.copy ();
310bc633 3176 new_info->lattices = NULL;
771578a0 3177 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 3178
3949c4a7
MJ
3179 new_info->uses_analysis_done = old_info->uses_analysis_done;
3180 new_info->node_enqueued = old_info->node_enqueued;
2c9561b5
MJ
3181
3182 old_av = ipa_get_agg_replacements_for_node (src);
3183 if (!old_av)
3184 return;
3185
3186 new_av = NULL;
3187 while (old_av)
3188 {
3189 struct ipa_agg_replacement_value *v;
3190
3191 v = ggc_alloc_ipa_agg_replacement_value ();
3192 memcpy (v, old_av, sizeof (*v));
3193 v->next = new_av;
3194 new_av = v;
3195 old_av = old_av->next;
3196 }
3197 ipa_set_node_agg_value_chain (dst, new_av);
771578a0
MJ
3198}
3199
40982661
JH
3200
3201/* Analyze newly added function into callgraph. */
3202
3203static void
3204ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3205{
3206 ipa_analyze_node (node);
3207}
3208
771578a0 3209/* Register our cgraph hooks if they are not already there. */
be95e2b9 3210
518dc859 3211void
771578a0 3212ipa_register_cgraph_hooks (void)
518dc859 3213{
771578a0
MJ
3214 if (!edge_removal_hook_holder)
3215 edge_removal_hook_holder =
3216 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3217 if (!node_removal_hook_holder)
3218 node_removal_hook_holder =
3219 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3220 if (!edge_duplication_hook_holder)
3221 edge_duplication_hook_holder =
3222 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3223 if (!node_duplication_hook_holder)
3224 node_duplication_hook_holder =
3225 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
40982661
JH
3226 function_insertion_hook_holder =
3227 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3228}
518dc859 3229
771578a0 3230/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3231
771578a0
MJ
3232static void
3233ipa_unregister_cgraph_hooks (void)
3234{
3235 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3236 edge_removal_hook_holder = NULL;
3237 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3238 node_removal_hook_holder = NULL;
3239 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3240 edge_duplication_hook_holder = NULL;
3241 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3242 node_duplication_hook_holder = NULL;
40982661
JH
3243 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3244 function_insertion_hook_holder = NULL;
771578a0
MJ
3245}
3246
3247/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3248 longer needed after ipa-cp. */
be95e2b9 3249
771578a0 3250void
e33c6cd6 3251ipa_free_all_structures_after_ipa_cp (void)
3e293154 3252{
5ee53a06 3253 if (!optimize)
3e293154
MJ
3254 {
3255 ipa_free_all_edge_args ();
3256 ipa_free_all_node_params ();
310bc633
MJ
3257 free_alloc_pool (ipcp_sources_pool);
3258 free_alloc_pool (ipcp_values_pool);
2c9561b5 3259 free_alloc_pool (ipcp_agg_lattice_pool);
3e293154 3260 ipa_unregister_cgraph_hooks ();
4502fe8d
MJ
3261 if (ipa_refdesc_pool)
3262 free_alloc_pool (ipa_refdesc_pool);
3e293154
MJ
3263 }
3264}
3265
3266/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3267 longer needed after indirect inlining. */
be95e2b9 3268
3e293154 3269void
e33c6cd6 3270ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3271{
3272 ipa_free_all_edge_args ();
3273 ipa_free_all_node_params ();
3274 ipa_unregister_cgraph_hooks ();
310bc633
MJ
3275 if (ipcp_sources_pool)
3276 free_alloc_pool (ipcp_sources_pool);
3277 if (ipcp_values_pool)
3278 free_alloc_pool (ipcp_values_pool);
2c9561b5
MJ
3279 if (ipcp_agg_lattice_pool)
3280 free_alloc_pool (ipcp_agg_lattice_pool);
4502fe8d
MJ
3281 if (ipa_refdesc_pool)
3282 free_alloc_pool (ipa_refdesc_pool);
518dc859
RL
3283}
3284
dcd416e3 3285/* Print ipa_tree_map data structures of all functions in the
518dc859 3286 callgraph to F. */
be95e2b9 3287
518dc859 3288void
2c9561b5 3289ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3290{
3291 int i, count;
3e293154 3292 struct ipa_node_params *info;
518dc859 3293
67348ccc 3294 if (!node->definition)
3e293154
MJ
3295 return;
3296 info = IPA_NODE_REF (node);
9de04252 3297 fprintf (f, " function %s/%i parameter descriptors:\n",
fec39fa6 3298 node->name (), node->order);
3e293154
MJ
3299 count = ipa_get_param_count (info);
3300 for (i = 0; i < count; i++)
518dc859 3301 {
4502fe8d
MJ
3302 int c;
3303
e067bd43 3304 ipa_dump_param (f, info, i);
339f49ec
JH
3305 if (ipa_is_param_used (info, i))
3306 fprintf (f, " used");
4502fe8d
MJ
3307 c = ipa_get_controlled_uses (info, i);
3308 if (c == IPA_UNDESCRIBED_USE)
3309 fprintf (f, " undescribed_use");
3310 else
3311 fprintf (f, " controlled_uses=%i", c);
3e293154 3312 fprintf (f, "\n");
518dc859
RL
3313 }
3314}
dcd416e3 3315
ca30a539 3316/* Print ipa_tree_map data structures of all functions in the
3e293154 3317 callgraph to F. */
be95e2b9 3318
3e293154 3319void
ca30a539 3320ipa_print_all_params (FILE * f)
3e293154
MJ
3321{
3322 struct cgraph_node *node;
3323
ca30a539 3324 fprintf (f, "\nFunction parameters:\n");
65c70e6b 3325 FOR_EACH_FUNCTION (node)
ca30a539 3326 ipa_print_node_params (f, node);
3e293154 3327}
3f84bf08
MJ
3328
3329/* Return a heap allocated vector containing formal parameters of FNDECL. */
3330
9771b263 3331vec<tree>
3f84bf08
MJ
3332ipa_get_vector_of_formal_parms (tree fndecl)
3333{
9771b263 3334 vec<tree> args;
3f84bf08
MJ
3335 int count;
3336 tree parm;
3337
0e8853ee 3338 gcc_assert (!flag_wpa);
310bc633 3339 count = count_formal_params (fndecl);
9771b263 3340 args.create (count);
910ad8de 3341 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 3342 args.quick_push (parm);
3f84bf08
MJ
3343
3344 return args;
3345}
3346
3347/* Return a heap allocated vector containing types of formal parameters of
3348 function type FNTYPE. */
3349
9771b263 3350static inline vec<tree>
3f84bf08
MJ
3351get_vector_of_formal_parm_types (tree fntype)
3352{
9771b263 3353 vec<tree> types;
3f84bf08
MJ
3354 int count = 0;
3355 tree t;
3356
3357 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3358 count++;
3359
9771b263 3360 types.create (count);
3f84bf08 3361 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 3362 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
3363
3364 return types;
3365}
3366
3367/* Modify the function declaration FNDECL and its type according to the plan in
3368 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3369 to reflect the actual parameters being modified which are determined by the
3370 base_index field. */
3371
3372void
3373ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3374 const char *synth_parm_prefix)
3375{
9771b263 3376 vec<tree> oparms, otypes;
3f84bf08
MJ
3377 tree orig_type, new_type = NULL;
3378 tree old_arg_types, t, new_arg_types = NULL;
3379 tree parm, *link = &DECL_ARGUMENTS (fndecl);
9771b263 3380 int i, len = adjustments.length ();
3f84bf08
MJ
3381 tree new_reversed = NULL;
3382 bool care_for_types, last_parm_void;
3383
3384 if (!synth_parm_prefix)
3385 synth_parm_prefix = "SYNTH";
3386
3387 oparms = ipa_get_vector_of_formal_parms (fndecl);
3388 orig_type = TREE_TYPE (fndecl);
3389 old_arg_types = TYPE_ARG_TYPES (orig_type);
3390
3391 /* The following test is an ugly hack, some functions simply don't have any
3392 arguments in their type. This is probably a bug but well... */
3393 care_for_types = (old_arg_types != NULL_TREE);
3394 if (care_for_types)
3395 {
3396 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3397 == void_type_node);
3398 otypes = get_vector_of_formal_parm_types (orig_type);
3399 if (last_parm_void)
9771b263 3400 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 3401 else
9771b263 3402 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
3403 }
3404 else
3405 {
3406 last_parm_void = false;
9771b263 3407 otypes.create (0);
3f84bf08
MJ
3408 }
3409
3410 for (i = 0; i < len; i++)
3411 {
3412 struct ipa_parm_adjustment *adj;
3413 gcc_assert (link);
3414
9771b263
DN
3415 adj = &adjustments[i];
3416 parm = oparms[adj->base_index];
3f84bf08
MJ
3417 adj->base = parm;
3418
3419 if (adj->copy_param)
3420 {
3421 if (care_for_types)
9771b263 3422 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
3423 new_arg_types);
3424 *link = parm;
910ad8de 3425 link = &DECL_CHAIN (parm);
3f84bf08
MJ
3426 }
3427 else if (!adj->remove_param)
3428 {
3429 tree new_parm;
3430 tree ptype;
3431
3432 if (adj->by_ref)
3433 ptype = build_pointer_type (adj->type);
3434 else
3435 ptype = adj->type;
3436
3437 if (care_for_types)
3438 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3439
3440 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3441 ptype);
3442 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3443
3444 DECL_ARTIFICIAL (new_parm) = 1;
3445 DECL_ARG_TYPE (new_parm) = ptype;
3446 DECL_CONTEXT (new_parm) = fndecl;
3447 TREE_USED (new_parm) = 1;
3448 DECL_IGNORED_P (new_parm) = 1;
3449 layout_decl (new_parm, 0);
3450
3f84bf08
MJ
3451 adj->base = parm;
3452 adj->reduction = new_parm;
3453
3454 *link = new_parm;
3455
910ad8de 3456 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
3457 }
3458 }
3459
3460 *link = NULL_TREE;
3461
3462 if (care_for_types)
3463 {
3464 new_reversed = nreverse (new_arg_types);
3465 if (last_parm_void)
3466 {
3467 if (new_reversed)
3468 TREE_CHAIN (new_arg_types) = void_list_node;
3469 else
3470 new_reversed = void_list_node;
3471 }
3472 }
3473
3474 /* Use copy_node to preserve as much as possible from original type
3475 (debug info, attribute lists etc.)
3476 Exception is METHOD_TYPEs must have THIS argument.
3477 When we are asked to remove it, we need to build new FUNCTION_TYPE
3478 instead. */
3479 if (TREE_CODE (orig_type) != METHOD_TYPE
9771b263
DN
3480 || (adjustments[0].copy_param
3481 && adjustments[0].base_index == 0))
3f84bf08 3482 {
4eb3f32c 3483 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
3484 TYPE_ARG_TYPES (new_type) = new_reversed;
3485 }
3486 else
3487 {
3488 new_type
3489 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3490 new_reversed));
3491 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3492 DECL_VINDEX (fndecl) = NULL_TREE;
3493 }
3494
d402c33d
JH
3495 /* When signature changes, we need to clear builtin info. */
3496 if (DECL_BUILT_IN (fndecl))
3497 {
3498 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3499 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3500 }
3501
3f84bf08
MJ
3502 /* This is a new type, not a copy of an old type. Need to reassociate
3503 variants. We can handle everything except the main variant lazily. */
3504 t = TYPE_MAIN_VARIANT (orig_type);
3505 if (orig_type != t)
3506 {
3507 TYPE_MAIN_VARIANT (new_type) = t;
3508 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3509 TYPE_NEXT_VARIANT (t) = new_type;
3510 }
3511 else
3512 {
3513 TYPE_MAIN_VARIANT (new_type) = new_type;
3514 TYPE_NEXT_VARIANT (new_type) = NULL;
3515 }
3516
3517 TREE_TYPE (fndecl) = new_type;
9b389a5e 3518 DECL_VIRTUAL_P (fndecl) = 0;
9771b263
DN
3519 otypes.release ();
3520 oparms.release ();
3f84bf08
MJ
3521}
3522
3523/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3524 If this is a directly recursive call, CS must be NULL. Otherwise it must
3525 contain the corresponding call graph edge. */
3526
3527void
3528ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3529 ipa_parm_adjustment_vec adjustments)
3530{
82338059 3531 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
9771b263
DN
3532 vec<tree> vargs;
3533 vec<tree, va_gc> **debug_args = NULL;
3f84bf08 3534 gimple new_stmt;
82338059 3535 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
3536 tree callee_decl;
3537 int i, len;
3538
9771b263
DN
3539 len = adjustments.length ();
3540 vargs.create (len);
67348ccc
DM
3541 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3542 ipa_remove_stmt_references (current_node, stmt);
3f84bf08
MJ
3543
3544 gsi = gsi_for_stmt (stmt);
82338059
MJ
3545 prev_gsi = gsi;
3546 gsi_prev (&prev_gsi);
3f84bf08
MJ
3547 for (i = 0; i < len; i++)
3548 {
3549 struct ipa_parm_adjustment *adj;
3550
9771b263 3551 adj = &adjustments[i];
3f84bf08
MJ
3552
3553 if (adj->copy_param)
3554 {
3555 tree arg = gimple_call_arg (stmt, adj->base_index);
3556
9771b263 3557 vargs.quick_push (arg);
3f84bf08
MJ
3558 }
3559 else if (!adj->remove_param)
3560 {
fffe1e40
MJ
3561 tree expr, base, off;
3562 location_t loc;
f43245d1 3563 unsigned int deref_align = 0;
c1ed6a01 3564 bool deref_base = false;
fffe1e40
MJ
3565
3566 /* We create a new parameter out of the value of the old one, we can
3567 do the following kind of transformations:
3568
3569 - A scalar passed by reference is converted to a scalar passed by
3570 value. (adj->by_ref is false and the type of the original
3571 actual argument is a pointer to a scalar).
3572
3573 - A part of an aggregate is passed instead of the whole aggregate.
3574 The part can be passed either by value or by reference, this is
3575 determined by value of adj->by_ref. Moreover, the code below
3576 handles both situations when the original aggregate is passed by
3577 value (its type is not a pointer) and when it is passed by
3578 reference (it is a pointer to an aggregate).
3579
3580 When the new argument is passed by reference (adj->by_ref is true)
3581 it must be a part of an aggregate and therefore we form it by
3582 simply taking the address of a reference inside the original
3583 aggregate. */
3584
3585 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3586 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
3587 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3588 : EXPR_LOCATION (base);
fffe1e40 3589
82d49829
MJ
3590 if (TREE_CODE (base) != ADDR_EXPR
3591 && POINTER_TYPE_P (TREE_TYPE (base)))
3592 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 3593 adj->offset / BITS_PER_UNIT);
3f84bf08 3594 else
3f84bf08 3595 {
fffe1e40
MJ
3596 HOST_WIDE_INT base_offset;
3597 tree prev_base;
c1ed6a01 3598 bool addrof;
fffe1e40
MJ
3599
3600 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
3601 {
3602 base = TREE_OPERAND (base, 0);
3603 addrof = true;
3604 }
3605 else
3606 addrof = false;
fffe1e40
MJ
3607 prev_base = base;
3608 base = get_addr_base_and_unit_offset (base, &base_offset);
3609 /* Aggregate arguments can have non-invariant addresses. */
3610 if (!base)
3611 {
3612 base = build_fold_addr_expr (prev_base);
82d49829 3613 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3614 adj->offset / BITS_PER_UNIT);
3615 }
3616 else if (TREE_CODE (base) == MEM_REF)
3617 {
c1ed6a01
MJ
3618 if (!addrof)
3619 {
3620 deref_base = true;
3621 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3622 }
82d49829 3623 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3624 base_offset
3625 + adj->offset / BITS_PER_UNIT);
3626 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 3627 off);
fffe1e40
MJ
3628 base = TREE_OPERAND (base, 0);
3629 }
3630 else
3631 {
82d49829 3632 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3633 base_offset
3634 + adj->offset / BITS_PER_UNIT);
3635 base = build_fold_addr_expr (base);
3636 }
3f84bf08 3637 }
fffe1e40 3638
3a5a825a
RG
3639 if (!adj->by_ref)
3640 {
3641 tree type = adj->type;
3642 unsigned int align;
3643 unsigned HOST_WIDE_INT misalign;
644ffefd 3644
c1ed6a01
MJ
3645 if (deref_base)
3646 {
3647 align = deref_align;
3648 misalign = 0;
3649 }
3650 else
3651 {
3652 get_pointer_alignment_1 (base, &align, &misalign);
3653 if (TYPE_ALIGN (type) > align)
3654 align = TYPE_ALIGN (type);
3655 }
27bcd47c
LC
3656 misalign += (tree_to_double_int (off)
3657 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3a5a825a
RG
3658 * BITS_PER_UNIT);
3659 misalign = misalign & (align - 1);
3660 if (misalign != 0)
3661 align = (misalign & -misalign);
3662 if (align < TYPE_ALIGN (type))
3663 type = build_aligned_type (type, align);
3664 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3665 }
3666 else
3667 {
3668 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3669 expr = build_fold_addr_expr (expr);
3670 }
fffe1e40 3671
3f84bf08
MJ
3672 expr = force_gimple_operand_gsi (&gsi, expr,
3673 adj->by_ref
3674 || is_gimple_reg_type (adj->type),
3675 NULL, true, GSI_SAME_STMT);
9771b263 3676 vargs.quick_push (expr);
3f84bf08 3677 }
ddb555ed
JJ
3678 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3679 {
3680 unsigned int ix;
3681 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3682 gimple def_temp;
3683
3684 arg = gimple_call_arg (stmt, adj->base_index);
3685 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3686 {
3687 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3688 continue;
3689 arg = fold_convert_loc (gimple_location (stmt),
3690 TREE_TYPE (origin), arg);
3691 }
3692 if (debug_args == NULL)
3693 debug_args = decl_debug_args_insert (callee_decl);
9771b263 3694 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
3695 if (ddecl == origin)
3696 {
9771b263 3697 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
3698 break;
3699 }
3700 if (ddecl == NULL)
3701 {
3702 ddecl = make_node (DEBUG_EXPR_DECL);
3703 DECL_ARTIFICIAL (ddecl) = 1;
3704 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3705 DECL_MODE (ddecl) = DECL_MODE (origin);
3706
9771b263
DN
3707 vec_safe_push (*debug_args, origin);
3708 vec_safe_push (*debug_args, ddecl);
ddb555ed 3709 }
9771b263 3710 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
3711 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3712 }
3f84bf08
MJ
3713 }
3714
3715 if (dump_file && (dump_flags & TDF_DETAILS))
3716 {
3717 fprintf (dump_file, "replacing stmt:");
3718 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3719 }
3720
3f84bf08 3721 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 3722 vargs.release ();
3f84bf08
MJ
3723 if (gimple_call_lhs (stmt))
3724 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3725
3726 gimple_set_block (new_stmt, gimple_block (stmt));
3727 if (gimple_has_location (stmt))
3728 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 3729 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 3730 gimple_call_copy_flags (new_stmt, stmt);
3f84bf08
MJ
3731
3732 if (dump_file && (dump_flags & TDF_DETAILS))
3733 {
3734 fprintf (dump_file, "with stmt:");
3735 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3736 fprintf (dump_file, "\n");
3737 }
3738 gsi_replace (&gsi, new_stmt, true);
3739 if (cs)
3740 cgraph_set_call_stmt (cs, new_stmt);
82338059
MJ
3741 do
3742 {
3743 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3744 gsi_prev (&gsi);
3745 }
3746 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3747 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3748
3f84bf08
MJ
3749 update_ssa (TODO_update_ssa);
3750 free_dominance_info (CDI_DOMINATORS);
3751}
3752
3753/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3754
3755static bool
3756index_in_adjustments_multiple_times_p (int base_index,
3757 ipa_parm_adjustment_vec adjustments)
3758{
9771b263 3759 int i, len = adjustments.length ();
3f84bf08
MJ
3760 bool one = false;
3761
3762 for (i = 0; i < len; i++)
3763 {
3764 struct ipa_parm_adjustment *adj;
9771b263 3765 adj = &adjustments[i];
3f84bf08
MJ
3766
3767 if (adj->base_index == base_index)
3768 {
3769 if (one)
3770 return true;
3771 else
3772 one = true;
3773 }
3774 }
3775 return false;
3776}
3777
3778
3779/* Return adjustments that should have the same effect on function parameters
3780 and call arguments as if they were first changed according to adjustments in
3781 INNER and then by adjustments in OUTER. */
3782
3783ipa_parm_adjustment_vec
3784ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3785 ipa_parm_adjustment_vec outer)
3786{
9771b263
DN
3787 int i, outlen = outer.length ();
3788 int inlen = inner.length ();
3f84bf08
MJ
3789 int removals = 0;
3790 ipa_parm_adjustment_vec adjustments, tmp;
3791
9771b263 3792 tmp.create (inlen);
3f84bf08
MJ
3793 for (i = 0; i < inlen; i++)
3794 {
3795 struct ipa_parm_adjustment *n;
9771b263 3796 n = &inner[i];
3f84bf08
MJ
3797
3798 if (n->remove_param)
3799 removals++;
3800 else
9771b263 3801 tmp.quick_push (*n);
3f84bf08
MJ
3802 }
3803
9771b263 3804 adjustments.create (outlen + removals);
3f84bf08
MJ
3805 for (i = 0; i < outlen; i++)
3806 {
f32682ca 3807 struct ipa_parm_adjustment r;
9771b263
DN
3808 struct ipa_parm_adjustment *out = &outer[i];
3809 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 3810
f32682ca 3811 memset (&r, 0, sizeof (r));
3f84bf08
MJ
3812 gcc_assert (!in->remove_param);
3813 if (out->remove_param)
3814 {
3815 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3816 {
f32682ca 3817 r.remove_param = true;
9771b263 3818 adjustments.quick_push (r);
3f84bf08
MJ
3819 }
3820 continue;
3821 }
3822
f32682ca
DN
3823 r.base_index = in->base_index;
3824 r.type = out->type;
3f84bf08
MJ
3825
3826 /* FIXME: Create nonlocal value too. */
3827
3828 if (in->copy_param && out->copy_param)
f32682ca 3829 r.copy_param = true;
3f84bf08 3830 else if (in->copy_param)
f32682ca 3831 r.offset = out->offset;
3f84bf08 3832 else if (out->copy_param)
f32682ca 3833 r.offset = in->offset;
3f84bf08 3834 else
f32682ca 3835 r.offset = in->offset + out->offset;
9771b263 3836 adjustments.quick_push (r);
3f84bf08
MJ
3837 }
3838
3839 for (i = 0; i < inlen; i++)
3840 {
9771b263 3841 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08
MJ
3842
3843 if (n->remove_param)
9771b263 3844 adjustments.quick_push (*n);
3f84bf08
MJ
3845 }
3846
9771b263 3847 tmp.release ();
3f84bf08
MJ
3848 return adjustments;
3849}
3850
3851/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3852 friendly way, assuming they are meant to be applied to FNDECL. */
3853
3854void
3855ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3856 tree fndecl)
3857{
9771b263 3858 int i, len = adjustments.length ();
3f84bf08 3859 bool first = true;
9771b263 3860 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
3861
3862 fprintf (file, "IPA param adjustments: ");
3863 for (i = 0; i < len; i++)
3864 {
3865 struct ipa_parm_adjustment *adj;
9771b263 3866 adj = &adjustments[i];
3f84bf08
MJ
3867
3868 if (!first)
3869 fprintf (file, " ");
3870 else
3871 first = false;
3872
3873 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 3874 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
3875 if (adj->base)
3876 {
3877 fprintf (file, ", base: ");
3878 print_generic_expr (file, adj->base, 0);
3879 }
3880 if (adj->reduction)
3881 {
3882 fprintf (file, ", reduction: ");
3883 print_generic_expr (file, adj->reduction, 0);
3884 }
3885 if (adj->new_ssa_base)
3886 {
3887 fprintf (file, ", new_ssa_base: ");
3888 print_generic_expr (file, adj->new_ssa_base, 0);
3889 }
3890
3891 if (adj->copy_param)
3892 fprintf (file, ", copy_param");
3893 else if (adj->remove_param)
3894 fprintf (file, ", remove_param");
3895 else
3896 fprintf (file, ", offset %li", (long) adj->offset);
3897 if (adj->by_ref)
3898 fprintf (file, ", by_ref");
3899 print_node_brief (file, ", type: ", adj->type, 0);
3900 fprintf (file, "\n");
3901 }
9771b263 3902 parms.release ();
3f84bf08
MJ
3903}
3904
2c9561b5
MJ
3905/* Dump the AV linked list. */
3906
3907void
3908ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3909{
3910 bool comma = false;
3911 fprintf (f, " Aggregate replacements:");
3912 for (; av; av = av->next)
3913 {
3914 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3915 av->index, av->offset);
3916 print_generic_expr (f, av->value, 0);
3917 comma = true;
3918 }
3919 fprintf (f, "\n");
3920}
3921
fb3f88cc
JH
3922/* Stream out jump function JUMP_FUNC to OB. */
3923
3924static void
3925ipa_write_jump_function (struct output_block *ob,
3926 struct ipa_jump_func *jump_func)
3927{
8b7773a4
MJ
3928 struct ipa_agg_jf_item *item;
3929 struct bitpack_d bp;
3930 int i, count;
fb3f88cc 3931
8b7773a4 3932 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
3933 switch (jump_func->type)
3934 {
3935 case IPA_JF_UNKNOWN:
3936 break;
b258210c 3937 case IPA_JF_KNOWN_TYPE:
c7573249
MJ
3938 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3939 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3940 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
b258210c 3941 break;
fb3f88cc 3942 case IPA_JF_CONST:
5368224f 3943 gcc_assert (
4502fe8d
MJ
3944 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3945 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
3946 break;
3947 case IPA_JF_PASS_THROUGH:
412288f1 3948 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
3949 if (jump_func->value.pass_through.operation == NOP_EXPR)
3950 {
3951 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3952 bp = bitpack_create (ob->main_stream);
3953 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
b8f6e610 3954 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4a53743e
MJ
3955 streamer_write_bitpack (&bp);
3956 }
3957 else
3958 {
3959 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3960 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3961 }
fb3f88cc
JH
3962 break;
3963 case IPA_JF_ANCESTOR:
412288f1 3964 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
b9393656 3965 stream_write_tree (ob, jump_func->value.ancestor.type, true);
412288f1 3966 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
3967 bp = bitpack_create (ob->main_stream);
3968 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
b8f6e610 3969 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
8b7773a4 3970 streamer_write_bitpack (&bp);
fb3f88cc 3971 break;
8b7773a4
MJ
3972 }
3973
9771b263 3974 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
3975 streamer_write_uhwi (ob, count);
3976 if (count)
3977 {
3978 bp = bitpack_create (ob->main_stream);
3979 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3980 streamer_write_bitpack (&bp);
3981 }
3982
9771b263 3983 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
3984 {
3985 streamer_write_uhwi (ob, item->offset);
3986 stream_write_tree (ob, item->value, true);
fb3f88cc
JH
3987 }
3988}
3989
3990/* Read in jump function JUMP_FUNC from IB. */
3991
3992static void
3993ipa_read_jump_function (struct lto_input_block *ib,
3994 struct ipa_jump_func *jump_func,
4502fe8d 3995 struct cgraph_edge *cs,
fb3f88cc
JH
3996 struct data_in *data_in)
3997{
4a53743e
MJ
3998 enum jump_func_type jftype;
3999 enum tree_code operation;
8b7773a4 4000 int i, count;
fb3f88cc 4001
4a53743e
MJ
4002 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4003 switch (jftype)
fb3f88cc
JH
4004 {
4005 case IPA_JF_UNKNOWN:
4a53743e 4006 jump_func->type = IPA_JF_UNKNOWN;
fb3f88cc 4007 break;
b258210c 4008 case IPA_JF_KNOWN_TYPE:
4a53743e
MJ
4009 {
4010 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4011 tree base_type = stream_read_tree (ib, data_in);
4012 tree component_type = stream_read_tree (ib, data_in);
4013
4014 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4015 break;
4016 }
fb3f88cc 4017 case IPA_JF_CONST:
4502fe8d 4018 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4019 break;
4020 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4021 operation = (enum tree_code) streamer_read_uhwi (ib);
4022 if (operation == NOP_EXPR)
4023 {
4024 int formal_id = streamer_read_uhwi (ib);
4025 struct bitpack_d bp = streamer_read_bitpack (ib);
4026 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610
MJ
4027 bool type_preserved = bp_unpack_value (&bp, 1);
4028 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4029 type_preserved);
4a53743e
MJ
4030 }
4031 else
4032 {
4033 tree operand = stream_read_tree (ib, data_in);
4034 int formal_id = streamer_read_uhwi (ib);
4035 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4036 operation);
4037 }
fb3f88cc
JH
4038 break;
4039 case IPA_JF_ANCESTOR:
4a53743e
MJ
4040 {
4041 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4042 tree type = stream_read_tree (ib, data_in);
4043 int formal_id = streamer_read_uhwi (ib);
4044 struct bitpack_d bp = streamer_read_bitpack (ib);
4045 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610 4046 bool type_preserved = bp_unpack_value (&bp, 1);
4a53743e 4047
b8f6e610
MJ
4048 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4049 type_preserved);
4a53743e
MJ
4050 break;
4051 }
8b7773a4
MJ
4052 }
4053
4054 count = streamer_read_uhwi (ib);
9771b263 4055 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4056 if (count)
4057 {
4a53743e 4058 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4059 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4060 }
4061 for (i = 0; i < count; i++)
4062 {
f32682ca
DN
4063 struct ipa_agg_jf_item item;
4064 item.offset = streamer_read_uhwi (ib);
4065 item.value = stream_read_tree (ib, data_in);
9771b263 4066 jump_func->agg.items->quick_push (item);
fb3f88cc
JH
4067 }
4068}
4069
e33c6cd6
MJ
4070/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4071 relevant to indirect inlining to OB. */
661e7330
MJ
4072
4073static void
e33c6cd6
MJ
4074ipa_write_indirect_edge_info (struct output_block *ob,
4075 struct cgraph_edge *cs)
661e7330 4076{
e33c6cd6 4077 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4078 struct bitpack_d bp;
e33c6cd6 4079
412288f1 4080 streamer_write_hwi (ob, ii->param_index);
8b7773a4 4081 streamer_write_hwi (ob, ii->offset);
2465dcc2
RG
4082 bp = bitpack_create (ob->main_stream);
4083 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4084 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4085 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4086 bp_pack_value (&bp, ii->by_ref, 1);
412288f1 4087 streamer_write_bitpack (&bp);
b258210c
MJ
4088
4089 if (ii->polymorphic)
4090 {
412288f1 4091 streamer_write_hwi (ob, ii->otr_token);
b9393656 4092 stream_write_tree (ob, ii->otr_type, true);
b258210c 4093 }
661e7330
MJ
4094}
4095
e33c6cd6
MJ
4096/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4097 relevant to indirect inlining from IB. */
661e7330
MJ
4098
4099static void
e33c6cd6
MJ
4100ipa_read_indirect_edge_info (struct lto_input_block *ib,
4101 struct data_in *data_in ATTRIBUTE_UNUSED,
4102 struct cgraph_edge *cs)
661e7330 4103{
e33c6cd6 4104 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4105 struct bitpack_d bp;
661e7330 4106
412288f1 4107 ii->param_index = (int) streamer_read_hwi (ib);
8b7773a4 4108 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
412288f1 4109 bp = streamer_read_bitpack (ib);
2465dcc2 4110 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4111 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4112 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4113 ii->by_ref = bp_unpack_value (&bp, 1);
b258210c
MJ
4114 if (ii->polymorphic)
4115 {
412288f1 4116 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4117 ii->otr_type = stream_read_tree (ib, data_in);
b258210c 4118 }
661e7330
MJ
4119}
4120
fb3f88cc
JH
4121/* Stream out NODE info to OB. */
4122
4123static void
4124ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4125{
4126 int node_ref;
7380e6ef 4127 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4128 struct ipa_node_params *info = IPA_NODE_REF (node);
4129 int j;
4130 struct cgraph_edge *e;
2465dcc2 4131 struct bitpack_d bp;
fb3f88cc 4132
7380e6ef 4133 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4134 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4135 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4136
0e8853ee
JH
4137 streamer_write_uhwi (ob, ipa_get_param_count (info));
4138 for (j = 0; j < ipa_get_param_count (info); j++)
4139 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4140 bp = bitpack_create (ob->main_stream);
062c604f 4141 gcc_assert (info->uses_analysis_done
661e7330 4142 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4143 gcc_assert (!info->node_enqueued);
4144 gcc_assert (!info->ipcp_orig_node);
4145 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4146 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4147 streamer_write_bitpack (&bp);
4502fe8d
MJ
4148 for (j = 0; j < ipa_get_param_count (info); j++)
4149 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
fb3f88cc
JH
4150 for (e = node->callees; e; e = e->next_callee)
4151 {
4152 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4153
412288f1 4154 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
fb3f88cc
JH
4155 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4156 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4157 }
e33c6cd6 4158 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4159 {
4160 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4161
412288f1 4162 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
c8246dbe
JH
4163 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4164 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4165 ipa_write_indirect_edge_info (ob, e);
4166 }
fb3f88cc
JH
4167}
4168
61502ca8 4169/* Stream in NODE info from IB. */
fb3f88cc
JH
4170
4171static void
4172ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4173 struct data_in *data_in)
4174{
4175 struct ipa_node_params *info = IPA_NODE_REF (node);
4176 int k;
4177 struct cgraph_edge *e;
2465dcc2 4178 struct bitpack_d bp;
fb3f88cc 4179
0e8853ee 4180 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4181
0e8853ee
JH
4182 for (k = 0; k < ipa_get_param_count (info); k++)
4183 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4184
412288f1 4185 bp = streamer_read_bitpack (ib);
fb3f88cc 4186 if (ipa_get_param_count (info) != 0)
062c604f 4187 info->uses_analysis_done = true;
fb3f88cc
JH
4188 info->node_enqueued = false;
4189 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4190 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a
MJ
4191 for (k = 0; k < ipa_get_param_count (info); k++)
4192 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
fb3f88cc
JH
4193 for (e = node->callees; e; e = e->next_callee)
4194 {
4195 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4196 int count = streamer_read_uhwi (ib);
fb3f88cc 4197
fb3f88cc
JH
4198 if (!count)
4199 continue;
9771b263 4200 vec_safe_grow_cleared (args->jump_functions, count);
fb3f88cc 4201
fb3f88cc 4202 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d
MJ
4203 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4204 data_in);
fb3f88cc 4205 }
e33c6cd6 4206 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4207 {
4208 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4209 int count = streamer_read_uhwi (ib);
c8246dbe 4210
c8246dbe
JH
4211 if (count)
4212 {
9771b263 4213 vec_safe_grow_cleared (args->jump_functions, count);
c8246dbe 4214 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d 4215 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
606d9a09 4216 data_in);
c8246dbe
JH
4217 }
4218 ipa_read_indirect_edge_info (ib, data_in, e);
4219 }
fb3f88cc
JH
4220}
4221
4222/* Write jump functions for nodes in SET. */
4223
4224void
f27c1867 4225ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4226{
4227 struct cgraph_node *node;
93536c97 4228 struct output_block *ob;
fb3f88cc 4229 unsigned int count = 0;
f27c1867
JH
4230 lto_symtab_encoder_iterator lsei;
4231 lto_symtab_encoder_t encoder;
4232
fb3f88cc 4233
9771b263 4234 if (!ipa_node_params_vector.exists ())
93536c97 4235 return;
fb3f88cc 4236
93536c97 4237 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4238 encoder = ob->decl_state->symtab_node_encoder;
93536c97 4239 ob->cgraph_node = NULL;
f27c1867
JH
4240 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4241 lsei_next_function_in_partition (&lsei))
fb3f88cc 4242 {
f27c1867 4243 node = lsei_cgraph_node (lsei);
c47d0034
JH
4244 if (cgraph_function_with_gimple_body_p (node)
4245 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4246 count++;
4247 }
4248
412288f1 4249 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4250
4251 /* Process all of the functions. */
f27c1867
JH
4252 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4253 lsei_next_function_in_partition (&lsei))
fb3f88cc 4254 {
f27c1867 4255 node = lsei_cgraph_node (lsei);
c47d0034
JH
4256 if (cgraph_function_with_gimple_body_p (node)
4257 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4258 ipa_write_node_info (ob, node);
4259 }
412288f1 4260 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4261 produce_asm (ob, NULL);
4262 destroy_output_block (ob);
4263}
4264
4265/* Read section in file FILE_DATA of length LEN with data DATA. */
4266
4267static void
4268ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4269 size_t len)
4270{
4271 const struct lto_function_header *header =
4272 (const struct lto_function_header *) data;
4ad9a9de
EB
4273 const int cfg_offset = sizeof (struct lto_function_header);
4274 const int main_offset = cfg_offset + header->cfg_size;
4275 const int string_offset = main_offset + header->main_size;
fb3f88cc
JH
4276 struct data_in *data_in;
4277 struct lto_input_block ib_main;
4278 unsigned int i;
4279 unsigned int count;
4280
4281 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4282 header->main_size);
4283
4284 data_in =
4285 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4286 header->string_size, vNULL);
412288f1 4287 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4288
4289 for (i = 0; i < count; i++)
4290 {
4291 unsigned int index;
4292 struct cgraph_node *node;
7380e6ef 4293 lto_symtab_encoder_t encoder;
fb3f88cc 4294
412288f1 4295 index = streamer_read_uhwi (&ib_main);
7380e6ef
JH
4296 encoder = file_data->symtab_node_encoder;
4297 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4298 gcc_assert (node->definition);
fb3f88cc
JH
4299 ipa_read_node_info (&ib_main, node, data_in);
4300 }
4301 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4302 len);
4303 lto_data_in_delete (data_in);
4304}
4305
4306/* Read ipcp jump functions. */
4307
4308void
4309ipa_prop_read_jump_functions (void)
4310{
4311 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4312 struct lto_file_decl_data *file_data;
4313 unsigned int j = 0;
4314
4315 ipa_check_create_node_params ();
4316 ipa_check_create_edge_args ();
4317 ipa_register_cgraph_hooks ();
4318
4319 while ((file_data = file_data_vec[j++]))
4320 {
4321 size_t len;
4322 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4323
4324 if (data)
4325 ipa_prop_read_section (file_data, data, len);
4326 }
4327}
4328
b8698a0f 4329/* After merging units, we can get mismatch in argument counts.
61502ca8 4330 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
4331 Also compute called_with_variable_arg info. */
4332
4333void
4334ipa_update_after_lto_read (void)
4335{
05d3aa37
MJ
4336 ipa_check_create_node_params ();
4337 ipa_check_create_edge_args ();
fb3f88cc 4338}
2c9561b5
MJ
4339
4340void
4341write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4342{
4343 int node_ref;
4344 unsigned int count = 0;
4345 lto_symtab_encoder_t encoder;
4346 struct ipa_agg_replacement_value *aggvals, *av;
4347
4348 aggvals = ipa_get_agg_replacements_for_node (node);
4349 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4350 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4351 streamer_write_uhwi (ob, node_ref);
4352
4353 for (av = aggvals; av; av = av->next)
4354 count++;
4355 streamer_write_uhwi (ob, count);
4356
4357 for (av = aggvals; av; av = av->next)
4358 {
7b920a9a
MJ
4359 struct bitpack_d bp;
4360
2c9561b5
MJ
4361 streamer_write_uhwi (ob, av->offset);
4362 streamer_write_uhwi (ob, av->index);
4363 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4364
4365 bp = bitpack_create (ob->main_stream);
4366 bp_pack_value (&bp, av->by_ref, 1);
4367 streamer_write_bitpack (&bp);
2c9561b5
MJ
4368 }
4369}
4370
4371/* Stream in the aggregate value replacement chain for NODE from IB. */
4372
4373static void
4374read_agg_replacement_chain (struct lto_input_block *ib,
4375 struct cgraph_node *node,
4376 struct data_in *data_in)
4377{
4378 struct ipa_agg_replacement_value *aggvals = NULL;
4379 unsigned int count, i;
4380
4381 count = streamer_read_uhwi (ib);
4382 for (i = 0; i <count; i++)
4383 {
4384 struct ipa_agg_replacement_value *av;
7b920a9a 4385 struct bitpack_d bp;
2c9561b5
MJ
4386
4387 av = ggc_alloc_ipa_agg_replacement_value ();
4388 av->offset = streamer_read_uhwi (ib);
4389 av->index = streamer_read_uhwi (ib);
4390 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4391 bp = streamer_read_bitpack (ib);
4392 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4393 av->next = aggvals;
4394 aggvals = av;
4395 }
4396 ipa_set_node_agg_value_chain (node, aggvals);
4397}
4398
4399/* Write all aggregate replacement for nodes in set. */
4400
4401void
4402ipa_prop_write_all_agg_replacement (void)
4403{
4404 struct cgraph_node *node;
4405 struct output_block *ob;
4406 unsigned int count = 0;
4407 lto_symtab_encoder_iterator lsei;
4408 lto_symtab_encoder_t encoder;
4409
4410 if (!ipa_node_agg_replacements)
4411 return;
4412
4413 ob = create_output_block (LTO_section_ipcp_transform);
4414 encoder = ob->decl_state->symtab_node_encoder;
4415 ob->cgraph_node = NULL;
4416 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4417 lsei_next_function_in_partition (&lsei))
4418 {
4419 node = lsei_cgraph_node (lsei);
4420 if (cgraph_function_with_gimple_body_p (node)
4421 && ipa_get_agg_replacements_for_node (node) != NULL)
4422 count++;
4423 }
4424
4425 streamer_write_uhwi (ob, count);
4426
4427 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4428 lsei_next_function_in_partition (&lsei))
4429 {
4430 node = lsei_cgraph_node (lsei);
4431 if (cgraph_function_with_gimple_body_p (node)
4432 && ipa_get_agg_replacements_for_node (node) != NULL)
4433 write_agg_replacement_chain (ob, node);
4434 }
4435 streamer_write_char_stream (ob->main_stream, 0);
4436 produce_asm (ob, NULL);
4437 destroy_output_block (ob);
4438}
4439
4440/* Read replacements section in file FILE_DATA of length LEN with data
4441 DATA. */
4442
4443static void
4444read_replacements_section (struct lto_file_decl_data *file_data,
4445 const char *data,
4446 size_t len)
4447{
4448 const struct lto_function_header *header =
4449 (const struct lto_function_header *) data;
4450 const int cfg_offset = sizeof (struct lto_function_header);
4451 const int main_offset = cfg_offset + header->cfg_size;
4452 const int string_offset = main_offset + header->main_size;
4453 struct data_in *data_in;
4454 struct lto_input_block ib_main;
4455 unsigned int i;
4456 unsigned int count;
4457
4458 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4459 header->main_size);
4460
4461 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4462 header->string_size, vNULL);
2c9561b5
MJ
4463 count = streamer_read_uhwi (&ib_main);
4464
4465 for (i = 0; i < count; i++)
4466 {
4467 unsigned int index;
4468 struct cgraph_node *node;
4469 lto_symtab_encoder_t encoder;
4470
4471 index = streamer_read_uhwi (&ib_main);
4472 encoder = file_data->symtab_node_encoder;
4473 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4474 gcc_assert (node->definition);
2c9561b5
MJ
4475 read_agg_replacement_chain (&ib_main, node, data_in);
4476 }
4477 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4478 len);
4479 lto_data_in_delete (data_in);
4480}
4481
4482/* Read IPA-CP aggregate replacements. */
4483
4484void
4485ipa_prop_read_all_agg_replacement (void)
4486{
4487 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4488 struct lto_file_decl_data *file_data;
4489 unsigned int j = 0;
4490
4491 while ((file_data = file_data_vec[j++]))
4492 {
4493 size_t len;
4494 const char *data = lto_get_section_data (file_data,
4495 LTO_section_ipcp_transform,
4496 NULL, &len);
4497 if (data)
4498 read_replacements_section (file_data, data, len);
4499 }
4500}
4501
4502/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4503 NODE. */
4504
4505static void
4506adjust_agg_replacement_values (struct cgraph_node *node,
4507 struct ipa_agg_replacement_value *aggval)
4508{
4509 struct ipa_agg_replacement_value *v;
4510 int i, c = 0, d = 0, *adj;
4511
4512 if (!node->clone.combined_args_to_skip)
4513 return;
4514
4515 for (v = aggval; v; v = v->next)
4516 {
4517 gcc_assert (v->index >= 0);
4518 if (c < v->index)
4519 c = v->index;
4520 }
4521 c++;
4522
4523 adj = XALLOCAVEC (int, c);
4524 for (i = 0; i < c; i++)
4525 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4526 {
4527 adj[i] = -1;
4528 d++;
4529 }
4530 else
4531 adj[i] = i - d;
4532
4533 for (v = aggval; v; v = v->next)
4534 v->index = adj[v->index];
4535}
4536
4537
4538/* Function body transformation phase. */
4539
4540unsigned int
4541ipcp_transform_function (struct cgraph_node *node)
4542{
6e1aa848 4543 vec<ipa_param_descriptor_t> descriptors = vNULL;
2c9561b5
MJ
4544 struct param_analysis_info *parms_ainfo;
4545 struct ipa_agg_replacement_value *aggval;
4546 gimple_stmt_iterator gsi;
4547 basic_block bb;
4548 int param_count;
4549 bool cfg_changed = false, something_changed = false;
4550
4551 gcc_checking_assert (cfun);
4552 gcc_checking_assert (current_function_decl);
4553
4554 if (dump_file)
4555 fprintf (dump_file, "Modification phase of node %s/%i\n",
fec39fa6 4556 node->name (), node->order);
2c9561b5
MJ
4557
4558 aggval = ipa_get_agg_replacements_for_node (node);
4559 if (!aggval)
4560 return 0;
67348ccc 4561 param_count = count_formal_params (node->decl);
2c9561b5
MJ
4562 if (param_count == 0)
4563 return 0;
4564 adjust_agg_replacement_values (node, aggval);
4565 if (dump_file)
4566 ipa_dump_agg_replacement_values (dump_file, aggval);
4567 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4568 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
9771b263 4569 descriptors.safe_grow_cleared (param_count);
2c9561b5
MJ
4570 ipa_populate_param_decls (node, descriptors);
4571
4572 FOR_EACH_BB (bb)
4573 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4574 {
4575 struct ipa_agg_replacement_value *v;
4576 gimple stmt = gsi_stmt (gsi);
4577 tree rhs, val, t;
3ff2ca23 4578 HOST_WIDE_INT offset, size;
2c9561b5
MJ
4579 int index;
4580 bool by_ref, vce;
4581
4582 if (!gimple_assign_load_p (stmt))
4583 continue;
4584 rhs = gimple_assign_rhs1 (stmt);
4585 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4586 continue;
4587
4588 vce = false;
4589 t = rhs;
4590 while (handled_component_p (t))
4591 {
4592 /* V_C_E can do things like convert an array of integers to one
4593 bigger integer and similar things we do not handle below. */
4594 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4595 {
4596 vce = true;
4597 break;
4598 }
4599 t = TREE_OPERAND (t, 0);
4600 }
4601 if (vce)
4602 continue;
4603
4604 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
3ff2ca23 4605 rhs, &index, &offset, &size, &by_ref))
2c9561b5
MJ
4606 continue;
4607 for (v = aggval; v; v = v->next)
4608 if (v->index == index
4609 && v->offset == offset)
4610 break;
3ff2ca23
JJ
4611 if (!v
4612 || v->by_ref != by_ref
4613 || tree_low_cst (TYPE_SIZE (TREE_TYPE (v->value)), 0) != size)
2c9561b5
MJ
4614 continue;
4615
4616 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4617 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4618 {
4619 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4620 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4621 else if (TYPE_SIZE (TREE_TYPE (rhs))
4622 == TYPE_SIZE (TREE_TYPE (v->value)))
4623 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4624 else
4625 {
4626 if (dump_file)
4627 {
4628 fprintf (dump_file, " const ");
4629 print_generic_expr (dump_file, v->value, 0);
4630 fprintf (dump_file, " can't be converted to type of ");
4631 print_generic_expr (dump_file, rhs, 0);
4632 fprintf (dump_file, "\n");
4633 }
4634 continue;
4635 }
4636 }
4637 else
4638 val = v->value;
4639
4640 if (dump_file && (dump_flags & TDF_DETAILS))
4641 {
4642 fprintf (dump_file, "Modifying stmt:\n ");
4643 print_gimple_stmt (dump_file, stmt, 0, 0);
4644 }
4645 gimple_assign_set_rhs_from_tree (&gsi, val);
4646 update_stmt (stmt);
4647
4648 if (dump_file && (dump_flags & TDF_DETAILS))
4649 {
4650 fprintf (dump_file, "into:\n ");
4651 print_gimple_stmt (dump_file, stmt, 0, 0);
4652 fprintf (dump_file, "\n");
4653 }
4654
4655 something_changed = true;
4656 if (maybe_clean_eh_stmt (stmt)
4657 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4658 cfg_changed = true;
4659 }
4660
9771b263 4661 (*ipa_node_agg_replacements)[node->uid] = NULL;
2c9561b5 4662 free_parms_ainfo (parms_ainfo, param_count);
9771b263 4663 descriptors.release ();
2c9561b5
MJ
4664
4665 if (!something_changed)
4666 return 0;
4667 else if (cfg_changed)
4668 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4669 else
4670 return TODO_update_ssa_only_virtuals;
4671}