]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
gimple-walk.h: New File.
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
d1e082c2 2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tree.h"
45b0be94 24#include "gimplify.h"
5be5c238
AM
25#include "gimple-iterator.h"
26#include "gimple-walk.h"
518dc859
RL
27#include "langhooks.h"
28#include "ggc.h"
29#include "target.h"
518dc859 30#include "ipa-prop.h"
442b4905
AM
31#include "bitmap.h"
32#include "gimple-ssa.h"
33#include "tree-cfg.h"
34#include "tree-phinodes.h"
35#include "ssa-iterators.h"
36#include "tree-into-ssa.h"
37#include "tree-dfa.h"
518dc859 38#include "tree-pass.h"
771578a0 39#include "tree-inline.h"
0f378cb5 40#include "ipa-inline.h"
518dc859 41#include "flags.h"
3e293154 42#include "diagnostic.h"
cf835838 43#include "gimple-pretty-print.h"
fb3f88cc 44#include "lto-streamer.h"
f0efc7aa
DN
45#include "data-streamer.h"
46#include "tree-streamer.h"
dfea20f1 47#include "params.h"
450ad0cd 48#include "ipa-utils.h"
771578a0 49
062c604f
MJ
50/* Intermediate information about a parameter that is only useful during the
51 run of ipa_analyze_node and is not kept afterwards. */
52
53struct param_analysis_info
54{
8b7773a4
MJ
55 bool parm_modified, ref_modified, pt_modified;
56 bitmap parm_visited_statements, pt_visited_statements;
062c604f
MJ
57};
58
771578a0 59/* Vector where the parameter infos are actually stored. */
9771b263 60vec<ipa_node_params_t> ipa_node_params_vector;
2c9561b5 61/* Vector of known aggregate values in cloned nodes. */
9771b263 62vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
771578a0 63/* Vector where the parameter infos are actually stored. */
9771b263 64vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
771578a0
MJ
65
66/* Holders of ipa cgraph hooks: */
e2c9111c
JH
67static struct cgraph_edge_hook_list *edge_removal_hook_holder;
68static struct cgraph_node_hook_list *node_removal_hook_holder;
69static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
70static struct cgraph_2node_hook_list *node_duplication_hook_holder;
40982661 71static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 72
4502fe8d
MJ
73/* Description of a reference to an IPA constant. */
74struct ipa_cst_ref_desc
75{
76 /* Edge that corresponds to the statement which took the reference. */
77 struct cgraph_edge *cs;
78 /* Linked list of duplicates created when call graph edges are cloned. */
79 struct ipa_cst_ref_desc *next_duplicate;
80 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
81 if out of control. */
82 int refcount;
83};
84
85/* Allocation pool for reference descriptions. */
86
87static alloc_pool ipa_refdesc_pool;
88
5fe8e757
MJ
89/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
90 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
91
92static bool
93ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
94{
67348ccc 95 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
96 struct cl_optimization *os;
97
98 if (!fs_opts)
99 return false;
100 os = TREE_OPTIMIZATION (fs_opts);
101 return !os->x_optimize || !os->x_flag_ipa_cp;
102}
103
be95e2b9
MJ
104/* Return index of the formal whose tree is PTREE in function which corresponds
105 to INFO. */
106
d044dd17 107static int
9771b263 108ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
518dc859
RL
109{
110 int i, count;
111
9771b263 112 count = descriptors.length ();
518dc859 113 for (i = 0; i < count; i++)
9771b263 114 if (descriptors[i].decl == ptree)
518dc859
RL
115 return i;
116
117 return -1;
118}
119
d044dd17
MJ
120/* Return index of the formal whose tree is PTREE in function which corresponds
121 to INFO. */
122
123int
124ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
125{
126 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
127}
128
129/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
130 NODE. */
be95e2b9 131
f8e2a1ed
MJ
132static void
133ipa_populate_param_decls (struct cgraph_node *node,
9771b263 134 vec<ipa_param_descriptor_t> &descriptors)
518dc859
RL
135{
136 tree fndecl;
137 tree fnargs;
138 tree parm;
139 int param_num;
3e293154 140
67348ccc 141 fndecl = node->decl;
0e8853ee 142 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
143 fnargs = DECL_ARGUMENTS (fndecl);
144 param_num = 0;
910ad8de 145 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 146 {
9771b263 147 descriptors[param_num].decl = parm;
0e8853ee 148 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
518dc859
RL
149 param_num++;
150 }
151}
152
3f84bf08
MJ
153/* Return how many formal parameters FNDECL has. */
154
155static inline int
310bc633 156count_formal_params (tree fndecl)
3f84bf08
MJ
157{
158 tree parm;
159 int count = 0;
0e8853ee 160 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 161
910ad8de 162 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
163 count++;
164
165 return count;
166}
167
0e8853ee
JH
168/* Return the declaration of Ith formal parameter of the function corresponding
169 to INFO. Note there is no setter function as this array is built just once
170 using ipa_initialize_node_params. */
171
172void
173ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
174{
175 fprintf (file, "param #%i", i);
176 if (info->descriptors[i].decl)
177 {
178 fprintf (file, " ");
179 print_generic_expr (file, info->descriptors[i].decl, 0);
180 }
181}
182
183/* Initialize the ipa_node_params structure associated with NODE
184 to hold PARAM_COUNT parameters. */
185
186void
187ipa_alloc_node_params (struct cgraph_node *node, int param_count)
188{
189 struct ipa_node_params *info = IPA_NODE_REF (node);
190
191 if (!info->descriptors.exists () && param_count)
192 info->descriptors.safe_grow_cleared (param_count);
193}
194
f8e2a1ed
MJ
195/* Initialize the ipa_node_params structure associated with NODE by counting
196 the function parameters, creating the descriptors and populating their
197 param_decls. */
be95e2b9 198
f8e2a1ed
MJ
199void
200ipa_initialize_node_params (struct cgraph_node *node)
201{
202 struct ipa_node_params *info = IPA_NODE_REF (node);
203
9771b263 204 if (!info->descriptors.exists ())
f8e2a1ed 205 {
67348ccc 206 ipa_alloc_node_params (node, count_formal_params (node->decl));
0e8853ee 207 ipa_populate_param_decls (node, info->descriptors);
f8e2a1ed 208 }
518dc859
RL
209}
210
749aa96d
MJ
211/* Print the jump functions associated with call graph edge CS to file F. */
212
213static void
214ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
215{
216 int i, count;
217
218 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
219 for (i = 0; i < count; i++)
220 {
221 struct ipa_jump_func *jump_func;
222 enum jump_func_type type;
223
224 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
225 type = jump_func->type;
226
227 fprintf (f, " param %d: ", i);
228 if (type == IPA_JF_UNKNOWN)
229 fprintf (f, "UNKNOWN\n");
230 else if (type == IPA_JF_KNOWN_TYPE)
231 {
c7573249
MJ
232 fprintf (f, "KNOWN TYPE: base ");
233 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
234 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
235 jump_func->value.known_type.offset);
236 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
237 fprintf (f, "\n");
749aa96d
MJ
238 }
239 else if (type == IPA_JF_CONST)
240 {
4502fe8d 241 tree val = jump_func->value.constant.value;
749aa96d
MJ
242 fprintf (f, "CONST: ");
243 print_generic_expr (f, val, 0);
244 if (TREE_CODE (val) == ADDR_EXPR
245 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
246 {
247 fprintf (f, " -> ");
248 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
249 0);
250 }
251 fprintf (f, "\n");
252 }
749aa96d
MJ
253 else if (type == IPA_JF_PASS_THROUGH)
254 {
255 fprintf (f, "PASS THROUGH: ");
8b7773a4 256 fprintf (f, "%d, op %s",
749aa96d 257 jump_func->value.pass_through.formal_id,
5806f481 258 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 259 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
260 {
261 fprintf (f, " ");
262 print_generic_expr (f,
263 jump_func->value.pass_through.operand, 0);
264 }
265 if (jump_func->value.pass_through.agg_preserved)
266 fprintf (f, ", agg_preserved");
b8f6e610
MJ
267 if (jump_func->value.pass_through.type_preserved)
268 fprintf (f, ", type_preserved");
3ea6239f 269 fprintf (f, "\n");
749aa96d
MJ
270 }
271 else if (type == IPA_JF_ANCESTOR)
272 {
273 fprintf (f, "ANCESTOR: ");
274 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
275 jump_func->value.ancestor.formal_id,
276 jump_func->value.ancestor.offset);
277 print_generic_expr (f, jump_func->value.ancestor.type, 0);
8b7773a4
MJ
278 if (jump_func->value.ancestor.agg_preserved)
279 fprintf (f, ", agg_preserved");
b8f6e610
MJ
280 if (jump_func->value.ancestor.type_preserved)
281 fprintf (f, ", type_preserved");
3ea6239f 282 fprintf (f, "\n");
749aa96d 283 }
8b7773a4
MJ
284
285 if (jump_func->agg.items)
286 {
287 struct ipa_agg_jf_item *item;
288 int j;
289
290 fprintf (f, " Aggregate passed by %s:\n",
291 jump_func->agg.by_ref ? "reference" : "value");
9771b263 292 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
293 {
294 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
295 item->offset);
296 if (TYPE_P (item->value))
297 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
298 tree_low_cst (TYPE_SIZE (item->value), 1));
299 else
300 {
301 fprintf (f, "cst: ");
302 print_generic_expr (f, item->value, 0);
303 }
304 fprintf (f, "\n");
305 }
306 }
749aa96d
MJ
307 }
308}
309
310
be95e2b9
MJ
311/* Print the jump functions of all arguments on all call graph edges going from
312 NODE to file F. */
313
518dc859 314void
3e293154 315ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 316{
3e293154 317 struct cgraph_edge *cs;
518dc859 318
9de04252 319 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
67348ccc 320 node->order);
3e293154
MJ
321 for (cs = node->callees; cs; cs = cs->next_callee)
322 {
323 if (!ipa_edge_args_info_available_for_edge_p (cs))
324 continue;
325
749aa96d 326 fprintf (f, " callsite %s/%i -> %s/%i : \n",
67348ccc 327 xstrdup (cgraph_node_name (node)), node->order,
9de04252 328 xstrdup (cgraph_node_name (cs->callee)),
67348ccc 329 cs->callee->order);
749aa96d
MJ
330 ipa_print_node_jump_functions_for_edge (f, cs);
331 }
518dc859 332
9de04252 333 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 334 {
9de04252 335 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
336 if (!ipa_edge_args_info_available_for_edge_p (cs))
337 continue;
3e293154 338
9de04252
MJ
339 ii = cs->indirect_info;
340 if (ii->agg_contents)
c13bc3d9 341 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 342 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 343 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
344 ii->param_index, ii->offset,
345 ii->by_ref ? "by reference" : "by_value");
346 else
347 fprintf (f, " indirect %s callsite, calling param %i",
348 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
349
749aa96d
MJ
350 if (cs->call_stmt)
351 {
9de04252 352 fprintf (f, ", for stmt ");
749aa96d 353 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 354 }
749aa96d 355 else
9de04252 356 fprintf (f, "\n");
749aa96d 357 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
358 }
359}
360
361/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 362
3e293154
MJ
363void
364ipa_print_all_jump_functions (FILE *f)
365{
366 struct cgraph_node *node;
367
ca30a539 368 fprintf (f, "\nJump functions:\n");
65c70e6b 369 FOR_EACH_FUNCTION (node)
3e293154
MJ
370 {
371 ipa_print_node_jump_functions (f, node);
372 }
373}
374
7b872d9e
MJ
375/* Set JFUNC to be a known type jump function. */
376
377static void
378ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
379 tree base_type, tree component_type)
380{
06d65050
JH
381 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
382 && TYPE_BINFO (component_type));
7b872d9e
MJ
383 jfunc->type = IPA_JF_KNOWN_TYPE;
384 jfunc->value.known_type.offset = offset,
385 jfunc->value.known_type.base_type = base_type;
386 jfunc->value.known_type.component_type = component_type;
387}
388
b8f6e610
MJ
389/* Set JFUNC to be a copy of another jmp (to be used by jump function
390 combination code). The two functions will share their rdesc. */
391
392static void
393ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
394 struct ipa_jump_func *src)
395
396{
397 gcc_checking_assert (src->type == IPA_JF_CONST);
398 dst->type = IPA_JF_CONST;
399 dst->value.constant = src->value.constant;
400}
401
7b872d9e
MJ
402/* Set JFUNC to be a constant jmp function. */
403
404static void
4502fe8d
MJ
405ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
406 struct cgraph_edge *cs)
7b872d9e 407{
5368224f
DC
408 constant = unshare_expr (constant);
409 if (constant && EXPR_P (constant))
410 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
7b872d9e 411 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
412 jfunc->value.constant.value = unshare_expr_without_location (constant);
413
414 if (TREE_CODE (constant) == ADDR_EXPR
415 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
416 {
417 struct ipa_cst_ref_desc *rdesc;
418 if (!ipa_refdesc_pool)
419 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
420 sizeof (struct ipa_cst_ref_desc), 32);
421
422 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
423 rdesc->cs = cs;
424 rdesc->next_duplicate = NULL;
425 rdesc->refcount = 1;
426 jfunc->value.constant.rdesc = rdesc;
427 }
428 else
429 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
430}
431
432/* Set JFUNC to be a simple pass-through jump function. */
433static void
8b7773a4 434ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
b8f6e610 435 bool agg_preserved, bool type_preserved)
7b872d9e
MJ
436{
437 jfunc->type = IPA_JF_PASS_THROUGH;
438 jfunc->value.pass_through.operand = NULL_TREE;
439 jfunc->value.pass_through.formal_id = formal_id;
440 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 441 jfunc->value.pass_through.agg_preserved = agg_preserved;
b8f6e610 442 jfunc->value.pass_through.type_preserved = type_preserved;
7b872d9e
MJ
443}
444
445/* Set JFUNC to be an arithmetic pass through jump function. */
446
447static void
448ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
449 tree operand, enum tree_code operation)
450{
451 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 452 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
453 jfunc->value.pass_through.formal_id = formal_id;
454 jfunc->value.pass_through.operation = operation;
8b7773a4 455 jfunc->value.pass_through.agg_preserved = false;
b8f6e610 456 jfunc->value.pass_through.type_preserved = false;
7b872d9e
MJ
457}
458
459/* Set JFUNC to be an ancestor jump function. */
460
461static void
462ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
b8f6e610
MJ
463 tree type, int formal_id, bool agg_preserved,
464 bool type_preserved)
7b872d9e
MJ
465{
466 jfunc->type = IPA_JF_ANCESTOR;
467 jfunc->value.ancestor.formal_id = formal_id;
468 jfunc->value.ancestor.offset = offset;
469 jfunc->value.ancestor.type = type;
8b7773a4 470 jfunc->value.ancestor.agg_preserved = agg_preserved;
b8f6e610 471 jfunc->value.ancestor.type_preserved = type_preserved;
7b872d9e
MJ
472}
473
e248d83f
MJ
474/* Extract the acual BINFO being described by JFUNC which must be a known type
475 jump function. */
476
477tree
478ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
479{
480 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
481 if (!base_binfo)
482 return NULL_TREE;
483 return get_binfo_at_offset (base_binfo,
484 jfunc->value.known_type.offset,
485 jfunc->value.known_type.component_type);
486}
487
f65cf2b7
MJ
488/* Structure to be passed in between detect_type_change and
489 check_stmt_for_type_change. */
490
491struct type_change_info
492{
290ebcb7
MJ
493 /* Offset into the object where there is the virtual method pointer we are
494 looking for. */
495 HOST_WIDE_INT offset;
496 /* The declaration or SSA_NAME pointer of the base that we are checking for
497 type change. */
498 tree object;
499 /* If we actually can tell the type that the object has changed to, it is
500 stored in this field. Otherwise it remains NULL_TREE. */
501 tree known_current_type;
f65cf2b7
MJ
502 /* Set to true if dynamic type change has been detected. */
503 bool type_maybe_changed;
290ebcb7
MJ
504 /* Set to true if multiple types have been encountered. known_current_type
505 must be disregarded in that case. */
506 bool multiple_types_encountered;
f65cf2b7
MJ
507};
508
509/* Return true if STMT can modify a virtual method table pointer.
510
511 This function makes special assumptions about both constructors and
512 destructors which are all the functions that are allowed to alter the VMT
513 pointers. It assumes that destructors begin with assignment into all VMT
514 pointers and that constructors essentially look in the following way:
515
516 1) The very first thing they do is that they call constructors of ancestor
517 sub-objects that have them.
518
519 2) Then VMT pointers of this and all its ancestors is set to new values
520 corresponding to the type corresponding to the constructor.
521
522 3) Only afterwards, other stuff such as constructor of member sub-objects
523 and the code written by the user is run. Only this may include calling
524 virtual functions, directly or indirectly.
525
526 There is no way to call a constructor of an ancestor sub-object in any
527 other way.
528
529 This means that we do not have to care whether constructors get the correct
530 type information because they will always change it (in fact, if we define
531 the type to be given by the VMT pointer, it is undefined).
532
533 The most important fact to derive from the above is that if, for some
534 statement in the section 3, we try to detect whether the dynamic type has
535 changed, we can safely ignore all calls as we examine the function body
536 backwards until we reach statements in section 2 because these calls cannot
537 be ancestor constructors or destructors (if the input is not bogus) and so
538 do not change the dynamic type (this holds true only for automatically
539 allocated objects but at the moment we devirtualize only these). We then
540 must detect that statements in section 2 change the dynamic type and can try
541 to derive the new type. That is enough and we can stop, we will never see
542 the calls into constructors of sub-objects in this code. Therefore we can
543 safely ignore all call statements that we traverse.
544 */
545
546static bool
547stmt_may_be_vtbl_ptr_store (gimple stmt)
548{
549 if (is_gimple_call (stmt))
550 return false;
551 else if (is_gimple_assign (stmt))
552 {
553 tree lhs = gimple_assign_lhs (stmt);
554
0004f992
MJ
555 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
556 {
557 if (flag_strict_aliasing
558 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
559 return false;
560
561 if (TREE_CODE (lhs) == COMPONENT_REF
562 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 563 return false;
0004f992
MJ
564 /* In the future we might want to use get_base_ref_and_offset to find
565 if there is a field corresponding to the offset and if so, proceed
566 almost like if it was a component ref. */
567 }
f65cf2b7
MJ
568 }
569 return true;
570}
571
290ebcb7
MJ
572/* If STMT can be proved to be an assignment to the virtual method table
573 pointer of ANALYZED_OBJ and the type associated with the new table
574 identified, return the type. Otherwise return NULL_TREE. */
575
576static tree
577extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
578{
579 HOST_WIDE_INT offset, size, max_size;
580 tree lhs, rhs, base;
581
582 if (!gimple_assign_single_p (stmt))
583 return NULL_TREE;
584
585 lhs = gimple_assign_lhs (stmt);
586 rhs = gimple_assign_rhs1 (stmt);
587 if (TREE_CODE (lhs) != COMPONENT_REF
588 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
589 || TREE_CODE (rhs) != ADDR_EXPR)
590 return NULL_TREE;
591 rhs = get_base_address (TREE_OPERAND (rhs, 0));
592 if (!rhs
593 || TREE_CODE (rhs) != VAR_DECL
594 || !DECL_VIRTUAL_P (rhs))
595 return NULL_TREE;
596
597 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
598 if (offset != tci->offset
599 || size != POINTER_SIZE
600 || max_size != POINTER_SIZE)
601 return NULL_TREE;
602 if (TREE_CODE (base) == MEM_REF)
603 {
604 if (TREE_CODE (tci->object) != MEM_REF
605 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
606 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
607 TREE_OPERAND (base, 1)))
608 return NULL_TREE;
609 }
610 else if (tci->object != base)
611 return NULL_TREE;
612
613 return DECL_CONTEXT (rhs);
614}
615
61502ca8 616/* Callback of walk_aliased_vdefs and a helper function for
f65cf2b7
MJ
617 detect_type_change to check whether a particular statement may modify
618 the virtual table pointer, and if possible also determine the new type of
619 the (sub-)object. It stores its result into DATA, which points to a
620 type_change_info structure. */
621
622static bool
623check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
624{
625 gimple stmt = SSA_NAME_DEF_STMT (vdef);
626 struct type_change_info *tci = (struct type_change_info *) data;
627
628 if (stmt_may_be_vtbl_ptr_store (stmt))
629 {
290ebcb7
MJ
630 tree type;
631 type = extr_type_from_vtbl_ptr_store (stmt, tci);
632 if (tci->type_maybe_changed
633 && type != tci->known_current_type)
634 tci->multiple_types_encountered = true;
635 tci->known_current_type = type;
f65cf2b7
MJ
636 tci->type_maybe_changed = true;
637 return true;
638 }
639 else
640 return false;
641}
642
290ebcb7
MJ
643
644
06d65050
JH
645/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
646 callsite CALL) by looking for assignments to its virtual table pointer. If
647 it is, return true and fill in the jump function JFUNC with relevant type
648 information or set it to unknown. ARG is the object itself (not a pointer
649 to it, unless dereferenced). BASE is the base of the memory access as
650 returned by get_ref_base_and_extent, as is the offset. */
f65cf2b7
MJ
651
652static bool
06d65050
JH
653detect_type_change (tree arg, tree base, tree comp_type, gimple call,
654 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
f65cf2b7
MJ
655{
656 struct type_change_info tci;
657 ao_ref ao;
658
659 gcc_checking_assert (DECL_P (arg)
660 || TREE_CODE (arg) == MEM_REF
661 || handled_component_p (arg));
662 /* Const calls cannot call virtual methods through VMT and so type changes do
663 not matter. */
06d65050
JH
664 if (!flag_devirtualize || !gimple_vuse (call)
665 /* Be sure expected_type is polymorphic. */
666 || !comp_type
667 || TREE_CODE (comp_type) != RECORD_TYPE
668 || !TYPE_BINFO (comp_type)
669 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
f65cf2b7
MJ
670 return false;
671
dd887943 672 ao_ref_init (&ao, arg);
f65cf2b7
MJ
673 ao.base = base;
674 ao.offset = offset;
675 ao.size = POINTER_SIZE;
676 ao.max_size = ao.size;
f65cf2b7 677
290ebcb7
MJ
678 tci.offset = offset;
679 tci.object = get_base_address (arg);
680 tci.known_current_type = NULL_TREE;
681 tci.type_maybe_changed = false;
682 tci.multiple_types_encountered = false;
683
f65cf2b7
MJ
684 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
685 &tci, NULL);
686 if (!tci.type_maybe_changed)
687 return false;
688
290ebcb7
MJ
689 if (!tci.known_current_type
690 || tci.multiple_types_encountered
691 || offset != 0)
692 jfunc->type = IPA_JF_UNKNOWN;
693 else
7b872d9e 694 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
290ebcb7 695
f65cf2b7
MJ
696 return true;
697}
698
699/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
700 SSA name (its dereference will become the base and the offset is assumed to
701 be zero). */
702
703static bool
06d65050
JH
704detect_type_change_ssa (tree arg, tree comp_type,
705 gimple call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
706{
707 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 708 if (!flag_devirtualize
06d65050 709 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
710 return false;
711
712 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 713 build_int_cst (ptr_type_node, 0));
f65cf2b7 714
06d65050 715 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
f65cf2b7
MJ
716}
717
fdb0e1b4
MJ
718/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
719 boolean variable pointed to by DATA. */
720
721static bool
722mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
723 void *data)
724{
725 bool *b = (bool *) data;
726 *b = true;
727 return true;
728}
729
688010ba 730/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4
MJ
731 a value known not to be modified in this function before reaching the
732 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
733 information about the parameter. */
fdb0e1b4
MJ
734
735static bool
8b7773a4
MJ
736parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
737 gimple stmt, tree parm_load)
fdb0e1b4
MJ
738{
739 bool modified = false;
8b7773a4 740 bitmap *visited_stmts;
fdb0e1b4
MJ
741 ao_ref refd;
742
8b7773a4
MJ
743 if (parm_ainfo && parm_ainfo->parm_modified)
744 return false;
fdb0e1b4
MJ
745
746 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4
MJ
747 ao_ref_init (&refd, parm_load);
748 /* We can cache visited statements only when parm_ainfo is available and when
749 we are looking at a naked load of the whole parameter. */
750 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
751 visited_stmts = NULL;
752 else
753 visited_stmts = &parm_ainfo->parm_visited_statements;
754 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
755 visited_stmts);
756 if (parm_ainfo && modified)
757 parm_ainfo->parm_modified = true;
758 return !modified;
fdb0e1b4
MJ
759}
760
761/* If STMT is an assignment that loads a value from an parameter declaration,
762 return the index of the parameter in ipa_node_params which has not been
763 modified. Otherwise return -1. */
764
765static int
9771b263 766load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
fdb0e1b4
MJ
767 struct param_analysis_info *parms_ainfo,
768 gimple stmt)
769{
770 int index;
771 tree op1;
772
773 if (!gimple_assign_single_p (stmt))
774 return -1;
775
776 op1 = gimple_assign_rhs1 (stmt);
777 if (TREE_CODE (op1) != PARM_DECL)
778 return -1;
779
d044dd17 780 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 781 if (index < 0
8b7773a4
MJ
782 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
783 : NULL, stmt, op1))
fdb0e1b4
MJ
784 return -1;
785
786 return index;
787}
f65cf2b7 788
8b7773a4
MJ
789/* Return true if memory reference REF loads data that are known to be
790 unmodified in this function before reaching statement STMT. PARM_AINFO, if
791 non-NULL, is a pointer to a structure containing temporary information about
792 PARM. */
793
794static bool
795parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
796 gimple stmt, tree ref)
797{
798 bool modified = false;
799 ao_ref refd;
800
801 gcc_checking_assert (gimple_vuse (stmt));
802 if (parm_ainfo && parm_ainfo->ref_modified)
803 return false;
804
805 ao_ref_init (&refd, ref);
806 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
807 NULL);
808 if (parm_ainfo && modified)
809 parm_ainfo->ref_modified = true;
810 return !modified;
811}
812
813/* Return true if the data pointed to by PARM is known to be unmodified in this
814 function before reaching call statement CALL into which it is passed.
815 PARM_AINFO is a pointer to a structure containing temporary information
816 about PARM. */
817
818static bool
819parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
820 gimple call, tree parm)
821{
822 bool modified = false;
823 ao_ref refd;
824
825 /* It's unnecessary to calculate anything about memory contnets for a const
826 function because it is not goin to use it. But do not cache the result
827 either. Also, no such calculations for non-pointers. */
828 if (!gimple_vuse (call)
829 || !POINTER_TYPE_P (TREE_TYPE (parm)))
830 return false;
831
832 if (parm_ainfo->pt_modified)
833 return false;
834
835 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
836 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
837 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
838 if (modified)
839 parm_ainfo->pt_modified = true;
840 return !modified;
841}
842
843/* Return true if we can prove that OP is a memory reference loading unmodified
844 data from an aggregate passed as a parameter and if the aggregate is passed
845 by reference, that the alias type of the load corresponds to the type of the
846 formal parameter (so that we can rely on this type for TBAA in callers).
847 INFO and PARMS_AINFO describe parameters of the current function (but the
848 latter can be NULL), STMT is the load statement. If function returns true,
849 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
850 within the aggregate and whether it is a load from a value passed by
851 reference respectively. */
852
853static bool
9771b263 854ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
8b7773a4
MJ
855 struct param_analysis_info *parms_ainfo, gimple stmt,
856 tree op, int *index_p, HOST_WIDE_INT *offset_p,
3ff2ca23 857 HOST_WIDE_INT *size_p, bool *by_ref_p)
8b7773a4
MJ
858{
859 int index;
860 HOST_WIDE_INT size, max_size;
861 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
862
863 if (max_size == -1 || max_size != size || *offset_p < 0)
864 return false;
865
866 if (DECL_P (base))
867 {
d044dd17 868 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4
MJ
869 if (index >= 0
870 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
871 : NULL, stmt, op))
872 {
873 *index_p = index;
874 *by_ref_p = false;
3ff2ca23
JJ
875 if (size_p)
876 *size_p = size;
8b7773a4
MJ
877 return true;
878 }
879 return false;
880 }
881
882 if (TREE_CODE (base) != MEM_REF
883 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
884 || !integer_zerop (TREE_OPERAND (base, 1)))
885 return false;
886
887 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
888 {
889 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 890 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
891 }
892 else
893 {
894 /* This branch catches situations where a pointer parameter is not a
895 gimple register, for example:
896
897 void hip7(S*) (struct S * p)
898 {
899 void (*<T2e4>) (struct S *) D.1867;
900 struct S * p.1;
901
902 <bb 2>:
903 p.1_1 = p;
904 D.1867_2 = p.1_1->f;
905 D.1867_2 ();
906 gdp = &p;
907 */
908
909 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
d044dd17 910 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
8b7773a4
MJ
911 }
912
913 if (index >= 0
914 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
915 stmt, op))
916 {
917 *index_p = index;
918 *by_ref_p = true;
3ff2ca23
JJ
919 if (size_p)
920 *size_p = size;
8b7773a4
MJ
921 return true;
922 }
923 return false;
924}
925
926/* Just like the previous function, just without the param_analysis_info
927 pointer, for users outside of this file. */
928
929bool
930ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
931 tree op, int *index_p, HOST_WIDE_INT *offset_p,
932 bool *by_ref_p)
933{
d044dd17 934 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
3ff2ca23 935 offset_p, NULL, by_ref_p);
8b7773a4
MJ
936}
937
b258210c 938/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
939 of an assignment statement STMT, try to determine whether we are actually
940 handling any of the following cases and construct an appropriate jump
941 function into JFUNC if so:
942
943 1) The passed value is loaded from a formal parameter which is not a gimple
944 register (most probably because it is addressable, the value has to be
945 scalar) and we can guarantee the value has not changed. This case can
946 therefore be described by a simple pass-through jump function. For example:
947
948 foo (int a)
949 {
950 int a.0;
951
952 a.0_2 = a;
953 bar (a.0_2);
954
955 2) The passed value can be described by a simple arithmetic pass-through
956 jump function. E.g.
957
958 foo (int a)
959 {
960 int D.2064;
961
962 D.2064_4 = a.1(D) + 4;
963 bar (D.2064_4);
964
965 This case can also occur in combination of the previous one, e.g.:
966
967 foo (int a, int z)
968 {
969 int a.0;
970 int D.2064;
971
972 a.0_3 = a;
973 D.2064_4 = a.0_3 + 4;
974 foo (D.2064_4);
975
976 3) The passed value is an address of an object within another one (which
977 also passed by reference). Such situations are described by an ancestor
978 jump function and describe situations such as:
979
980 B::foo() (struct B * const this)
981 {
982 struct A * D.1845;
983
984 D.1845_2 = &this_1(D)->D.1748;
985 A::bar (D.1845_2);
986
987 INFO is the structure describing individual parameters access different
988 stages of IPA optimizations. PARMS_AINFO contains the information that is
989 only needed for intraprocedural analysis. */
685b0d13
MJ
990
991static void
b258210c 992compute_complex_assign_jump_func (struct ipa_node_params *info,
fdb0e1b4 993 struct param_analysis_info *parms_ainfo,
b258210c 994 struct ipa_jump_func *jfunc,
06d65050
JH
995 gimple call, gimple stmt, tree name,
996 tree param_type)
685b0d13
MJ
997{
998 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 999 tree op1, tc_ssa, base, ssa;
685b0d13 1000 int index;
685b0d13 1001
685b0d13 1002 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1003
fdb0e1b4 1004 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1005 {
fdb0e1b4
MJ
1006 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1007 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1008 else
d044dd17 1009 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
fdb0e1b4
MJ
1010 SSA_NAME_DEF_STMT (op1));
1011 tc_ssa = op1;
1012 }
1013 else
1014 {
d044dd17 1015 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
fdb0e1b4
MJ
1016 tc_ssa = gimple_assign_lhs (stmt);
1017 }
1018
1019 if (index >= 0)
1020 {
1021 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 1022
b258210c 1023 if (op2)
685b0d13 1024 {
b258210c
MJ
1025 if (!is_gimple_ip_invariant (op2)
1026 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1027 && !useless_type_conversion_p (TREE_TYPE (name),
1028 TREE_TYPE (op1))))
1029 return;
1030
7b872d9e
MJ
1031 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1032 gimple_assign_rhs_code (stmt));
685b0d13 1033 }
b8f6e610 1034 else if (gimple_assign_single_p (stmt))
8b7773a4
MJ
1035 {
1036 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1037 call, tc_ssa);
06d65050
JH
1038 bool type_p = false;
1039
1040 if (param_type && POINTER_TYPE_P (param_type))
1041 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1042 call, jfunc);
b8f6e610
MJ
1043 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1044 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
8b7773a4 1045 }
685b0d13
MJ
1046 return;
1047 }
1048
1049 if (TREE_CODE (op1) != ADDR_EXPR)
1050 return;
1051 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1052 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1053 return;
32aa622c
MJ
1054 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1055 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1056 /* If this is a varying address, punt. */
1057 || max_size == -1
1058 || max_size != size)
685b0d13 1059 return;
32aa622c 1060 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
f65cf2b7
MJ
1061 ssa = TREE_OPERAND (base, 0);
1062 if (TREE_CODE (ssa) != SSA_NAME
1063 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1064 || offset < 0)
685b0d13
MJ
1065 return;
1066
b8f6e610 1067 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1068 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1069 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
b8f6e610 1070 {
06d65050
JH
1071 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1072 call, jfunc, offset);
b8f6e610
MJ
1073 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1074 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1075 parm_ref_data_pass_through_p (&parms_ainfo[index],
1076 call, ssa), type_p);
1077 }
685b0d13
MJ
1078}
1079
40591473
MJ
1080/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1081 it looks like:
1082
1083 iftmp.1_3 = &obj_2(D)->D.1762;
1084
1085 The base of the MEM_REF must be a default definition SSA NAME of a
1086 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1087 whole MEM_REF expression is returned and the offset calculated from any
1088 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1089 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1090
1091static tree
1092get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1093{
1094 HOST_WIDE_INT size, max_size;
1095 tree expr, parm, obj;
1096
1097 if (!gimple_assign_single_p (assign))
1098 return NULL_TREE;
1099 expr = gimple_assign_rhs1 (assign);
1100
1101 if (TREE_CODE (expr) != ADDR_EXPR)
1102 return NULL_TREE;
1103 expr = TREE_OPERAND (expr, 0);
1104 obj = expr;
1105 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1106
1107 if (TREE_CODE (expr) != MEM_REF
1108 /* If this is a varying address, punt. */
1109 || max_size == -1
1110 || max_size != size
1111 || *offset < 0)
1112 return NULL_TREE;
1113 parm = TREE_OPERAND (expr, 0);
1114 if (TREE_CODE (parm) != SSA_NAME
1115 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1116 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1117 return NULL_TREE;
1118
1119 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1120 *obj_p = obj;
1121 return expr;
1122}
1123
685b0d13 1124
b258210c
MJ
1125/* Given that an actual argument is an SSA_NAME that is a result of a phi
1126 statement PHI, try to find out whether NAME is in fact a
1127 multiple-inheritance typecast from a descendant into an ancestor of a formal
1128 parameter and thus can be described by an ancestor jump function and if so,
1129 write the appropriate function into JFUNC.
1130
1131 Essentially we want to match the following pattern:
1132
1133 if (obj_2(D) != 0B)
1134 goto <bb 3>;
1135 else
1136 goto <bb 4>;
1137
1138 <bb 3>:
1139 iftmp.1_3 = &obj_2(D)->D.1762;
1140
1141 <bb 4>:
1142 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1143 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1144 return D.1879_6; */
1145
1146static void
1147compute_complex_ancestor_jump_func (struct ipa_node_params *info,
8b7773a4 1148 struct param_analysis_info *parms_ainfo,
b258210c 1149 struct ipa_jump_func *jfunc,
06d65050 1150 gimple call, gimple phi, tree param_type)
b258210c 1151{
40591473 1152 HOST_WIDE_INT offset;
b258210c
MJ
1153 gimple assign, cond;
1154 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1155 tree tmp, parm, expr, obj;
b258210c
MJ
1156 int index, i;
1157
54e348cb 1158 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1159 return;
1160
54e348cb
MJ
1161 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1162 tmp = PHI_ARG_DEF (phi, 0);
1163 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1164 tmp = PHI_ARG_DEF (phi, 1);
1165 else
1166 return;
b258210c
MJ
1167 if (TREE_CODE (tmp) != SSA_NAME
1168 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1169 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1170 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1171 return;
1172
1173 assign = SSA_NAME_DEF_STMT (tmp);
1174 assign_bb = gimple_bb (assign);
40591473 1175 if (!single_pred_p (assign_bb))
b258210c 1176 return;
40591473
MJ
1177 expr = get_ancestor_addr_info (assign, &obj, &offset);
1178 if (!expr)
b258210c
MJ
1179 return;
1180 parm = TREE_OPERAND (expr, 0);
b258210c 1181 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
40591473 1182 gcc_assert (index >= 0);
b258210c
MJ
1183
1184 cond_bb = single_pred (assign_bb);
1185 cond = last_stmt (cond_bb);
69610617
SB
1186 if (!cond
1187 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1188 || gimple_cond_code (cond) != NE_EXPR
1189 || gimple_cond_lhs (cond) != parm
1190 || !integer_zerop (gimple_cond_rhs (cond)))
1191 return;
1192
b258210c
MJ
1193 phi_bb = gimple_bb (phi);
1194 for (i = 0; i < 2; i++)
1195 {
1196 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1197 if (pred != assign_bb && pred != cond_bb)
1198 return;
1199 }
1200
06d65050
JH
1201 bool type_p = false;
1202 if (param_type && POINTER_TYPE_P (param_type))
1203 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1204 call, jfunc, offset);
b8f6e610 1205 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
8b7773a4
MJ
1206 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1207 parm_ref_data_pass_through_p (&parms_ainfo[index],
b8f6e610 1208 call, parm), type_p);
b258210c
MJ
1209}
1210
61502ca8 1211/* Given OP which is passed as an actual argument to a called function,
b258210c 1212 determine if it is possible to construct a KNOWN_TYPE jump function for it
06d65050
JH
1213 and if so, create one and store it to JFUNC.
1214 EXPECTED_TYPE represents a type the argument should be in */
b258210c
MJ
1215
1216static void
f65cf2b7 1217compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
06d65050 1218 gimple call, tree expected_type)
b258210c 1219{
32aa622c 1220 HOST_WIDE_INT offset, size, max_size;
c7573249 1221 tree base;
b258210c 1222
05842ff5
MJ
1223 if (!flag_devirtualize
1224 || TREE_CODE (op) != ADDR_EXPR
06d65050
JH
1225 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1226 /* Be sure expected_type is polymorphic. */
1227 || !expected_type
1228 || TREE_CODE (expected_type) != RECORD_TYPE
1229 || !TYPE_BINFO (expected_type)
1230 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
b258210c
MJ
1231 return;
1232
1233 op = TREE_OPERAND (op, 0);
32aa622c
MJ
1234 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1235 if (!DECL_P (base)
1236 || max_size == -1
1237 || max_size != size
1238 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1239 || is_global_var (base))
1240 return;
1241
06d65050 1242 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
f65cf2b7
MJ
1243 return;
1244
06d65050
JH
1245 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1246 expected_type);
b258210c
MJ
1247}
1248
be95e2b9
MJ
1249/* Inspect the given TYPE and return true iff it has the same structure (the
1250 same number of fields of the same types) as a C++ member pointer. If
1251 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1252 corresponding fields there. */
1253
3e293154
MJ
1254static bool
1255type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1256{
1257 tree fld;
1258
1259 if (TREE_CODE (type) != RECORD_TYPE)
1260 return false;
1261
1262 fld = TYPE_FIELDS (type);
1263 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4
MJ
1264 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1265 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
3e293154
MJ
1266 return false;
1267
1268 if (method_ptr)
1269 *method_ptr = fld;
1270
910ad8de 1271 fld = DECL_CHAIN (fld);
8b7773a4
MJ
1272 if (!fld || INTEGRAL_TYPE_P (fld)
1273 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
3e293154
MJ
1274 return false;
1275 if (delta)
1276 *delta = fld;
1277
910ad8de 1278 if (DECL_CHAIN (fld))
3e293154
MJ
1279 return false;
1280
1281 return true;
1282}
1283
61502ca8 1284/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1285 return the rhs of its defining statement. Otherwise return RHS as it
1286 is. */
7ec49257
MJ
1287
1288static inline tree
1289get_ssa_def_if_simple_copy (tree rhs)
1290{
1291 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1292 {
1293 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1294
1295 if (gimple_assign_single_p (def_stmt))
1296 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1297 else
1298 break;
7ec49257
MJ
1299 }
1300 return rhs;
1301}
1302
8b7773a4
MJ
1303/* Simple linked list, describing known contents of an aggregate beforere
1304 call. */
1305
1306struct ipa_known_agg_contents_list
1307{
1308 /* Offset and size of the described part of the aggregate. */
1309 HOST_WIDE_INT offset, size;
1310 /* Known constant value or NULL if the contents is known to be unknown. */
1311 tree constant;
1312 /* Pointer to the next structure in the list. */
1313 struct ipa_known_agg_contents_list *next;
1314};
3e293154 1315
8b7773a4
MJ
1316/* Traverse statements from CALL backwards, scanning whether an aggregate given
1317 in ARG is filled in with constant values. ARG can either be an aggregate
1318 expression or a pointer to an aggregate. JFUNC is the jump function into
1319 which the constants are subsequently stored. */
be95e2b9 1320
3e293154 1321static void
8b7773a4
MJ
1322determine_known_aggregate_parts (gimple call, tree arg,
1323 struct ipa_jump_func *jfunc)
3e293154 1324{
8b7773a4
MJ
1325 struct ipa_known_agg_contents_list *list = NULL;
1326 int item_count = 0, const_count = 0;
1327 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1328 gimple_stmt_iterator gsi;
8b7773a4
MJ
1329 tree arg_base;
1330 bool check_ref, by_ref;
1331 ao_ref r;
3e293154 1332
8b7773a4
MJ
1333 /* The function operates in three stages. First, we prepare check_ref, r,
1334 arg_base and arg_offset based on what is actually passed as an actual
1335 argument. */
3e293154 1336
8b7773a4
MJ
1337 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1338 {
1339 by_ref = true;
1340 if (TREE_CODE (arg) == SSA_NAME)
1341 {
1342 tree type_size;
1343 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1344 return;
1345 check_ref = true;
1346 arg_base = arg;
1347 arg_offset = 0;
1348 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1349 arg_size = tree_low_cst (type_size, 1);
1350 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1351 }
1352 else if (TREE_CODE (arg) == ADDR_EXPR)
1353 {
1354 HOST_WIDE_INT arg_max_size;
1355
1356 arg = TREE_OPERAND (arg, 0);
1357 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1358 &arg_max_size);
1359 if (arg_max_size == -1
1360 || arg_max_size != arg_size
1361 || arg_offset < 0)
1362 return;
1363 if (DECL_P (arg_base))
1364 {
1365 tree size;
1366 check_ref = false;
1367 size = build_int_cst (integer_type_node, arg_size);
1368 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1369 }
1370 else
1371 return;
1372 }
1373 else
1374 return;
1375 }
1376 else
1377 {
1378 HOST_WIDE_INT arg_max_size;
1379
1380 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1381
1382 by_ref = false;
1383 check_ref = false;
1384 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1385 &arg_max_size);
1386 if (arg_max_size == -1
1387 || arg_max_size != arg_size
1388 || arg_offset < 0)
1389 return;
1390
1391 ao_ref_init (&r, arg);
1392 }
1393
1394 /* Second stage walks back the BB, looks at individual statements and as long
1395 as it is confident of how the statements affect contents of the
1396 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1397 describing it. */
1398 gsi = gsi_for_stmt (call);
726a989a
RB
1399 gsi_prev (&gsi);
1400 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1401 {
8b7773a4 1402 struct ipa_known_agg_contents_list *n, **p;
726a989a 1403 gimple stmt = gsi_stmt (gsi);
8b7773a4
MJ
1404 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1405 tree lhs, rhs, lhs_base;
1406 bool partial_overlap;
3e293154 1407
8b7773a4 1408 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1409 continue;
8b75fc9b 1410 if (!gimple_assign_single_p (stmt))
8b7773a4 1411 break;
3e293154 1412
726a989a
RB
1413 lhs = gimple_assign_lhs (stmt);
1414 rhs = gimple_assign_rhs1 (stmt);
7d2fb524
MJ
1415 if (!is_gimple_reg_type (rhs)
1416 || TREE_CODE (lhs) == BIT_FIELD_REF
1417 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1418 break;
3e293154 1419
8b7773a4
MJ
1420 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1421 &lhs_max_size);
1422 if (lhs_max_size == -1
1423 || lhs_max_size != lhs_size
1424 || (lhs_offset < arg_offset
1425 && lhs_offset + lhs_size > arg_offset)
1426 || (lhs_offset < arg_offset + arg_size
1427 && lhs_offset + lhs_size > arg_offset + arg_size))
1428 break;
3e293154 1429
8b7773a4 1430 if (check_ref)
518dc859 1431 {
8b7773a4
MJ
1432 if (TREE_CODE (lhs_base) != MEM_REF
1433 || TREE_OPERAND (lhs_base, 0) != arg_base
1434 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1435 break;
3e293154 1436 }
8b7773a4 1437 else if (lhs_base != arg_base)
774b8a55
MJ
1438 {
1439 if (DECL_P (lhs_base))
1440 continue;
1441 else
1442 break;
1443 }
3e293154 1444
8b7773a4
MJ
1445 if (lhs_offset + lhs_size < arg_offset
1446 || lhs_offset >= (arg_offset + arg_size))
1447 continue;
1448
1449 partial_overlap = false;
1450 p = &list;
1451 while (*p && (*p)->offset < lhs_offset)
3e293154 1452 {
8b7773a4 1453 if ((*p)->offset + (*p)->size > lhs_offset)
3e293154 1454 {
8b7773a4
MJ
1455 partial_overlap = true;
1456 break;
3e293154 1457 }
8b7773a4
MJ
1458 p = &(*p)->next;
1459 }
1460 if (partial_overlap)
1461 break;
1462 if (*p && (*p)->offset < lhs_offset + lhs_size)
1463 {
1464 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1465 /* We already know this value is subsequently overwritten with
1466 something else. */
1467 continue;
3e293154 1468 else
8b7773a4
MJ
1469 /* Otherwise this is a partial overlap which we cannot
1470 represent. */
1471 break;
3e293154 1472 }
3e293154 1473
8b7773a4
MJ
1474 rhs = get_ssa_def_if_simple_copy (rhs);
1475 n = XALLOCA (struct ipa_known_agg_contents_list);
1476 n->size = lhs_size;
1477 n->offset = lhs_offset;
1478 if (is_gimple_ip_invariant (rhs))
1479 {
1480 n->constant = rhs;
1481 const_count++;
1482 }
1483 else
1484 n->constant = NULL_TREE;
1485 n->next = *p;
1486 *p = n;
3e293154 1487
8b7773a4 1488 item_count++;
dfea20f1
MJ
1489 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1490 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1491 break;
1492 }
be95e2b9 1493
8b7773a4
MJ
1494 /* Third stage just goes over the list and creates an appropriate vector of
1495 ipa_agg_jf_item structures out of it, of sourse only if there are
1496 any known constants to begin with. */
3e293154 1497
8b7773a4 1498 if (const_count)
3e293154 1499 {
8b7773a4 1500 jfunc->agg.by_ref = by_ref;
9771b263 1501 vec_alloc (jfunc->agg.items, const_count);
8b7773a4
MJ
1502 while (list)
1503 {
1504 if (list->constant)
1505 {
f32682ca
DN
1506 struct ipa_agg_jf_item item;
1507 item.offset = list->offset - arg_offset;
7d2fb524 1508 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
d1f98542 1509 item.value = unshare_expr_without_location (list->constant);
9771b263 1510 jfunc->agg.items->quick_push (item);
8b7773a4
MJ
1511 }
1512 list = list->next;
1513 }
3e293154
MJ
1514 }
1515}
1516
06d65050
JH
1517static tree
1518ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1519{
1520 int n;
1521 tree type = (e->callee
67348ccc 1522 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1523 : gimple_call_fntype (e->call_stmt));
1524 tree t = TYPE_ARG_TYPES (type);
1525
1526 for (n = 0; n < i; n++)
1527 {
1528 if (!t)
1529 break;
1530 t = TREE_CHAIN (t);
1531 }
1532 if (t)
1533 return TREE_VALUE (t);
1534 if (!e->callee)
1535 return NULL;
67348ccc 1536 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1537 for (n = 0; n < i; n++)
1538 {
1539 if (!t)
1540 return NULL;
1541 t = TREE_CHAIN (t);
1542 }
1543 if (t)
1544 return TREE_TYPE (t);
1545 return NULL;
1546}
1547
3e293154
MJ
1548/* Compute jump function for all arguments of callsite CS and insert the
1549 information in the jump_functions array in the ipa_edge_args corresponding
1550 to this callsite. */
be95e2b9 1551
749aa96d 1552static void
c419671c 1553ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
062c604f 1554 struct cgraph_edge *cs)
3e293154
MJ
1555{
1556 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09
MJ
1557 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1558 gimple call = cs->call_stmt;
8b7773a4 1559 int n, arg_num = gimple_call_num_args (call);
3e293154 1560
606d9a09 1561 if (arg_num == 0 || args->jump_functions)
3e293154 1562 return;
9771b263 1563 vec_safe_grow_cleared (args->jump_functions, arg_num);
3e293154 1564
96e24d49
JJ
1565 if (gimple_call_internal_p (call))
1566 return;
5fe8e757
MJ
1567 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1568 return;
1569
8b7773a4
MJ
1570 for (n = 0; n < arg_num; n++)
1571 {
1572 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1573 tree arg = gimple_call_arg (call, n);
06d65050 1574 tree param_type = ipa_get_callee_param_type (cs, n);
3e293154 1575
8b7773a4 1576 if (is_gimple_ip_invariant (arg))
4502fe8d 1577 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1578 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1579 && TREE_CODE (arg) == PARM_DECL)
1580 {
1581 int index = ipa_get_param_decl_index (info, arg);
1582
1583 gcc_assert (index >=0);
1584 /* Aggregate passed by value, check for pass-through, otherwise we
1585 will attempt to fill in aggregate contents later in this
1586 for cycle. */
1587 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1588 {
b8f6e610 1589 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
8b7773a4
MJ
1590 continue;
1591 }
1592 }
1593 else if (TREE_CODE (arg) == SSA_NAME)
1594 {
1595 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1596 {
1597 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1598 if (index >= 0)
8b7773a4 1599 {
b8f6e610 1600 bool agg_p, type_p;
8b7773a4
MJ
1601 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1602 call, arg);
06d65050
JH
1603 if (param_type && POINTER_TYPE_P (param_type))
1604 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1605 call, jfunc);
1606 else
1607 type_p = false;
b8f6e610 1608 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
06d65050
JH
1609 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1610 type_p);
8b7773a4
MJ
1611 }
1612 }
1613 else
1614 {
1615 gimple stmt = SSA_NAME_DEF_STMT (arg);
1616 if (is_gimple_assign (stmt))
1617 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
06d65050 1618 call, stmt, arg, param_type);
8b7773a4
MJ
1619 else if (gimple_code (stmt) == GIMPLE_PHI)
1620 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
06d65050 1621 call, stmt, param_type);
8b7773a4
MJ
1622 }
1623 }
1624 else
06d65050
JH
1625 compute_known_type_jump_func (arg, jfunc, call,
1626 param_type
1627 && POINTER_TYPE_P (param_type)
1628 ? TREE_TYPE (param_type)
1629 : NULL);
3e293154 1630
8b7773a4
MJ
1631 if ((jfunc->type != IPA_JF_PASS_THROUGH
1632 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1633 && (jfunc->type != IPA_JF_ANCESTOR
1634 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1635 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1636 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1637 determine_known_aggregate_parts (call, arg, jfunc);
1638 }
3e293154
MJ
1639}
1640
749aa96d
MJ
1641/* Compute jump functions for all edges - both direct and indirect - outgoing
1642 from NODE. Also count the actual arguments in the process. */
1643
062c604f
MJ
1644static void
1645ipa_compute_jump_functions (struct cgraph_node *node,
c419671c 1646 struct param_analysis_info *parms_ainfo)
749aa96d
MJ
1647{
1648 struct cgraph_edge *cs;
1649
1650 for (cs = node->callees; cs; cs = cs->next_callee)
1651 {
d7da5cc8
MJ
1652 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1653 NULL);
749aa96d
MJ
1654 /* We do not need to bother analyzing calls to unknown
1655 functions unless they may become known during lto/whopr. */
67348ccc 1656 if (!callee->definition && !flag_lto)
749aa96d 1657 continue;
c419671c 1658 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1659 }
1660
1661 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
c419671c 1662 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1663}
1664
8b7773a4
MJ
1665/* If STMT looks like a statement loading a value from a member pointer formal
1666 parameter, return that parameter and store the offset of the field to
1667 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1668 might be clobbered). If USE_DELTA, then we look for a use of the delta
1669 field rather than the pfn. */
be95e2b9 1670
3e293154 1671static tree
8b7773a4
MJ
1672ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1673 HOST_WIDE_INT *offset_p)
3e293154 1674{
8b7773a4
MJ
1675 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1676
1677 if (!gimple_assign_single_p (stmt))
1678 return NULL_TREE;
3e293154 1679
8b7773a4 1680 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
1681 if (TREE_CODE (rhs) == COMPONENT_REF)
1682 {
1683 ref_field = TREE_OPERAND (rhs, 1);
1684 rhs = TREE_OPERAND (rhs, 0);
1685 }
1686 else
1687 ref_field = NULL_TREE;
d242d063 1688 if (TREE_CODE (rhs) != MEM_REF)
3e293154 1689 return NULL_TREE;
3e293154 1690 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
1691 if (TREE_CODE (rec) != ADDR_EXPR)
1692 return NULL_TREE;
1693 rec = TREE_OPERAND (rec, 0);
3e293154 1694 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 1695 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 1696 return NULL_TREE;
d242d063 1697 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 1698
8b7773a4
MJ
1699 if (use_delta)
1700 fld = delta_field;
1701 else
1702 fld = ptr_field;
1703 if (offset_p)
1704 *offset_p = int_bit_position (fld);
1705
ae788515
EB
1706 if (ref_field)
1707 {
1708 if (integer_nonzerop (ref_offset))
1709 return NULL_TREE;
ae788515
EB
1710 return ref_field == fld ? rec : NULL_TREE;
1711 }
3e293154 1712 else
8b7773a4
MJ
1713 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1714 : NULL_TREE;
3e293154
MJ
1715}
1716
1717/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 1718
3e293154
MJ
1719static bool
1720ipa_is_ssa_with_stmt_def (tree t)
1721{
1722 if (TREE_CODE (t) == SSA_NAME
1723 && !SSA_NAME_IS_DEFAULT_DEF (t))
1724 return true;
1725 else
1726 return false;
1727}
1728
40591473
MJ
1729/* Find the indirect call graph edge corresponding to STMT and mark it as a
1730 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1731 indirect call graph edge. */
be95e2b9 1732
40591473
MJ
1733static struct cgraph_edge *
1734ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
3e293154 1735{
e33c6cd6 1736 struct cgraph_edge *cs;
3e293154 1737
5f902d76 1738 cs = cgraph_edge (node, stmt);
b258210c 1739 cs->indirect_info->param_index = param_index;
8b7773a4 1740 cs->indirect_info->offset = 0;
40591473 1741 cs->indirect_info->polymorphic = 0;
8b7773a4 1742 cs->indirect_info->agg_contents = 0;
c13bc3d9 1743 cs->indirect_info->member_ptr = 0;
40591473 1744 return cs;
3e293154
MJ
1745}
1746
e33c6cd6 1747/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 1748 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
1749 intermediate information about each formal parameter. Currently it checks
1750 whether the call calls a pointer that is a formal parameter and if so, the
1751 parameter is marked with the called flag and an indirect call graph edge
1752 describing the call is created. This is very simple for ordinary pointers
1753 represented in SSA but not-so-nice when it comes to member pointers. The
1754 ugly part of this function does nothing more than trying to match the
1755 pattern of such a call. An example of such a pattern is the gimple dump
1756 below, the call is on the last line:
3e293154 1757
ae788515
EB
1758 <bb 2>:
1759 f$__delta_5 = f.__delta;
1760 f$__pfn_24 = f.__pfn;
1761
1762 or
3e293154 1763 <bb 2>:
d242d063
MJ
1764 f$__delta_5 = MEM[(struct *)&f];
1765 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 1766
ae788515 1767 and a few lines below:
8aa29647
MJ
1768
1769 <bb 5>
3e293154
MJ
1770 D.2496_3 = (int) f$__pfn_24;
1771 D.2497_4 = D.2496_3 & 1;
1772 if (D.2497_4 != 0)
1773 goto <bb 3>;
1774 else
1775 goto <bb 4>;
1776
8aa29647 1777 <bb 6>:
3e293154
MJ
1778 D.2500_7 = (unsigned int) f$__delta_5;
1779 D.2501_8 = &S + D.2500_7;
1780 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1781 D.2503_10 = *D.2502_9;
1782 D.2504_12 = f$__pfn_24 + -1;
1783 D.2505_13 = (unsigned int) D.2504_12;
1784 D.2506_14 = D.2503_10 + D.2505_13;
1785 D.2507_15 = *D.2506_14;
1786 iftmp.11_16 = (String:: *) D.2507_15;
1787
8aa29647 1788 <bb 7>:
3e293154
MJ
1789 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1790 D.2500_19 = (unsigned int) f$__delta_5;
1791 D.2508_20 = &S + D.2500_19;
1792 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1793
1794 Such patterns are results of simple calls to a member pointer:
1795
1796 int doprinting (int (MyString::* f)(int) const)
1797 {
1798 MyString S ("somestring");
1799
1800 return (S.*f)(4);
1801 }
8b7773a4
MJ
1802
1803 Moreover, the function also looks for called pointers loaded from aggregates
1804 passed by value or reference. */
3e293154
MJ
1805
1806static void
b258210c
MJ
1807ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1808 struct ipa_node_params *info,
c419671c 1809 struct param_analysis_info *parms_ainfo,
b258210c 1810 gimple call, tree target)
3e293154 1811{
726a989a 1812 gimple def;
3e293154 1813 tree n1, n2;
726a989a
RB
1814 gimple d1, d2;
1815 tree rec, rec2, cond;
1816 gimple branch;
3e293154 1817 int index;
3e293154 1818 basic_block bb, virt_bb, join;
8b7773a4
MJ
1819 HOST_WIDE_INT offset;
1820 bool by_ref;
3e293154 1821
3e293154
MJ
1822 if (SSA_NAME_IS_DEFAULT_DEF (target))
1823 {
b258210c 1824 tree var = SSA_NAME_VAR (target);
3e293154
MJ
1825 index = ipa_get_param_decl_index (info, var);
1826 if (index >= 0)
40591473 1827 ipa_note_param_call (node, index, call);
3e293154
MJ
1828 return;
1829 }
1830
8b7773a4
MJ
1831 def = SSA_NAME_DEF_STMT (target);
1832 if (gimple_assign_single_p (def)
d044dd17 1833 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
8b7773a4 1834 gimple_assign_rhs1 (def), &index, &offset,
3ff2ca23 1835 NULL, &by_ref))
8b7773a4
MJ
1836 {
1837 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1838 cs->indirect_info->offset = offset;
1839 cs->indirect_info->agg_contents = 1;
1840 cs->indirect_info->by_ref = by_ref;
1841 return;
1842 }
1843
3e293154
MJ
1844 /* Now we need to try to match the complex pattern of calling a member
1845 pointer. */
8b7773a4
MJ
1846 if (gimple_code (def) != GIMPLE_PHI
1847 || gimple_phi_num_args (def) != 2
1848 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
1849 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1850 return;
1851
3e293154
MJ
1852 /* First, we need to check whether one of these is a load from a member
1853 pointer that is a parameter to this function. */
1854 n1 = PHI_ARG_DEF (def, 0);
1855 n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 1856 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154
MJ
1857 return;
1858 d1 = SSA_NAME_DEF_STMT (n1);
1859 d2 = SSA_NAME_DEF_STMT (n2);
1860
8aa29647 1861 join = gimple_bb (def);
8b7773a4 1862 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 1863 {
8b7773a4 1864 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
1865 return;
1866
8aa29647 1867 bb = EDGE_PRED (join, 0)->src;
726a989a 1868 virt_bb = gimple_bb (d2);
3e293154 1869 }
8b7773a4 1870 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 1871 {
8aa29647 1872 bb = EDGE_PRED (join, 1)->src;
726a989a 1873 virt_bb = gimple_bb (d1);
3e293154
MJ
1874 }
1875 else
1876 return;
1877
1878 /* Second, we need to check that the basic blocks are laid out in the way
1879 corresponding to the pattern. */
1880
3e293154
MJ
1881 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1882 || single_pred (virt_bb) != bb
1883 || single_succ (virt_bb) != join)
1884 return;
1885
1886 /* Third, let's see that the branching is done depending on the least
1887 significant bit of the pfn. */
1888
1889 branch = last_stmt (bb);
8aa29647 1890 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
1891 return;
1892
12430896
RG
1893 if ((gimple_cond_code (branch) != NE_EXPR
1894 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 1895 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 1896 return;
3e293154 1897
726a989a 1898 cond = gimple_cond_lhs (branch);
3e293154
MJ
1899 if (!ipa_is_ssa_with_stmt_def (cond))
1900 return;
1901
726a989a 1902 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 1903 if (!is_gimple_assign (def)
726a989a
RB
1904 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1905 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 1906 return;
726a989a
RB
1907
1908 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1909 if (!ipa_is_ssa_with_stmt_def (cond))
1910 return;
1911
726a989a 1912 def = SSA_NAME_DEF_STMT (cond);
3e293154 1913
8b75fc9b
MJ
1914 if (is_gimple_assign (def)
1915 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 1916 {
726a989a 1917 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1918 if (!ipa_is_ssa_with_stmt_def (cond))
1919 return;
726a989a 1920 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
1921 }
1922
6f7b8b70
RE
1923 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1924 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
1925 == ptrmemfunc_vbit_in_delta),
1926 NULL);
3e293154
MJ
1927 if (rec != rec2)
1928 return;
1929
1930 index = ipa_get_param_decl_index (info, rec);
8b7773a4
MJ
1931 if (index >= 0
1932 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1933 {
1934 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1935 cs->indirect_info->offset = offset;
1936 cs->indirect_info->agg_contents = 1;
c13bc3d9 1937 cs->indirect_info->member_ptr = 1;
8b7773a4 1938 }
3e293154
MJ
1939
1940 return;
1941}
1942
b258210c
MJ
1943/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1944 object referenced in the expression is a formal parameter of the caller
1945 (described by INFO), create a call note for the statement. */
1946
1947static void
1948ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1949 struct ipa_node_params *info, gimple call,
1950 tree target)
1951{
40591473
MJ
1952 struct cgraph_edge *cs;
1953 struct cgraph_indirect_call_info *ii;
f65cf2b7 1954 struct ipa_jump_func jfunc;
b258210c 1955 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 1956 int index;
40591473 1957 HOST_WIDE_INT anc_offset;
b258210c 1958
05842ff5
MJ
1959 if (!flag_devirtualize)
1960 return;
1961
40591473 1962 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
1963 return;
1964
40591473
MJ
1965 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1966 {
1967 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1968 return;
b258210c 1969
40591473
MJ
1970 anc_offset = 0;
1971 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1972 gcc_assert (index >= 0);
06d65050
JH
1973 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1974 call, &jfunc))
40591473
MJ
1975 return;
1976 }
1977 else
1978 {
1979 gimple stmt = SSA_NAME_DEF_STMT (obj);
1980 tree expr;
1981
1982 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1983 if (!expr)
1984 return;
1985 index = ipa_get_param_decl_index (info,
1986 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1987 gcc_assert (index >= 0);
06d65050
JH
1988 if (detect_type_change (obj, expr, obj_type_ref_class (target),
1989 call, &jfunc, anc_offset))
40591473
MJ
1990 return;
1991 }
1992
1993 cs = ipa_note_param_call (node, index, call);
1994 ii = cs->indirect_info;
8b7773a4 1995 ii->offset = anc_offset;
40591473 1996 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
c49bdb2e 1997 ii->otr_type = obj_type_ref_class (target);
40591473 1998 ii->polymorphic = 1;
b258210c
MJ
1999}
2000
2001/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2002 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2003 containing intermediate information about each formal parameter. */
b258210c
MJ
2004
2005static void
2006ipa_analyze_call_uses (struct cgraph_node *node,
062c604f 2007 struct ipa_node_params *info,
c419671c 2008 struct param_analysis_info *parms_ainfo, gimple call)
b258210c
MJ
2009{
2010 tree target = gimple_call_fn (call);
2011
25583c4f
RS
2012 if (!target)
2013 return;
b258210c 2014 if (TREE_CODE (target) == SSA_NAME)
c419671c 2015 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1d5755ef 2016 else if (virtual_method_call_p (target))
b258210c
MJ
2017 ipa_analyze_virtual_call_uses (node, info, call, target);
2018}
2019
2020
e33c6cd6
MJ
2021/* Analyze the call statement STMT with respect to formal parameters (described
2022 in INFO) of caller given by NODE. Currently it only checks whether formal
c419671c 2023 parameters are called. PARMS_AINFO is a pointer to a vector containing
062c604f 2024 intermediate information about each formal parameter. */
be95e2b9 2025
3e293154 2026static void
e33c6cd6 2027ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
c419671c 2028 struct param_analysis_info *parms_ainfo, gimple stmt)
3e293154 2029{
726a989a 2030 if (is_gimple_call (stmt))
c419671c 2031 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2032}
2033
2034/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2035 If OP is a parameter declaration, mark it as used in the info structure
2036 passed in DATA. */
2037
2038static bool
2039visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2040 tree op, void *data)
2041{
2042 struct ipa_node_params *info = (struct ipa_node_params *) data;
2043
2044 op = get_base_address (op);
2045 if (op
2046 && TREE_CODE (op) == PARM_DECL)
2047 {
2048 int index = ipa_get_param_decl_index (info, op);
2049 gcc_assert (index >= 0);
310bc633 2050 ipa_set_param_used (info, index, true);
062c604f
MJ
2051 }
2052
2053 return false;
3e293154
MJ
2054}
2055
2056/* Scan the function body of NODE and inspect the uses of formal parameters.
2057 Store the findings in various structures of the associated ipa_node_params
c419671c 2058 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
062c604f 2059 vector containing intermediate information about each formal parameter. */
be95e2b9 2060
062c604f
MJ
2061static void
2062ipa_analyze_params_uses (struct cgraph_node *node,
c419671c 2063 struct param_analysis_info *parms_ainfo)
3e293154 2064{
67348ccc 2065 tree decl = node->decl;
3e293154
MJ
2066 basic_block bb;
2067 struct function *func;
726a989a 2068 gimple_stmt_iterator gsi;
3e293154 2069 struct ipa_node_params *info = IPA_NODE_REF (node);
062c604f 2070 int i;
3e293154 2071
726a989a 2072 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
3e293154 2073 return;
3e293154 2074
5fe8e757
MJ
2075 info->uses_analysis_done = 1;
2076 if (ipa_func_spec_opts_forbid_analysis_p (node))
2077 {
2078 for (i = 0; i < ipa_get_param_count (info); i++)
2079 {
2080 ipa_set_param_used (info, i, true);
2081 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2082 }
2083 return;
2084 }
2085
062c604f
MJ
2086 for (i = 0; i < ipa_get_param_count (info); i++)
2087 {
2088 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2089 int controlled_uses = 0;
2090
062c604f
MJ
2091 /* For SSA regs see if parameter is used. For non-SSA we compute
2092 the flag during modification analysis. */
4502fe8d
MJ
2093 if (is_gimple_reg (parm))
2094 {
67348ccc 2095 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2096 parm);
2097 if (ddef && !has_zero_uses (ddef))
2098 {
2099 imm_use_iterator imm_iter;
2100 use_operand_p use_p;
2101
2102 ipa_set_param_used (info, i, true);
2103 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2104 if (!is_gimple_call (USE_STMT (use_p)))
2105 {
2106 controlled_uses = IPA_UNDESCRIBED_USE;
2107 break;
2108 }
2109 else
2110 controlled_uses++;
2111 }
2112 else
2113 controlled_uses = 0;
2114 }
2115 else
2116 controlled_uses = IPA_UNDESCRIBED_USE;
2117 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f
MJ
2118 }
2119
3e293154
MJ
2120 func = DECL_STRUCT_FUNCTION (decl);
2121 FOR_EACH_BB_FN (bb, func)
2122 {
726a989a 2123 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3e293154 2124 {
726a989a 2125 gimple stmt = gsi_stmt (gsi);
062c604f
MJ
2126
2127 if (is_gimple_debug (stmt))
2128 continue;
2129
c419671c 2130 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2131 walk_stmt_load_store_addr_ops (stmt, info,
2132 visit_ref_for_mod_analysis,
2133 visit_ref_for_mod_analysis,
2134 visit_ref_for_mod_analysis);
518dc859 2135 }
355a7673 2136 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
062c604f
MJ
2137 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2138 visit_ref_for_mod_analysis,
2139 visit_ref_for_mod_analysis,
2140 visit_ref_for_mod_analysis);
518dc859 2141 }
3e293154
MJ
2142}
2143
2c9561b5
MJ
2144/* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2145
2146static void
2147free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2148{
2149 int i;
2150
2151 for (i = 0; i < param_count; i++)
2152 {
2153 if (parms_ainfo[i].parm_visited_statements)
2154 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2155 if (parms_ainfo[i].pt_visited_statements)
2156 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2157 }
2158}
2159
dd5a833e
MS
2160/* Initialize the array describing properties of of formal parameters
2161 of NODE, analyze their uses and compute jump functions associated
2162 with actual arguments of calls from within NODE. */
062c604f
MJ
2163
2164void
2165ipa_analyze_node (struct cgraph_node *node)
2166{
57dbdc5a 2167 struct ipa_node_params *info;
c419671c 2168 struct param_analysis_info *parms_ainfo;
2c9561b5 2169 int param_count;
062c604f 2170
57dbdc5a
MJ
2171 ipa_check_create_node_params ();
2172 ipa_check_create_edge_args ();
2173 info = IPA_NODE_REF (node);
67348ccc 2174 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
062c604f
MJ
2175 ipa_initialize_node_params (node);
2176
2177 param_count = ipa_get_param_count (info);
c419671c
MJ
2178 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2179 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
062c604f 2180
c419671c
MJ
2181 ipa_analyze_params_uses (node, parms_ainfo);
2182 ipa_compute_jump_functions (node, parms_ainfo);
062c604f 2183
2c9561b5 2184 free_parms_ainfo (parms_ainfo, param_count);
f65cf2b7 2185 pop_cfun ();
062c604f
MJ
2186}
2187
e248d83f
MJ
2188/* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2189 attempt a type-based devirtualization. If successful, return the
2190 target function declaration, otherwise return NULL. */
2191
2192tree
2193ipa_intraprocedural_devirtualization (gimple call)
2194{
2195 tree binfo, token, fndecl;
2196 struct ipa_jump_func jfunc;
2197 tree otr = gimple_call_fn (call);
2198
2199 jfunc.type = IPA_JF_UNKNOWN;
2200 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
06d65050 2201 call, obj_type_ref_class (otr));
e248d83f
MJ
2202 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2203 return NULL_TREE;
2204 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2205 if (!binfo)
2206 return NULL_TREE;
2207 token = OBJ_TYPE_REF_TOKEN (otr);
2208 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2209 binfo);
450ad0cd
JH
2210#ifdef ENABLE_CHECKING
2211 if (fndecl)
2212 gcc_assert (possible_polymorphic_call_target_p
2213 (otr, cgraph_get_node (fndecl)));
2214#endif
e248d83f
MJ
2215 return fndecl;
2216}
062c604f 2217
61502ca8 2218/* Update the jump function DST when the call graph edge corresponding to SRC is
b258210c
MJ
2219 is being inlined, knowing that DST is of type ancestor and src of known
2220 type. */
2221
2222static void
2223combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2224 struct ipa_jump_func *dst)
2225{
c7573249
MJ
2226 HOST_WIDE_INT combined_offset;
2227 tree combined_type;
b258210c 2228
b8f6e610
MJ
2229 if (!ipa_get_jf_ancestor_type_preserved (dst))
2230 {
2231 dst->type = IPA_JF_UNKNOWN;
2232 return;
2233 }
2234
7b872d9e
MJ
2235 combined_offset = ipa_get_jf_known_type_offset (src)
2236 + ipa_get_jf_ancestor_offset (dst);
2237 combined_type = ipa_get_jf_ancestor_type (dst);
c7573249 2238
7b872d9e
MJ
2239 ipa_set_jf_known_type (dst, combined_offset,
2240 ipa_get_jf_known_type_base_type (src),
2241 combined_type);
b258210c
MJ
2242}
2243
be95e2b9 2244/* Update the jump functions associated with call graph edge E when the call
3e293154 2245 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2246 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2247
3e293154
MJ
2248static void
2249update_jump_functions_after_inlining (struct cgraph_edge *cs,
2250 struct cgraph_edge *e)
2251{
2252 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2253 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2254 int count = ipa_get_cs_argument_count (args);
2255 int i;
2256
2257 for (i = 0; i < count; i++)
2258 {
b258210c 2259 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3e293154 2260
685b0d13
MJ
2261 if (dst->type == IPA_JF_ANCESTOR)
2262 {
b258210c 2263 struct ipa_jump_func *src;
8b7773a4 2264 int dst_fid = dst->value.ancestor.formal_id;
685b0d13 2265
b258210c
MJ
2266 /* Variable number of arguments can cause havoc if we try to access
2267 one that does not exist in the inlined edge. So make sure we
2268 don't. */
8b7773a4 2269 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c
MJ
2270 {
2271 dst->type = IPA_JF_UNKNOWN;
2272 continue;
2273 }
2274
8b7773a4
MJ
2275 src = ipa_get_ith_jump_func (top, dst_fid);
2276
2277 if (src->agg.items
2278 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2279 {
2280 struct ipa_agg_jf_item *item;
2281 int j;
2282
2283 /* Currently we do not produce clobber aggregate jump functions,
2284 replace with merging when we do. */
2285 gcc_assert (!dst->agg.items);
2286
9771b263 2287 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2288 dst->agg.by_ref = src->agg.by_ref;
9771b263 2289 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2290 item->offset -= dst->value.ancestor.offset;
2291 }
2292
b258210c
MJ
2293 if (src->type == IPA_JF_KNOWN_TYPE)
2294 combine_known_type_and_ancestor_jfs (src, dst);
b258210c
MJ
2295 else if (src->type == IPA_JF_PASS_THROUGH
2296 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2297 {
2298 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2299 dst->value.ancestor.agg_preserved &=
2300 src->value.pass_through.agg_preserved;
b8f6e610
MJ
2301 dst->value.ancestor.type_preserved &=
2302 src->value.pass_through.type_preserved;
8b7773a4 2303 }
b258210c
MJ
2304 else if (src->type == IPA_JF_ANCESTOR)
2305 {
2306 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2307 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2308 dst->value.ancestor.agg_preserved &=
2309 src->value.ancestor.agg_preserved;
b8f6e610
MJ
2310 dst->value.ancestor.type_preserved &=
2311 src->value.ancestor.type_preserved;
b258210c
MJ
2312 }
2313 else
2314 dst->type = IPA_JF_UNKNOWN;
2315 }
2316 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2317 {
b258210c
MJ
2318 struct ipa_jump_func *src;
2319 /* We must check range due to calls with variable number of arguments
2320 and we cannot combine jump functions with operations. */
2321 if (dst->value.pass_through.operation == NOP_EXPR
2322 && (dst->value.pass_through.formal_id
2323 < ipa_get_cs_argument_count (top)))
2324 {
8b7773a4
MJ
2325 int dst_fid = dst->value.pass_through.formal_id;
2326 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2327 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
8b7773a4 2328
b8f6e610
MJ
2329 switch (src->type)
2330 {
2331 case IPA_JF_UNKNOWN:
2332 dst->type = IPA_JF_UNKNOWN;
2333 break;
2334 case IPA_JF_KNOWN_TYPE:
2335 ipa_set_jf_known_type (dst,
2336 ipa_get_jf_known_type_offset (src),
2337 ipa_get_jf_known_type_base_type (src),
2338 ipa_get_jf_known_type_base_type (src));
2339 break;
2340 case IPA_JF_CONST:
2341 ipa_set_jf_cst_copy (dst, src);
2342 break;
2343
2344 case IPA_JF_PASS_THROUGH:
2345 {
2346 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2347 enum tree_code operation;
2348 operation = ipa_get_jf_pass_through_operation (src);
2349
2350 if (operation == NOP_EXPR)
2351 {
2352 bool agg_p, type_p;
2353 agg_p = dst_agg_p
2354 && ipa_get_jf_pass_through_agg_preserved (src);
2355 type_p = ipa_get_jf_pass_through_type_preserved (src)
2356 && ipa_get_jf_pass_through_type_preserved (dst);
2357 ipa_set_jf_simple_pass_through (dst, formal_id,
2358 agg_p, type_p);
2359 }
2360 else
2361 {
2362 tree operand = ipa_get_jf_pass_through_operand (src);
2363 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2364 operation);
2365 }
2366 break;
2367 }
2368 case IPA_JF_ANCESTOR:
2369 {
2370 bool agg_p, type_p;
2371 agg_p = dst_agg_p
2372 && ipa_get_jf_ancestor_agg_preserved (src);
2373 type_p = ipa_get_jf_ancestor_type_preserved (src)
2374 && ipa_get_jf_pass_through_type_preserved (dst);
2375 ipa_set_ancestor_jf (dst,
2376 ipa_get_jf_ancestor_offset (src),
2377 ipa_get_jf_ancestor_type (src),
2378 ipa_get_jf_ancestor_formal_id (src),
2379 agg_p, type_p);
2380 break;
2381 }
2382 default:
2383 gcc_unreachable ();
2384 }
8b7773a4
MJ
2385
2386 if (src->agg.items
b8f6e610 2387 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2388 {
2389 /* Currently we do not produce clobber aggregate jump
2390 functions, replace with merging when we do. */
2391 gcc_assert (!dst->agg.items);
2392
2393 dst->agg.by_ref = src->agg.by_ref;
9771b263 2394 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2395 }
b258210c
MJ
2396 }
2397 else
2398 dst->type = IPA_JF_UNKNOWN;
3e293154 2399 }
b258210c
MJ
2400 }
2401}
2402
2403/* If TARGET is an addr_expr of a function declaration, make it the destination
81fa35bd 2404 of an indirect edge IE and return the edge. Otherwise, return NULL. */
b258210c 2405
3949c4a7 2406struct cgraph_edge *
81fa35bd 2407ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
b258210c
MJ
2408{
2409 struct cgraph_node *callee;
0f378cb5 2410 struct inline_edge_summary *es = inline_edge_summary (ie);
48b1474e 2411 bool unreachable = false;
b258210c 2412
ceeffab0
MJ
2413 if (TREE_CODE (target) == ADDR_EXPR)
2414 target = TREE_OPERAND (target, 0);
b258210c 2415 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2416 {
2417 target = canonicalize_constructor_val (target, NULL);
2418 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2419 {
c13bc3d9
MJ
2420 if (ie->indirect_info->member_ptr)
2421 /* Member pointer call that goes through a VMT lookup. */
2422 return NULL;
2423
a0a7b611
JH
2424 if (dump_file)
2425 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
48b1474e 2426 " in %s/%i, making it unreachable.\n",
67348ccc 2427 cgraph_node_name (ie->caller), ie->caller->order);
48b1474e
MJ
2428 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2429 callee = cgraph_get_create_node (target);
2430 unreachable = true;
a0a7b611 2431 }
48b1474e
MJ
2432 else
2433 callee = cgraph_get_node (target);
a0a7b611 2434 }
48b1474e
MJ
2435 else
2436 callee = cgraph_get_node (target);
a0a7b611
JH
2437
2438 /* Because may-edges are not explicitely represented and vtable may be external,
2439 we may create the first reference to the object in the unit. */
2440 if (!callee || callee->global.inlined_to)
2441 {
a0a7b611
JH
2442
2443 /* We are better to ensure we can refer to it.
2444 In the case of static functions we are out of luck, since we already
2445 removed its body. In the case of public functions we may or may
2446 not introduce the reference. */
2447 if (!canonicalize_constructor_val (target, NULL)
2448 || !TREE_PUBLIC (target))
2449 {
2450 if (dump_file)
2451 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2452 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
9de04252 2453 xstrdup (cgraph_node_name (ie->caller)),
67348ccc 2454 ie->caller->order,
9de04252 2455 xstrdup (cgraph_node_name (ie->callee)),
67348ccc 2456 ie->callee->order);
a0a7b611
JH
2457 return NULL;
2458 }
6f99e449 2459 callee = cgraph_get_create_node (target);
a0a7b611 2460 }
1dbee8c9 2461 ipa_check_create_node_params ();
ceeffab0 2462
81fa35bd
MJ
2463 /* We can not make edges to inline clones. It is bug that someone removed
2464 the cgraph node too early. */
17afc0fe
JH
2465 gcc_assert (!callee->global.inlined_to);
2466
48b1474e 2467 if (dump_file && !unreachable)
b258210c
MJ
2468 {
2469 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
ceeffab0 2470 "(%s/%i -> %s/%i), for stmt ",
b258210c 2471 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
9de04252 2472 xstrdup (cgraph_node_name (ie->caller)),
67348ccc 2473 ie->caller->order,
042ae7d2 2474 xstrdup (cgraph_node_name (callee)),
67348ccc 2475 callee->order);
b258210c
MJ
2476 if (ie->call_stmt)
2477 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2478 else
2479 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2
JH
2480 }
2481 ie = cgraph_make_edge_direct (ie, callee);
2482 es = inline_edge_summary (ie);
2483 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2484 - eni_size_weights.call_cost);
2485 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2486 - eni_time_weights.call_cost);
749aa96d 2487
b258210c 2488 return ie;
3e293154
MJ
2489}
2490
8b7773a4
MJ
2491/* Retrieve value from aggregate jump function AGG for the given OFFSET or
2492 return NULL if there is not any. BY_REF specifies whether the value has to
2493 be passed by reference or by value. */
2494
2495tree
2496ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2497 HOST_WIDE_INT offset, bool by_ref)
2498{
2499 struct ipa_agg_jf_item *item;
2500 int i;
2501
2502 if (by_ref != agg->by_ref)
2503 return NULL;
2504
9771b263 2505 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2506 if (item->offset == offset)
2507 {
2508 /* Currently we do not have clobber values, return NULL for them once
2509 we do. */
2510 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2511 return item->value;
2512 }
8b7773a4
MJ
2513 return NULL;
2514}
2515
4502fe8d 2516/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
2517 reference description RDESC. Return true if the reference has been
2518 successfully found and removed. */
4502fe8d 2519
568cda29 2520static bool
5e20cdc9 2521remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
2522{
2523 struct ipa_ref *to_del;
2524 struct cgraph_edge *origin;
2525
2526 origin = rdesc->cs;
a854f856
MJ
2527 if (!origin)
2528 return false;
67348ccc 2529 to_del = ipa_find_reference (origin->caller, symbol,
042ae7d2 2530 origin->call_stmt, origin->lto_stmt_uid);
568cda29
MJ
2531 if (!to_del)
2532 return false;
2533
4502fe8d
MJ
2534 ipa_remove_reference (to_del);
2535 if (dump_file)
2536 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2537 xstrdup (cgraph_node_name (origin->caller)),
67348ccc 2538 origin->caller->order, xstrdup (symtab_node_name (symbol)));
568cda29 2539 return true;
4502fe8d
MJ
2540}
2541
2542/* If JFUNC has a reference description with refcount different from
2543 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2544 NULL. JFUNC must be a constant jump function. */
2545
2546static struct ipa_cst_ref_desc *
2547jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2548{
2549 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2550 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2551 return rdesc;
2552 else
2553 return NULL;
2554}
2555
568cda29
MJ
2556/* If the value of constant jump function JFUNC is an address of a function
2557 declaration, return the associated call graph node. Otherwise return
2558 NULL. */
2559
2560static cgraph_node *
2561cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2562{
2563 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2564 tree cst = ipa_get_jf_constant (jfunc);
2565 if (TREE_CODE (cst) != ADDR_EXPR
2566 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2567 return NULL;
2568
2569 return cgraph_get_node (TREE_OPERAND (cst, 0));
2570}
2571
2572
2573/* If JFUNC is a constant jump function with a usable rdesc, decrement its
2574 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2575 the edge specified in the rdesc. Return false if either the symbol or the
2576 reference could not be found, otherwise return true. */
2577
2578static bool
2579try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2580{
2581 struct ipa_cst_ref_desc *rdesc;
2582 if (jfunc->type == IPA_JF_CONST
2583 && (rdesc = jfunc_rdesc_usable (jfunc))
2584 && --rdesc->refcount == 0)
2585 {
5e20cdc9 2586 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
2587 if (!symbol)
2588 return false;
2589
2590 return remove_described_reference (symbol, rdesc);
2591 }
2592 return true;
2593}
2594
b258210c
MJ
2595/* Try to find a destination for indirect edge IE that corresponds to a simple
2596 call or a call of a member function pointer and where the destination is a
2597 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
2598 determined, return the newly direct edge, otherwise return NULL.
2599 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 2600
b258210c
MJ
2601static struct cgraph_edge *
2602try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
2603 struct ipa_jump_func *jfunc,
2604 struct ipa_node_params *new_root_info)
b258210c 2605{
4502fe8d 2606 struct cgraph_edge *cs;
b258210c 2607 tree target;
042ae7d2 2608 bool agg_contents = ie->indirect_info->agg_contents;
b258210c 2609
8b7773a4 2610 if (ie->indirect_info->agg_contents)
d250540a
MJ
2611 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2612 ie->indirect_info->offset,
2613 ie->indirect_info->by_ref);
b258210c 2614 else
d250540a
MJ
2615 target = ipa_value_from_jfunc (new_root_info, jfunc);
2616 if (!target)
2617 return NULL;
4502fe8d
MJ
2618 cs = ipa_make_edge_direct_to_target (ie, target);
2619
a12cd2db 2620 if (cs && !agg_contents)
568cda29
MJ
2621 {
2622 bool ok;
2623 gcc_checking_assert (cs->callee
ae6d0907
MJ
2624 && (cs != ie
2625 || jfunc->type != IPA_JF_CONST
568cda29
MJ
2626 || !cgraph_node_for_jfunc (jfunc)
2627 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2628 ok = try_decrement_rdesc_refcount (jfunc);
2629 gcc_checking_assert (ok);
2630 }
4502fe8d
MJ
2631
2632 return cs;
b258210c
MJ
2633}
2634
d250540a
MJ
2635/* Try to find a destination for indirect edge IE that corresponds to a virtual
2636 call based on a formal parameter which is described by jump function JFUNC
2637 and if it can be determined, make it direct and return the direct edge.
2638 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2639 are relative to. */
b258210c
MJ
2640
2641static struct cgraph_edge *
2642try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a
MJ
2643 struct ipa_jump_func *jfunc,
2644 struct ipa_node_params *new_root_info)
3e293154 2645{
c7573249 2646 tree binfo, target;
b258210c 2647
d250540a
MJ
2648 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2649
da942ca0 2650 if (!binfo)
b258210c 2651 return NULL;
3e293154 2652
da942ca0
JH
2653 if (TREE_CODE (binfo) != TREE_BINFO)
2654 {
c49bdb2e
JH
2655 binfo = gimple_extract_devirt_binfo_from_cst
2656 (binfo, ie->indirect_info->otr_type);
da942ca0
JH
2657 if (!binfo)
2658 return NULL;
2659 }
2660
d250540a 2661 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
c7573249 2662 ie->indirect_info->otr_type);
b258210c 2663 if (binfo)
c7573249
MJ
2664 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2665 binfo);
b258210c
MJ
2666 else
2667 return NULL;
2668
2669 if (target)
450ad0cd
JH
2670 {
2671#ifdef ENABLE_CHECKING
2672 gcc_assert (possible_polymorphic_call_target_p
2673 (ie, cgraph_get_node (target)));
2674#endif
2675 return ipa_make_edge_direct_to_target (ie, target);
2676 }
b258210c
MJ
2677 else
2678 return NULL;
3e293154
MJ
2679}
2680
2681/* Update the param called notes associated with NODE when CS is being inlined,
2682 assuming NODE is (potentially indirectly) inlined into CS->callee.
2683 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 2684 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 2685 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 2686
f8e2a1ed 2687static bool
e33c6cd6
MJ
2688update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2689 struct cgraph_node *node,
9771b263 2690 vec<cgraph_edge_p> *new_edges)
3e293154 2691{
9e97ff61 2692 struct ipa_edge_args *top;
b258210c 2693 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 2694 struct ipa_node_params *new_root_info;
f8e2a1ed 2695 bool res = false;
3e293154 2696
e33c6cd6 2697 ipa_check_create_edge_args ();
9e97ff61 2698 top = IPA_EDGE_REF (cs);
d250540a
MJ
2699 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2700 ? cs->caller->global.inlined_to
2701 : cs->caller);
e33c6cd6
MJ
2702
2703 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 2704 {
e33c6cd6 2705 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 2706 struct ipa_jump_func *jfunc;
8b7773a4 2707 int param_index;
3e293154 2708
e33c6cd6 2709 next_ie = ie->next_callee;
3e293154 2710
5f902d76
JH
2711 if (ici->param_index == -1)
2712 continue;
e33c6cd6 2713
3e293154 2714 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 2715 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 2716 {
5ee53a06 2717 ici->param_index = -1;
3e293154
MJ
2718 continue;
2719 }
2720
8b7773a4
MJ
2721 param_index = ici->param_index;
2722 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06
JH
2723
2724 if (!flag_indirect_inlining)
36b72910
JH
2725 new_direct_edge = NULL;
2726 else if (ici->polymorphic)
d250540a
MJ
2727 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2728 new_root_info);
b258210c 2729 else
d250540a
MJ
2730 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2731 new_root_info);
042ae7d2
JH
2732 /* If speculation was removed, then we need to do nothing. */
2733 if (new_direct_edge && new_direct_edge != ie)
2734 {
2735 new_direct_edge->indirect_inlining_edge = 1;
2736 top = IPA_EDGE_REF (cs);
2737 res = true;
2738 }
2739 else if (new_direct_edge)
685b0d13 2740 {
b258210c 2741 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
2742 if (new_direct_edge->call_stmt)
2743 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
2744 = !gimple_check_call_matching_types (
2745 new_direct_edge->call_stmt,
67348ccc 2746 new_direct_edge->callee->decl, false);
b258210c
MJ
2747 if (new_edges)
2748 {
9771b263 2749 new_edges->safe_push (new_direct_edge);
b258210c
MJ
2750 res = true;
2751 }
042ae7d2 2752 top = IPA_EDGE_REF (cs);
685b0d13 2753 }
36b72910
JH
2754 else if (jfunc->type == IPA_JF_PASS_THROUGH
2755 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2756 {
2757 if (ici->agg_contents
2758 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2759 ici->param_index = -1;
2760 else
2761 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2762 }
2763 else if (jfunc->type == IPA_JF_ANCESTOR)
2764 {
2765 if (ici->agg_contents
2766 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2767 ici->param_index = -1;
2768 else
2769 {
2770 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2771 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2772 }
2773 }
2774 else
2775 /* Either we can find a destination for this edge now or never. */
2776 ici->param_index = -1;
3e293154 2777 }
e33c6cd6 2778
f8e2a1ed 2779 return res;
3e293154
MJ
2780}
2781
2782/* Recursively traverse subtree of NODE (including node) made of inlined
2783 cgraph_edges when CS has been inlined and invoke
e33c6cd6 2784 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
2785 update_jump_functions_after_inlining on all non-inlined edges that lead out
2786 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2787 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2788 created. */
be95e2b9 2789
f8e2a1ed 2790static bool
3e293154
MJ
2791propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2792 struct cgraph_node *node,
9771b263 2793 vec<cgraph_edge_p> *new_edges)
3e293154
MJ
2794{
2795 struct cgraph_edge *e;
f8e2a1ed 2796 bool res;
3e293154 2797
e33c6cd6 2798 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
2799
2800 for (e = node->callees; e; e = e->next_callee)
2801 if (!e->inline_failed)
f8e2a1ed 2802 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
2803 else
2804 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
2805 for (e = node->indirect_calls; e; e = e->next_callee)
2806 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
2807
2808 return res;
3e293154
MJ
2809}
2810
4502fe8d
MJ
2811/* Combine two controlled uses counts as done during inlining. */
2812
2813static int
2814combine_controlled_uses_counters (int c, int d)
2815{
2816 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2817 return IPA_UNDESCRIBED_USE;
2818 else
2819 return c + d - 1;
2820}
2821
2822/* Propagate number of controlled users from CS->caleee to the new root of the
2823 tree of inlined nodes. */
2824
2825static void
2826propagate_controlled_uses (struct cgraph_edge *cs)
2827{
2828 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2829 struct cgraph_node *new_root = cs->caller->global.inlined_to
2830 ? cs->caller->global.inlined_to : cs->caller;
2831 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2832 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2833 int count, i;
2834
2835 count = MIN (ipa_get_cs_argument_count (args),
2836 ipa_get_param_count (old_root_info));
2837 for (i = 0; i < count; i++)
2838 {
2839 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2840 struct ipa_cst_ref_desc *rdesc;
2841
2842 if (jf->type == IPA_JF_PASS_THROUGH)
2843 {
2844 int src_idx, c, d;
2845 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2846 c = ipa_get_controlled_uses (new_root_info, src_idx);
2847 d = ipa_get_controlled_uses (old_root_info, i);
2848
2849 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2850 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2851 c = combine_controlled_uses_counters (c, d);
2852 ipa_set_controlled_uses (new_root_info, src_idx, c);
2853 if (c == 0 && new_root_info->ipcp_orig_node)
2854 {
2855 struct cgraph_node *n;
2856 struct ipa_ref *ref;
2857 tree t = new_root_info->known_vals[src_idx];
2858
2859 if (t && TREE_CODE (t) == ADDR_EXPR
2860 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2861 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
67348ccc
DM
2862 && (ref = ipa_find_reference (new_root,
2863 n, NULL, 0)))
4502fe8d
MJ
2864 {
2865 if (dump_file)
2866 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2867 "reference from %s/%i to %s/%i.\n",
2868 xstrdup (cgraph_node_name (new_root)),
67348ccc
DM
2869 new_root->order,
2870 xstrdup (cgraph_node_name (n)), n->order);
4502fe8d
MJ
2871 ipa_remove_reference (ref);
2872 }
2873 }
2874 }
2875 else if (jf->type == IPA_JF_CONST
2876 && (rdesc = jfunc_rdesc_usable (jf)))
2877 {
2878 int d = ipa_get_controlled_uses (old_root_info, i);
2879 int c = rdesc->refcount;
2880 rdesc->refcount = combine_controlled_uses_counters (c, d);
2881 if (rdesc->refcount == 0)
2882 {
2883 tree cst = ipa_get_jf_constant (jf);
2884 struct cgraph_node *n;
2885 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2886 && TREE_CODE (TREE_OPERAND (cst, 0))
2887 == FUNCTION_DECL);
2888 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2889 if (n)
2890 {
2891 struct cgraph_node *clone;
568cda29 2892 bool ok;
67348ccc 2893 ok = remove_described_reference (n, rdesc);
568cda29 2894 gcc_checking_assert (ok);
4502fe8d
MJ
2895
2896 clone = cs->caller;
2897 while (clone->global.inlined_to
2898 && clone != rdesc->cs->caller
2899 && IPA_NODE_REF (clone)->ipcp_orig_node)
2900 {
2901 struct ipa_ref *ref;
67348ccc
DM
2902 ref = ipa_find_reference (clone,
2903 n, NULL, 0);
4502fe8d
MJ
2904 if (ref)
2905 {
2906 if (dump_file)
2907 fprintf (dump_file, "ipa-prop: Removing "
2908 "cloning-created reference "
2909 "from %s/%i to %s/%i.\n",
2910 xstrdup (cgraph_node_name (clone)),
67348ccc 2911 clone->order,
4502fe8d 2912 xstrdup (cgraph_node_name (n)),
67348ccc 2913 n->order);
4502fe8d
MJ
2914 ipa_remove_reference (ref);
2915 }
2916 clone = clone->callers->caller;
2917 }
2918 }
2919 }
2920 }
2921 }
2922
2923 for (i = ipa_get_param_count (old_root_info);
2924 i < ipa_get_cs_argument_count (args);
2925 i++)
2926 {
2927 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2928
2929 if (jf->type == IPA_JF_CONST)
2930 {
2931 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2932 if (rdesc)
2933 rdesc->refcount = IPA_UNDESCRIBED_USE;
2934 }
2935 else if (jf->type == IPA_JF_PASS_THROUGH)
2936 ipa_set_controlled_uses (new_root_info,
2937 jf->value.pass_through.formal_id,
2938 IPA_UNDESCRIBED_USE);
2939 }
2940}
2941
3e293154
MJ
2942/* Update jump functions and call note functions on inlining the call site CS.
2943 CS is expected to lead to a node already cloned by
2944 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2945 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2946 created. */
be95e2b9 2947
f8e2a1ed 2948bool
3e293154 2949ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
9771b263 2950 vec<cgraph_edge_p> *new_edges)
3e293154 2951{
5ee53a06 2952 bool changed;
f8e2a1ed
MJ
2953 /* Do nothing if the preparation phase has not been carried out yet
2954 (i.e. during early inlining). */
9771b263 2955 if (!ipa_node_params_vector.exists ())
f8e2a1ed
MJ
2956 return false;
2957 gcc_assert (ipa_edge_args_vector);
2958
4502fe8d 2959 propagate_controlled_uses (cs);
5ee53a06
JH
2960 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2961
5ee53a06 2962 return changed;
518dc859
RL
2963}
2964
771578a0
MJ
2965/* Frees all dynamically allocated structures that the argument info points
2966 to. */
be95e2b9 2967
518dc859 2968void
771578a0 2969ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 2970{
9771b263 2971 vec_free (args->jump_functions);
771578a0 2972 memset (args, 0, sizeof (*args));
518dc859
RL
2973}
2974
771578a0 2975/* Free all ipa_edge structures. */
be95e2b9 2976
518dc859 2977void
771578a0 2978ipa_free_all_edge_args (void)
518dc859 2979{
771578a0
MJ
2980 int i;
2981 struct ipa_edge_args *args;
518dc859 2982
9771b263
DN
2983 if (!ipa_edge_args_vector)
2984 return;
2985
2986 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
2987 ipa_free_edge_args_substructures (args);
2988
9771b263 2989 vec_free (ipa_edge_args_vector);
518dc859
RL
2990}
2991
771578a0
MJ
2992/* Frees all dynamically allocated structures that the param info points
2993 to. */
be95e2b9 2994
518dc859 2995void
771578a0 2996ipa_free_node_params_substructures (struct ipa_node_params *info)
518dc859 2997{
9771b263 2998 info->descriptors.release ();
310bc633
MJ
2999 free (info->lattices);
3000 /* Lattice values and their sources are deallocated with their alocation
3001 pool. */
9771b263 3002 info->known_vals.release ();
771578a0 3003 memset (info, 0, sizeof (*info));
518dc859
RL
3004}
3005
771578a0 3006/* Free all ipa_node_params structures. */
be95e2b9 3007
518dc859 3008void
771578a0 3009ipa_free_all_node_params (void)
518dc859 3010{
771578a0
MJ
3011 int i;
3012 struct ipa_node_params *info;
518dc859 3013
9771b263 3014 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
771578a0
MJ
3015 ipa_free_node_params_substructures (info);
3016
9771b263 3017 ipa_node_params_vector.release ();
771578a0
MJ
3018}
3019
2c9561b5
MJ
3020/* Set the aggregate replacements of NODE to be AGGVALS. */
3021
3022void
3023ipa_set_node_agg_value_chain (struct cgraph_node *node,
3024 struct ipa_agg_replacement_value *aggvals)
3025{
9771b263
DN
3026 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3027 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2c9561b5 3028
9771b263 3029 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2c9561b5
MJ
3030}
3031
771578a0 3032/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 3033
771578a0 3034static void
5c0466b5 3035ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 3036{
568cda29
MJ
3037 struct ipa_edge_args *args;
3038
3039 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
9771b263 3040 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 3041 return;
568cda29
MJ
3042
3043 args = IPA_EDGE_REF (cs);
3044 if (args->jump_functions)
3045 {
3046 struct ipa_jump_func *jf;
3047 int i;
3048 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3049 {
3050 struct ipa_cst_ref_desc *rdesc;
3051 try_decrement_rdesc_refcount (jf);
3052 if (jf->type == IPA_JF_CONST
3053 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3054 && rdesc->cs == cs)
3055 rdesc->cs = NULL;
3056 }
568cda29
MJ
3057 }
3058
771578a0 3059 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
3060}
3061
771578a0 3062/* Hook that is called by cgraph.c when a node is removed. */
be95e2b9 3063
771578a0 3064static void
5c0466b5 3065ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3066{
dd6d1ad7 3067 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 3068 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2c9561b5 3069 ipa_free_node_params_substructures (IPA_NODE_REF (node));
9771b263
DN
3070 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3071 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
771578a0
MJ
3072}
3073
8b7773a4 3074/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 3075
771578a0
MJ
3076static void
3077ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
f8e2a1ed 3078 __attribute__((unused)) void *data)
771578a0
MJ
3079{
3080 struct ipa_edge_args *old_args, *new_args;
8b7773a4 3081 unsigned int i;
771578a0
MJ
3082
3083 ipa_check_create_edge_args ();
3084
3085 old_args = IPA_EDGE_REF (src);
3086 new_args = IPA_EDGE_REF (dst);
3087
9771b263 3088 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
8b7773a4 3089
9771b263 3090 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3091 {
3092 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3093 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3094
3095 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3096
3097 if (src_jf->type == IPA_JF_CONST)
3098 {
3099 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3100
3101 if (!src_rdesc)
3102 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3103 else if (src->caller == dst->caller)
3104 {
3105 struct ipa_ref *ref;
5e20cdc9 3106 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3107 gcc_checking_assert (n);
67348ccc 3108 ref = ipa_find_reference (src->caller, n,
568cda29
MJ
3109 src->call_stmt, src->lto_stmt_uid);
3110 gcc_checking_assert (ref);
67348ccc 3111 ipa_clone_ref (ref, dst->caller, ref->stmt);
568cda29
MJ
3112
3113 gcc_checking_assert (ipa_refdesc_pool);
3114 struct ipa_cst_ref_desc *dst_rdesc
3115 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3116 dst_rdesc->cs = dst;
3117 dst_rdesc->refcount = src_rdesc->refcount;
3118 dst_rdesc->next_duplicate = NULL;
3119 dst_jf->value.constant.rdesc = dst_rdesc;
3120 }
4502fe8d
MJ
3121 else if (src_rdesc->cs == src)
3122 {
3123 struct ipa_cst_ref_desc *dst_rdesc;
3124 gcc_checking_assert (ipa_refdesc_pool);
3125 dst_rdesc
3126 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3127 dst_rdesc->cs = dst;
4502fe8d 3128 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3129 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3130 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3131 dst_jf->value.constant.rdesc = dst_rdesc;
3132 }
3133 else
3134 {
3135 struct ipa_cst_ref_desc *dst_rdesc;
3136 /* This can happen during inlining, when a JFUNC can refer to a
3137 reference taken in a function up in the tree of inline clones.
3138 We need to find the duplicate that refers to our tree of
3139 inline clones. */
3140
3141 gcc_assert (dst->caller->global.inlined_to);
3142 for (dst_rdesc = src_rdesc->next_duplicate;
3143 dst_rdesc;
3144 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3145 {
3146 struct cgraph_node *top;
3147 top = dst_rdesc->cs->caller->global.inlined_to
3148 ? dst_rdesc->cs->caller->global.inlined_to
3149 : dst_rdesc->cs->caller;
3150 if (dst->caller->global.inlined_to == top)
3151 break;
3152 }
44a60244 3153 gcc_assert (dst_rdesc);
4502fe8d
MJ
3154 dst_jf->value.constant.rdesc = dst_rdesc;
3155 }
3156 }
3157 }
771578a0
MJ
3158}
3159
3160/* Hook that is called by cgraph.c when a node is duplicated. */
be95e2b9 3161
771578a0
MJ
3162static void
3163ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
10a5dd5d 3164 ATTRIBUTE_UNUSED void *data)
771578a0
MJ
3165{
3166 struct ipa_node_params *old_info, *new_info;
2c9561b5 3167 struct ipa_agg_replacement_value *old_av, *new_av;
771578a0
MJ
3168
3169 ipa_check_create_node_params ();
3170 old_info = IPA_NODE_REF (src);
3171 new_info = IPA_NODE_REF (dst);
771578a0 3172
9771b263 3173 new_info->descriptors = old_info->descriptors.copy ();
310bc633 3174 new_info->lattices = NULL;
771578a0 3175 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 3176
3949c4a7
MJ
3177 new_info->uses_analysis_done = old_info->uses_analysis_done;
3178 new_info->node_enqueued = old_info->node_enqueued;
2c9561b5
MJ
3179
3180 old_av = ipa_get_agg_replacements_for_node (src);
3181 if (!old_av)
3182 return;
3183
3184 new_av = NULL;
3185 while (old_av)
3186 {
3187 struct ipa_agg_replacement_value *v;
3188
3189 v = ggc_alloc_ipa_agg_replacement_value ();
3190 memcpy (v, old_av, sizeof (*v));
3191 v->next = new_av;
3192 new_av = v;
3193 old_av = old_av->next;
3194 }
3195 ipa_set_node_agg_value_chain (dst, new_av);
771578a0
MJ
3196}
3197
40982661
JH
3198
3199/* Analyze newly added function into callgraph. */
3200
3201static void
3202ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3203{
3204 ipa_analyze_node (node);
3205}
3206
771578a0 3207/* Register our cgraph hooks if they are not already there. */
be95e2b9 3208
518dc859 3209void
771578a0 3210ipa_register_cgraph_hooks (void)
518dc859 3211{
771578a0
MJ
3212 if (!edge_removal_hook_holder)
3213 edge_removal_hook_holder =
3214 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3215 if (!node_removal_hook_holder)
3216 node_removal_hook_holder =
3217 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3218 if (!edge_duplication_hook_holder)
3219 edge_duplication_hook_holder =
3220 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3221 if (!node_duplication_hook_holder)
3222 node_duplication_hook_holder =
3223 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
40982661
JH
3224 function_insertion_hook_holder =
3225 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3226}
518dc859 3227
771578a0 3228/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3229
771578a0
MJ
3230static void
3231ipa_unregister_cgraph_hooks (void)
3232{
3233 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3234 edge_removal_hook_holder = NULL;
3235 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3236 node_removal_hook_holder = NULL;
3237 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3238 edge_duplication_hook_holder = NULL;
3239 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3240 node_duplication_hook_holder = NULL;
40982661
JH
3241 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3242 function_insertion_hook_holder = NULL;
771578a0
MJ
3243}
3244
3245/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3246 longer needed after ipa-cp. */
be95e2b9 3247
771578a0 3248void
e33c6cd6 3249ipa_free_all_structures_after_ipa_cp (void)
3e293154 3250{
5ee53a06 3251 if (!optimize)
3e293154
MJ
3252 {
3253 ipa_free_all_edge_args ();
3254 ipa_free_all_node_params ();
310bc633
MJ
3255 free_alloc_pool (ipcp_sources_pool);
3256 free_alloc_pool (ipcp_values_pool);
2c9561b5 3257 free_alloc_pool (ipcp_agg_lattice_pool);
3e293154 3258 ipa_unregister_cgraph_hooks ();
4502fe8d
MJ
3259 if (ipa_refdesc_pool)
3260 free_alloc_pool (ipa_refdesc_pool);
3e293154
MJ
3261 }
3262}
3263
3264/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3265 longer needed after indirect inlining. */
be95e2b9 3266
3e293154 3267void
e33c6cd6 3268ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3269{
3270 ipa_free_all_edge_args ();
3271 ipa_free_all_node_params ();
3272 ipa_unregister_cgraph_hooks ();
310bc633
MJ
3273 if (ipcp_sources_pool)
3274 free_alloc_pool (ipcp_sources_pool);
3275 if (ipcp_values_pool)
3276 free_alloc_pool (ipcp_values_pool);
2c9561b5
MJ
3277 if (ipcp_agg_lattice_pool)
3278 free_alloc_pool (ipcp_agg_lattice_pool);
4502fe8d
MJ
3279 if (ipa_refdesc_pool)
3280 free_alloc_pool (ipa_refdesc_pool);
518dc859
RL
3281}
3282
dcd416e3 3283/* Print ipa_tree_map data structures of all functions in the
518dc859 3284 callgraph to F. */
be95e2b9 3285
518dc859 3286void
2c9561b5 3287ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3288{
3289 int i, count;
3e293154 3290 struct ipa_node_params *info;
518dc859 3291
67348ccc 3292 if (!node->definition)
3e293154
MJ
3293 return;
3294 info = IPA_NODE_REF (node);
9de04252 3295 fprintf (f, " function %s/%i parameter descriptors:\n",
67348ccc 3296 cgraph_node_name (node), node->order);
3e293154
MJ
3297 count = ipa_get_param_count (info);
3298 for (i = 0; i < count; i++)
518dc859 3299 {
4502fe8d
MJ
3300 int c;
3301
e067bd43 3302 ipa_dump_param (f, info, i);
339f49ec
JH
3303 if (ipa_is_param_used (info, i))
3304 fprintf (f, " used");
4502fe8d
MJ
3305 c = ipa_get_controlled_uses (info, i);
3306 if (c == IPA_UNDESCRIBED_USE)
3307 fprintf (f, " undescribed_use");
3308 else
3309 fprintf (f, " controlled_uses=%i", c);
3e293154 3310 fprintf (f, "\n");
518dc859
RL
3311 }
3312}
dcd416e3 3313
ca30a539 3314/* Print ipa_tree_map data structures of all functions in the
3e293154 3315 callgraph to F. */
be95e2b9 3316
3e293154 3317void
ca30a539 3318ipa_print_all_params (FILE * f)
3e293154
MJ
3319{
3320 struct cgraph_node *node;
3321
ca30a539 3322 fprintf (f, "\nFunction parameters:\n");
65c70e6b 3323 FOR_EACH_FUNCTION (node)
ca30a539 3324 ipa_print_node_params (f, node);
3e293154 3325}
3f84bf08
MJ
3326
3327/* Return a heap allocated vector containing formal parameters of FNDECL. */
3328
9771b263 3329vec<tree>
3f84bf08
MJ
3330ipa_get_vector_of_formal_parms (tree fndecl)
3331{
9771b263 3332 vec<tree> args;
3f84bf08
MJ
3333 int count;
3334 tree parm;
3335
0e8853ee 3336 gcc_assert (!flag_wpa);
310bc633 3337 count = count_formal_params (fndecl);
9771b263 3338 args.create (count);
910ad8de 3339 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 3340 args.quick_push (parm);
3f84bf08
MJ
3341
3342 return args;
3343}
3344
3345/* Return a heap allocated vector containing types of formal parameters of
3346 function type FNTYPE. */
3347
9771b263 3348static inline vec<tree>
3f84bf08
MJ
3349get_vector_of_formal_parm_types (tree fntype)
3350{
9771b263 3351 vec<tree> types;
3f84bf08
MJ
3352 int count = 0;
3353 tree t;
3354
3355 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3356 count++;
3357
9771b263 3358 types.create (count);
3f84bf08 3359 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 3360 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
3361
3362 return types;
3363}
3364
3365/* Modify the function declaration FNDECL and its type according to the plan in
3366 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3367 to reflect the actual parameters being modified which are determined by the
3368 base_index field. */
3369
3370void
3371ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3372 const char *synth_parm_prefix)
3373{
9771b263 3374 vec<tree> oparms, otypes;
3f84bf08
MJ
3375 tree orig_type, new_type = NULL;
3376 tree old_arg_types, t, new_arg_types = NULL;
3377 tree parm, *link = &DECL_ARGUMENTS (fndecl);
9771b263 3378 int i, len = adjustments.length ();
3f84bf08
MJ
3379 tree new_reversed = NULL;
3380 bool care_for_types, last_parm_void;
3381
3382 if (!synth_parm_prefix)
3383 synth_parm_prefix = "SYNTH";
3384
3385 oparms = ipa_get_vector_of_formal_parms (fndecl);
3386 orig_type = TREE_TYPE (fndecl);
3387 old_arg_types = TYPE_ARG_TYPES (orig_type);
3388
3389 /* The following test is an ugly hack, some functions simply don't have any
3390 arguments in their type. This is probably a bug but well... */
3391 care_for_types = (old_arg_types != NULL_TREE);
3392 if (care_for_types)
3393 {
3394 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3395 == void_type_node);
3396 otypes = get_vector_of_formal_parm_types (orig_type);
3397 if (last_parm_void)
9771b263 3398 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 3399 else
9771b263 3400 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
3401 }
3402 else
3403 {
3404 last_parm_void = false;
9771b263 3405 otypes.create (0);
3f84bf08
MJ
3406 }
3407
3408 for (i = 0; i < len; i++)
3409 {
3410 struct ipa_parm_adjustment *adj;
3411 gcc_assert (link);
3412
9771b263
DN
3413 adj = &adjustments[i];
3414 parm = oparms[adj->base_index];
3f84bf08
MJ
3415 adj->base = parm;
3416
3417 if (adj->copy_param)
3418 {
3419 if (care_for_types)
9771b263 3420 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
3421 new_arg_types);
3422 *link = parm;
910ad8de 3423 link = &DECL_CHAIN (parm);
3f84bf08
MJ
3424 }
3425 else if (!adj->remove_param)
3426 {
3427 tree new_parm;
3428 tree ptype;
3429
3430 if (adj->by_ref)
3431 ptype = build_pointer_type (adj->type);
3432 else
3433 ptype = adj->type;
3434
3435 if (care_for_types)
3436 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3437
3438 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3439 ptype);
3440 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3441
3442 DECL_ARTIFICIAL (new_parm) = 1;
3443 DECL_ARG_TYPE (new_parm) = ptype;
3444 DECL_CONTEXT (new_parm) = fndecl;
3445 TREE_USED (new_parm) = 1;
3446 DECL_IGNORED_P (new_parm) = 1;
3447 layout_decl (new_parm, 0);
3448
3f84bf08
MJ
3449 adj->base = parm;
3450 adj->reduction = new_parm;
3451
3452 *link = new_parm;
3453
910ad8de 3454 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
3455 }
3456 }
3457
3458 *link = NULL_TREE;
3459
3460 if (care_for_types)
3461 {
3462 new_reversed = nreverse (new_arg_types);
3463 if (last_parm_void)
3464 {
3465 if (new_reversed)
3466 TREE_CHAIN (new_arg_types) = void_list_node;
3467 else
3468 new_reversed = void_list_node;
3469 }
3470 }
3471
3472 /* Use copy_node to preserve as much as possible from original type
3473 (debug info, attribute lists etc.)
3474 Exception is METHOD_TYPEs must have THIS argument.
3475 When we are asked to remove it, we need to build new FUNCTION_TYPE
3476 instead. */
3477 if (TREE_CODE (orig_type) != METHOD_TYPE
9771b263
DN
3478 || (adjustments[0].copy_param
3479 && adjustments[0].base_index == 0))
3f84bf08 3480 {
4eb3f32c 3481 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
3482 TYPE_ARG_TYPES (new_type) = new_reversed;
3483 }
3484 else
3485 {
3486 new_type
3487 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3488 new_reversed));
3489 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3490 DECL_VINDEX (fndecl) = NULL_TREE;
3491 }
3492
d402c33d
JH
3493 /* When signature changes, we need to clear builtin info. */
3494 if (DECL_BUILT_IN (fndecl))
3495 {
3496 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3497 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3498 }
3499
3f84bf08
MJ
3500 /* This is a new type, not a copy of an old type. Need to reassociate
3501 variants. We can handle everything except the main variant lazily. */
3502 t = TYPE_MAIN_VARIANT (orig_type);
3503 if (orig_type != t)
3504 {
3505 TYPE_MAIN_VARIANT (new_type) = t;
3506 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3507 TYPE_NEXT_VARIANT (t) = new_type;
3508 }
3509 else
3510 {
3511 TYPE_MAIN_VARIANT (new_type) = new_type;
3512 TYPE_NEXT_VARIANT (new_type) = NULL;
3513 }
3514
3515 TREE_TYPE (fndecl) = new_type;
9b389a5e 3516 DECL_VIRTUAL_P (fndecl) = 0;
9771b263
DN
3517 otypes.release ();
3518 oparms.release ();
3f84bf08
MJ
3519}
3520
3521/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3522 If this is a directly recursive call, CS must be NULL. Otherwise it must
3523 contain the corresponding call graph edge. */
3524
3525void
3526ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3527 ipa_parm_adjustment_vec adjustments)
3528{
82338059 3529 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
9771b263
DN
3530 vec<tree> vargs;
3531 vec<tree, va_gc> **debug_args = NULL;
3f84bf08 3532 gimple new_stmt;
82338059 3533 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
3534 tree callee_decl;
3535 int i, len;
3536
9771b263
DN
3537 len = adjustments.length ();
3538 vargs.create (len);
67348ccc
DM
3539 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3540 ipa_remove_stmt_references (current_node, stmt);
3f84bf08
MJ
3541
3542 gsi = gsi_for_stmt (stmt);
82338059
MJ
3543 prev_gsi = gsi;
3544 gsi_prev (&prev_gsi);
3f84bf08
MJ
3545 for (i = 0; i < len; i++)
3546 {
3547 struct ipa_parm_adjustment *adj;
3548
9771b263 3549 adj = &adjustments[i];
3f84bf08
MJ
3550
3551 if (adj->copy_param)
3552 {
3553 tree arg = gimple_call_arg (stmt, adj->base_index);
3554
9771b263 3555 vargs.quick_push (arg);
3f84bf08
MJ
3556 }
3557 else if (!adj->remove_param)
3558 {
fffe1e40
MJ
3559 tree expr, base, off;
3560 location_t loc;
f43245d1 3561 unsigned int deref_align = 0;
c1ed6a01 3562 bool deref_base = false;
fffe1e40
MJ
3563
3564 /* We create a new parameter out of the value of the old one, we can
3565 do the following kind of transformations:
3566
3567 - A scalar passed by reference is converted to a scalar passed by
3568 value. (adj->by_ref is false and the type of the original
3569 actual argument is a pointer to a scalar).
3570
3571 - A part of an aggregate is passed instead of the whole aggregate.
3572 The part can be passed either by value or by reference, this is
3573 determined by value of adj->by_ref. Moreover, the code below
3574 handles both situations when the original aggregate is passed by
3575 value (its type is not a pointer) and when it is passed by
3576 reference (it is a pointer to an aggregate).
3577
3578 When the new argument is passed by reference (adj->by_ref is true)
3579 it must be a part of an aggregate and therefore we form it by
3580 simply taking the address of a reference inside the original
3581 aggregate. */
3582
3583 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3584 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
3585 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3586 : EXPR_LOCATION (base);
fffe1e40 3587
82d49829
MJ
3588 if (TREE_CODE (base) != ADDR_EXPR
3589 && POINTER_TYPE_P (TREE_TYPE (base)))
3590 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 3591 adj->offset / BITS_PER_UNIT);
3f84bf08 3592 else
3f84bf08 3593 {
fffe1e40
MJ
3594 HOST_WIDE_INT base_offset;
3595 tree prev_base;
c1ed6a01 3596 bool addrof;
fffe1e40
MJ
3597
3598 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
3599 {
3600 base = TREE_OPERAND (base, 0);
3601 addrof = true;
3602 }
3603 else
3604 addrof = false;
fffe1e40
MJ
3605 prev_base = base;
3606 base = get_addr_base_and_unit_offset (base, &base_offset);
3607 /* Aggregate arguments can have non-invariant addresses. */
3608 if (!base)
3609 {
3610 base = build_fold_addr_expr (prev_base);
82d49829 3611 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3612 adj->offset / BITS_PER_UNIT);
3613 }
3614 else if (TREE_CODE (base) == MEM_REF)
3615 {
c1ed6a01
MJ
3616 if (!addrof)
3617 {
3618 deref_base = true;
3619 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3620 }
82d49829 3621 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3622 base_offset
3623 + adj->offset / BITS_PER_UNIT);
3624 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 3625 off);
fffe1e40
MJ
3626 base = TREE_OPERAND (base, 0);
3627 }
3628 else
3629 {
82d49829 3630 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3631 base_offset
3632 + adj->offset / BITS_PER_UNIT);
3633 base = build_fold_addr_expr (base);
3634 }
3f84bf08 3635 }
fffe1e40 3636
3a5a825a
RG
3637 if (!adj->by_ref)
3638 {
3639 tree type = adj->type;
3640 unsigned int align;
3641 unsigned HOST_WIDE_INT misalign;
644ffefd 3642
c1ed6a01
MJ
3643 if (deref_base)
3644 {
3645 align = deref_align;
3646 misalign = 0;
3647 }
3648 else
3649 {
3650 get_pointer_alignment_1 (base, &align, &misalign);
3651 if (TYPE_ALIGN (type) > align)
3652 align = TYPE_ALIGN (type);
3653 }
27bcd47c
LC
3654 misalign += (tree_to_double_int (off)
3655 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3a5a825a
RG
3656 * BITS_PER_UNIT);
3657 misalign = misalign & (align - 1);
3658 if (misalign != 0)
3659 align = (misalign & -misalign);
3660 if (align < TYPE_ALIGN (type))
3661 type = build_aligned_type (type, align);
3662 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3663 }
3664 else
3665 {
3666 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3667 expr = build_fold_addr_expr (expr);
3668 }
fffe1e40 3669
3f84bf08
MJ
3670 expr = force_gimple_operand_gsi (&gsi, expr,
3671 adj->by_ref
3672 || is_gimple_reg_type (adj->type),
3673 NULL, true, GSI_SAME_STMT);
9771b263 3674 vargs.quick_push (expr);
3f84bf08 3675 }
ddb555ed
JJ
3676 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3677 {
3678 unsigned int ix;
3679 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3680 gimple def_temp;
3681
3682 arg = gimple_call_arg (stmt, adj->base_index);
3683 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3684 {
3685 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3686 continue;
3687 arg = fold_convert_loc (gimple_location (stmt),
3688 TREE_TYPE (origin), arg);
3689 }
3690 if (debug_args == NULL)
3691 debug_args = decl_debug_args_insert (callee_decl);
9771b263 3692 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
3693 if (ddecl == origin)
3694 {
9771b263 3695 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
3696 break;
3697 }
3698 if (ddecl == NULL)
3699 {
3700 ddecl = make_node (DEBUG_EXPR_DECL);
3701 DECL_ARTIFICIAL (ddecl) = 1;
3702 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3703 DECL_MODE (ddecl) = DECL_MODE (origin);
3704
9771b263
DN
3705 vec_safe_push (*debug_args, origin);
3706 vec_safe_push (*debug_args, ddecl);
ddb555ed 3707 }
9771b263 3708 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
3709 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3710 }
3f84bf08
MJ
3711 }
3712
3713 if (dump_file && (dump_flags & TDF_DETAILS))
3714 {
3715 fprintf (dump_file, "replacing stmt:");
3716 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3717 }
3718
3f84bf08 3719 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 3720 vargs.release ();
3f84bf08
MJ
3721 if (gimple_call_lhs (stmt))
3722 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3723
3724 gimple_set_block (new_stmt, gimple_block (stmt));
3725 if (gimple_has_location (stmt))
3726 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 3727 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 3728 gimple_call_copy_flags (new_stmt, stmt);
3f84bf08
MJ
3729
3730 if (dump_file && (dump_flags & TDF_DETAILS))
3731 {
3732 fprintf (dump_file, "with stmt:");
3733 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3734 fprintf (dump_file, "\n");
3735 }
3736 gsi_replace (&gsi, new_stmt, true);
3737 if (cs)
3738 cgraph_set_call_stmt (cs, new_stmt);
82338059
MJ
3739 do
3740 {
3741 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3742 gsi_prev (&gsi);
3743 }
3744 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3745 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3746
3f84bf08
MJ
3747 update_ssa (TODO_update_ssa);
3748 free_dominance_info (CDI_DOMINATORS);
3749}
3750
3751/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3752
3753static bool
3754index_in_adjustments_multiple_times_p (int base_index,
3755 ipa_parm_adjustment_vec adjustments)
3756{
9771b263 3757 int i, len = adjustments.length ();
3f84bf08
MJ
3758 bool one = false;
3759
3760 for (i = 0; i < len; i++)
3761 {
3762 struct ipa_parm_adjustment *adj;
9771b263 3763 adj = &adjustments[i];
3f84bf08
MJ
3764
3765 if (adj->base_index == base_index)
3766 {
3767 if (one)
3768 return true;
3769 else
3770 one = true;
3771 }
3772 }
3773 return false;
3774}
3775
3776
3777/* Return adjustments that should have the same effect on function parameters
3778 and call arguments as if they were first changed according to adjustments in
3779 INNER and then by adjustments in OUTER. */
3780
3781ipa_parm_adjustment_vec
3782ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3783 ipa_parm_adjustment_vec outer)
3784{
9771b263
DN
3785 int i, outlen = outer.length ();
3786 int inlen = inner.length ();
3f84bf08
MJ
3787 int removals = 0;
3788 ipa_parm_adjustment_vec adjustments, tmp;
3789
9771b263 3790 tmp.create (inlen);
3f84bf08
MJ
3791 for (i = 0; i < inlen; i++)
3792 {
3793 struct ipa_parm_adjustment *n;
9771b263 3794 n = &inner[i];
3f84bf08
MJ
3795
3796 if (n->remove_param)
3797 removals++;
3798 else
9771b263 3799 tmp.quick_push (*n);
3f84bf08
MJ
3800 }
3801
9771b263 3802 adjustments.create (outlen + removals);
3f84bf08
MJ
3803 for (i = 0; i < outlen; i++)
3804 {
f32682ca 3805 struct ipa_parm_adjustment r;
9771b263
DN
3806 struct ipa_parm_adjustment *out = &outer[i];
3807 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 3808
f32682ca 3809 memset (&r, 0, sizeof (r));
3f84bf08
MJ
3810 gcc_assert (!in->remove_param);
3811 if (out->remove_param)
3812 {
3813 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3814 {
f32682ca 3815 r.remove_param = true;
9771b263 3816 adjustments.quick_push (r);
3f84bf08
MJ
3817 }
3818 continue;
3819 }
3820
f32682ca
DN
3821 r.base_index = in->base_index;
3822 r.type = out->type;
3f84bf08
MJ
3823
3824 /* FIXME: Create nonlocal value too. */
3825
3826 if (in->copy_param && out->copy_param)
f32682ca 3827 r.copy_param = true;
3f84bf08 3828 else if (in->copy_param)
f32682ca 3829 r.offset = out->offset;
3f84bf08 3830 else if (out->copy_param)
f32682ca 3831 r.offset = in->offset;
3f84bf08 3832 else
f32682ca 3833 r.offset = in->offset + out->offset;
9771b263 3834 adjustments.quick_push (r);
3f84bf08
MJ
3835 }
3836
3837 for (i = 0; i < inlen; i++)
3838 {
9771b263 3839 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08
MJ
3840
3841 if (n->remove_param)
9771b263 3842 adjustments.quick_push (*n);
3f84bf08
MJ
3843 }
3844
9771b263 3845 tmp.release ();
3f84bf08
MJ
3846 return adjustments;
3847}
3848
3849/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3850 friendly way, assuming they are meant to be applied to FNDECL. */
3851
3852void
3853ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3854 tree fndecl)
3855{
9771b263 3856 int i, len = adjustments.length ();
3f84bf08 3857 bool first = true;
9771b263 3858 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
3859
3860 fprintf (file, "IPA param adjustments: ");
3861 for (i = 0; i < len; i++)
3862 {
3863 struct ipa_parm_adjustment *adj;
9771b263 3864 adj = &adjustments[i];
3f84bf08
MJ
3865
3866 if (!first)
3867 fprintf (file, " ");
3868 else
3869 first = false;
3870
3871 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 3872 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
3873 if (adj->base)
3874 {
3875 fprintf (file, ", base: ");
3876 print_generic_expr (file, adj->base, 0);
3877 }
3878 if (adj->reduction)
3879 {
3880 fprintf (file, ", reduction: ");
3881 print_generic_expr (file, adj->reduction, 0);
3882 }
3883 if (adj->new_ssa_base)
3884 {
3885 fprintf (file, ", new_ssa_base: ");
3886 print_generic_expr (file, adj->new_ssa_base, 0);
3887 }
3888
3889 if (adj->copy_param)
3890 fprintf (file, ", copy_param");
3891 else if (adj->remove_param)
3892 fprintf (file, ", remove_param");
3893 else
3894 fprintf (file, ", offset %li", (long) adj->offset);
3895 if (adj->by_ref)
3896 fprintf (file, ", by_ref");
3897 print_node_brief (file, ", type: ", adj->type, 0);
3898 fprintf (file, "\n");
3899 }
9771b263 3900 parms.release ();
3f84bf08
MJ
3901}
3902
2c9561b5
MJ
3903/* Dump the AV linked list. */
3904
3905void
3906ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3907{
3908 bool comma = false;
3909 fprintf (f, " Aggregate replacements:");
3910 for (; av; av = av->next)
3911 {
3912 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3913 av->index, av->offset);
3914 print_generic_expr (f, av->value, 0);
3915 comma = true;
3916 }
3917 fprintf (f, "\n");
3918}
3919
fb3f88cc
JH
3920/* Stream out jump function JUMP_FUNC to OB. */
3921
3922static void
3923ipa_write_jump_function (struct output_block *ob,
3924 struct ipa_jump_func *jump_func)
3925{
8b7773a4
MJ
3926 struct ipa_agg_jf_item *item;
3927 struct bitpack_d bp;
3928 int i, count;
fb3f88cc 3929
8b7773a4 3930 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
3931 switch (jump_func->type)
3932 {
3933 case IPA_JF_UNKNOWN:
3934 break;
b258210c 3935 case IPA_JF_KNOWN_TYPE:
c7573249
MJ
3936 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3937 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3938 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
b258210c 3939 break;
fb3f88cc 3940 case IPA_JF_CONST:
5368224f 3941 gcc_assert (
4502fe8d
MJ
3942 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3943 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
3944 break;
3945 case IPA_JF_PASS_THROUGH:
412288f1 3946 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
3947 if (jump_func->value.pass_through.operation == NOP_EXPR)
3948 {
3949 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3950 bp = bitpack_create (ob->main_stream);
3951 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
b8f6e610 3952 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4a53743e
MJ
3953 streamer_write_bitpack (&bp);
3954 }
3955 else
3956 {
3957 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3958 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3959 }
fb3f88cc
JH
3960 break;
3961 case IPA_JF_ANCESTOR:
412288f1 3962 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
b9393656 3963 stream_write_tree (ob, jump_func->value.ancestor.type, true);
412288f1 3964 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
3965 bp = bitpack_create (ob->main_stream);
3966 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
b8f6e610 3967 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
8b7773a4 3968 streamer_write_bitpack (&bp);
fb3f88cc 3969 break;
8b7773a4
MJ
3970 }
3971
9771b263 3972 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
3973 streamer_write_uhwi (ob, count);
3974 if (count)
3975 {
3976 bp = bitpack_create (ob->main_stream);
3977 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3978 streamer_write_bitpack (&bp);
3979 }
3980
9771b263 3981 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
3982 {
3983 streamer_write_uhwi (ob, item->offset);
3984 stream_write_tree (ob, item->value, true);
fb3f88cc
JH
3985 }
3986}
3987
3988/* Read in jump function JUMP_FUNC from IB. */
3989
3990static void
3991ipa_read_jump_function (struct lto_input_block *ib,
3992 struct ipa_jump_func *jump_func,
4502fe8d 3993 struct cgraph_edge *cs,
fb3f88cc
JH
3994 struct data_in *data_in)
3995{
4a53743e
MJ
3996 enum jump_func_type jftype;
3997 enum tree_code operation;
8b7773a4 3998 int i, count;
fb3f88cc 3999
4a53743e
MJ
4000 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4001 switch (jftype)
fb3f88cc
JH
4002 {
4003 case IPA_JF_UNKNOWN:
4a53743e 4004 jump_func->type = IPA_JF_UNKNOWN;
fb3f88cc 4005 break;
b258210c 4006 case IPA_JF_KNOWN_TYPE:
4a53743e
MJ
4007 {
4008 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4009 tree base_type = stream_read_tree (ib, data_in);
4010 tree component_type = stream_read_tree (ib, data_in);
4011
4012 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4013 break;
4014 }
fb3f88cc 4015 case IPA_JF_CONST:
4502fe8d 4016 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4017 break;
4018 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4019 operation = (enum tree_code) streamer_read_uhwi (ib);
4020 if (operation == NOP_EXPR)
4021 {
4022 int formal_id = streamer_read_uhwi (ib);
4023 struct bitpack_d bp = streamer_read_bitpack (ib);
4024 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610
MJ
4025 bool type_preserved = bp_unpack_value (&bp, 1);
4026 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4027 type_preserved);
4a53743e
MJ
4028 }
4029 else
4030 {
4031 tree operand = stream_read_tree (ib, data_in);
4032 int formal_id = streamer_read_uhwi (ib);
4033 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4034 operation);
4035 }
fb3f88cc
JH
4036 break;
4037 case IPA_JF_ANCESTOR:
4a53743e
MJ
4038 {
4039 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4040 tree type = stream_read_tree (ib, data_in);
4041 int formal_id = streamer_read_uhwi (ib);
4042 struct bitpack_d bp = streamer_read_bitpack (ib);
4043 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610 4044 bool type_preserved = bp_unpack_value (&bp, 1);
4a53743e 4045
b8f6e610
MJ
4046 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4047 type_preserved);
4a53743e
MJ
4048 break;
4049 }
8b7773a4
MJ
4050 }
4051
4052 count = streamer_read_uhwi (ib);
9771b263 4053 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4054 if (count)
4055 {
4a53743e 4056 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4057 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4058 }
4059 for (i = 0; i < count; i++)
4060 {
f32682ca
DN
4061 struct ipa_agg_jf_item item;
4062 item.offset = streamer_read_uhwi (ib);
4063 item.value = stream_read_tree (ib, data_in);
9771b263 4064 jump_func->agg.items->quick_push (item);
fb3f88cc
JH
4065 }
4066}
4067
e33c6cd6
MJ
4068/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4069 relevant to indirect inlining to OB. */
661e7330
MJ
4070
4071static void
e33c6cd6
MJ
4072ipa_write_indirect_edge_info (struct output_block *ob,
4073 struct cgraph_edge *cs)
661e7330 4074{
e33c6cd6 4075 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4076 struct bitpack_d bp;
e33c6cd6 4077
412288f1 4078 streamer_write_hwi (ob, ii->param_index);
8b7773a4 4079 streamer_write_hwi (ob, ii->offset);
2465dcc2
RG
4080 bp = bitpack_create (ob->main_stream);
4081 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4082 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4083 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4084 bp_pack_value (&bp, ii->by_ref, 1);
412288f1 4085 streamer_write_bitpack (&bp);
b258210c
MJ
4086
4087 if (ii->polymorphic)
4088 {
412288f1 4089 streamer_write_hwi (ob, ii->otr_token);
b9393656 4090 stream_write_tree (ob, ii->otr_type, true);
b258210c 4091 }
661e7330
MJ
4092}
4093
e33c6cd6
MJ
4094/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4095 relevant to indirect inlining from IB. */
661e7330
MJ
4096
4097static void
e33c6cd6
MJ
4098ipa_read_indirect_edge_info (struct lto_input_block *ib,
4099 struct data_in *data_in ATTRIBUTE_UNUSED,
4100 struct cgraph_edge *cs)
661e7330 4101{
e33c6cd6 4102 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4103 struct bitpack_d bp;
661e7330 4104
412288f1 4105 ii->param_index = (int) streamer_read_hwi (ib);
8b7773a4 4106 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
412288f1 4107 bp = streamer_read_bitpack (ib);
2465dcc2 4108 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4109 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4110 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4111 ii->by_ref = bp_unpack_value (&bp, 1);
b258210c
MJ
4112 if (ii->polymorphic)
4113 {
412288f1 4114 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4115 ii->otr_type = stream_read_tree (ib, data_in);
b258210c 4116 }
661e7330
MJ
4117}
4118
fb3f88cc
JH
4119/* Stream out NODE info to OB. */
4120
4121static void
4122ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4123{
4124 int node_ref;
7380e6ef 4125 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4126 struct ipa_node_params *info = IPA_NODE_REF (node);
4127 int j;
4128 struct cgraph_edge *e;
2465dcc2 4129 struct bitpack_d bp;
fb3f88cc 4130
7380e6ef 4131 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4132 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4133 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4134
0e8853ee
JH
4135 streamer_write_uhwi (ob, ipa_get_param_count (info));
4136 for (j = 0; j < ipa_get_param_count (info); j++)
4137 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4138 bp = bitpack_create (ob->main_stream);
062c604f 4139 gcc_assert (info->uses_analysis_done
661e7330 4140 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4141 gcc_assert (!info->node_enqueued);
4142 gcc_assert (!info->ipcp_orig_node);
4143 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4144 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4145 streamer_write_bitpack (&bp);
4502fe8d
MJ
4146 for (j = 0; j < ipa_get_param_count (info); j++)
4147 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
fb3f88cc
JH
4148 for (e = node->callees; e; e = e->next_callee)
4149 {
4150 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4151
412288f1 4152 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
fb3f88cc
JH
4153 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4154 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4155 }
e33c6cd6 4156 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4157 {
4158 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4159
412288f1 4160 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
c8246dbe
JH
4161 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4162 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4163 ipa_write_indirect_edge_info (ob, e);
4164 }
fb3f88cc
JH
4165}
4166
61502ca8 4167/* Stream in NODE info from IB. */
fb3f88cc
JH
4168
4169static void
4170ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4171 struct data_in *data_in)
4172{
4173 struct ipa_node_params *info = IPA_NODE_REF (node);
4174 int k;
4175 struct cgraph_edge *e;
2465dcc2 4176 struct bitpack_d bp;
fb3f88cc 4177
0e8853ee 4178 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4179
0e8853ee
JH
4180 for (k = 0; k < ipa_get_param_count (info); k++)
4181 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4182
412288f1 4183 bp = streamer_read_bitpack (ib);
fb3f88cc 4184 if (ipa_get_param_count (info) != 0)
062c604f 4185 info->uses_analysis_done = true;
fb3f88cc
JH
4186 info->node_enqueued = false;
4187 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4188 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a
MJ
4189 for (k = 0; k < ipa_get_param_count (info); k++)
4190 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
fb3f88cc
JH
4191 for (e = node->callees; e; e = e->next_callee)
4192 {
4193 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4194 int count = streamer_read_uhwi (ib);
fb3f88cc 4195
fb3f88cc
JH
4196 if (!count)
4197 continue;
9771b263 4198 vec_safe_grow_cleared (args->jump_functions, count);
fb3f88cc 4199
fb3f88cc 4200 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d
MJ
4201 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4202 data_in);
fb3f88cc 4203 }
e33c6cd6 4204 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4205 {
4206 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4207 int count = streamer_read_uhwi (ib);
c8246dbe 4208
c8246dbe
JH
4209 if (count)
4210 {
9771b263 4211 vec_safe_grow_cleared (args->jump_functions, count);
c8246dbe 4212 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d 4213 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
606d9a09 4214 data_in);
c8246dbe
JH
4215 }
4216 ipa_read_indirect_edge_info (ib, data_in, e);
4217 }
fb3f88cc
JH
4218}
4219
4220/* Write jump functions for nodes in SET. */
4221
4222void
f27c1867 4223ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4224{
4225 struct cgraph_node *node;
93536c97 4226 struct output_block *ob;
fb3f88cc 4227 unsigned int count = 0;
f27c1867
JH
4228 lto_symtab_encoder_iterator lsei;
4229 lto_symtab_encoder_t encoder;
4230
fb3f88cc 4231
9771b263 4232 if (!ipa_node_params_vector.exists ())
93536c97 4233 return;
fb3f88cc 4234
93536c97 4235 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4236 encoder = ob->decl_state->symtab_node_encoder;
93536c97 4237 ob->cgraph_node = NULL;
f27c1867
JH
4238 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4239 lsei_next_function_in_partition (&lsei))
fb3f88cc 4240 {
f27c1867 4241 node = lsei_cgraph_node (lsei);
c47d0034
JH
4242 if (cgraph_function_with_gimple_body_p (node)
4243 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4244 count++;
4245 }
4246
412288f1 4247 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4248
4249 /* Process all of the functions. */
f27c1867
JH
4250 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4251 lsei_next_function_in_partition (&lsei))
fb3f88cc 4252 {
f27c1867 4253 node = lsei_cgraph_node (lsei);
c47d0034
JH
4254 if (cgraph_function_with_gimple_body_p (node)
4255 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4256 ipa_write_node_info (ob, node);
4257 }
412288f1 4258 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4259 produce_asm (ob, NULL);
4260 destroy_output_block (ob);
4261}
4262
4263/* Read section in file FILE_DATA of length LEN with data DATA. */
4264
4265static void
4266ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4267 size_t len)
4268{
4269 const struct lto_function_header *header =
4270 (const struct lto_function_header *) data;
4ad9a9de
EB
4271 const int cfg_offset = sizeof (struct lto_function_header);
4272 const int main_offset = cfg_offset + header->cfg_size;
4273 const int string_offset = main_offset + header->main_size;
fb3f88cc
JH
4274 struct data_in *data_in;
4275 struct lto_input_block ib_main;
4276 unsigned int i;
4277 unsigned int count;
4278
4279 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4280 header->main_size);
4281
4282 data_in =
4283 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4284 header->string_size, vNULL);
412288f1 4285 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4286
4287 for (i = 0; i < count; i++)
4288 {
4289 unsigned int index;
4290 struct cgraph_node *node;
7380e6ef 4291 lto_symtab_encoder_t encoder;
fb3f88cc 4292
412288f1 4293 index = streamer_read_uhwi (&ib_main);
7380e6ef
JH
4294 encoder = file_data->symtab_node_encoder;
4295 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4296 gcc_assert (node->definition);
fb3f88cc
JH
4297 ipa_read_node_info (&ib_main, node, data_in);
4298 }
4299 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4300 len);
4301 lto_data_in_delete (data_in);
4302}
4303
4304/* Read ipcp jump functions. */
4305
4306void
4307ipa_prop_read_jump_functions (void)
4308{
4309 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4310 struct lto_file_decl_data *file_data;
4311 unsigned int j = 0;
4312
4313 ipa_check_create_node_params ();
4314 ipa_check_create_edge_args ();
4315 ipa_register_cgraph_hooks ();
4316
4317 while ((file_data = file_data_vec[j++]))
4318 {
4319 size_t len;
4320 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4321
4322 if (data)
4323 ipa_prop_read_section (file_data, data, len);
4324 }
4325}
4326
b8698a0f 4327/* After merging units, we can get mismatch in argument counts.
61502ca8 4328 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
4329 Also compute called_with_variable_arg info. */
4330
4331void
4332ipa_update_after_lto_read (void)
4333{
05d3aa37
MJ
4334 ipa_check_create_node_params ();
4335 ipa_check_create_edge_args ();
fb3f88cc 4336}
2c9561b5
MJ
4337
4338void
4339write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4340{
4341 int node_ref;
4342 unsigned int count = 0;
4343 lto_symtab_encoder_t encoder;
4344 struct ipa_agg_replacement_value *aggvals, *av;
4345
4346 aggvals = ipa_get_agg_replacements_for_node (node);
4347 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4348 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4349 streamer_write_uhwi (ob, node_ref);
4350
4351 for (av = aggvals; av; av = av->next)
4352 count++;
4353 streamer_write_uhwi (ob, count);
4354
4355 for (av = aggvals; av; av = av->next)
4356 {
7b920a9a
MJ
4357 struct bitpack_d bp;
4358
2c9561b5
MJ
4359 streamer_write_uhwi (ob, av->offset);
4360 streamer_write_uhwi (ob, av->index);
4361 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4362
4363 bp = bitpack_create (ob->main_stream);
4364 bp_pack_value (&bp, av->by_ref, 1);
4365 streamer_write_bitpack (&bp);
2c9561b5
MJ
4366 }
4367}
4368
4369/* Stream in the aggregate value replacement chain for NODE from IB. */
4370
4371static void
4372read_agg_replacement_chain (struct lto_input_block *ib,
4373 struct cgraph_node *node,
4374 struct data_in *data_in)
4375{
4376 struct ipa_agg_replacement_value *aggvals = NULL;
4377 unsigned int count, i;
4378
4379 count = streamer_read_uhwi (ib);
4380 for (i = 0; i <count; i++)
4381 {
4382 struct ipa_agg_replacement_value *av;
7b920a9a 4383 struct bitpack_d bp;
2c9561b5
MJ
4384
4385 av = ggc_alloc_ipa_agg_replacement_value ();
4386 av->offset = streamer_read_uhwi (ib);
4387 av->index = streamer_read_uhwi (ib);
4388 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4389 bp = streamer_read_bitpack (ib);
4390 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4391 av->next = aggvals;
4392 aggvals = av;
4393 }
4394 ipa_set_node_agg_value_chain (node, aggvals);
4395}
4396
4397/* Write all aggregate replacement for nodes in set. */
4398
4399void
4400ipa_prop_write_all_agg_replacement (void)
4401{
4402 struct cgraph_node *node;
4403 struct output_block *ob;
4404 unsigned int count = 0;
4405 lto_symtab_encoder_iterator lsei;
4406 lto_symtab_encoder_t encoder;
4407
4408 if (!ipa_node_agg_replacements)
4409 return;
4410
4411 ob = create_output_block (LTO_section_ipcp_transform);
4412 encoder = ob->decl_state->symtab_node_encoder;
4413 ob->cgraph_node = NULL;
4414 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4415 lsei_next_function_in_partition (&lsei))
4416 {
4417 node = lsei_cgraph_node (lsei);
4418 if (cgraph_function_with_gimple_body_p (node)
4419 && ipa_get_agg_replacements_for_node (node) != NULL)
4420 count++;
4421 }
4422
4423 streamer_write_uhwi (ob, count);
4424
4425 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4426 lsei_next_function_in_partition (&lsei))
4427 {
4428 node = lsei_cgraph_node (lsei);
4429 if (cgraph_function_with_gimple_body_p (node)
4430 && ipa_get_agg_replacements_for_node (node) != NULL)
4431 write_agg_replacement_chain (ob, node);
4432 }
4433 streamer_write_char_stream (ob->main_stream, 0);
4434 produce_asm (ob, NULL);
4435 destroy_output_block (ob);
4436}
4437
4438/* Read replacements section in file FILE_DATA of length LEN with data
4439 DATA. */
4440
4441static void
4442read_replacements_section (struct lto_file_decl_data *file_data,
4443 const char *data,
4444 size_t len)
4445{
4446 const struct lto_function_header *header =
4447 (const struct lto_function_header *) data;
4448 const int cfg_offset = sizeof (struct lto_function_header);
4449 const int main_offset = cfg_offset + header->cfg_size;
4450 const int string_offset = main_offset + header->main_size;
4451 struct data_in *data_in;
4452 struct lto_input_block ib_main;
4453 unsigned int i;
4454 unsigned int count;
4455
4456 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4457 header->main_size);
4458
4459 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4460 header->string_size, vNULL);
2c9561b5
MJ
4461 count = streamer_read_uhwi (&ib_main);
4462
4463 for (i = 0; i < count; i++)
4464 {
4465 unsigned int index;
4466 struct cgraph_node *node;
4467 lto_symtab_encoder_t encoder;
4468
4469 index = streamer_read_uhwi (&ib_main);
4470 encoder = file_data->symtab_node_encoder;
4471 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4472 gcc_assert (node->definition);
2c9561b5
MJ
4473 read_agg_replacement_chain (&ib_main, node, data_in);
4474 }
4475 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4476 len);
4477 lto_data_in_delete (data_in);
4478}
4479
4480/* Read IPA-CP aggregate replacements. */
4481
4482void
4483ipa_prop_read_all_agg_replacement (void)
4484{
4485 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4486 struct lto_file_decl_data *file_data;
4487 unsigned int j = 0;
4488
4489 while ((file_data = file_data_vec[j++]))
4490 {
4491 size_t len;
4492 const char *data = lto_get_section_data (file_data,
4493 LTO_section_ipcp_transform,
4494 NULL, &len);
4495 if (data)
4496 read_replacements_section (file_data, data, len);
4497 }
4498}
4499
4500/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4501 NODE. */
4502
4503static void
4504adjust_agg_replacement_values (struct cgraph_node *node,
4505 struct ipa_agg_replacement_value *aggval)
4506{
4507 struct ipa_agg_replacement_value *v;
4508 int i, c = 0, d = 0, *adj;
4509
4510 if (!node->clone.combined_args_to_skip)
4511 return;
4512
4513 for (v = aggval; v; v = v->next)
4514 {
4515 gcc_assert (v->index >= 0);
4516 if (c < v->index)
4517 c = v->index;
4518 }
4519 c++;
4520
4521 adj = XALLOCAVEC (int, c);
4522 for (i = 0; i < c; i++)
4523 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4524 {
4525 adj[i] = -1;
4526 d++;
4527 }
4528 else
4529 adj[i] = i - d;
4530
4531 for (v = aggval; v; v = v->next)
4532 v->index = adj[v->index];
4533}
4534
4535
4536/* Function body transformation phase. */
4537
4538unsigned int
4539ipcp_transform_function (struct cgraph_node *node)
4540{
6e1aa848 4541 vec<ipa_param_descriptor_t> descriptors = vNULL;
2c9561b5
MJ
4542 struct param_analysis_info *parms_ainfo;
4543 struct ipa_agg_replacement_value *aggval;
4544 gimple_stmt_iterator gsi;
4545 basic_block bb;
4546 int param_count;
4547 bool cfg_changed = false, something_changed = false;
4548
4549 gcc_checking_assert (cfun);
4550 gcc_checking_assert (current_function_decl);
4551
4552 if (dump_file)
4553 fprintf (dump_file, "Modification phase of node %s/%i\n",
67348ccc 4554 cgraph_node_name (node), node->order);
2c9561b5
MJ
4555
4556 aggval = ipa_get_agg_replacements_for_node (node);
4557 if (!aggval)
4558 return 0;
67348ccc 4559 param_count = count_formal_params (node->decl);
2c9561b5
MJ
4560 if (param_count == 0)
4561 return 0;
4562 adjust_agg_replacement_values (node, aggval);
4563 if (dump_file)
4564 ipa_dump_agg_replacement_values (dump_file, aggval);
4565 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4566 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
9771b263 4567 descriptors.safe_grow_cleared (param_count);
2c9561b5
MJ
4568 ipa_populate_param_decls (node, descriptors);
4569
4570 FOR_EACH_BB (bb)
4571 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4572 {
4573 struct ipa_agg_replacement_value *v;
4574 gimple stmt = gsi_stmt (gsi);
4575 tree rhs, val, t;
3ff2ca23 4576 HOST_WIDE_INT offset, size;
2c9561b5
MJ
4577 int index;
4578 bool by_ref, vce;
4579
4580 if (!gimple_assign_load_p (stmt))
4581 continue;
4582 rhs = gimple_assign_rhs1 (stmt);
4583 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4584 continue;
4585
4586 vce = false;
4587 t = rhs;
4588 while (handled_component_p (t))
4589 {
4590 /* V_C_E can do things like convert an array of integers to one
4591 bigger integer and similar things we do not handle below. */
4592 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4593 {
4594 vce = true;
4595 break;
4596 }
4597 t = TREE_OPERAND (t, 0);
4598 }
4599 if (vce)
4600 continue;
4601
4602 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
3ff2ca23 4603 rhs, &index, &offset, &size, &by_ref))
2c9561b5
MJ
4604 continue;
4605 for (v = aggval; v; v = v->next)
4606 if (v->index == index
4607 && v->offset == offset)
4608 break;
3ff2ca23
JJ
4609 if (!v
4610 || v->by_ref != by_ref
4611 || tree_low_cst (TYPE_SIZE (TREE_TYPE (v->value)), 0) != size)
2c9561b5
MJ
4612 continue;
4613
4614 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4615 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4616 {
4617 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4618 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4619 else if (TYPE_SIZE (TREE_TYPE (rhs))
4620 == TYPE_SIZE (TREE_TYPE (v->value)))
4621 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4622 else
4623 {
4624 if (dump_file)
4625 {
4626 fprintf (dump_file, " const ");
4627 print_generic_expr (dump_file, v->value, 0);
4628 fprintf (dump_file, " can't be converted to type of ");
4629 print_generic_expr (dump_file, rhs, 0);
4630 fprintf (dump_file, "\n");
4631 }
4632 continue;
4633 }
4634 }
4635 else
4636 val = v->value;
4637
4638 if (dump_file && (dump_flags & TDF_DETAILS))
4639 {
4640 fprintf (dump_file, "Modifying stmt:\n ");
4641 print_gimple_stmt (dump_file, stmt, 0, 0);
4642 }
4643 gimple_assign_set_rhs_from_tree (&gsi, val);
4644 update_stmt (stmt);
4645
4646 if (dump_file && (dump_flags & TDF_DETAILS))
4647 {
4648 fprintf (dump_file, "into:\n ");
4649 print_gimple_stmt (dump_file, stmt, 0, 0);
4650 fprintf (dump_file, "\n");
4651 }
4652
4653 something_changed = true;
4654 if (maybe_clean_eh_stmt (stmt)
4655 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4656 cfg_changed = true;
4657 }
4658
9771b263 4659 (*ipa_node_agg_replacements)[node->uid] = NULL;
2c9561b5 4660 free_parms_ainfo (parms_ainfo, param_count);
9771b263 4661 descriptors.release ();
2c9561b5
MJ
4662
4663 if (!something_changed)
4664 return 0;
4665 else if (cfg_changed)
4666 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4667 else
4668 return TODO_update_ssa_only_virtuals;
4669}