]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
re PR target/59216 (ARM negdi*extendsidi regression)
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
d1e082c2 2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tree.h"
18f429e2 24#include "gimple.h"
d8a2d370
DN
25#include "expr.h"
26#include "stor-layout.h"
27#include "print-tree.h"
45b0be94 28#include "gimplify.h"
5be5c238 29#include "gimple-iterator.h"
18f429e2 30#include "gimplify-me.h"
5be5c238 31#include "gimple-walk.h"
518dc859
RL
32#include "langhooks.h"
33#include "ggc.h"
34#include "target.h"
518dc859 35#include "ipa-prop.h"
442b4905
AM
36#include "bitmap.h"
37#include "gimple-ssa.h"
38#include "tree-cfg.h"
39#include "tree-phinodes.h"
40#include "ssa-iterators.h"
41#include "tree-into-ssa.h"
42#include "tree-dfa.h"
518dc859 43#include "tree-pass.h"
771578a0 44#include "tree-inline.h"
0f378cb5 45#include "ipa-inline.h"
518dc859 46#include "flags.h"
3e293154 47#include "diagnostic.h"
cf835838 48#include "gimple-pretty-print.h"
fb3f88cc 49#include "lto-streamer.h"
f0efc7aa
DN
50#include "data-streamer.h"
51#include "tree-streamer.h"
dfea20f1 52#include "params.h"
450ad0cd 53#include "ipa-utils.h"
771578a0 54
062c604f
MJ
55/* Intermediate information about a parameter that is only useful during the
56 run of ipa_analyze_node and is not kept afterwards. */
57
58struct param_analysis_info
59{
8b7773a4
MJ
60 bool parm_modified, ref_modified, pt_modified;
61 bitmap parm_visited_statements, pt_visited_statements;
062c604f
MJ
62};
63
771578a0 64/* Vector where the parameter infos are actually stored. */
9771b263 65vec<ipa_node_params_t> ipa_node_params_vector;
2c9561b5 66/* Vector of known aggregate values in cloned nodes. */
9771b263 67vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
771578a0 68/* Vector where the parameter infos are actually stored. */
9771b263 69vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
771578a0
MJ
70
71/* Holders of ipa cgraph hooks: */
e2c9111c
JH
72static struct cgraph_edge_hook_list *edge_removal_hook_holder;
73static struct cgraph_node_hook_list *node_removal_hook_holder;
74static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
75static struct cgraph_2node_hook_list *node_duplication_hook_holder;
40982661 76static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 77
4502fe8d
MJ
78/* Description of a reference to an IPA constant. */
79struct ipa_cst_ref_desc
80{
81 /* Edge that corresponds to the statement which took the reference. */
82 struct cgraph_edge *cs;
83 /* Linked list of duplicates created when call graph edges are cloned. */
84 struct ipa_cst_ref_desc *next_duplicate;
85 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
86 if out of control. */
87 int refcount;
88};
89
90/* Allocation pool for reference descriptions. */
91
92static alloc_pool ipa_refdesc_pool;
93
5fe8e757
MJ
94/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
95 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
96
97static bool
98ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
99{
67348ccc 100 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
101 struct cl_optimization *os;
102
103 if (!fs_opts)
104 return false;
105 os = TREE_OPTIMIZATION (fs_opts);
106 return !os->x_optimize || !os->x_flag_ipa_cp;
107}
108
be95e2b9
MJ
109/* Return index of the formal whose tree is PTREE in function which corresponds
110 to INFO. */
111
d044dd17 112static int
9771b263 113ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
518dc859
RL
114{
115 int i, count;
116
9771b263 117 count = descriptors.length ();
518dc859 118 for (i = 0; i < count; i++)
9771b263 119 if (descriptors[i].decl == ptree)
518dc859
RL
120 return i;
121
122 return -1;
123}
124
d044dd17
MJ
125/* Return index of the formal whose tree is PTREE in function which corresponds
126 to INFO. */
127
128int
129ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
130{
131 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
132}
133
134/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
135 NODE. */
be95e2b9 136
f8e2a1ed
MJ
137static void
138ipa_populate_param_decls (struct cgraph_node *node,
9771b263 139 vec<ipa_param_descriptor_t> &descriptors)
518dc859
RL
140{
141 tree fndecl;
142 tree fnargs;
143 tree parm;
144 int param_num;
3e293154 145
67348ccc 146 fndecl = node->decl;
0e8853ee 147 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
148 fnargs = DECL_ARGUMENTS (fndecl);
149 param_num = 0;
910ad8de 150 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 151 {
9771b263 152 descriptors[param_num].decl = parm;
0e8853ee 153 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
518dc859
RL
154 param_num++;
155 }
156}
157
3f84bf08
MJ
158/* Return how many formal parameters FNDECL has. */
159
160static inline int
310bc633 161count_formal_params (tree fndecl)
3f84bf08
MJ
162{
163 tree parm;
164 int count = 0;
0e8853ee 165 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 166
910ad8de 167 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
168 count++;
169
170 return count;
171}
172
0e8853ee
JH
173/* Return the declaration of Ith formal parameter of the function corresponding
174 to INFO. Note there is no setter function as this array is built just once
175 using ipa_initialize_node_params. */
176
177void
178ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
179{
180 fprintf (file, "param #%i", i);
181 if (info->descriptors[i].decl)
182 {
183 fprintf (file, " ");
184 print_generic_expr (file, info->descriptors[i].decl, 0);
185 }
186}
187
188/* Initialize the ipa_node_params structure associated with NODE
189 to hold PARAM_COUNT parameters. */
190
191void
192ipa_alloc_node_params (struct cgraph_node *node, int param_count)
193{
194 struct ipa_node_params *info = IPA_NODE_REF (node);
195
196 if (!info->descriptors.exists () && param_count)
197 info->descriptors.safe_grow_cleared (param_count);
198}
199
f8e2a1ed
MJ
200/* Initialize the ipa_node_params structure associated with NODE by counting
201 the function parameters, creating the descriptors and populating their
202 param_decls. */
be95e2b9 203
f8e2a1ed
MJ
204void
205ipa_initialize_node_params (struct cgraph_node *node)
206{
207 struct ipa_node_params *info = IPA_NODE_REF (node);
208
9771b263 209 if (!info->descriptors.exists ())
f8e2a1ed 210 {
67348ccc 211 ipa_alloc_node_params (node, count_formal_params (node->decl));
0e8853ee 212 ipa_populate_param_decls (node, info->descriptors);
f8e2a1ed 213 }
518dc859
RL
214}
215
749aa96d
MJ
216/* Print the jump functions associated with call graph edge CS to file F. */
217
218static void
219ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
220{
221 int i, count;
222
223 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
224 for (i = 0; i < count; i++)
225 {
226 struct ipa_jump_func *jump_func;
227 enum jump_func_type type;
228
229 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
230 type = jump_func->type;
231
232 fprintf (f, " param %d: ", i);
233 if (type == IPA_JF_UNKNOWN)
234 fprintf (f, "UNKNOWN\n");
235 else if (type == IPA_JF_KNOWN_TYPE)
236 {
c7573249
MJ
237 fprintf (f, "KNOWN TYPE: base ");
238 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
239 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
240 jump_func->value.known_type.offset);
241 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
242 fprintf (f, "\n");
749aa96d
MJ
243 }
244 else if (type == IPA_JF_CONST)
245 {
4502fe8d 246 tree val = jump_func->value.constant.value;
749aa96d
MJ
247 fprintf (f, "CONST: ");
248 print_generic_expr (f, val, 0);
249 if (TREE_CODE (val) == ADDR_EXPR
250 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
251 {
252 fprintf (f, " -> ");
253 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
254 0);
255 }
256 fprintf (f, "\n");
257 }
749aa96d
MJ
258 else if (type == IPA_JF_PASS_THROUGH)
259 {
260 fprintf (f, "PASS THROUGH: ");
8b7773a4 261 fprintf (f, "%d, op %s",
749aa96d 262 jump_func->value.pass_through.formal_id,
5806f481 263 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 264 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
265 {
266 fprintf (f, " ");
267 print_generic_expr (f,
268 jump_func->value.pass_through.operand, 0);
269 }
270 if (jump_func->value.pass_through.agg_preserved)
271 fprintf (f, ", agg_preserved");
b8f6e610
MJ
272 if (jump_func->value.pass_through.type_preserved)
273 fprintf (f, ", type_preserved");
3ea6239f 274 fprintf (f, "\n");
749aa96d
MJ
275 }
276 else if (type == IPA_JF_ANCESTOR)
277 {
278 fprintf (f, "ANCESTOR: ");
279 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
280 jump_func->value.ancestor.formal_id,
281 jump_func->value.ancestor.offset);
282 print_generic_expr (f, jump_func->value.ancestor.type, 0);
8b7773a4
MJ
283 if (jump_func->value.ancestor.agg_preserved)
284 fprintf (f, ", agg_preserved");
b8f6e610
MJ
285 if (jump_func->value.ancestor.type_preserved)
286 fprintf (f, ", type_preserved");
3ea6239f 287 fprintf (f, "\n");
749aa96d 288 }
8b7773a4
MJ
289
290 if (jump_func->agg.items)
291 {
292 struct ipa_agg_jf_item *item;
293 int j;
294
295 fprintf (f, " Aggregate passed by %s:\n",
296 jump_func->agg.by_ref ? "reference" : "value");
9771b263 297 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
298 {
299 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
300 item->offset);
301 if (TYPE_P (item->value))
302 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 303 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
304 else
305 {
306 fprintf (f, "cst: ");
307 print_generic_expr (f, item->value, 0);
308 }
309 fprintf (f, "\n");
310 }
311 }
749aa96d
MJ
312 }
313}
314
315
be95e2b9
MJ
316/* Print the jump functions of all arguments on all call graph edges going from
317 NODE to file F. */
318
518dc859 319void
3e293154 320ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 321{
3e293154 322 struct cgraph_edge *cs;
518dc859 323
fec39fa6 324 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
67348ccc 325 node->order);
3e293154
MJ
326 for (cs = node->callees; cs; cs = cs->next_callee)
327 {
328 if (!ipa_edge_args_info_available_for_edge_p (cs))
329 continue;
330
749aa96d 331 fprintf (f, " callsite %s/%i -> %s/%i : \n",
fec39fa6
TS
332 xstrdup (node->name ()), node->order,
333 xstrdup (cs->callee->name ()),
67348ccc 334 cs->callee->order);
749aa96d
MJ
335 ipa_print_node_jump_functions_for_edge (f, cs);
336 }
518dc859 337
9de04252 338 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 339 {
9de04252 340 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
341 if (!ipa_edge_args_info_available_for_edge_p (cs))
342 continue;
3e293154 343
9de04252
MJ
344 ii = cs->indirect_info;
345 if (ii->agg_contents)
c13bc3d9 346 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 347 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 348 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
349 ii->param_index, ii->offset,
350 ii->by_ref ? "by reference" : "by_value");
351 else
352 fprintf (f, " indirect %s callsite, calling param %i",
353 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
354
749aa96d
MJ
355 if (cs->call_stmt)
356 {
9de04252 357 fprintf (f, ", for stmt ");
749aa96d 358 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 359 }
749aa96d 360 else
9de04252 361 fprintf (f, "\n");
749aa96d 362 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
363 }
364}
365
366/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 367
3e293154
MJ
368void
369ipa_print_all_jump_functions (FILE *f)
370{
371 struct cgraph_node *node;
372
ca30a539 373 fprintf (f, "\nJump functions:\n");
65c70e6b 374 FOR_EACH_FUNCTION (node)
3e293154
MJ
375 {
376 ipa_print_node_jump_functions (f, node);
377 }
378}
379
7b872d9e
MJ
380/* Set JFUNC to be a known type jump function. */
381
382static void
383ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
384 tree base_type, tree component_type)
385{
06d65050
JH
386 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
387 && TYPE_BINFO (component_type));
7b872d9e
MJ
388 jfunc->type = IPA_JF_KNOWN_TYPE;
389 jfunc->value.known_type.offset = offset,
390 jfunc->value.known_type.base_type = base_type;
391 jfunc->value.known_type.component_type = component_type;
68377e53 392 gcc_assert (component_type);
7b872d9e
MJ
393}
394
b8f6e610
MJ
395/* Set JFUNC to be a copy of another jmp (to be used by jump function
396 combination code). The two functions will share their rdesc. */
397
398static void
399ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
400 struct ipa_jump_func *src)
401
402{
403 gcc_checking_assert (src->type == IPA_JF_CONST);
404 dst->type = IPA_JF_CONST;
405 dst->value.constant = src->value.constant;
406}
407
7b872d9e
MJ
408/* Set JFUNC to be a constant jmp function. */
409
410static void
4502fe8d
MJ
411ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
412 struct cgraph_edge *cs)
7b872d9e 413{
5368224f
DC
414 constant = unshare_expr (constant);
415 if (constant && EXPR_P (constant))
416 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
7b872d9e 417 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
418 jfunc->value.constant.value = unshare_expr_without_location (constant);
419
420 if (TREE_CODE (constant) == ADDR_EXPR
421 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
422 {
423 struct ipa_cst_ref_desc *rdesc;
424 if (!ipa_refdesc_pool)
425 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
426 sizeof (struct ipa_cst_ref_desc), 32);
427
428 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
429 rdesc->cs = cs;
430 rdesc->next_duplicate = NULL;
431 rdesc->refcount = 1;
432 jfunc->value.constant.rdesc = rdesc;
433 }
434 else
435 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
436}
437
438/* Set JFUNC to be a simple pass-through jump function. */
439static void
8b7773a4 440ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
b8f6e610 441 bool agg_preserved, bool type_preserved)
7b872d9e
MJ
442{
443 jfunc->type = IPA_JF_PASS_THROUGH;
444 jfunc->value.pass_through.operand = NULL_TREE;
445 jfunc->value.pass_through.formal_id = formal_id;
446 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 447 jfunc->value.pass_through.agg_preserved = agg_preserved;
b8f6e610 448 jfunc->value.pass_through.type_preserved = type_preserved;
7b872d9e
MJ
449}
450
451/* Set JFUNC to be an arithmetic pass through jump function. */
452
453static void
454ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
455 tree operand, enum tree_code operation)
456{
457 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 458 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
459 jfunc->value.pass_through.formal_id = formal_id;
460 jfunc->value.pass_through.operation = operation;
8b7773a4 461 jfunc->value.pass_through.agg_preserved = false;
b8f6e610 462 jfunc->value.pass_through.type_preserved = false;
7b872d9e
MJ
463}
464
465/* Set JFUNC to be an ancestor jump function. */
466
467static void
468ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
b8f6e610
MJ
469 tree type, int formal_id, bool agg_preserved,
470 bool type_preserved)
7b872d9e
MJ
471{
472 jfunc->type = IPA_JF_ANCESTOR;
473 jfunc->value.ancestor.formal_id = formal_id;
474 jfunc->value.ancestor.offset = offset;
475 jfunc->value.ancestor.type = type;
8b7773a4 476 jfunc->value.ancestor.agg_preserved = agg_preserved;
b8f6e610 477 jfunc->value.ancestor.type_preserved = type_preserved;
7b872d9e
MJ
478}
479
e248d83f
MJ
480/* Extract the acual BINFO being described by JFUNC which must be a known type
481 jump function. */
482
483tree
484ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
485{
486 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
487 if (!base_binfo)
488 return NULL_TREE;
489 return get_binfo_at_offset (base_binfo,
490 jfunc->value.known_type.offset,
491 jfunc->value.known_type.component_type);
492}
493
f65cf2b7
MJ
494/* Structure to be passed in between detect_type_change and
495 check_stmt_for_type_change. */
496
497struct type_change_info
498{
290ebcb7
MJ
499 /* Offset into the object where there is the virtual method pointer we are
500 looking for. */
501 HOST_WIDE_INT offset;
502 /* The declaration or SSA_NAME pointer of the base that we are checking for
503 type change. */
504 tree object;
505 /* If we actually can tell the type that the object has changed to, it is
506 stored in this field. Otherwise it remains NULL_TREE. */
507 tree known_current_type;
f65cf2b7
MJ
508 /* Set to true if dynamic type change has been detected. */
509 bool type_maybe_changed;
290ebcb7
MJ
510 /* Set to true if multiple types have been encountered. known_current_type
511 must be disregarded in that case. */
512 bool multiple_types_encountered;
f65cf2b7
MJ
513};
514
515/* Return true if STMT can modify a virtual method table pointer.
516
517 This function makes special assumptions about both constructors and
518 destructors which are all the functions that are allowed to alter the VMT
519 pointers. It assumes that destructors begin with assignment into all VMT
520 pointers and that constructors essentially look in the following way:
521
522 1) The very first thing they do is that they call constructors of ancestor
523 sub-objects that have them.
524
525 2) Then VMT pointers of this and all its ancestors is set to new values
526 corresponding to the type corresponding to the constructor.
527
528 3) Only afterwards, other stuff such as constructor of member sub-objects
529 and the code written by the user is run. Only this may include calling
530 virtual functions, directly or indirectly.
531
532 There is no way to call a constructor of an ancestor sub-object in any
533 other way.
534
535 This means that we do not have to care whether constructors get the correct
536 type information because they will always change it (in fact, if we define
537 the type to be given by the VMT pointer, it is undefined).
538
539 The most important fact to derive from the above is that if, for some
540 statement in the section 3, we try to detect whether the dynamic type has
541 changed, we can safely ignore all calls as we examine the function body
542 backwards until we reach statements in section 2 because these calls cannot
543 be ancestor constructors or destructors (if the input is not bogus) and so
544 do not change the dynamic type (this holds true only for automatically
545 allocated objects but at the moment we devirtualize only these). We then
546 must detect that statements in section 2 change the dynamic type and can try
547 to derive the new type. That is enough and we can stop, we will never see
548 the calls into constructors of sub-objects in this code. Therefore we can
549 safely ignore all call statements that we traverse.
550 */
551
552static bool
553stmt_may_be_vtbl_ptr_store (gimple stmt)
554{
555 if (is_gimple_call (stmt))
556 return false;
557 else if (is_gimple_assign (stmt))
558 {
559 tree lhs = gimple_assign_lhs (stmt);
560
0004f992
MJ
561 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
562 {
563 if (flag_strict_aliasing
564 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
565 return false;
566
567 if (TREE_CODE (lhs) == COMPONENT_REF
568 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 569 return false;
0004f992
MJ
570 /* In the future we might want to use get_base_ref_and_offset to find
571 if there is a field corresponding to the offset and if so, proceed
572 almost like if it was a component ref. */
573 }
f65cf2b7
MJ
574 }
575 return true;
576}
577
290ebcb7
MJ
578/* If STMT can be proved to be an assignment to the virtual method table
579 pointer of ANALYZED_OBJ and the type associated with the new table
580 identified, return the type. Otherwise return NULL_TREE. */
581
582static tree
583extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
584{
585 HOST_WIDE_INT offset, size, max_size;
586 tree lhs, rhs, base;
587
588 if (!gimple_assign_single_p (stmt))
589 return NULL_TREE;
590
591 lhs = gimple_assign_lhs (stmt);
592 rhs = gimple_assign_rhs1 (stmt);
593 if (TREE_CODE (lhs) != COMPONENT_REF
594 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
595 || TREE_CODE (rhs) != ADDR_EXPR)
596 return NULL_TREE;
597 rhs = get_base_address (TREE_OPERAND (rhs, 0));
598 if (!rhs
599 || TREE_CODE (rhs) != VAR_DECL
600 || !DECL_VIRTUAL_P (rhs))
601 return NULL_TREE;
602
603 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
604 if (offset != tci->offset
605 || size != POINTER_SIZE
606 || max_size != POINTER_SIZE)
607 return NULL_TREE;
608 if (TREE_CODE (base) == MEM_REF)
609 {
610 if (TREE_CODE (tci->object) != MEM_REF
611 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
612 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
613 TREE_OPERAND (base, 1)))
614 return NULL_TREE;
615 }
616 else if (tci->object != base)
617 return NULL_TREE;
618
619 return DECL_CONTEXT (rhs);
620}
621
61502ca8 622/* Callback of walk_aliased_vdefs and a helper function for
f65cf2b7
MJ
623 detect_type_change to check whether a particular statement may modify
624 the virtual table pointer, and if possible also determine the new type of
625 the (sub-)object. It stores its result into DATA, which points to a
626 type_change_info structure. */
627
628static bool
629check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
630{
631 gimple stmt = SSA_NAME_DEF_STMT (vdef);
632 struct type_change_info *tci = (struct type_change_info *) data;
633
634 if (stmt_may_be_vtbl_ptr_store (stmt))
635 {
290ebcb7
MJ
636 tree type;
637 type = extr_type_from_vtbl_ptr_store (stmt, tci);
638 if (tci->type_maybe_changed
639 && type != tci->known_current_type)
640 tci->multiple_types_encountered = true;
641 tci->known_current_type = type;
f65cf2b7
MJ
642 tci->type_maybe_changed = true;
643 return true;
644 }
645 else
646 return false;
647}
648
290ebcb7
MJ
649
650
06d65050
JH
651/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
652 callsite CALL) by looking for assignments to its virtual table pointer. If
653 it is, return true and fill in the jump function JFUNC with relevant type
654 information or set it to unknown. ARG is the object itself (not a pointer
655 to it, unless dereferenced). BASE is the base of the memory access as
656 returned by get_ref_base_and_extent, as is the offset. */
f65cf2b7
MJ
657
658static bool
06d65050
JH
659detect_type_change (tree arg, tree base, tree comp_type, gimple call,
660 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
f65cf2b7
MJ
661{
662 struct type_change_info tci;
663 ao_ref ao;
664
665 gcc_checking_assert (DECL_P (arg)
666 || TREE_CODE (arg) == MEM_REF
667 || handled_component_p (arg));
668 /* Const calls cannot call virtual methods through VMT and so type changes do
669 not matter. */
06d65050
JH
670 if (!flag_devirtualize || !gimple_vuse (call)
671 /* Be sure expected_type is polymorphic. */
672 || !comp_type
673 || TREE_CODE (comp_type) != RECORD_TYPE
674 || !TYPE_BINFO (comp_type)
675 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
f65cf2b7
MJ
676 return false;
677
dd887943 678 ao_ref_init (&ao, arg);
f65cf2b7
MJ
679 ao.base = base;
680 ao.offset = offset;
681 ao.size = POINTER_SIZE;
682 ao.max_size = ao.size;
f65cf2b7 683
290ebcb7
MJ
684 tci.offset = offset;
685 tci.object = get_base_address (arg);
686 tci.known_current_type = NULL_TREE;
687 tci.type_maybe_changed = false;
688 tci.multiple_types_encountered = false;
689
f65cf2b7
MJ
690 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
691 &tci, NULL);
692 if (!tci.type_maybe_changed)
693 return false;
694
290ebcb7
MJ
695 if (!tci.known_current_type
696 || tci.multiple_types_encountered
697 || offset != 0)
698 jfunc->type = IPA_JF_UNKNOWN;
699 else
7b872d9e 700 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
290ebcb7 701
f65cf2b7
MJ
702 return true;
703}
704
705/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
706 SSA name (its dereference will become the base and the offset is assumed to
707 be zero). */
708
709static bool
06d65050
JH
710detect_type_change_ssa (tree arg, tree comp_type,
711 gimple call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
712{
713 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 714 if (!flag_devirtualize
06d65050 715 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
716 return false;
717
718 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 719 build_int_cst (ptr_type_node, 0));
f65cf2b7 720
06d65050 721 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
f65cf2b7
MJ
722}
723
fdb0e1b4
MJ
724/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
725 boolean variable pointed to by DATA. */
726
727static bool
728mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
729 void *data)
730{
731 bool *b = (bool *) data;
732 *b = true;
733 return true;
734}
735
688010ba 736/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4
MJ
737 a value known not to be modified in this function before reaching the
738 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
739 information about the parameter. */
fdb0e1b4
MJ
740
741static bool
8b7773a4
MJ
742parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
743 gimple stmt, tree parm_load)
fdb0e1b4
MJ
744{
745 bool modified = false;
8b7773a4 746 bitmap *visited_stmts;
fdb0e1b4
MJ
747 ao_ref refd;
748
8b7773a4
MJ
749 if (parm_ainfo && parm_ainfo->parm_modified)
750 return false;
fdb0e1b4
MJ
751
752 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4
MJ
753 ao_ref_init (&refd, parm_load);
754 /* We can cache visited statements only when parm_ainfo is available and when
755 we are looking at a naked load of the whole parameter. */
756 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
757 visited_stmts = NULL;
758 else
759 visited_stmts = &parm_ainfo->parm_visited_statements;
760 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
761 visited_stmts);
762 if (parm_ainfo && modified)
763 parm_ainfo->parm_modified = true;
764 return !modified;
fdb0e1b4
MJ
765}
766
767/* If STMT is an assignment that loads a value from an parameter declaration,
768 return the index of the parameter in ipa_node_params which has not been
769 modified. Otherwise return -1. */
770
771static int
9771b263 772load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
fdb0e1b4
MJ
773 struct param_analysis_info *parms_ainfo,
774 gimple stmt)
775{
776 int index;
777 tree op1;
778
779 if (!gimple_assign_single_p (stmt))
780 return -1;
781
782 op1 = gimple_assign_rhs1 (stmt);
783 if (TREE_CODE (op1) != PARM_DECL)
784 return -1;
785
d044dd17 786 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 787 if (index < 0
8b7773a4
MJ
788 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
789 : NULL, stmt, op1))
fdb0e1b4
MJ
790 return -1;
791
792 return index;
793}
f65cf2b7 794
8b7773a4
MJ
795/* Return true if memory reference REF loads data that are known to be
796 unmodified in this function before reaching statement STMT. PARM_AINFO, if
797 non-NULL, is a pointer to a structure containing temporary information about
798 PARM. */
799
800static bool
801parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
802 gimple stmt, tree ref)
803{
804 bool modified = false;
805 ao_ref refd;
806
807 gcc_checking_assert (gimple_vuse (stmt));
808 if (parm_ainfo && parm_ainfo->ref_modified)
809 return false;
810
811 ao_ref_init (&refd, ref);
812 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
813 NULL);
814 if (parm_ainfo && modified)
815 parm_ainfo->ref_modified = true;
816 return !modified;
817}
818
819/* Return true if the data pointed to by PARM is known to be unmodified in this
820 function before reaching call statement CALL into which it is passed.
821 PARM_AINFO is a pointer to a structure containing temporary information
822 about PARM. */
823
824static bool
825parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
826 gimple call, tree parm)
827{
828 bool modified = false;
829 ao_ref refd;
830
831 /* It's unnecessary to calculate anything about memory contnets for a const
832 function because it is not goin to use it. But do not cache the result
833 either. Also, no such calculations for non-pointers. */
834 if (!gimple_vuse (call)
835 || !POINTER_TYPE_P (TREE_TYPE (parm)))
836 return false;
837
838 if (parm_ainfo->pt_modified)
839 return false;
840
841 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
842 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
843 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
844 if (modified)
845 parm_ainfo->pt_modified = true;
846 return !modified;
847}
848
849/* Return true if we can prove that OP is a memory reference loading unmodified
850 data from an aggregate passed as a parameter and if the aggregate is passed
851 by reference, that the alias type of the load corresponds to the type of the
852 formal parameter (so that we can rely on this type for TBAA in callers).
853 INFO and PARMS_AINFO describe parameters of the current function (but the
854 latter can be NULL), STMT is the load statement. If function returns true,
855 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
856 within the aggregate and whether it is a load from a value passed by
857 reference respectively. */
858
859static bool
9771b263 860ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
8b7773a4
MJ
861 struct param_analysis_info *parms_ainfo, gimple stmt,
862 tree op, int *index_p, HOST_WIDE_INT *offset_p,
3ff2ca23 863 HOST_WIDE_INT *size_p, bool *by_ref_p)
8b7773a4
MJ
864{
865 int index;
866 HOST_WIDE_INT size, max_size;
867 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
868
869 if (max_size == -1 || max_size != size || *offset_p < 0)
870 return false;
871
872 if (DECL_P (base))
873 {
d044dd17 874 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4
MJ
875 if (index >= 0
876 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
877 : NULL, stmt, op))
878 {
879 *index_p = index;
880 *by_ref_p = false;
3ff2ca23
JJ
881 if (size_p)
882 *size_p = size;
8b7773a4
MJ
883 return true;
884 }
885 return false;
886 }
887
888 if (TREE_CODE (base) != MEM_REF
889 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
890 || !integer_zerop (TREE_OPERAND (base, 1)))
891 return false;
892
893 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
894 {
895 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 896 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
897 }
898 else
899 {
900 /* This branch catches situations where a pointer parameter is not a
901 gimple register, for example:
902
903 void hip7(S*) (struct S * p)
904 {
905 void (*<T2e4>) (struct S *) D.1867;
906 struct S * p.1;
907
908 <bb 2>:
909 p.1_1 = p;
910 D.1867_2 = p.1_1->f;
911 D.1867_2 ();
912 gdp = &p;
913 */
914
915 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
d044dd17 916 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
8b7773a4
MJ
917 }
918
919 if (index >= 0
920 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
921 stmt, op))
922 {
923 *index_p = index;
924 *by_ref_p = true;
3ff2ca23
JJ
925 if (size_p)
926 *size_p = size;
8b7773a4
MJ
927 return true;
928 }
929 return false;
930}
931
932/* Just like the previous function, just without the param_analysis_info
933 pointer, for users outside of this file. */
934
935bool
936ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
937 tree op, int *index_p, HOST_WIDE_INT *offset_p,
938 bool *by_ref_p)
939{
d044dd17 940 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
3ff2ca23 941 offset_p, NULL, by_ref_p);
8b7773a4
MJ
942}
943
b258210c 944/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
945 of an assignment statement STMT, try to determine whether we are actually
946 handling any of the following cases and construct an appropriate jump
947 function into JFUNC if so:
948
949 1) The passed value is loaded from a formal parameter which is not a gimple
950 register (most probably because it is addressable, the value has to be
951 scalar) and we can guarantee the value has not changed. This case can
952 therefore be described by a simple pass-through jump function. For example:
953
954 foo (int a)
955 {
956 int a.0;
957
958 a.0_2 = a;
959 bar (a.0_2);
960
961 2) The passed value can be described by a simple arithmetic pass-through
962 jump function. E.g.
963
964 foo (int a)
965 {
966 int D.2064;
967
968 D.2064_4 = a.1(D) + 4;
969 bar (D.2064_4);
970
971 This case can also occur in combination of the previous one, e.g.:
972
973 foo (int a, int z)
974 {
975 int a.0;
976 int D.2064;
977
978 a.0_3 = a;
979 D.2064_4 = a.0_3 + 4;
980 foo (D.2064_4);
981
982 3) The passed value is an address of an object within another one (which
983 also passed by reference). Such situations are described by an ancestor
984 jump function and describe situations such as:
985
986 B::foo() (struct B * const this)
987 {
988 struct A * D.1845;
989
990 D.1845_2 = &this_1(D)->D.1748;
991 A::bar (D.1845_2);
992
993 INFO is the structure describing individual parameters access different
994 stages of IPA optimizations. PARMS_AINFO contains the information that is
995 only needed for intraprocedural analysis. */
685b0d13
MJ
996
997static void
b258210c 998compute_complex_assign_jump_func (struct ipa_node_params *info,
fdb0e1b4 999 struct param_analysis_info *parms_ainfo,
b258210c 1000 struct ipa_jump_func *jfunc,
06d65050
JH
1001 gimple call, gimple stmt, tree name,
1002 tree param_type)
685b0d13
MJ
1003{
1004 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 1005 tree op1, tc_ssa, base, ssa;
685b0d13 1006 int index;
685b0d13 1007
685b0d13 1008 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1009
fdb0e1b4 1010 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1011 {
fdb0e1b4
MJ
1012 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1013 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1014 else
d044dd17 1015 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
fdb0e1b4
MJ
1016 SSA_NAME_DEF_STMT (op1));
1017 tc_ssa = op1;
1018 }
1019 else
1020 {
d044dd17 1021 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
fdb0e1b4
MJ
1022 tc_ssa = gimple_assign_lhs (stmt);
1023 }
1024
1025 if (index >= 0)
1026 {
1027 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 1028
b258210c 1029 if (op2)
685b0d13 1030 {
b258210c
MJ
1031 if (!is_gimple_ip_invariant (op2)
1032 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1033 && !useless_type_conversion_p (TREE_TYPE (name),
1034 TREE_TYPE (op1))))
1035 return;
1036
7b872d9e
MJ
1037 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1038 gimple_assign_rhs_code (stmt));
685b0d13 1039 }
b8f6e610 1040 else if (gimple_assign_single_p (stmt))
8b7773a4
MJ
1041 {
1042 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1043 call, tc_ssa);
06d65050
JH
1044 bool type_p = false;
1045
1046 if (param_type && POINTER_TYPE_P (param_type))
1047 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1048 call, jfunc);
b8f6e610
MJ
1049 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1050 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
8b7773a4 1051 }
685b0d13
MJ
1052 return;
1053 }
1054
1055 if (TREE_CODE (op1) != ADDR_EXPR)
1056 return;
1057 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1058 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1059 return;
32aa622c
MJ
1060 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1061 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1062 /* If this is a varying address, punt. */
1063 || max_size == -1
1064 || max_size != size)
685b0d13 1065 return;
32aa622c 1066 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
f65cf2b7
MJ
1067 ssa = TREE_OPERAND (base, 0);
1068 if (TREE_CODE (ssa) != SSA_NAME
1069 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1070 || offset < 0)
685b0d13
MJ
1071 return;
1072
b8f6e610 1073 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1074 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1075 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
b8f6e610 1076 {
06d65050
JH
1077 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1078 call, jfunc, offset);
b8f6e610
MJ
1079 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1080 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1081 parm_ref_data_pass_through_p (&parms_ainfo[index],
1082 call, ssa), type_p);
1083 }
685b0d13
MJ
1084}
1085
40591473
MJ
1086/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1087 it looks like:
1088
1089 iftmp.1_3 = &obj_2(D)->D.1762;
1090
1091 The base of the MEM_REF must be a default definition SSA NAME of a
1092 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1093 whole MEM_REF expression is returned and the offset calculated from any
1094 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1095 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1096
1097static tree
1098get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1099{
1100 HOST_WIDE_INT size, max_size;
1101 tree expr, parm, obj;
1102
1103 if (!gimple_assign_single_p (assign))
1104 return NULL_TREE;
1105 expr = gimple_assign_rhs1 (assign);
1106
1107 if (TREE_CODE (expr) != ADDR_EXPR)
1108 return NULL_TREE;
1109 expr = TREE_OPERAND (expr, 0);
1110 obj = expr;
1111 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1112
1113 if (TREE_CODE (expr) != MEM_REF
1114 /* If this is a varying address, punt. */
1115 || max_size == -1
1116 || max_size != size
1117 || *offset < 0)
1118 return NULL_TREE;
1119 parm = TREE_OPERAND (expr, 0);
1120 if (TREE_CODE (parm) != SSA_NAME
1121 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1122 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1123 return NULL_TREE;
1124
1125 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1126 *obj_p = obj;
1127 return expr;
1128}
1129
685b0d13 1130
b258210c
MJ
1131/* Given that an actual argument is an SSA_NAME that is a result of a phi
1132 statement PHI, try to find out whether NAME is in fact a
1133 multiple-inheritance typecast from a descendant into an ancestor of a formal
1134 parameter and thus can be described by an ancestor jump function and if so,
1135 write the appropriate function into JFUNC.
1136
1137 Essentially we want to match the following pattern:
1138
1139 if (obj_2(D) != 0B)
1140 goto <bb 3>;
1141 else
1142 goto <bb 4>;
1143
1144 <bb 3>:
1145 iftmp.1_3 = &obj_2(D)->D.1762;
1146
1147 <bb 4>:
1148 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1149 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1150 return D.1879_6; */
1151
1152static void
1153compute_complex_ancestor_jump_func (struct ipa_node_params *info,
8b7773a4 1154 struct param_analysis_info *parms_ainfo,
b258210c 1155 struct ipa_jump_func *jfunc,
06d65050 1156 gimple call, gimple phi, tree param_type)
b258210c 1157{
40591473 1158 HOST_WIDE_INT offset;
b258210c
MJ
1159 gimple assign, cond;
1160 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1161 tree tmp, parm, expr, obj;
b258210c
MJ
1162 int index, i;
1163
54e348cb 1164 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1165 return;
1166
54e348cb
MJ
1167 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1168 tmp = PHI_ARG_DEF (phi, 0);
1169 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1170 tmp = PHI_ARG_DEF (phi, 1);
1171 else
1172 return;
b258210c
MJ
1173 if (TREE_CODE (tmp) != SSA_NAME
1174 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1175 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1176 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1177 return;
1178
1179 assign = SSA_NAME_DEF_STMT (tmp);
1180 assign_bb = gimple_bb (assign);
40591473 1181 if (!single_pred_p (assign_bb))
b258210c 1182 return;
40591473
MJ
1183 expr = get_ancestor_addr_info (assign, &obj, &offset);
1184 if (!expr)
b258210c
MJ
1185 return;
1186 parm = TREE_OPERAND (expr, 0);
b258210c 1187 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
40591473 1188 gcc_assert (index >= 0);
b258210c
MJ
1189
1190 cond_bb = single_pred (assign_bb);
1191 cond = last_stmt (cond_bb);
69610617
SB
1192 if (!cond
1193 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1194 || gimple_cond_code (cond) != NE_EXPR
1195 || gimple_cond_lhs (cond) != parm
1196 || !integer_zerop (gimple_cond_rhs (cond)))
1197 return;
1198
b258210c
MJ
1199 phi_bb = gimple_bb (phi);
1200 for (i = 0; i < 2; i++)
1201 {
1202 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1203 if (pred != assign_bb && pred != cond_bb)
1204 return;
1205 }
1206
06d65050
JH
1207 bool type_p = false;
1208 if (param_type && POINTER_TYPE_P (param_type))
1209 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1210 call, jfunc, offset);
b8f6e610 1211 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
8b7773a4
MJ
1212 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1213 parm_ref_data_pass_through_p (&parms_ainfo[index],
b8f6e610 1214 call, parm), type_p);
b258210c
MJ
1215}
1216
61502ca8 1217/* Given OP which is passed as an actual argument to a called function,
b258210c 1218 determine if it is possible to construct a KNOWN_TYPE jump function for it
06d65050
JH
1219 and if so, create one and store it to JFUNC.
1220 EXPECTED_TYPE represents a type the argument should be in */
b258210c
MJ
1221
1222static void
f65cf2b7 1223compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
06d65050 1224 gimple call, tree expected_type)
b258210c 1225{
32aa622c 1226 HOST_WIDE_INT offset, size, max_size;
c7573249 1227 tree base;
b258210c 1228
05842ff5
MJ
1229 if (!flag_devirtualize
1230 || TREE_CODE (op) != ADDR_EXPR
06d65050
JH
1231 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1232 /* Be sure expected_type is polymorphic. */
1233 || !expected_type
1234 || TREE_CODE (expected_type) != RECORD_TYPE
1235 || !TYPE_BINFO (expected_type)
1236 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
b258210c
MJ
1237 return;
1238
1239 op = TREE_OPERAND (op, 0);
32aa622c
MJ
1240 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1241 if (!DECL_P (base)
1242 || max_size == -1
1243 || max_size != size
1244 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1245 || is_global_var (base))
1246 return;
1247
06d65050 1248 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
f65cf2b7
MJ
1249 return;
1250
06d65050
JH
1251 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1252 expected_type);
b258210c
MJ
1253}
1254
be95e2b9
MJ
1255/* Inspect the given TYPE and return true iff it has the same structure (the
1256 same number of fields of the same types) as a C++ member pointer. If
1257 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1258 corresponding fields there. */
1259
3e293154
MJ
1260static bool
1261type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1262{
1263 tree fld;
1264
1265 if (TREE_CODE (type) != RECORD_TYPE)
1266 return false;
1267
1268 fld = TYPE_FIELDS (type);
1269 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1270 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1271 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1272 return false;
1273
1274 if (method_ptr)
1275 *method_ptr = fld;
1276
910ad8de 1277 fld = DECL_CHAIN (fld);
8b7773a4 1278 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1279 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1280 return false;
1281 if (delta)
1282 *delta = fld;
1283
910ad8de 1284 if (DECL_CHAIN (fld))
3e293154
MJ
1285 return false;
1286
1287 return true;
1288}
1289
61502ca8 1290/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1291 return the rhs of its defining statement. Otherwise return RHS as it
1292 is. */
7ec49257
MJ
1293
1294static inline tree
1295get_ssa_def_if_simple_copy (tree rhs)
1296{
1297 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1298 {
1299 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1300
1301 if (gimple_assign_single_p (def_stmt))
1302 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1303 else
1304 break;
7ec49257
MJ
1305 }
1306 return rhs;
1307}
1308
8b7773a4
MJ
1309/* Simple linked list, describing known contents of an aggregate beforere
1310 call. */
1311
1312struct ipa_known_agg_contents_list
1313{
1314 /* Offset and size of the described part of the aggregate. */
1315 HOST_WIDE_INT offset, size;
1316 /* Known constant value or NULL if the contents is known to be unknown. */
1317 tree constant;
1318 /* Pointer to the next structure in the list. */
1319 struct ipa_known_agg_contents_list *next;
1320};
3e293154 1321
8b7773a4
MJ
1322/* Traverse statements from CALL backwards, scanning whether an aggregate given
1323 in ARG is filled in with constant values. ARG can either be an aggregate
1324 expression or a pointer to an aggregate. JFUNC is the jump function into
1325 which the constants are subsequently stored. */
be95e2b9 1326
3e293154 1327static void
8b7773a4
MJ
1328determine_known_aggregate_parts (gimple call, tree arg,
1329 struct ipa_jump_func *jfunc)
3e293154 1330{
8b7773a4
MJ
1331 struct ipa_known_agg_contents_list *list = NULL;
1332 int item_count = 0, const_count = 0;
1333 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1334 gimple_stmt_iterator gsi;
8b7773a4
MJ
1335 tree arg_base;
1336 bool check_ref, by_ref;
1337 ao_ref r;
3e293154 1338
8b7773a4
MJ
1339 /* The function operates in three stages. First, we prepare check_ref, r,
1340 arg_base and arg_offset based on what is actually passed as an actual
1341 argument. */
3e293154 1342
8b7773a4
MJ
1343 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1344 {
1345 by_ref = true;
1346 if (TREE_CODE (arg) == SSA_NAME)
1347 {
1348 tree type_size;
cc269bb6 1349 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)))))
8b7773a4
MJ
1350 return;
1351 check_ref = true;
1352 arg_base = arg;
1353 arg_offset = 0;
1354 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
ae7e9ddd 1355 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1356 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1357 }
1358 else if (TREE_CODE (arg) == ADDR_EXPR)
1359 {
1360 HOST_WIDE_INT arg_max_size;
1361
1362 arg = TREE_OPERAND (arg, 0);
1363 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1364 &arg_max_size);
1365 if (arg_max_size == -1
1366 || arg_max_size != arg_size
1367 || arg_offset < 0)
1368 return;
1369 if (DECL_P (arg_base))
1370 {
1371 tree size;
1372 check_ref = false;
1373 size = build_int_cst (integer_type_node, arg_size);
1374 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1375 }
1376 else
1377 return;
1378 }
1379 else
1380 return;
1381 }
1382 else
1383 {
1384 HOST_WIDE_INT arg_max_size;
1385
1386 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1387
1388 by_ref = false;
1389 check_ref = false;
1390 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1391 &arg_max_size);
1392 if (arg_max_size == -1
1393 || arg_max_size != arg_size
1394 || arg_offset < 0)
1395 return;
1396
1397 ao_ref_init (&r, arg);
1398 }
1399
1400 /* Second stage walks back the BB, looks at individual statements and as long
1401 as it is confident of how the statements affect contents of the
1402 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1403 describing it. */
1404 gsi = gsi_for_stmt (call);
726a989a
RB
1405 gsi_prev (&gsi);
1406 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1407 {
8b7773a4 1408 struct ipa_known_agg_contents_list *n, **p;
726a989a 1409 gimple stmt = gsi_stmt (gsi);
8b7773a4
MJ
1410 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1411 tree lhs, rhs, lhs_base;
1412 bool partial_overlap;
3e293154 1413
8b7773a4 1414 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1415 continue;
8b75fc9b 1416 if (!gimple_assign_single_p (stmt))
8b7773a4 1417 break;
3e293154 1418
726a989a
RB
1419 lhs = gimple_assign_lhs (stmt);
1420 rhs = gimple_assign_rhs1 (stmt);
7d2fb524
MJ
1421 if (!is_gimple_reg_type (rhs)
1422 || TREE_CODE (lhs) == BIT_FIELD_REF
1423 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1424 break;
3e293154 1425
8b7773a4
MJ
1426 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1427 &lhs_max_size);
1428 if (lhs_max_size == -1
1429 || lhs_max_size != lhs_size
1430 || (lhs_offset < arg_offset
1431 && lhs_offset + lhs_size > arg_offset)
1432 || (lhs_offset < arg_offset + arg_size
1433 && lhs_offset + lhs_size > arg_offset + arg_size))
1434 break;
3e293154 1435
8b7773a4 1436 if (check_ref)
518dc859 1437 {
8b7773a4
MJ
1438 if (TREE_CODE (lhs_base) != MEM_REF
1439 || TREE_OPERAND (lhs_base, 0) != arg_base
1440 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1441 break;
3e293154 1442 }
8b7773a4 1443 else if (lhs_base != arg_base)
774b8a55
MJ
1444 {
1445 if (DECL_P (lhs_base))
1446 continue;
1447 else
1448 break;
1449 }
3e293154 1450
8b7773a4
MJ
1451 if (lhs_offset + lhs_size < arg_offset
1452 || lhs_offset >= (arg_offset + arg_size))
1453 continue;
1454
1455 partial_overlap = false;
1456 p = &list;
1457 while (*p && (*p)->offset < lhs_offset)
3e293154 1458 {
8b7773a4 1459 if ((*p)->offset + (*p)->size > lhs_offset)
3e293154 1460 {
8b7773a4
MJ
1461 partial_overlap = true;
1462 break;
3e293154 1463 }
8b7773a4
MJ
1464 p = &(*p)->next;
1465 }
1466 if (partial_overlap)
1467 break;
1468 if (*p && (*p)->offset < lhs_offset + lhs_size)
1469 {
1470 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1471 /* We already know this value is subsequently overwritten with
1472 something else. */
1473 continue;
3e293154 1474 else
8b7773a4
MJ
1475 /* Otherwise this is a partial overlap which we cannot
1476 represent. */
1477 break;
3e293154 1478 }
3e293154 1479
8b7773a4
MJ
1480 rhs = get_ssa_def_if_simple_copy (rhs);
1481 n = XALLOCA (struct ipa_known_agg_contents_list);
1482 n->size = lhs_size;
1483 n->offset = lhs_offset;
1484 if (is_gimple_ip_invariant (rhs))
1485 {
1486 n->constant = rhs;
1487 const_count++;
1488 }
1489 else
1490 n->constant = NULL_TREE;
1491 n->next = *p;
1492 *p = n;
3e293154 1493
8b7773a4 1494 item_count++;
dfea20f1
MJ
1495 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1496 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1497 break;
1498 }
be95e2b9 1499
8b7773a4
MJ
1500 /* Third stage just goes over the list and creates an appropriate vector of
1501 ipa_agg_jf_item structures out of it, of sourse only if there are
1502 any known constants to begin with. */
3e293154 1503
8b7773a4 1504 if (const_count)
3e293154 1505 {
8b7773a4 1506 jfunc->agg.by_ref = by_ref;
9771b263 1507 vec_alloc (jfunc->agg.items, const_count);
8b7773a4
MJ
1508 while (list)
1509 {
1510 if (list->constant)
1511 {
f32682ca
DN
1512 struct ipa_agg_jf_item item;
1513 item.offset = list->offset - arg_offset;
7d2fb524 1514 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
d1f98542 1515 item.value = unshare_expr_without_location (list->constant);
9771b263 1516 jfunc->agg.items->quick_push (item);
8b7773a4
MJ
1517 }
1518 list = list->next;
1519 }
3e293154
MJ
1520 }
1521}
1522
06d65050
JH
1523static tree
1524ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1525{
1526 int n;
1527 tree type = (e->callee
67348ccc 1528 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1529 : gimple_call_fntype (e->call_stmt));
1530 tree t = TYPE_ARG_TYPES (type);
1531
1532 for (n = 0; n < i; n++)
1533 {
1534 if (!t)
1535 break;
1536 t = TREE_CHAIN (t);
1537 }
1538 if (t)
1539 return TREE_VALUE (t);
1540 if (!e->callee)
1541 return NULL;
67348ccc 1542 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1543 for (n = 0; n < i; n++)
1544 {
1545 if (!t)
1546 return NULL;
1547 t = TREE_CHAIN (t);
1548 }
1549 if (t)
1550 return TREE_TYPE (t);
1551 return NULL;
1552}
1553
3e293154
MJ
1554/* Compute jump function for all arguments of callsite CS and insert the
1555 information in the jump_functions array in the ipa_edge_args corresponding
1556 to this callsite. */
be95e2b9 1557
749aa96d 1558static void
c419671c 1559ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
062c604f 1560 struct cgraph_edge *cs)
3e293154
MJ
1561{
1562 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09
MJ
1563 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1564 gimple call = cs->call_stmt;
8b7773a4 1565 int n, arg_num = gimple_call_num_args (call);
3e293154 1566
606d9a09 1567 if (arg_num == 0 || args->jump_functions)
3e293154 1568 return;
9771b263 1569 vec_safe_grow_cleared (args->jump_functions, arg_num);
3e293154 1570
96e24d49
JJ
1571 if (gimple_call_internal_p (call))
1572 return;
5fe8e757
MJ
1573 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1574 return;
1575
8b7773a4
MJ
1576 for (n = 0; n < arg_num; n++)
1577 {
1578 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1579 tree arg = gimple_call_arg (call, n);
06d65050 1580 tree param_type = ipa_get_callee_param_type (cs, n);
3e293154 1581
8b7773a4 1582 if (is_gimple_ip_invariant (arg))
4502fe8d 1583 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1584 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1585 && TREE_CODE (arg) == PARM_DECL)
1586 {
1587 int index = ipa_get_param_decl_index (info, arg);
1588
1589 gcc_assert (index >=0);
1590 /* Aggregate passed by value, check for pass-through, otherwise we
1591 will attempt to fill in aggregate contents later in this
1592 for cycle. */
1593 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1594 {
b8f6e610 1595 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
8b7773a4
MJ
1596 continue;
1597 }
1598 }
1599 else if (TREE_CODE (arg) == SSA_NAME)
1600 {
1601 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1602 {
1603 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1604 if (index >= 0)
8b7773a4 1605 {
b8f6e610 1606 bool agg_p, type_p;
8b7773a4
MJ
1607 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1608 call, arg);
06d65050
JH
1609 if (param_type && POINTER_TYPE_P (param_type))
1610 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1611 call, jfunc);
1612 else
1613 type_p = false;
b8f6e610 1614 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
06d65050
JH
1615 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1616 type_p);
8b7773a4
MJ
1617 }
1618 }
1619 else
1620 {
1621 gimple stmt = SSA_NAME_DEF_STMT (arg);
1622 if (is_gimple_assign (stmt))
1623 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
06d65050 1624 call, stmt, arg, param_type);
8b7773a4
MJ
1625 else if (gimple_code (stmt) == GIMPLE_PHI)
1626 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
06d65050 1627 call, stmt, param_type);
8b7773a4
MJ
1628 }
1629 }
1630 else
06d65050
JH
1631 compute_known_type_jump_func (arg, jfunc, call,
1632 param_type
1633 && POINTER_TYPE_P (param_type)
1634 ? TREE_TYPE (param_type)
1635 : NULL);
3e293154 1636
8b7773a4
MJ
1637 if ((jfunc->type != IPA_JF_PASS_THROUGH
1638 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1639 && (jfunc->type != IPA_JF_ANCESTOR
1640 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1641 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1642 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1643 determine_known_aggregate_parts (call, arg, jfunc);
1644 }
3e293154
MJ
1645}
1646
749aa96d
MJ
1647/* Compute jump functions for all edges - both direct and indirect - outgoing
1648 from NODE. Also count the actual arguments in the process. */
1649
062c604f
MJ
1650static void
1651ipa_compute_jump_functions (struct cgraph_node *node,
c419671c 1652 struct param_analysis_info *parms_ainfo)
749aa96d
MJ
1653{
1654 struct cgraph_edge *cs;
1655
1656 for (cs = node->callees; cs; cs = cs->next_callee)
1657 {
d7da5cc8
MJ
1658 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1659 NULL);
749aa96d
MJ
1660 /* We do not need to bother analyzing calls to unknown
1661 functions unless they may become known during lto/whopr. */
67348ccc 1662 if (!callee->definition && !flag_lto)
749aa96d 1663 continue;
c419671c 1664 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1665 }
1666
1667 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
c419671c 1668 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1669}
1670
8b7773a4
MJ
1671/* If STMT looks like a statement loading a value from a member pointer formal
1672 parameter, return that parameter and store the offset of the field to
1673 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1674 might be clobbered). If USE_DELTA, then we look for a use of the delta
1675 field rather than the pfn. */
be95e2b9 1676
3e293154 1677static tree
8b7773a4
MJ
1678ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1679 HOST_WIDE_INT *offset_p)
3e293154 1680{
8b7773a4
MJ
1681 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1682
1683 if (!gimple_assign_single_p (stmt))
1684 return NULL_TREE;
3e293154 1685
8b7773a4 1686 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
1687 if (TREE_CODE (rhs) == COMPONENT_REF)
1688 {
1689 ref_field = TREE_OPERAND (rhs, 1);
1690 rhs = TREE_OPERAND (rhs, 0);
1691 }
1692 else
1693 ref_field = NULL_TREE;
d242d063 1694 if (TREE_CODE (rhs) != MEM_REF)
3e293154 1695 return NULL_TREE;
3e293154 1696 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
1697 if (TREE_CODE (rec) != ADDR_EXPR)
1698 return NULL_TREE;
1699 rec = TREE_OPERAND (rec, 0);
3e293154 1700 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 1701 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 1702 return NULL_TREE;
d242d063 1703 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 1704
8b7773a4
MJ
1705 if (use_delta)
1706 fld = delta_field;
1707 else
1708 fld = ptr_field;
1709 if (offset_p)
1710 *offset_p = int_bit_position (fld);
1711
ae788515
EB
1712 if (ref_field)
1713 {
1714 if (integer_nonzerop (ref_offset))
1715 return NULL_TREE;
ae788515
EB
1716 return ref_field == fld ? rec : NULL_TREE;
1717 }
3e293154 1718 else
8b7773a4
MJ
1719 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1720 : NULL_TREE;
3e293154
MJ
1721}
1722
1723/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 1724
3e293154
MJ
1725static bool
1726ipa_is_ssa_with_stmt_def (tree t)
1727{
1728 if (TREE_CODE (t) == SSA_NAME
1729 && !SSA_NAME_IS_DEFAULT_DEF (t))
1730 return true;
1731 else
1732 return false;
1733}
1734
40591473
MJ
1735/* Find the indirect call graph edge corresponding to STMT and mark it as a
1736 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1737 indirect call graph edge. */
be95e2b9 1738
40591473
MJ
1739static struct cgraph_edge *
1740ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
3e293154 1741{
e33c6cd6 1742 struct cgraph_edge *cs;
3e293154 1743
5f902d76 1744 cs = cgraph_edge (node, stmt);
b258210c 1745 cs->indirect_info->param_index = param_index;
8b7773a4 1746 cs->indirect_info->agg_contents = 0;
c13bc3d9 1747 cs->indirect_info->member_ptr = 0;
40591473 1748 return cs;
3e293154
MJ
1749}
1750
e33c6cd6 1751/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 1752 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
1753 intermediate information about each formal parameter. Currently it checks
1754 whether the call calls a pointer that is a formal parameter and if so, the
1755 parameter is marked with the called flag and an indirect call graph edge
1756 describing the call is created. This is very simple for ordinary pointers
1757 represented in SSA but not-so-nice when it comes to member pointers. The
1758 ugly part of this function does nothing more than trying to match the
1759 pattern of such a call. An example of such a pattern is the gimple dump
1760 below, the call is on the last line:
3e293154 1761
ae788515
EB
1762 <bb 2>:
1763 f$__delta_5 = f.__delta;
1764 f$__pfn_24 = f.__pfn;
1765
1766 or
3e293154 1767 <bb 2>:
d242d063
MJ
1768 f$__delta_5 = MEM[(struct *)&f];
1769 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 1770
ae788515 1771 and a few lines below:
8aa29647
MJ
1772
1773 <bb 5>
3e293154
MJ
1774 D.2496_3 = (int) f$__pfn_24;
1775 D.2497_4 = D.2496_3 & 1;
1776 if (D.2497_4 != 0)
1777 goto <bb 3>;
1778 else
1779 goto <bb 4>;
1780
8aa29647 1781 <bb 6>:
3e293154
MJ
1782 D.2500_7 = (unsigned int) f$__delta_5;
1783 D.2501_8 = &S + D.2500_7;
1784 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1785 D.2503_10 = *D.2502_9;
1786 D.2504_12 = f$__pfn_24 + -1;
1787 D.2505_13 = (unsigned int) D.2504_12;
1788 D.2506_14 = D.2503_10 + D.2505_13;
1789 D.2507_15 = *D.2506_14;
1790 iftmp.11_16 = (String:: *) D.2507_15;
1791
8aa29647 1792 <bb 7>:
3e293154
MJ
1793 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1794 D.2500_19 = (unsigned int) f$__delta_5;
1795 D.2508_20 = &S + D.2500_19;
1796 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1797
1798 Such patterns are results of simple calls to a member pointer:
1799
1800 int doprinting (int (MyString::* f)(int) const)
1801 {
1802 MyString S ("somestring");
1803
1804 return (S.*f)(4);
1805 }
8b7773a4
MJ
1806
1807 Moreover, the function also looks for called pointers loaded from aggregates
1808 passed by value or reference. */
3e293154
MJ
1809
1810static void
b258210c
MJ
1811ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1812 struct ipa_node_params *info,
c419671c 1813 struct param_analysis_info *parms_ainfo,
b258210c 1814 gimple call, tree target)
3e293154 1815{
726a989a 1816 gimple def;
3e293154 1817 tree n1, n2;
726a989a
RB
1818 gimple d1, d2;
1819 tree rec, rec2, cond;
1820 gimple branch;
3e293154 1821 int index;
3e293154 1822 basic_block bb, virt_bb, join;
8b7773a4
MJ
1823 HOST_WIDE_INT offset;
1824 bool by_ref;
3e293154 1825
3e293154
MJ
1826 if (SSA_NAME_IS_DEFAULT_DEF (target))
1827 {
b258210c 1828 tree var = SSA_NAME_VAR (target);
3e293154
MJ
1829 index = ipa_get_param_decl_index (info, var);
1830 if (index >= 0)
40591473 1831 ipa_note_param_call (node, index, call);
3e293154
MJ
1832 return;
1833 }
1834
8b7773a4
MJ
1835 def = SSA_NAME_DEF_STMT (target);
1836 if (gimple_assign_single_p (def)
d044dd17 1837 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
8b7773a4 1838 gimple_assign_rhs1 (def), &index, &offset,
3ff2ca23 1839 NULL, &by_ref))
8b7773a4
MJ
1840 {
1841 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
68377e53
JH
1842 if (cs->indirect_info->offset != offset)
1843 cs->indirect_info->outer_type = NULL;
8b7773a4
MJ
1844 cs->indirect_info->offset = offset;
1845 cs->indirect_info->agg_contents = 1;
1846 cs->indirect_info->by_ref = by_ref;
1847 return;
1848 }
1849
3e293154
MJ
1850 /* Now we need to try to match the complex pattern of calling a member
1851 pointer. */
8b7773a4
MJ
1852 if (gimple_code (def) != GIMPLE_PHI
1853 || gimple_phi_num_args (def) != 2
1854 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
1855 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1856 return;
1857
3e293154
MJ
1858 /* First, we need to check whether one of these is a load from a member
1859 pointer that is a parameter to this function. */
1860 n1 = PHI_ARG_DEF (def, 0);
1861 n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 1862 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154
MJ
1863 return;
1864 d1 = SSA_NAME_DEF_STMT (n1);
1865 d2 = SSA_NAME_DEF_STMT (n2);
1866
8aa29647 1867 join = gimple_bb (def);
8b7773a4 1868 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 1869 {
8b7773a4 1870 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
1871 return;
1872
8aa29647 1873 bb = EDGE_PRED (join, 0)->src;
726a989a 1874 virt_bb = gimple_bb (d2);
3e293154 1875 }
8b7773a4 1876 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 1877 {
8aa29647 1878 bb = EDGE_PRED (join, 1)->src;
726a989a 1879 virt_bb = gimple_bb (d1);
3e293154
MJ
1880 }
1881 else
1882 return;
1883
1884 /* Second, we need to check that the basic blocks are laid out in the way
1885 corresponding to the pattern. */
1886
3e293154
MJ
1887 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1888 || single_pred (virt_bb) != bb
1889 || single_succ (virt_bb) != join)
1890 return;
1891
1892 /* Third, let's see that the branching is done depending on the least
1893 significant bit of the pfn. */
1894
1895 branch = last_stmt (bb);
8aa29647 1896 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
1897 return;
1898
12430896
RG
1899 if ((gimple_cond_code (branch) != NE_EXPR
1900 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 1901 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 1902 return;
3e293154 1903
726a989a 1904 cond = gimple_cond_lhs (branch);
3e293154
MJ
1905 if (!ipa_is_ssa_with_stmt_def (cond))
1906 return;
1907
726a989a 1908 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 1909 if (!is_gimple_assign (def)
726a989a
RB
1910 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1911 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 1912 return;
726a989a
RB
1913
1914 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1915 if (!ipa_is_ssa_with_stmt_def (cond))
1916 return;
1917
726a989a 1918 def = SSA_NAME_DEF_STMT (cond);
3e293154 1919
8b75fc9b
MJ
1920 if (is_gimple_assign (def)
1921 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 1922 {
726a989a 1923 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1924 if (!ipa_is_ssa_with_stmt_def (cond))
1925 return;
726a989a 1926 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
1927 }
1928
6f7b8b70
RE
1929 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1930 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
1931 == ptrmemfunc_vbit_in_delta),
1932 NULL);
3e293154
MJ
1933 if (rec != rec2)
1934 return;
1935
1936 index = ipa_get_param_decl_index (info, rec);
8b7773a4
MJ
1937 if (index >= 0
1938 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1939 {
1940 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
68377e53
JH
1941 if (cs->indirect_info->offset != offset)
1942 cs->indirect_info->outer_type = NULL;
8b7773a4
MJ
1943 cs->indirect_info->offset = offset;
1944 cs->indirect_info->agg_contents = 1;
c13bc3d9 1945 cs->indirect_info->member_ptr = 1;
8b7773a4 1946 }
3e293154
MJ
1947
1948 return;
1949}
1950
b258210c
MJ
1951/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1952 object referenced in the expression is a formal parameter of the caller
1953 (described by INFO), create a call note for the statement. */
1954
1955static void
1956ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1957 struct ipa_node_params *info, gimple call,
1958 tree target)
1959{
40591473
MJ
1960 struct cgraph_edge *cs;
1961 struct cgraph_indirect_call_info *ii;
f65cf2b7 1962 struct ipa_jump_func jfunc;
b258210c 1963 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 1964 int index;
40591473 1965 HOST_WIDE_INT anc_offset;
b258210c 1966
05842ff5
MJ
1967 if (!flag_devirtualize)
1968 return;
1969
40591473 1970 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
1971 return;
1972
40591473
MJ
1973 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1974 {
1975 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1976 return;
b258210c 1977
40591473
MJ
1978 anc_offset = 0;
1979 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1980 gcc_assert (index >= 0);
06d65050
JH
1981 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1982 call, &jfunc))
40591473
MJ
1983 return;
1984 }
1985 else
1986 {
1987 gimple stmt = SSA_NAME_DEF_STMT (obj);
1988 tree expr;
1989
1990 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1991 if (!expr)
1992 return;
1993 index = ipa_get_param_decl_index (info,
1994 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1995 gcc_assert (index >= 0);
06d65050
JH
1996 if (detect_type_change (obj, expr, obj_type_ref_class (target),
1997 call, &jfunc, anc_offset))
40591473
MJ
1998 return;
1999 }
2000
2001 cs = ipa_note_param_call (node, index, call);
2002 ii = cs->indirect_info;
8b7773a4 2003 ii->offset = anc_offset;
ae7e9ddd 2004 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2005 ii->otr_type = obj_type_ref_class (target);
40591473 2006 ii->polymorphic = 1;
b258210c
MJ
2007}
2008
2009/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2010 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2011 containing intermediate information about each formal parameter. */
b258210c
MJ
2012
2013static void
2014ipa_analyze_call_uses (struct cgraph_node *node,
062c604f 2015 struct ipa_node_params *info,
c419671c 2016 struct param_analysis_info *parms_ainfo, gimple call)
b258210c
MJ
2017{
2018 tree target = gimple_call_fn (call);
2019
25583c4f
RS
2020 if (!target)
2021 return;
b258210c 2022 if (TREE_CODE (target) == SSA_NAME)
c419671c 2023 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1d5755ef 2024 else if (virtual_method_call_p (target))
b258210c
MJ
2025 ipa_analyze_virtual_call_uses (node, info, call, target);
2026}
2027
2028
e33c6cd6
MJ
2029/* Analyze the call statement STMT with respect to formal parameters (described
2030 in INFO) of caller given by NODE. Currently it only checks whether formal
c419671c 2031 parameters are called. PARMS_AINFO is a pointer to a vector containing
062c604f 2032 intermediate information about each formal parameter. */
be95e2b9 2033
3e293154 2034static void
e33c6cd6 2035ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
c419671c 2036 struct param_analysis_info *parms_ainfo, gimple stmt)
3e293154 2037{
726a989a 2038 if (is_gimple_call (stmt))
c419671c 2039 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2040}
2041
2042/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2043 If OP is a parameter declaration, mark it as used in the info structure
2044 passed in DATA. */
2045
2046static bool
2047visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2048 tree op, void *data)
2049{
2050 struct ipa_node_params *info = (struct ipa_node_params *) data;
2051
2052 op = get_base_address (op);
2053 if (op
2054 && TREE_CODE (op) == PARM_DECL)
2055 {
2056 int index = ipa_get_param_decl_index (info, op);
2057 gcc_assert (index >= 0);
310bc633 2058 ipa_set_param_used (info, index, true);
062c604f
MJ
2059 }
2060
2061 return false;
3e293154
MJ
2062}
2063
2064/* Scan the function body of NODE and inspect the uses of formal parameters.
2065 Store the findings in various structures of the associated ipa_node_params
c419671c 2066 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
062c604f 2067 vector containing intermediate information about each formal parameter. */
be95e2b9 2068
062c604f
MJ
2069static void
2070ipa_analyze_params_uses (struct cgraph_node *node,
c419671c 2071 struct param_analysis_info *parms_ainfo)
3e293154 2072{
67348ccc 2073 tree decl = node->decl;
3e293154
MJ
2074 basic_block bb;
2075 struct function *func;
726a989a 2076 gimple_stmt_iterator gsi;
3e293154 2077 struct ipa_node_params *info = IPA_NODE_REF (node);
062c604f 2078 int i;
3e293154 2079
726a989a 2080 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
3e293154 2081 return;
3e293154 2082
5fe8e757
MJ
2083 info->uses_analysis_done = 1;
2084 if (ipa_func_spec_opts_forbid_analysis_p (node))
2085 {
2086 for (i = 0; i < ipa_get_param_count (info); i++)
2087 {
2088 ipa_set_param_used (info, i, true);
2089 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2090 }
2091 return;
2092 }
2093
062c604f
MJ
2094 for (i = 0; i < ipa_get_param_count (info); i++)
2095 {
2096 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2097 int controlled_uses = 0;
2098
062c604f
MJ
2099 /* For SSA regs see if parameter is used. For non-SSA we compute
2100 the flag during modification analysis. */
4502fe8d
MJ
2101 if (is_gimple_reg (parm))
2102 {
67348ccc 2103 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2104 parm);
2105 if (ddef && !has_zero_uses (ddef))
2106 {
2107 imm_use_iterator imm_iter;
2108 use_operand_p use_p;
2109
2110 ipa_set_param_used (info, i, true);
2111 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2112 if (!is_gimple_call (USE_STMT (use_p)))
2113 {
2114 controlled_uses = IPA_UNDESCRIBED_USE;
2115 break;
2116 }
2117 else
2118 controlled_uses++;
2119 }
2120 else
2121 controlled_uses = 0;
2122 }
2123 else
2124 controlled_uses = IPA_UNDESCRIBED_USE;
2125 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f
MJ
2126 }
2127
3e293154
MJ
2128 func = DECL_STRUCT_FUNCTION (decl);
2129 FOR_EACH_BB_FN (bb, func)
2130 {
726a989a 2131 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3e293154 2132 {
726a989a 2133 gimple stmt = gsi_stmt (gsi);
062c604f
MJ
2134
2135 if (is_gimple_debug (stmt))
2136 continue;
2137
c419671c 2138 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2139 walk_stmt_load_store_addr_ops (stmt, info,
2140 visit_ref_for_mod_analysis,
2141 visit_ref_for_mod_analysis,
2142 visit_ref_for_mod_analysis);
518dc859 2143 }
355a7673 2144 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
062c604f
MJ
2145 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2146 visit_ref_for_mod_analysis,
2147 visit_ref_for_mod_analysis,
2148 visit_ref_for_mod_analysis);
518dc859 2149 }
3e293154
MJ
2150}
2151
2c9561b5
MJ
2152/* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2153
2154static void
2155free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2156{
2157 int i;
2158
2159 for (i = 0; i < param_count; i++)
2160 {
2161 if (parms_ainfo[i].parm_visited_statements)
2162 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2163 if (parms_ainfo[i].pt_visited_statements)
2164 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2165 }
2166}
2167
dd5a833e
MS
2168/* Initialize the array describing properties of of formal parameters
2169 of NODE, analyze their uses and compute jump functions associated
2170 with actual arguments of calls from within NODE. */
062c604f
MJ
2171
2172void
2173ipa_analyze_node (struct cgraph_node *node)
2174{
57dbdc5a 2175 struct ipa_node_params *info;
c419671c 2176 struct param_analysis_info *parms_ainfo;
2c9561b5 2177 int param_count;
062c604f 2178
57dbdc5a
MJ
2179 ipa_check_create_node_params ();
2180 ipa_check_create_edge_args ();
2181 info = IPA_NODE_REF (node);
67348ccc 2182 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
062c604f
MJ
2183 ipa_initialize_node_params (node);
2184
2185 param_count = ipa_get_param_count (info);
c419671c
MJ
2186 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2187 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
062c604f 2188
c419671c
MJ
2189 ipa_analyze_params_uses (node, parms_ainfo);
2190 ipa_compute_jump_functions (node, parms_ainfo);
062c604f 2191
2c9561b5 2192 free_parms_ainfo (parms_ainfo, param_count);
f65cf2b7 2193 pop_cfun ();
062c604f
MJ
2194}
2195
e248d83f
MJ
2196/* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2197 attempt a type-based devirtualization. If successful, return the
2198 target function declaration, otherwise return NULL. */
2199
2200tree
2201ipa_intraprocedural_devirtualization (gimple call)
2202{
2203 tree binfo, token, fndecl;
2204 struct ipa_jump_func jfunc;
2205 tree otr = gimple_call_fn (call);
2206
2207 jfunc.type = IPA_JF_UNKNOWN;
2208 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
06d65050 2209 call, obj_type_ref_class (otr));
e248d83f
MJ
2210 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2211 return NULL_TREE;
2212 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2213 if (!binfo)
2214 return NULL_TREE;
2215 token = OBJ_TYPE_REF_TOKEN (otr);
ae7e9ddd 2216 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
e248d83f 2217 binfo);
450ad0cd
JH
2218#ifdef ENABLE_CHECKING
2219 if (fndecl)
2220 gcc_assert (possible_polymorphic_call_target_p
2221 (otr, cgraph_get_node (fndecl)));
2222#endif
e248d83f
MJ
2223 return fndecl;
2224}
062c604f 2225
61502ca8 2226/* Update the jump function DST when the call graph edge corresponding to SRC is
b258210c
MJ
2227 is being inlined, knowing that DST is of type ancestor and src of known
2228 type. */
2229
2230static void
2231combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2232 struct ipa_jump_func *dst)
2233{
c7573249
MJ
2234 HOST_WIDE_INT combined_offset;
2235 tree combined_type;
b258210c 2236
b8f6e610
MJ
2237 if (!ipa_get_jf_ancestor_type_preserved (dst))
2238 {
2239 dst->type = IPA_JF_UNKNOWN;
2240 return;
2241 }
2242
7b872d9e
MJ
2243 combined_offset = ipa_get_jf_known_type_offset (src)
2244 + ipa_get_jf_ancestor_offset (dst);
2245 combined_type = ipa_get_jf_ancestor_type (dst);
c7573249 2246
7b872d9e
MJ
2247 ipa_set_jf_known_type (dst, combined_offset,
2248 ipa_get_jf_known_type_base_type (src),
2249 combined_type);
b258210c
MJ
2250}
2251
be95e2b9 2252/* Update the jump functions associated with call graph edge E when the call
3e293154 2253 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2254 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2255
3e293154
MJ
2256static void
2257update_jump_functions_after_inlining (struct cgraph_edge *cs,
2258 struct cgraph_edge *e)
2259{
2260 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2261 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2262 int count = ipa_get_cs_argument_count (args);
2263 int i;
2264
2265 for (i = 0; i < count; i++)
2266 {
b258210c 2267 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3e293154 2268
685b0d13
MJ
2269 if (dst->type == IPA_JF_ANCESTOR)
2270 {
b258210c 2271 struct ipa_jump_func *src;
8b7773a4 2272 int dst_fid = dst->value.ancestor.formal_id;
685b0d13 2273
b258210c
MJ
2274 /* Variable number of arguments can cause havoc if we try to access
2275 one that does not exist in the inlined edge. So make sure we
2276 don't. */
8b7773a4 2277 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c
MJ
2278 {
2279 dst->type = IPA_JF_UNKNOWN;
2280 continue;
2281 }
2282
8b7773a4
MJ
2283 src = ipa_get_ith_jump_func (top, dst_fid);
2284
2285 if (src->agg.items
2286 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2287 {
2288 struct ipa_agg_jf_item *item;
2289 int j;
2290
2291 /* Currently we do not produce clobber aggregate jump functions,
2292 replace with merging when we do. */
2293 gcc_assert (!dst->agg.items);
2294
9771b263 2295 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2296 dst->agg.by_ref = src->agg.by_ref;
9771b263 2297 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2298 item->offset -= dst->value.ancestor.offset;
2299 }
2300
b258210c
MJ
2301 if (src->type == IPA_JF_KNOWN_TYPE)
2302 combine_known_type_and_ancestor_jfs (src, dst);
b258210c
MJ
2303 else if (src->type == IPA_JF_PASS_THROUGH
2304 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2305 {
2306 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2307 dst->value.ancestor.agg_preserved &=
2308 src->value.pass_through.agg_preserved;
b8f6e610
MJ
2309 dst->value.ancestor.type_preserved &=
2310 src->value.pass_through.type_preserved;
8b7773a4 2311 }
b258210c
MJ
2312 else if (src->type == IPA_JF_ANCESTOR)
2313 {
2314 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2315 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2316 dst->value.ancestor.agg_preserved &=
2317 src->value.ancestor.agg_preserved;
b8f6e610
MJ
2318 dst->value.ancestor.type_preserved &=
2319 src->value.ancestor.type_preserved;
b258210c
MJ
2320 }
2321 else
2322 dst->type = IPA_JF_UNKNOWN;
2323 }
2324 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2325 {
b258210c
MJ
2326 struct ipa_jump_func *src;
2327 /* We must check range due to calls with variable number of arguments
2328 and we cannot combine jump functions with operations. */
2329 if (dst->value.pass_through.operation == NOP_EXPR
2330 && (dst->value.pass_through.formal_id
2331 < ipa_get_cs_argument_count (top)))
2332 {
8b7773a4
MJ
2333 int dst_fid = dst->value.pass_through.formal_id;
2334 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2335 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
8b7773a4 2336
b8f6e610
MJ
2337 switch (src->type)
2338 {
2339 case IPA_JF_UNKNOWN:
2340 dst->type = IPA_JF_UNKNOWN;
2341 break;
2342 case IPA_JF_KNOWN_TYPE:
2343 ipa_set_jf_known_type (dst,
2344 ipa_get_jf_known_type_offset (src),
2345 ipa_get_jf_known_type_base_type (src),
2346 ipa_get_jf_known_type_base_type (src));
2347 break;
2348 case IPA_JF_CONST:
2349 ipa_set_jf_cst_copy (dst, src);
2350 break;
2351
2352 case IPA_JF_PASS_THROUGH:
2353 {
2354 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2355 enum tree_code operation;
2356 operation = ipa_get_jf_pass_through_operation (src);
2357
2358 if (operation == NOP_EXPR)
2359 {
2360 bool agg_p, type_p;
2361 agg_p = dst_agg_p
2362 && ipa_get_jf_pass_through_agg_preserved (src);
2363 type_p = ipa_get_jf_pass_through_type_preserved (src)
2364 && ipa_get_jf_pass_through_type_preserved (dst);
2365 ipa_set_jf_simple_pass_through (dst, formal_id,
2366 agg_p, type_p);
2367 }
2368 else
2369 {
2370 tree operand = ipa_get_jf_pass_through_operand (src);
2371 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2372 operation);
2373 }
2374 break;
2375 }
2376 case IPA_JF_ANCESTOR:
2377 {
2378 bool agg_p, type_p;
2379 agg_p = dst_agg_p
2380 && ipa_get_jf_ancestor_agg_preserved (src);
2381 type_p = ipa_get_jf_ancestor_type_preserved (src)
2382 && ipa_get_jf_pass_through_type_preserved (dst);
2383 ipa_set_ancestor_jf (dst,
2384 ipa_get_jf_ancestor_offset (src),
2385 ipa_get_jf_ancestor_type (src),
2386 ipa_get_jf_ancestor_formal_id (src),
2387 agg_p, type_p);
2388 break;
2389 }
2390 default:
2391 gcc_unreachable ();
2392 }
8b7773a4
MJ
2393
2394 if (src->agg.items
b8f6e610 2395 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2396 {
2397 /* Currently we do not produce clobber aggregate jump
2398 functions, replace with merging when we do. */
2399 gcc_assert (!dst->agg.items);
2400
2401 dst->agg.by_ref = src->agg.by_ref;
9771b263 2402 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2403 }
b258210c
MJ
2404 }
2405 else
2406 dst->type = IPA_JF_UNKNOWN;
3e293154 2407 }
b258210c
MJ
2408 }
2409}
2410
2411/* If TARGET is an addr_expr of a function declaration, make it the destination
81fa35bd 2412 of an indirect edge IE and return the edge. Otherwise, return NULL. */
b258210c 2413
3949c4a7 2414struct cgraph_edge *
81fa35bd 2415ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
b258210c
MJ
2416{
2417 struct cgraph_node *callee;
0f378cb5 2418 struct inline_edge_summary *es = inline_edge_summary (ie);
48b1474e 2419 bool unreachable = false;
b258210c 2420
ceeffab0
MJ
2421 if (TREE_CODE (target) == ADDR_EXPR)
2422 target = TREE_OPERAND (target, 0);
b258210c 2423 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2424 {
2425 target = canonicalize_constructor_val (target, NULL);
2426 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2427 {
c13bc3d9
MJ
2428 if (ie->indirect_info->member_ptr)
2429 /* Member pointer call that goes through a VMT lookup. */
2430 return NULL;
2431
a0a7b611
JH
2432 if (dump_file)
2433 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
48b1474e 2434 " in %s/%i, making it unreachable.\n",
fec39fa6 2435 ie->caller->name (), ie->caller->order);
48b1474e
MJ
2436 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2437 callee = cgraph_get_create_node (target);
2438 unreachable = true;
a0a7b611 2439 }
48b1474e
MJ
2440 else
2441 callee = cgraph_get_node (target);
a0a7b611 2442 }
48b1474e
MJ
2443 else
2444 callee = cgraph_get_node (target);
a0a7b611
JH
2445
2446 /* Because may-edges are not explicitely represented and vtable may be external,
2447 we may create the first reference to the object in the unit. */
2448 if (!callee || callee->global.inlined_to)
2449 {
a0a7b611
JH
2450
2451 /* We are better to ensure we can refer to it.
2452 In the case of static functions we are out of luck, since we already
2453 removed its body. In the case of public functions we may or may
2454 not introduce the reference. */
2455 if (!canonicalize_constructor_val (target, NULL)
2456 || !TREE_PUBLIC (target))
2457 {
2458 if (dump_file)
2459 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2460 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
fec39fa6 2461 xstrdup (ie->caller->name ()),
67348ccc 2462 ie->caller->order,
fec39fa6 2463 xstrdup (ie->callee->name ()),
67348ccc 2464 ie->callee->order);
a0a7b611
JH
2465 return NULL;
2466 }
6f99e449 2467 callee = cgraph_get_create_node (target);
a0a7b611 2468 }
1dbee8c9 2469 ipa_check_create_node_params ();
ceeffab0 2470
81fa35bd
MJ
2471 /* We can not make edges to inline clones. It is bug that someone removed
2472 the cgraph node too early. */
17afc0fe
JH
2473 gcc_assert (!callee->global.inlined_to);
2474
48b1474e 2475 if (dump_file && !unreachable)
b258210c
MJ
2476 {
2477 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
ceeffab0 2478 "(%s/%i -> %s/%i), for stmt ",
b258210c 2479 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
fec39fa6 2480 xstrdup (ie->caller->name ()),
67348ccc 2481 ie->caller->order,
fec39fa6 2482 xstrdup (callee->name ()),
67348ccc 2483 callee->order);
b258210c
MJ
2484 if (ie->call_stmt)
2485 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2486 else
2487 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2
JH
2488 }
2489 ie = cgraph_make_edge_direct (ie, callee);
2490 es = inline_edge_summary (ie);
2491 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2492 - eni_size_weights.call_cost);
2493 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2494 - eni_time_weights.call_cost);
749aa96d 2495
b258210c 2496 return ie;
3e293154
MJ
2497}
2498
8b7773a4
MJ
2499/* Retrieve value from aggregate jump function AGG for the given OFFSET or
2500 return NULL if there is not any. BY_REF specifies whether the value has to
2501 be passed by reference or by value. */
2502
2503tree
2504ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2505 HOST_WIDE_INT offset, bool by_ref)
2506{
2507 struct ipa_agg_jf_item *item;
2508 int i;
2509
2510 if (by_ref != agg->by_ref)
2511 return NULL;
2512
9771b263 2513 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2514 if (item->offset == offset)
2515 {
2516 /* Currently we do not have clobber values, return NULL for them once
2517 we do. */
2518 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2519 return item->value;
2520 }
8b7773a4
MJ
2521 return NULL;
2522}
2523
4502fe8d 2524/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
2525 reference description RDESC. Return true if the reference has been
2526 successfully found and removed. */
4502fe8d 2527
568cda29 2528static bool
5e20cdc9 2529remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
2530{
2531 struct ipa_ref *to_del;
2532 struct cgraph_edge *origin;
2533
2534 origin = rdesc->cs;
a854f856
MJ
2535 if (!origin)
2536 return false;
67348ccc 2537 to_del = ipa_find_reference (origin->caller, symbol,
042ae7d2 2538 origin->call_stmt, origin->lto_stmt_uid);
568cda29
MJ
2539 if (!to_del)
2540 return false;
2541
4502fe8d
MJ
2542 ipa_remove_reference (to_del);
2543 if (dump_file)
2544 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
fec39fa6
TS
2545 xstrdup (origin->caller->name ()),
2546 origin->caller->order, xstrdup (symbol->name ()));
568cda29 2547 return true;
4502fe8d
MJ
2548}
2549
2550/* If JFUNC has a reference description with refcount different from
2551 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2552 NULL. JFUNC must be a constant jump function. */
2553
2554static struct ipa_cst_ref_desc *
2555jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2556{
2557 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2558 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2559 return rdesc;
2560 else
2561 return NULL;
2562}
2563
568cda29
MJ
2564/* If the value of constant jump function JFUNC is an address of a function
2565 declaration, return the associated call graph node. Otherwise return
2566 NULL. */
2567
2568static cgraph_node *
2569cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2570{
2571 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2572 tree cst = ipa_get_jf_constant (jfunc);
2573 if (TREE_CODE (cst) != ADDR_EXPR
2574 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2575 return NULL;
2576
2577 return cgraph_get_node (TREE_OPERAND (cst, 0));
2578}
2579
2580
2581/* If JFUNC is a constant jump function with a usable rdesc, decrement its
2582 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2583 the edge specified in the rdesc. Return false if either the symbol or the
2584 reference could not be found, otherwise return true. */
2585
2586static bool
2587try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2588{
2589 struct ipa_cst_ref_desc *rdesc;
2590 if (jfunc->type == IPA_JF_CONST
2591 && (rdesc = jfunc_rdesc_usable (jfunc))
2592 && --rdesc->refcount == 0)
2593 {
5e20cdc9 2594 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
2595 if (!symbol)
2596 return false;
2597
2598 return remove_described_reference (symbol, rdesc);
2599 }
2600 return true;
2601}
2602
b258210c
MJ
2603/* Try to find a destination for indirect edge IE that corresponds to a simple
2604 call or a call of a member function pointer and where the destination is a
2605 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
2606 determined, return the newly direct edge, otherwise return NULL.
2607 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 2608
b258210c
MJ
2609static struct cgraph_edge *
2610try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
2611 struct ipa_jump_func *jfunc,
2612 struct ipa_node_params *new_root_info)
b258210c 2613{
4502fe8d 2614 struct cgraph_edge *cs;
b258210c 2615 tree target;
042ae7d2 2616 bool agg_contents = ie->indirect_info->agg_contents;
b258210c 2617
8b7773a4 2618 if (ie->indirect_info->agg_contents)
d250540a
MJ
2619 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2620 ie->indirect_info->offset,
2621 ie->indirect_info->by_ref);
b258210c 2622 else
d250540a
MJ
2623 target = ipa_value_from_jfunc (new_root_info, jfunc);
2624 if (!target)
2625 return NULL;
4502fe8d
MJ
2626 cs = ipa_make_edge_direct_to_target (ie, target);
2627
a12cd2db 2628 if (cs && !agg_contents)
568cda29
MJ
2629 {
2630 bool ok;
2631 gcc_checking_assert (cs->callee
ae6d0907
MJ
2632 && (cs != ie
2633 || jfunc->type != IPA_JF_CONST
568cda29
MJ
2634 || !cgraph_node_for_jfunc (jfunc)
2635 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2636 ok = try_decrement_rdesc_refcount (jfunc);
2637 gcc_checking_assert (ok);
2638 }
4502fe8d
MJ
2639
2640 return cs;
b258210c
MJ
2641}
2642
d250540a
MJ
2643/* Try to find a destination for indirect edge IE that corresponds to a virtual
2644 call based on a formal parameter which is described by jump function JFUNC
2645 and if it can be determined, make it direct and return the direct edge.
2646 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2647 are relative to. */
b258210c
MJ
2648
2649static struct cgraph_edge *
2650try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a
MJ
2651 struct ipa_jump_func *jfunc,
2652 struct ipa_node_params *new_root_info)
3e293154 2653{
c7573249 2654 tree binfo, target;
b258210c 2655
d250540a
MJ
2656 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2657
da942ca0 2658 if (!binfo)
b258210c 2659 return NULL;
3e293154 2660
da942ca0
JH
2661 if (TREE_CODE (binfo) != TREE_BINFO)
2662 {
c49bdb2e
JH
2663 binfo = gimple_extract_devirt_binfo_from_cst
2664 (binfo, ie->indirect_info->otr_type);
da942ca0
JH
2665 if (!binfo)
2666 return NULL;
2667 }
2668
d250540a 2669 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
c7573249 2670 ie->indirect_info->otr_type);
b258210c 2671 if (binfo)
c7573249
MJ
2672 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2673 binfo);
b258210c
MJ
2674 else
2675 return NULL;
2676
2677 if (target)
450ad0cd
JH
2678 {
2679#ifdef ENABLE_CHECKING
2680 gcc_assert (possible_polymorphic_call_target_p
2681 (ie, cgraph_get_node (target)));
2682#endif
2683 return ipa_make_edge_direct_to_target (ie, target);
2684 }
b258210c
MJ
2685 else
2686 return NULL;
3e293154
MJ
2687}
2688
2689/* Update the param called notes associated with NODE when CS is being inlined,
2690 assuming NODE is (potentially indirectly) inlined into CS->callee.
2691 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 2692 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 2693 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 2694
f8e2a1ed 2695static bool
e33c6cd6
MJ
2696update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2697 struct cgraph_node *node,
9771b263 2698 vec<cgraph_edge_p> *new_edges)
3e293154 2699{
9e97ff61 2700 struct ipa_edge_args *top;
b258210c 2701 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 2702 struct ipa_node_params *new_root_info;
f8e2a1ed 2703 bool res = false;
3e293154 2704
e33c6cd6 2705 ipa_check_create_edge_args ();
9e97ff61 2706 top = IPA_EDGE_REF (cs);
d250540a
MJ
2707 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2708 ? cs->caller->global.inlined_to
2709 : cs->caller);
e33c6cd6
MJ
2710
2711 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 2712 {
e33c6cd6 2713 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 2714 struct ipa_jump_func *jfunc;
8b7773a4 2715 int param_index;
3e293154 2716
e33c6cd6 2717 next_ie = ie->next_callee;
3e293154 2718
5f902d76
JH
2719 if (ici->param_index == -1)
2720 continue;
e33c6cd6 2721
3e293154 2722 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 2723 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 2724 {
5ee53a06 2725 ici->param_index = -1;
3e293154
MJ
2726 continue;
2727 }
2728
8b7773a4
MJ
2729 param_index = ici->param_index;
2730 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06
JH
2731
2732 if (!flag_indirect_inlining)
36b72910
JH
2733 new_direct_edge = NULL;
2734 else if (ici->polymorphic)
d250540a
MJ
2735 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2736 new_root_info);
b258210c 2737 else
d250540a
MJ
2738 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2739 new_root_info);
042ae7d2
JH
2740 /* If speculation was removed, then we need to do nothing. */
2741 if (new_direct_edge && new_direct_edge != ie)
2742 {
2743 new_direct_edge->indirect_inlining_edge = 1;
2744 top = IPA_EDGE_REF (cs);
2745 res = true;
2746 }
2747 else if (new_direct_edge)
685b0d13 2748 {
b258210c 2749 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
2750 if (new_direct_edge->call_stmt)
2751 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
2752 = !gimple_check_call_matching_types (
2753 new_direct_edge->call_stmt,
67348ccc 2754 new_direct_edge->callee->decl, false);
b258210c
MJ
2755 if (new_edges)
2756 {
9771b263 2757 new_edges->safe_push (new_direct_edge);
b258210c
MJ
2758 res = true;
2759 }
042ae7d2 2760 top = IPA_EDGE_REF (cs);
685b0d13 2761 }
36b72910
JH
2762 else if (jfunc->type == IPA_JF_PASS_THROUGH
2763 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2764 {
2765 if (ici->agg_contents
2766 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2767 ici->param_index = -1;
2768 else
2769 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2770 }
2771 else if (jfunc->type == IPA_JF_ANCESTOR)
2772 {
2773 if (ici->agg_contents
2774 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2775 ici->param_index = -1;
2776 else
2777 {
2778 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
68377e53
JH
2779 if (ipa_get_jf_ancestor_offset (jfunc))
2780 ici->outer_type = NULL;
36b72910
JH
2781 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2782 }
2783 }
2784 else
2785 /* Either we can find a destination for this edge now or never. */
2786 ici->param_index = -1;
3e293154 2787 }
e33c6cd6 2788
f8e2a1ed 2789 return res;
3e293154
MJ
2790}
2791
2792/* Recursively traverse subtree of NODE (including node) made of inlined
2793 cgraph_edges when CS has been inlined and invoke
e33c6cd6 2794 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
2795 update_jump_functions_after_inlining on all non-inlined edges that lead out
2796 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2797 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2798 created. */
be95e2b9 2799
f8e2a1ed 2800static bool
3e293154
MJ
2801propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2802 struct cgraph_node *node,
9771b263 2803 vec<cgraph_edge_p> *new_edges)
3e293154
MJ
2804{
2805 struct cgraph_edge *e;
f8e2a1ed 2806 bool res;
3e293154 2807
e33c6cd6 2808 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
2809
2810 for (e = node->callees; e; e = e->next_callee)
2811 if (!e->inline_failed)
f8e2a1ed 2812 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
2813 else
2814 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
2815 for (e = node->indirect_calls; e; e = e->next_callee)
2816 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
2817
2818 return res;
3e293154
MJ
2819}
2820
4502fe8d
MJ
2821/* Combine two controlled uses counts as done during inlining. */
2822
2823static int
2824combine_controlled_uses_counters (int c, int d)
2825{
2826 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2827 return IPA_UNDESCRIBED_USE;
2828 else
2829 return c + d - 1;
2830}
2831
2832/* Propagate number of controlled users from CS->caleee to the new root of the
2833 tree of inlined nodes. */
2834
2835static void
2836propagate_controlled_uses (struct cgraph_edge *cs)
2837{
2838 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2839 struct cgraph_node *new_root = cs->caller->global.inlined_to
2840 ? cs->caller->global.inlined_to : cs->caller;
2841 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2842 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2843 int count, i;
2844
2845 count = MIN (ipa_get_cs_argument_count (args),
2846 ipa_get_param_count (old_root_info));
2847 for (i = 0; i < count; i++)
2848 {
2849 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2850 struct ipa_cst_ref_desc *rdesc;
2851
2852 if (jf->type == IPA_JF_PASS_THROUGH)
2853 {
2854 int src_idx, c, d;
2855 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2856 c = ipa_get_controlled_uses (new_root_info, src_idx);
2857 d = ipa_get_controlled_uses (old_root_info, i);
2858
2859 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2860 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2861 c = combine_controlled_uses_counters (c, d);
2862 ipa_set_controlled_uses (new_root_info, src_idx, c);
2863 if (c == 0 && new_root_info->ipcp_orig_node)
2864 {
2865 struct cgraph_node *n;
2866 struct ipa_ref *ref;
2867 tree t = new_root_info->known_vals[src_idx];
2868
2869 if (t && TREE_CODE (t) == ADDR_EXPR
2870 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2871 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
67348ccc
DM
2872 && (ref = ipa_find_reference (new_root,
2873 n, NULL, 0)))
4502fe8d
MJ
2874 {
2875 if (dump_file)
2876 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2877 "reference from %s/%i to %s/%i.\n",
fec39fa6 2878 xstrdup (new_root->name ()),
67348ccc 2879 new_root->order,
fec39fa6 2880 xstrdup (n->name ()), n->order);
4502fe8d
MJ
2881 ipa_remove_reference (ref);
2882 }
2883 }
2884 }
2885 else if (jf->type == IPA_JF_CONST
2886 && (rdesc = jfunc_rdesc_usable (jf)))
2887 {
2888 int d = ipa_get_controlled_uses (old_root_info, i);
2889 int c = rdesc->refcount;
2890 rdesc->refcount = combine_controlled_uses_counters (c, d);
2891 if (rdesc->refcount == 0)
2892 {
2893 tree cst = ipa_get_jf_constant (jf);
2894 struct cgraph_node *n;
2895 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2896 && TREE_CODE (TREE_OPERAND (cst, 0))
2897 == FUNCTION_DECL);
2898 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2899 if (n)
2900 {
2901 struct cgraph_node *clone;
568cda29 2902 bool ok;
67348ccc 2903 ok = remove_described_reference (n, rdesc);
568cda29 2904 gcc_checking_assert (ok);
4502fe8d
MJ
2905
2906 clone = cs->caller;
2907 while (clone->global.inlined_to
2908 && clone != rdesc->cs->caller
2909 && IPA_NODE_REF (clone)->ipcp_orig_node)
2910 {
2911 struct ipa_ref *ref;
67348ccc
DM
2912 ref = ipa_find_reference (clone,
2913 n, NULL, 0);
4502fe8d
MJ
2914 if (ref)
2915 {
2916 if (dump_file)
2917 fprintf (dump_file, "ipa-prop: Removing "
2918 "cloning-created reference "
2919 "from %s/%i to %s/%i.\n",
fec39fa6 2920 xstrdup (clone->name ()),
67348ccc 2921 clone->order,
fec39fa6 2922 xstrdup (n->name ()),
67348ccc 2923 n->order);
4502fe8d
MJ
2924 ipa_remove_reference (ref);
2925 }
2926 clone = clone->callers->caller;
2927 }
2928 }
2929 }
2930 }
2931 }
2932
2933 for (i = ipa_get_param_count (old_root_info);
2934 i < ipa_get_cs_argument_count (args);
2935 i++)
2936 {
2937 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2938
2939 if (jf->type == IPA_JF_CONST)
2940 {
2941 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2942 if (rdesc)
2943 rdesc->refcount = IPA_UNDESCRIBED_USE;
2944 }
2945 else if (jf->type == IPA_JF_PASS_THROUGH)
2946 ipa_set_controlled_uses (new_root_info,
2947 jf->value.pass_through.formal_id,
2948 IPA_UNDESCRIBED_USE);
2949 }
2950}
2951
3e293154
MJ
2952/* Update jump functions and call note functions on inlining the call site CS.
2953 CS is expected to lead to a node already cloned by
2954 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2955 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2956 created. */
be95e2b9 2957
f8e2a1ed 2958bool
3e293154 2959ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
9771b263 2960 vec<cgraph_edge_p> *new_edges)
3e293154 2961{
5ee53a06 2962 bool changed;
f8e2a1ed
MJ
2963 /* Do nothing if the preparation phase has not been carried out yet
2964 (i.e. during early inlining). */
9771b263 2965 if (!ipa_node_params_vector.exists ())
f8e2a1ed
MJ
2966 return false;
2967 gcc_assert (ipa_edge_args_vector);
2968
4502fe8d 2969 propagate_controlled_uses (cs);
5ee53a06
JH
2970 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2971
5ee53a06 2972 return changed;
518dc859
RL
2973}
2974
771578a0
MJ
2975/* Frees all dynamically allocated structures that the argument info points
2976 to. */
be95e2b9 2977
518dc859 2978void
771578a0 2979ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 2980{
9771b263 2981 vec_free (args->jump_functions);
771578a0 2982 memset (args, 0, sizeof (*args));
518dc859
RL
2983}
2984
771578a0 2985/* Free all ipa_edge structures. */
be95e2b9 2986
518dc859 2987void
771578a0 2988ipa_free_all_edge_args (void)
518dc859 2989{
771578a0
MJ
2990 int i;
2991 struct ipa_edge_args *args;
518dc859 2992
9771b263
DN
2993 if (!ipa_edge_args_vector)
2994 return;
2995
2996 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
2997 ipa_free_edge_args_substructures (args);
2998
9771b263 2999 vec_free (ipa_edge_args_vector);
518dc859
RL
3000}
3001
771578a0
MJ
3002/* Frees all dynamically allocated structures that the param info points
3003 to. */
be95e2b9 3004
518dc859 3005void
771578a0 3006ipa_free_node_params_substructures (struct ipa_node_params *info)
518dc859 3007{
9771b263 3008 info->descriptors.release ();
310bc633
MJ
3009 free (info->lattices);
3010 /* Lattice values and their sources are deallocated with their alocation
3011 pool. */
9771b263 3012 info->known_vals.release ();
771578a0 3013 memset (info, 0, sizeof (*info));
518dc859
RL
3014}
3015
771578a0 3016/* Free all ipa_node_params structures. */
be95e2b9 3017
518dc859 3018void
771578a0 3019ipa_free_all_node_params (void)
518dc859 3020{
771578a0
MJ
3021 int i;
3022 struct ipa_node_params *info;
518dc859 3023
9771b263 3024 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
771578a0
MJ
3025 ipa_free_node_params_substructures (info);
3026
9771b263 3027 ipa_node_params_vector.release ();
771578a0
MJ
3028}
3029
2c9561b5
MJ
3030/* Set the aggregate replacements of NODE to be AGGVALS. */
3031
3032void
3033ipa_set_node_agg_value_chain (struct cgraph_node *node,
3034 struct ipa_agg_replacement_value *aggvals)
3035{
9771b263
DN
3036 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3037 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2c9561b5 3038
9771b263 3039 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2c9561b5
MJ
3040}
3041
771578a0 3042/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 3043
771578a0 3044static void
5c0466b5 3045ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 3046{
568cda29
MJ
3047 struct ipa_edge_args *args;
3048
3049 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
9771b263 3050 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 3051 return;
568cda29
MJ
3052
3053 args = IPA_EDGE_REF (cs);
3054 if (args->jump_functions)
3055 {
3056 struct ipa_jump_func *jf;
3057 int i;
3058 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3059 {
3060 struct ipa_cst_ref_desc *rdesc;
3061 try_decrement_rdesc_refcount (jf);
3062 if (jf->type == IPA_JF_CONST
3063 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3064 && rdesc->cs == cs)
3065 rdesc->cs = NULL;
3066 }
568cda29
MJ
3067 }
3068
771578a0 3069 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
3070}
3071
771578a0 3072/* Hook that is called by cgraph.c when a node is removed. */
be95e2b9 3073
771578a0 3074static void
5c0466b5 3075ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3076{
dd6d1ad7 3077 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 3078 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2c9561b5 3079 ipa_free_node_params_substructures (IPA_NODE_REF (node));
9771b263
DN
3080 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3081 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
771578a0
MJ
3082}
3083
8b7773a4 3084/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 3085
771578a0
MJ
3086static void
3087ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
f8e2a1ed 3088 __attribute__((unused)) void *data)
771578a0
MJ
3089{
3090 struct ipa_edge_args *old_args, *new_args;
8b7773a4 3091 unsigned int i;
771578a0
MJ
3092
3093 ipa_check_create_edge_args ();
3094
3095 old_args = IPA_EDGE_REF (src);
3096 new_args = IPA_EDGE_REF (dst);
3097
9771b263 3098 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
8b7773a4 3099
9771b263 3100 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3101 {
3102 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3103 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3104
3105 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3106
3107 if (src_jf->type == IPA_JF_CONST)
3108 {
3109 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3110
3111 if (!src_rdesc)
3112 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3113 else if (src->caller == dst->caller)
3114 {
3115 struct ipa_ref *ref;
5e20cdc9 3116 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3117 gcc_checking_assert (n);
67348ccc 3118 ref = ipa_find_reference (src->caller, n,
568cda29
MJ
3119 src->call_stmt, src->lto_stmt_uid);
3120 gcc_checking_assert (ref);
67348ccc 3121 ipa_clone_ref (ref, dst->caller, ref->stmt);
568cda29
MJ
3122
3123 gcc_checking_assert (ipa_refdesc_pool);
3124 struct ipa_cst_ref_desc *dst_rdesc
3125 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3126 dst_rdesc->cs = dst;
3127 dst_rdesc->refcount = src_rdesc->refcount;
3128 dst_rdesc->next_duplicate = NULL;
3129 dst_jf->value.constant.rdesc = dst_rdesc;
3130 }
4502fe8d
MJ
3131 else if (src_rdesc->cs == src)
3132 {
3133 struct ipa_cst_ref_desc *dst_rdesc;
3134 gcc_checking_assert (ipa_refdesc_pool);
3135 dst_rdesc
3136 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3137 dst_rdesc->cs = dst;
4502fe8d 3138 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3139 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3140 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3141 dst_jf->value.constant.rdesc = dst_rdesc;
3142 }
3143 else
3144 {
3145 struct ipa_cst_ref_desc *dst_rdesc;
3146 /* This can happen during inlining, when a JFUNC can refer to a
3147 reference taken in a function up in the tree of inline clones.
3148 We need to find the duplicate that refers to our tree of
3149 inline clones. */
3150
3151 gcc_assert (dst->caller->global.inlined_to);
3152 for (dst_rdesc = src_rdesc->next_duplicate;
3153 dst_rdesc;
3154 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3155 {
3156 struct cgraph_node *top;
3157 top = dst_rdesc->cs->caller->global.inlined_to
3158 ? dst_rdesc->cs->caller->global.inlined_to
3159 : dst_rdesc->cs->caller;
3160 if (dst->caller->global.inlined_to == top)
3161 break;
3162 }
44a60244 3163 gcc_assert (dst_rdesc);
4502fe8d
MJ
3164 dst_jf->value.constant.rdesc = dst_rdesc;
3165 }
3166 }
3167 }
771578a0
MJ
3168}
3169
3170/* Hook that is called by cgraph.c when a node is duplicated. */
be95e2b9 3171
771578a0
MJ
3172static void
3173ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
10a5dd5d 3174 ATTRIBUTE_UNUSED void *data)
771578a0
MJ
3175{
3176 struct ipa_node_params *old_info, *new_info;
2c9561b5 3177 struct ipa_agg_replacement_value *old_av, *new_av;
771578a0
MJ
3178
3179 ipa_check_create_node_params ();
3180 old_info = IPA_NODE_REF (src);
3181 new_info = IPA_NODE_REF (dst);
771578a0 3182
9771b263 3183 new_info->descriptors = old_info->descriptors.copy ();
310bc633 3184 new_info->lattices = NULL;
771578a0 3185 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 3186
3949c4a7
MJ
3187 new_info->uses_analysis_done = old_info->uses_analysis_done;
3188 new_info->node_enqueued = old_info->node_enqueued;
2c9561b5
MJ
3189
3190 old_av = ipa_get_agg_replacements_for_node (src);
3191 if (!old_av)
3192 return;
3193
3194 new_av = NULL;
3195 while (old_av)
3196 {
3197 struct ipa_agg_replacement_value *v;
3198
3199 v = ggc_alloc_ipa_agg_replacement_value ();
3200 memcpy (v, old_av, sizeof (*v));
3201 v->next = new_av;
3202 new_av = v;
3203 old_av = old_av->next;
3204 }
3205 ipa_set_node_agg_value_chain (dst, new_av);
771578a0
MJ
3206}
3207
40982661
JH
3208
3209/* Analyze newly added function into callgraph. */
3210
3211static void
3212ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3213{
3214 ipa_analyze_node (node);
3215}
3216
771578a0 3217/* Register our cgraph hooks if they are not already there. */
be95e2b9 3218
518dc859 3219void
771578a0 3220ipa_register_cgraph_hooks (void)
518dc859 3221{
771578a0
MJ
3222 if (!edge_removal_hook_holder)
3223 edge_removal_hook_holder =
3224 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3225 if (!node_removal_hook_holder)
3226 node_removal_hook_holder =
3227 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3228 if (!edge_duplication_hook_holder)
3229 edge_duplication_hook_holder =
3230 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3231 if (!node_duplication_hook_holder)
3232 node_duplication_hook_holder =
3233 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
40982661
JH
3234 function_insertion_hook_holder =
3235 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3236}
518dc859 3237
771578a0 3238/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3239
771578a0
MJ
3240static void
3241ipa_unregister_cgraph_hooks (void)
3242{
3243 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3244 edge_removal_hook_holder = NULL;
3245 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3246 node_removal_hook_holder = NULL;
3247 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3248 edge_duplication_hook_holder = NULL;
3249 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3250 node_duplication_hook_holder = NULL;
40982661
JH
3251 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3252 function_insertion_hook_holder = NULL;
771578a0
MJ
3253}
3254
3255/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3256 longer needed after ipa-cp. */
be95e2b9 3257
771578a0 3258void
e33c6cd6 3259ipa_free_all_structures_after_ipa_cp (void)
3e293154 3260{
5ee53a06 3261 if (!optimize)
3e293154
MJ
3262 {
3263 ipa_free_all_edge_args ();
3264 ipa_free_all_node_params ();
310bc633
MJ
3265 free_alloc_pool (ipcp_sources_pool);
3266 free_alloc_pool (ipcp_values_pool);
2c9561b5 3267 free_alloc_pool (ipcp_agg_lattice_pool);
3e293154 3268 ipa_unregister_cgraph_hooks ();
4502fe8d
MJ
3269 if (ipa_refdesc_pool)
3270 free_alloc_pool (ipa_refdesc_pool);
3e293154
MJ
3271 }
3272}
3273
3274/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3275 longer needed after indirect inlining. */
be95e2b9 3276
3e293154 3277void
e33c6cd6 3278ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3279{
3280 ipa_free_all_edge_args ();
3281 ipa_free_all_node_params ();
3282 ipa_unregister_cgraph_hooks ();
310bc633
MJ
3283 if (ipcp_sources_pool)
3284 free_alloc_pool (ipcp_sources_pool);
3285 if (ipcp_values_pool)
3286 free_alloc_pool (ipcp_values_pool);
2c9561b5
MJ
3287 if (ipcp_agg_lattice_pool)
3288 free_alloc_pool (ipcp_agg_lattice_pool);
4502fe8d
MJ
3289 if (ipa_refdesc_pool)
3290 free_alloc_pool (ipa_refdesc_pool);
518dc859
RL
3291}
3292
dcd416e3 3293/* Print ipa_tree_map data structures of all functions in the
518dc859 3294 callgraph to F. */
be95e2b9 3295
518dc859 3296void
2c9561b5 3297ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3298{
3299 int i, count;
3e293154 3300 struct ipa_node_params *info;
518dc859 3301
67348ccc 3302 if (!node->definition)
3e293154
MJ
3303 return;
3304 info = IPA_NODE_REF (node);
9de04252 3305 fprintf (f, " function %s/%i parameter descriptors:\n",
fec39fa6 3306 node->name (), node->order);
3e293154
MJ
3307 count = ipa_get_param_count (info);
3308 for (i = 0; i < count; i++)
518dc859 3309 {
4502fe8d
MJ
3310 int c;
3311
e067bd43 3312 ipa_dump_param (f, info, i);
339f49ec
JH
3313 if (ipa_is_param_used (info, i))
3314 fprintf (f, " used");
4502fe8d
MJ
3315 c = ipa_get_controlled_uses (info, i);
3316 if (c == IPA_UNDESCRIBED_USE)
3317 fprintf (f, " undescribed_use");
3318 else
3319 fprintf (f, " controlled_uses=%i", c);
3e293154 3320 fprintf (f, "\n");
518dc859
RL
3321 }
3322}
dcd416e3 3323
ca30a539 3324/* Print ipa_tree_map data structures of all functions in the
3e293154 3325 callgraph to F. */
be95e2b9 3326
3e293154 3327void
ca30a539 3328ipa_print_all_params (FILE * f)
3e293154
MJ
3329{
3330 struct cgraph_node *node;
3331
ca30a539 3332 fprintf (f, "\nFunction parameters:\n");
65c70e6b 3333 FOR_EACH_FUNCTION (node)
ca30a539 3334 ipa_print_node_params (f, node);
3e293154 3335}
3f84bf08
MJ
3336
3337/* Return a heap allocated vector containing formal parameters of FNDECL. */
3338
9771b263 3339vec<tree>
3f84bf08
MJ
3340ipa_get_vector_of_formal_parms (tree fndecl)
3341{
9771b263 3342 vec<tree> args;
3f84bf08
MJ
3343 int count;
3344 tree parm;
3345
0e8853ee 3346 gcc_assert (!flag_wpa);
310bc633 3347 count = count_formal_params (fndecl);
9771b263 3348 args.create (count);
910ad8de 3349 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 3350 args.quick_push (parm);
3f84bf08
MJ
3351
3352 return args;
3353}
3354
3355/* Return a heap allocated vector containing types of formal parameters of
3356 function type FNTYPE. */
3357
9771b263 3358static inline vec<tree>
3f84bf08
MJ
3359get_vector_of_formal_parm_types (tree fntype)
3360{
9771b263 3361 vec<tree> types;
3f84bf08
MJ
3362 int count = 0;
3363 tree t;
3364
3365 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3366 count++;
3367
9771b263 3368 types.create (count);
3f84bf08 3369 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 3370 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
3371
3372 return types;
3373}
3374
3375/* Modify the function declaration FNDECL and its type according to the plan in
3376 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3377 to reflect the actual parameters being modified which are determined by the
3378 base_index field. */
3379
3380void
3381ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3382 const char *synth_parm_prefix)
3383{
9771b263 3384 vec<tree> oparms, otypes;
3f84bf08
MJ
3385 tree orig_type, new_type = NULL;
3386 tree old_arg_types, t, new_arg_types = NULL;
3387 tree parm, *link = &DECL_ARGUMENTS (fndecl);
9771b263 3388 int i, len = adjustments.length ();
3f84bf08
MJ
3389 tree new_reversed = NULL;
3390 bool care_for_types, last_parm_void;
3391
3392 if (!synth_parm_prefix)
3393 synth_parm_prefix = "SYNTH";
3394
3395 oparms = ipa_get_vector_of_formal_parms (fndecl);
3396 orig_type = TREE_TYPE (fndecl);
3397 old_arg_types = TYPE_ARG_TYPES (orig_type);
3398
3399 /* The following test is an ugly hack, some functions simply don't have any
3400 arguments in their type. This is probably a bug but well... */
3401 care_for_types = (old_arg_types != NULL_TREE);
3402 if (care_for_types)
3403 {
3404 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3405 == void_type_node);
3406 otypes = get_vector_of_formal_parm_types (orig_type);
3407 if (last_parm_void)
9771b263 3408 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 3409 else
9771b263 3410 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
3411 }
3412 else
3413 {
3414 last_parm_void = false;
9771b263 3415 otypes.create (0);
3f84bf08
MJ
3416 }
3417
3418 for (i = 0; i < len; i++)
3419 {
3420 struct ipa_parm_adjustment *adj;
3421 gcc_assert (link);
3422
9771b263
DN
3423 adj = &adjustments[i];
3424 parm = oparms[adj->base_index];
3f84bf08
MJ
3425 adj->base = parm;
3426
3427 if (adj->copy_param)
3428 {
3429 if (care_for_types)
9771b263 3430 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
3431 new_arg_types);
3432 *link = parm;
910ad8de 3433 link = &DECL_CHAIN (parm);
3f84bf08
MJ
3434 }
3435 else if (!adj->remove_param)
3436 {
3437 tree new_parm;
3438 tree ptype;
3439
3440 if (adj->by_ref)
3441 ptype = build_pointer_type (adj->type);
3442 else
3443 ptype = adj->type;
3444
3445 if (care_for_types)
3446 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3447
3448 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3449 ptype);
3450 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3451
3452 DECL_ARTIFICIAL (new_parm) = 1;
3453 DECL_ARG_TYPE (new_parm) = ptype;
3454 DECL_CONTEXT (new_parm) = fndecl;
3455 TREE_USED (new_parm) = 1;
3456 DECL_IGNORED_P (new_parm) = 1;
3457 layout_decl (new_parm, 0);
3458
3f84bf08
MJ
3459 adj->base = parm;
3460 adj->reduction = new_parm;
3461
3462 *link = new_parm;
3463
910ad8de 3464 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
3465 }
3466 }
3467
3468 *link = NULL_TREE;
3469
3470 if (care_for_types)
3471 {
3472 new_reversed = nreverse (new_arg_types);
3473 if (last_parm_void)
3474 {
3475 if (new_reversed)
3476 TREE_CHAIN (new_arg_types) = void_list_node;
3477 else
3478 new_reversed = void_list_node;
3479 }
3480 }
3481
3482 /* Use copy_node to preserve as much as possible from original type
3483 (debug info, attribute lists etc.)
3484 Exception is METHOD_TYPEs must have THIS argument.
3485 When we are asked to remove it, we need to build new FUNCTION_TYPE
3486 instead. */
3487 if (TREE_CODE (orig_type) != METHOD_TYPE
9771b263
DN
3488 || (adjustments[0].copy_param
3489 && adjustments[0].base_index == 0))
3f84bf08 3490 {
4eb3f32c 3491 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
3492 TYPE_ARG_TYPES (new_type) = new_reversed;
3493 }
3494 else
3495 {
3496 new_type
3497 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3498 new_reversed));
3499 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3500 DECL_VINDEX (fndecl) = NULL_TREE;
3501 }
3502
d402c33d
JH
3503 /* When signature changes, we need to clear builtin info. */
3504 if (DECL_BUILT_IN (fndecl))
3505 {
3506 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3507 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3508 }
3509
3f84bf08
MJ
3510 /* This is a new type, not a copy of an old type. Need to reassociate
3511 variants. We can handle everything except the main variant lazily. */
3512 t = TYPE_MAIN_VARIANT (orig_type);
3513 if (orig_type != t)
3514 {
3515 TYPE_MAIN_VARIANT (new_type) = t;
3516 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3517 TYPE_NEXT_VARIANT (t) = new_type;
3518 }
3519 else
3520 {
3521 TYPE_MAIN_VARIANT (new_type) = new_type;
3522 TYPE_NEXT_VARIANT (new_type) = NULL;
3523 }
3524
3525 TREE_TYPE (fndecl) = new_type;
9b389a5e 3526 DECL_VIRTUAL_P (fndecl) = 0;
9771b263
DN
3527 otypes.release ();
3528 oparms.release ();
3f84bf08
MJ
3529}
3530
3531/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3532 If this is a directly recursive call, CS must be NULL. Otherwise it must
3533 contain the corresponding call graph edge. */
3534
3535void
3536ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3537 ipa_parm_adjustment_vec adjustments)
3538{
82338059 3539 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
9771b263
DN
3540 vec<tree> vargs;
3541 vec<tree, va_gc> **debug_args = NULL;
3f84bf08 3542 gimple new_stmt;
82338059 3543 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
3544 tree callee_decl;
3545 int i, len;
3546
9771b263
DN
3547 len = adjustments.length ();
3548 vargs.create (len);
67348ccc
DM
3549 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3550 ipa_remove_stmt_references (current_node, stmt);
3f84bf08
MJ
3551
3552 gsi = gsi_for_stmt (stmt);
82338059
MJ
3553 prev_gsi = gsi;
3554 gsi_prev (&prev_gsi);
3f84bf08
MJ
3555 for (i = 0; i < len; i++)
3556 {
3557 struct ipa_parm_adjustment *adj;
3558
9771b263 3559 adj = &adjustments[i];
3f84bf08
MJ
3560
3561 if (adj->copy_param)
3562 {
3563 tree arg = gimple_call_arg (stmt, adj->base_index);
3564
9771b263 3565 vargs.quick_push (arg);
3f84bf08
MJ
3566 }
3567 else if (!adj->remove_param)
3568 {
fffe1e40
MJ
3569 tree expr, base, off;
3570 location_t loc;
f43245d1 3571 unsigned int deref_align = 0;
c1ed6a01 3572 bool deref_base = false;
fffe1e40
MJ
3573
3574 /* We create a new parameter out of the value of the old one, we can
3575 do the following kind of transformations:
3576
3577 - A scalar passed by reference is converted to a scalar passed by
3578 value. (adj->by_ref is false and the type of the original
3579 actual argument is a pointer to a scalar).
3580
3581 - A part of an aggregate is passed instead of the whole aggregate.
3582 The part can be passed either by value or by reference, this is
3583 determined by value of adj->by_ref. Moreover, the code below
3584 handles both situations when the original aggregate is passed by
3585 value (its type is not a pointer) and when it is passed by
3586 reference (it is a pointer to an aggregate).
3587
3588 When the new argument is passed by reference (adj->by_ref is true)
3589 it must be a part of an aggregate and therefore we form it by
3590 simply taking the address of a reference inside the original
3591 aggregate. */
3592
3593 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3594 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
3595 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3596 : EXPR_LOCATION (base);
fffe1e40 3597
82d49829
MJ
3598 if (TREE_CODE (base) != ADDR_EXPR
3599 && POINTER_TYPE_P (TREE_TYPE (base)))
3600 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 3601 adj->offset / BITS_PER_UNIT);
3f84bf08 3602 else
3f84bf08 3603 {
fffe1e40
MJ
3604 HOST_WIDE_INT base_offset;
3605 tree prev_base;
c1ed6a01 3606 bool addrof;
fffe1e40
MJ
3607
3608 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
3609 {
3610 base = TREE_OPERAND (base, 0);
3611 addrof = true;
3612 }
3613 else
3614 addrof = false;
fffe1e40
MJ
3615 prev_base = base;
3616 base = get_addr_base_and_unit_offset (base, &base_offset);
3617 /* Aggregate arguments can have non-invariant addresses. */
3618 if (!base)
3619 {
3620 base = build_fold_addr_expr (prev_base);
82d49829 3621 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3622 adj->offset / BITS_PER_UNIT);
3623 }
3624 else if (TREE_CODE (base) == MEM_REF)
3625 {
c1ed6a01
MJ
3626 if (!addrof)
3627 {
3628 deref_base = true;
3629 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3630 }
82d49829 3631 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3632 base_offset
3633 + adj->offset / BITS_PER_UNIT);
3634 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 3635 off);
fffe1e40
MJ
3636 base = TREE_OPERAND (base, 0);
3637 }
3638 else
3639 {
82d49829 3640 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3641 base_offset
3642 + adj->offset / BITS_PER_UNIT);
3643 base = build_fold_addr_expr (base);
3644 }
3f84bf08 3645 }
fffe1e40 3646
3a5a825a
RG
3647 if (!adj->by_ref)
3648 {
3649 tree type = adj->type;
3650 unsigned int align;
3651 unsigned HOST_WIDE_INT misalign;
644ffefd 3652
c1ed6a01
MJ
3653 if (deref_base)
3654 {
3655 align = deref_align;
3656 misalign = 0;
3657 }
3658 else
3659 {
3660 get_pointer_alignment_1 (base, &align, &misalign);
3661 if (TYPE_ALIGN (type) > align)
3662 align = TYPE_ALIGN (type);
3663 }
27bcd47c
LC
3664 misalign += (tree_to_double_int (off)
3665 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3a5a825a
RG
3666 * BITS_PER_UNIT);
3667 misalign = misalign & (align - 1);
3668 if (misalign != 0)
3669 align = (misalign & -misalign);
3670 if (align < TYPE_ALIGN (type))
3671 type = build_aligned_type (type, align);
3672 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3673 }
3674 else
3675 {
3676 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3677 expr = build_fold_addr_expr (expr);
3678 }
fffe1e40 3679
3f84bf08
MJ
3680 expr = force_gimple_operand_gsi (&gsi, expr,
3681 adj->by_ref
3682 || is_gimple_reg_type (adj->type),
3683 NULL, true, GSI_SAME_STMT);
9771b263 3684 vargs.quick_push (expr);
3f84bf08 3685 }
ddb555ed
JJ
3686 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3687 {
3688 unsigned int ix;
3689 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3690 gimple def_temp;
3691
3692 arg = gimple_call_arg (stmt, adj->base_index);
3693 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3694 {
3695 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3696 continue;
3697 arg = fold_convert_loc (gimple_location (stmt),
3698 TREE_TYPE (origin), arg);
3699 }
3700 if (debug_args == NULL)
3701 debug_args = decl_debug_args_insert (callee_decl);
9771b263 3702 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
3703 if (ddecl == origin)
3704 {
9771b263 3705 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
3706 break;
3707 }
3708 if (ddecl == NULL)
3709 {
3710 ddecl = make_node (DEBUG_EXPR_DECL);
3711 DECL_ARTIFICIAL (ddecl) = 1;
3712 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3713 DECL_MODE (ddecl) = DECL_MODE (origin);
3714
9771b263
DN
3715 vec_safe_push (*debug_args, origin);
3716 vec_safe_push (*debug_args, ddecl);
ddb555ed 3717 }
9771b263 3718 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
3719 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3720 }
3f84bf08
MJ
3721 }
3722
3723 if (dump_file && (dump_flags & TDF_DETAILS))
3724 {
3725 fprintf (dump_file, "replacing stmt:");
3726 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3727 }
3728
3f84bf08 3729 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 3730 vargs.release ();
3f84bf08
MJ
3731 if (gimple_call_lhs (stmt))
3732 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3733
3734 gimple_set_block (new_stmt, gimple_block (stmt));
3735 if (gimple_has_location (stmt))
3736 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 3737 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 3738 gimple_call_copy_flags (new_stmt, stmt);
3f84bf08
MJ
3739
3740 if (dump_file && (dump_flags & TDF_DETAILS))
3741 {
3742 fprintf (dump_file, "with stmt:");
3743 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3744 fprintf (dump_file, "\n");
3745 }
3746 gsi_replace (&gsi, new_stmt, true);
3747 if (cs)
3748 cgraph_set_call_stmt (cs, new_stmt);
82338059
MJ
3749 do
3750 {
3751 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3752 gsi_prev (&gsi);
3753 }
3754 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3755 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3756
3f84bf08
MJ
3757 update_ssa (TODO_update_ssa);
3758 free_dominance_info (CDI_DOMINATORS);
3759}
3760
3761/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3762
3763static bool
3764index_in_adjustments_multiple_times_p (int base_index,
3765 ipa_parm_adjustment_vec adjustments)
3766{
9771b263 3767 int i, len = adjustments.length ();
3f84bf08
MJ
3768 bool one = false;
3769
3770 for (i = 0; i < len; i++)
3771 {
3772 struct ipa_parm_adjustment *adj;
9771b263 3773 adj = &adjustments[i];
3f84bf08
MJ
3774
3775 if (adj->base_index == base_index)
3776 {
3777 if (one)
3778 return true;
3779 else
3780 one = true;
3781 }
3782 }
3783 return false;
3784}
3785
3786
3787/* Return adjustments that should have the same effect on function parameters
3788 and call arguments as if they were first changed according to adjustments in
3789 INNER and then by adjustments in OUTER. */
3790
3791ipa_parm_adjustment_vec
3792ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3793 ipa_parm_adjustment_vec outer)
3794{
9771b263
DN
3795 int i, outlen = outer.length ();
3796 int inlen = inner.length ();
3f84bf08
MJ
3797 int removals = 0;
3798 ipa_parm_adjustment_vec adjustments, tmp;
3799
9771b263 3800 tmp.create (inlen);
3f84bf08
MJ
3801 for (i = 0; i < inlen; i++)
3802 {
3803 struct ipa_parm_adjustment *n;
9771b263 3804 n = &inner[i];
3f84bf08
MJ
3805
3806 if (n->remove_param)
3807 removals++;
3808 else
9771b263 3809 tmp.quick_push (*n);
3f84bf08
MJ
3810 }
3811
9771b263 3812 adjustments.create (outlen + removals);
3f84bf08
MJ
3813 for (i = 0; i < outlen; i++)
3814 {
f32682ca 3815 struct ipa_parm_adjustment r;
9771b263
DN
3816 struct ipa_parm_adjustment *out = &outer[i];
3817 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 3818
f32682ca 3819 memset (&r, 0, sizeof (r));
3f84bf08
MJ
3820 gcc_assert (!in->remove_param);
3821 if (out->remove_param)
3822 {
3823 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3824 {
f32682ca 3825 r.remove_param = true;
9771b263 3826 adjustments.quick_push (r);
3f84bf08
MJ
3827 }
3828 continue;
3829 }
3830
f32682ca
DN
3831 r.base_index = in->base_index;
3832 r.type = out->type;
3f84bf08
MJ
3833
3834 /* FIXME: Create nonlocal value too. */
3835
3836 if (in->copy_param && out->copy_param)
f32682ca 3837 r.copy_param = true;
3f84bf08 3838 else if (in->copy_param)
f32682ca 3839 r.offset = out->offset;
3f84bf08 3840 else if (out->copy_param)
f32682ca 3841 r.offset = in->offset;
3f84bf08 3842 else
f32682ca 3843 r.offset = in->offset + out->offset;
9771b263 3844 adjustments.quick_push (r);
3f84bf08
MJ
3845 }
3846
3847 for (i = 0; i < inlen; i++)
3848 {
9771b263 3849 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08
MJ
3850
3851 if (n->remove_param)
9771b263 3852 adjustments.quick_push (*n);
3f84bf08
MJ
3853 }
3854
9771b263 3855 tmp.release ();
3f84bf08
MJ
3856 return adjustments;
3857}
3858
3859/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3860 friendly way, assuming they are meant to be applied to FNDECL. */
3861
3862void
3863ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3864 tree fndecl)
3865{
9771b263 3866 int i, len = adjustments.length ();
3f84bf08 3867 bool first = true;
9771b263 3868 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
3869
3870 fprintf (file, "IPA param adjustments: ");
3871 for (i = 0; i < len; i++)
3872 {
3873 struct ipa_parm_adjustment *adj;
9771b263 3874 adj = &adjustments[i];
3f84bf08
MJ
3875
3876 if (!first)
3877 fprintf (file, " ");
3878 else
3879 first = false;
3880
3881 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 3882 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
3883 if (adj->base)
3884 {
3885 fprintf (file, ", base: ");
3886 print_generic_expr (file, adj->base, 0);
3887 }
3888 if (adj->reduction)
3889 {
3890 fprintf (file, ", reduction: ");
3891 print_generic_expr (file, adj->reduction, 0);
3892 }
3893 if (adj->new_ssa_base)
3894 {
3895 fprintf (file, ", new_ssa_base: ");
3896 print_generic_expr (file, adj->new_ssa_base, 0);
3897 }
3898
3899 if (adj->copy_param)
3900 fprintf (file, ", copy_param");
3901 else if (adj->remove_param)
3902 fprintf (file, ", remove_param");
3903 else
3904 fprintf (file, ", offset %li", (long) adj->offset);
3905 if (adj->by_ref)
3906 fprintf (file, ", by_ref");
3907 print_node_brief (file, ", type: ", adj->type, 0);
3908 fprintf (file, "\n");
3909 }
9771b263 3910 parms.release ();
3f84bf08
MJ
3911}
3912
2c9561b5
MJ
3913/* Dump the AV linked list. */
3914
3915void
3916ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3917{
3918 bool comma = false;
3919 fprintf (f, " Aggregate replacements:");
3920 for (; av; av = av->next)
3921 {
3922 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3923 av->index, av->offset);
3924 print_generic_expr (f, av->value, 0);
3925 comma = true;
3926 }
3927 fprintf (f, "\n");
3928}
3929
fb3f88cc
JH
3930/* Stream out jump function JUMP_FUNC to OB. */
3931
3932static void
3933ipa_write_jump_function (struct output_block *ob,
3934 struct ipa_jump_func *jump_func)
3935{
8b7773a4
MJ
3936 struct ipa_agg_jf_item *item;
3937 struct bitpack_d bp;
3938 int i, count;
fb3f88cc 3939
8b7773a4 3940 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
3941 switch (jump_func->type)
3942 {
3943 case IPA_JF_UNKNOWN:
3944 break;
b258210c 3945 case IPA_JF_KNOWN_TYPE:
c7573249
MJ
3946 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3947 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3948 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
b258210c 3949 break;
fb3f88cc 3950 case IPA_JF_CONST:
5368224f 3951 gcc_assert (
4502fe8d
MJ
3952 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3953 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
3954 break;
3955 case IPA_JF_PASS_THROUGH:
412288f1 3956 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
3957 if (jump_func->value.pass_through.operation == NOP_EXPR)
3958 {
3959 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3960 bp = bitpack_create (ob->main_stream);
3961 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
b8f6e610 3962 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4a53743e
MJ
3963 streamer_write_bitpack (&bp);
3964 }
3965 else
3966 {
3967 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3968 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3969 }
fb3f88cc
JH
3970 break;
3971 case IPA_JF_ANCESTOR:
412288f1 3972 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
b9393656 3973 stream_write_tree (ob, jump_func->value.ancestor.type, true);
412288f1 3974 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
3975 bp = bitpack_create (ob->main_stream);
3976 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
b8f6e610 3977 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
8b7773a4 3978 streamer_write_bitpack (&bp);
fb3f88cc 3979 break;
8b7773a4
MJ
3980 }
3981
9771b263 3982 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
3983 streamer_write_uhwi (ob, count);
3984 if (count)
3985 {
3986 bp = bitpack_create (ob->main_stream);
3987 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3988 streamer_write_bitpack (&bp);
3989 }
3990
9771b263 3991 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
3992 {
3993 streamer_write_uhwi (ob, item->offset);
3994 stream_write_tree (ob, item->value, true);
fb3f88cc
JH
3995 }
3996}
3997
3998/* Read in jump function JUMP_FUNC from IB. */
3999
4000static void
4001ipa_read_jump_function (struct lto_input_block *ib,
4002 struct ipa_jump_func *jump_func,
4502fe8d 4003 struct cgraph_edge *cs,
fb3f88cc
JH
4004 struct data_in *data_in)
4005{
4a53743e
MJ
4006 enum jump_func_type jftype;
4007 enum tree_code operation;
8b7773a4 4008 int i, count;
fb3f88cc 4009
4a53743e
MJ
4010 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4011 switch (jftype)
fb3f88cc
JH
4012 {
4013 case IPA_JF_UNKNOWN:
4a53743e 4014 jump_func->type = IPA_JF_UNKNOWN;
fb3f88cc 4015 break;
b258210c 4016 case IPA_JF_KNOWN_TYPE:
4a53743e
MJ
4017 {
4018 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4019 tree base_type = stream_read_tree (ib, data_in);
4020 tree component_type = stream_read_tree (ib, data_in);
4021
4022 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4023 break;
4024 }
fb3f88cc 4025 case IPA_JF_CONST:
4502fe8d 4026 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4027 break;
4028 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4029 operation = (enum tree_code) streamer_read_uhwi (ib);
4030 if (operation == NOP_EXPR)
4031 {
4032 int formal_id = streamer_read_uhwi (ib);
4033 struct bitpack_d bp = streamer_read_bitpack (ib);
4034 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610
MJ
4035 bool type_preserved = bp_unpack_value (&bp, 1);
4036 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4037 type_preserved);
4a53743e
MJ
4038 }
4039 else
4040 {
4041 tree operand = stream_read_tree (ib, data_in);
4042 int formal_id = streamer_read_uhwi (ib);
4043 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4044 operation);
4045 }
fb3f88cc
JH
4046 break;
4047 case IPA_JF_ANCESTOR:
4a53743e
MJ
4048 {
4049 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4050 tree type = stream_read_tree (ib, data_in);
4051 int formal_id = streamer_read_uhwi (ib);
4052 struct bitpack_d bp = streamer_read_bitpack (ib);
4053 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610 4054 bool type_preserved = bp_unpack_value (&bp, 1);
4a53743e 4055
b8f6e610
MJ
4056 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4057 type_preserved);
4a53743e
MJ
4058 break;
4059 }
8b7773a4
MJ
4060 }
4061
4062 count = streamer_read_uhwi (ib);
9771b263 4063 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4064 if (count)
4065 {
4a53743e 4066 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4067 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4068 }
4069 for (i = 0; i < count; i++)
4070 {
f32682ca
DN
4071 struct ipa_agg_jf_item item;
4072 item.offset = streamer_read_uhwi (ib);
4073 item.value = stream_read_tree (ib, data_in);
9771b263 4074 jump_func->agg.items->quick_push (item);
fb3f88cc
JH
4075 }
4076}
4077
e33c6cd6
MJ
4078/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4079 relevant to indirect inlining to OB. */
661e7330
MJ
4080
4081static void
e33c6cd6
MJ
4082ipa_write_indirect_edge_info (struct output_block *ob,
4083 struct cgraph_edge *cs)
661e7330 4084{
e33c6cd6 4085 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4086 struct bitpack_d bp;
e33c6cd6 4087
412288f1 4088 streamer_write_hwi (ob, ii->param_index);
8b7773a4 4089 streamer_write_hwi (ob, ii->offset);
2465dcc2
RG
4090 bp = bitpack_create (ob->main_stream);
4091 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4092 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4093 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4094 bp_pack_value (&bp, ii->by_ref, 1);
68377e53
JH
4095 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4096 bp_pack_value (&bp, ii->maybe_derived_type, 1);
412288f1 4097 streamer_write_bitpack (&bp);
b258210c
MJ
4098
4099 if (ii->polymorphic)
4100 {
412288f1 4101 streamer_write_hwi (ob, ii->otr_token);
b9393656 4102 stream_write_tree (ob, ii->otr_type, true);
68377e53 4103 stream_write_tree (ob, ii->outer_type, true);
b258210c 4104 }
661e7330
MJ
4105}
4106
e33c6cd6
MJ
4107/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4108 relevant to indirect inlining from IB. */
661e7330
MJ
4109
4110static void
e33c6cd6
MJ
4111ipa_read_indirect_edge_info (struct lto_input_block *ib,
4112 struct data_in *data_in ATTRIBUTE_UNUSED,
4113 struct cgraph_edge *cs)
661e7330 4114{
e33c6cd6 4115 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4116 struct bitpack_d bp;
661e7330 4117
412288f1 4118 ii->param_index = (int) streamer_read_hwi (ib);
8b7773a4 4119 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
412288f1 4120 bp = streamer_read_bitpack (ib);
2465dcc2 4121 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4122 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4123 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4124 ii->by_ref = bp_unpack_value (&bp, 1);
68377e53
JH
4125 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4126 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
b258210c
MJ
4127 if (ii->polymorphic)
4128 {
412288f1 4129 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4130 ii->otr_type = stream_read_tree (ib, data_in);
68377e53 4131 ii->outer_type = stream_read_tree (ib, data_in);
b258210c 4132 }
661e7330
MJ
4133}
4134
fb3f88cc
JH
4135/* Stream out NODE info to OB. */
4136
4137static void
4138ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4139{
4140 int node_ref;
7380e6ef 4141 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4142 struct ipa_node_params *info = IPA_NODE_REF (node);
4143 int j;
4144 struct cgraph_edge *e;
2465dcc2 4145 struct bitpack_d bp;
fb3f88cc 4146
7380e6ef 4147 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4148 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4149 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4150
0e8853ee
JH
4151 streamer_write_uhwi (ob, ipa_get_param_count (info));
4152 for (j = 0; j < ipa_get_param_count (info); j++)
4153 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4154 bp = bitpack_create (ob->main_stream);
062c604f 4155 gcc_assert (info->uses_analysis_done
661e7330 4156 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4157 gcc_assert (!info->node_enqueued);
4158 gcc_assert (!info->ipcp_orig_node);
4159 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4160 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4161 streamer_write_bitpack (&bp);
4502fe8d
MJ
4162 for (j = 0; j < ipa_get_param_count (info); j++)
4163 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
fb3f88cc
JH
4164 for (e = node->callees; e; e = e->next_callee)
4165 {
4166 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4167
412288f1 4168 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
fb3f88cc
JH
4169 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4170 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4171 }
e33c6cd6 4172 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4173 {
4174 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4175
412288f1 4176 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
c8246dbe
JH
4177 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4178 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4179 ipa_write_indirect_edge_info (ob, e);
4180 }
fb3f88cc
JH
4181}
4182
61502ca8 4183/* Stream in NODE info from IB. */
fb3f88cc
JH
4184
4185static void
4186ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4187 struct data_in *data_in)
4188{
4189 struct ipa_node_params *info = IPA_NODE_REF (node);
4190 int k;
4191 struct cgraph_edge *e;
2465dcc2 4192 struct bitpack_d bp;
fb3f88cc 4193
0e8853ee 4194 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4195
0e8853ee
JH
4196 for (k = 0; k < ipa_get_param_count (info); k++)
4197 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4198
412288f1 4199 bp = streamer_read_bitpack (ib);
fb3f88cc 4200 if (ipa_get_param_count (info) != 0)
062c604f 4201 info->uses_analysis_done = true;
fb3f88cc
JH
4202 info->node_enqueued = false;
4203 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4204 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a
MJ
4205 for (k = 0; k < ipa_get_param_count (info); k++)
4206 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
fb3f88cc
JH
4207 for (e = node->callees; e; e = e->next_callee)
4208 {
4209 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4210 int count = streamer_read_uhwi (ib);
fb3f88cc 4211
fb3f88cc
JH
4212 if (!count)
4213 continue;
9771b263 4214 vec_safe_grow_cleared (args->jump_functions, count);
fb3f88cc 4215
fb3f88cc 4216 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d
MJ
4217 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4218 data_in);
fb3f88cc 4219 }
e33c6cd6 4220 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4221 {
4222 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4223 int count = streamer_read_uhwi (ib);
c8246dbe 4224
c8246dbe
JH
4225 if (count)
4226 {
9771b263 4227 vec_safe_grow_cleared (args->jump_functions, count);
c8246dbe 4228 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d 4229 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
606d9a09 4230 data_in);
c8246dbe
JH
4231 }
4232 ipa_read_indirect_edge_info (ib, data_in, e);
4233 }
fb3f88cc
JH
4234}
4235
4236/* Write jump functions for nodes in SET. */
4237
4238void
f27c1867 4239ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4240{
4241 struct cgraph_node *node;
93536c97 4242 struct output_block *ob;
fb3f88cc 4243 unsigned int count = 0;
f27c1867
JH
4244 lto_symtab_encoder_iterator lsei;
4245 lto_symtab_encoder_t encoder;
4246
fb3f88cc 4247
9771b263 4248 if (!ipa_node_params_vector.exists ())
93536c97 4249 return;
fb3f88cc 4250
93536c97 4251 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4252 encoder = ob->decl_state->symtab_node_encoder;
93536c97 4253 ob->cgraph_node = NULL;
f27c1867
JH
4254 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4255 lsei_next_function_in_partition (&lsei))
fb3f88cc 4256 {
f27c1867 4257 node = lsei_cgraph_node (lsei);
c47d0034
JH
4258 if (cgraph_function_with_gimple_body_p (node)
4259 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4260 count++;
4261 }
4262
412288f1 4263 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4264
4265 /* Process all of the functions. */
f27c1867
JH
4266 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4267 lsei_next_function_in_partition (&lsei))
fb3f88cc 4268 {
f27c1867 4269 node = lsei_cgraph_node (lsei);
c47d0034
JH
4270 if (cgraph_function_with_gimple_body_p (node)
4271 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4272 ipa_write_node_info (ob, node);
4273 }
412288f1 4274 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4275 produce_asm (ob, NULL);
4276 destroy_output_block (ob);
4277}
4278
4279/* Read section in file FILE_DATA of length LEN with data DATA. */
4280
4281static void
4282ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4283 size_t len)
4284{
4285 const struct lto_function_header *header =
4286 (const struct lto_function_header *) data;
4ad9a9de
EB
4287 const int cfg_offset = sizeof (struct lto_function_header);
4288 const int main_offset = cfg_offset + header->cfg_size;
4289 const int string_offset = main_offset + header->main_size;
fb3f88cc
JH
4290 struct data_in *data_in;
4291 struct lto_input_block ib_main;
4292 unsigned int i;
4293 unsigned int count;
4294
4295 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4296 header->main_size);
4297
4298 data_in =
4299 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4300 header->string_size, vNULL);
412288f1 4301 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4302
4303 for (i = 0; i < count; i++)
4304 {
4305 unsigned int index;
4306 struct cgraph_node *node;
7380e6ef 4307 lto_symtab_encoder_t encoder;
fb3f88cc 4308
412288f1 4309 index = streamer_read_uhwi (&ib_main);
7380e6ef
JH
4310 encoder = file_data->symtab_node_encoder;
4311 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4312 gcc_assert (node->definition);
fb3f88cc
JH
4313 ipa_read_node_info (&ib_main, node, data_in);
4314 }
4315 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4316 len);
4317 lto_data_in_delete (data_in);
4318}
4319
4320/* Read ipcp jump functions. */
4321
4322void
4323ipa_prop_read_jump_functions (void)
4324{
4325 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4326 struct lto_file_decl_data *file_data;
4327 unsigned int j = 0;
4328
4329 ipa_check_create_node_params ();
4330 ipa_check_create_edge_args ();
4331 ipa_register_cgraph_hooks ();
4332
4333 while ((file_data = file_data_vec[j++]))
4334 {
4335 size_t len;
4336 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4337
4338 if (data)
4339 ipa_prop_read_section (file_data, data, len);
4340 }
4341}
4342
b8698a0f 4343/* After merging units, we can get mismatch in argument counts.
61502ca8 4344 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
4345 Also compute called_with_variable_arg info. */
4346
4347void
4348ipa_update_after_lto_read (void)
4349{
05d3aa37
MJ
4350 ipa_check_create_node_params ();
4351 ipa_check_create_edge_args ();
fb3f88cc 4352}
2c9561b5
MJ
4353
4354void
4355write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4356{
4357 int node_ref;
4358 unsigned int count = 0;
4359 lto_symtab_encoder_t encoder;
4360 struct ipa_agg_replacement_value *aggvals, *av;
4361
4362 aggvals = ipa_get_agg_replacements_for_node (node);
4363 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4364 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4365 streamer_write_uhwi (ob, node_ref);
4366
4367 for (av = aggvals; av; av = av->next)
4368 count++;
4369 streamer_write_uhwi (ob, count);
4370
4371 for (av = aggvals; av; av = av->next)
4372 {
7b920a9a
MJ
4373 struct bitpack_d bp;
4374
2c9561b5
MJ
4375 streamer_write_uhwi (ob, av->offset);
4376 streamer_write_uhwi (ob, av->index);
4377 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4378
4379 bp = bitpack_create (ob->main_stream);
4380 bp_pack_value (&bp, av->by_ref, 1);
4381 streamer_write_bitpack (&bp);
2c9561b5
MJ
4382 }
4383}
4384
4385/* Stream in the aggregate value replacement chain for NODE from IB. */
4386
4387static void
4388read_agg_replacement_chain (struct lto_input_block *ib,
4389 struct cgraph_node *node,
4390 struct data_in *data_in)
4391{
4392 struct ipa_agg_replacement_value *aggvals = NULL;
4393 unsigned int count, i;
4394
4395 count = streamer_read_uhwi (ib);
4396 for (i = 0; i <count; i++)
4397 {
4398 struct ipa_agg_replacement_value *av;
7b920a9a 4399 struct bitpack_d bp;
2c9561b5
MJ
4400
4401 av = ggc_alloc_ipa_agg_replacement_value ();
4402 av->offset = streamer_read_uhwi (ib);
4403 av->index = streamer_read_uhwi (ib);
4404 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4405 bp = streamer_read_bitpack (ib);
4406 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4407 av->next = aggvals;
4408 aggvals = av;
4409 }
4410 ipa_set_node_agg_value_chain (node, aggvals);
4411}
4412
4413/* Write all aggregate replacement for nodes in set. */
4414
4415void
4416ipa_prop_write_all_agg_replacement (void)
4417{
4418 struct cgraph_node *node;
4419 struct output_block *ob;
4420 unsigned int count = 0;
4421 lto_symtab_encoder_iterator lsei;
4422 lto_symtab_encoder_t encoder;
4423
4424 if (!ipa_node_agg_replacements)
4425 return;
4426
4427 ob = create_output_block (LTO_section_ipcp_transform);
4428 encoder = ob->decl_state->symtab_node_encoder;
4429 ob->cgraph_node = NULL;
4430 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4431 lsei_next_function_in_partition (&lsei))
4432 {
4433 node = lsei_cgraph_node (lsei);
4434 if (cgraph_function_with_gimple_body_p (node)
4435 && ipa_get_agg_replacements_for_node (node) != NULL)
4436 count++;
4437 }
4438
4439 streamer_write_uhwi (ob, count);
4440
4441 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4442 lsei_next_function_in_partition (&lsei))
4443 {
4444 node = lsei_cgraph_node (lsei);
4445 if (cgraph_function_with_gimple_body_p (node)
4446 && ipa_get_agg_replacements_for_node (node) != NULL)
4447 write_agg_replacement_chain (ob, node);
4448 }
4449 streamer_write_char_stream (ob->main_stream, 0);
4450 produce_asm (ob, NULL);
4451 destroy_output_block (ob);
4452}
4453
4454/* Read replacements section in file FILE_DATA of length LEN with data
4455 DATA. */
4456
4457static void
4458read_replacements_section (struct lto_file_decl_data *file_data,
4459 const char *data,
4460 size_t len)
4461{
4462 const struct lto_function_header *header =
4463 (const struct lto_function_header *) data;
4464 const int cfg_offset = sizeof (struct lto_function_header);
4465 const int main_offset = cfg_offset + header->cfg_size;
4466 const int string_offset = main_offset + header->main_size;
4467 struct data_in *data_in;
4468 struct lto_input_block ib_main;
4469 unsigned int i;
4470 unsigned int count;
4471
4472 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4473 header->main_size);
4474
4475 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4476 header->string_size, vNULL);
2c9561b5
MJ
4477 count = streamer_read_uhwi (&ib_main);
4478
4479 for (i = 0; i < count; i++)
4480 {
4481 unsigned int index;
4482 struct cgraph_node *node;
4483 lto_symtab_encoder_t encoder;
4484
4485 index = streamer_read_uhwi (&ib_main);
4486 encoder = file_data->symtab_node_encoder;
4487 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4488 gcc_assert (node->definition);
2c9561b5
MJ
4489 read_agg_replacement_chain (&ib_main, node, data_in);
4490 }
4491 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4492 len);
4493 lto_data_in_delete (data_in);
4494}
4495
4496/* Read IPA-CP aggregate replacements. */
4497
4498void
4499ipa_prop_read_all_agg_replacement (void)
4500{
4501 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4502 struct lto_file_decl_data *file_data;
4503 unsigned int j = 0;
4504
4505 while ((file_data = file_data_vec[j++]))
4506 {
4507 size_t len;
4508 const char *data = lto_get_section_data (file_data,
4509 LTO_section_ipcp_transform,
4510 NULL, &len);
4511 if (data)
4512 read_replacements_section (file_data, data, len);
4513 }
4514}
4515
4516/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4517 NODE. */
4518
4519static void
4520adjust_agg_replacement_values (struct cgraph_node *node,
4521 struct ipa_agg_replacement_value *aggval)
4522{
4523 struct ipa_agg_replacement_value *v;
4524 int i, c = 0, d = 0, *adj;
4525
4526 if (!node->clone.combined_args_to_skip)
4527 return;
4528
4529 for (v = aggval; v; v = v->next)
4530 {
4531 gcc_assert (v->index >= 0);
4532 if (c < v->index)
4533 c = v->index;
4534 }
4535 c++;
4536
4537 adj = XALLOCAVEC (int, c);
4538 for (i = 0; i < c; i++)
4539 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4540 {
4541 adj[i] = -1;
4542 d++;
4543 }
4544 else
4545 adj[i] = i - d;
4546
4547 for (v = aggval; v; v = v->next)
4548 v->index = adj[v->index];
4549}
4550
4551
4552/* Function body transformation phase. */
4553
4554unsigned int
4555ipcp_transform_function (struct cgraph_node *node)
4556{
6e1aa848 4557 vec<ipa_param_descriptor_t> descriptors = vNULL;
2c9561b5
MJ
4558 struct param_analysis_info *parms_ainfo;
4559 struct ipa_agg_replacement_value *aggval;
4560 gimple_stmt_iterator gsi;
4561 basic_block bb;
4562 int param_count;
4563 bool cfg_changed = false, something_changed = false;
4564
4565 gcc_checking_assert (cfun);
4566 gcc_checking_assert (current_function_decl);
4567
4568 if (dump_file)
4569 fprintf (dump_file, "Modification phase of node %s/%i\n",
fec39fa6 4570 node->name (), node->order);
2c9561b5
MJ
4571
4572 aggval = ipa_get_agg_replacements_for_node (node);
4573 if (!aggval)
4574 return 0;
67348ccc 4575 param_count = count_formal_params (node->decl);
2c9561b5
MJ
4576 if (param_count == 0)
4577 return 0;
4578 adjust_agg_replacement_values (node, aggval);
4579 if (dump_file)
4580 ipa_dump_agg_replacement_values (dump_file, aggval);
4581 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4582 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
9771b263 4583 descriptors.safe_grow_cleared (param_count);
2c9561b5
MJ
4584 ipa_populate_param_decls (node, descriptors);
4585
4586 FOR_EACH_BB (bb)
4587 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4588 {
4589 struct ipa_agg_replacement_value *v;
4590 gimple stmt = gsi_stmt (gsi);
4591 tree rhs, val, t;
3ff2ca23 4592 HOST_WIDE_INT offset, size;
2c9561b5
MJ
4593 int index;
4594 bool by_ref, vce;
4595
4596 if (!gimple_assign_load_p (stmt))
4597 continue;
4598 rhs = gimple_assign_rhs1 (stmt);
4599 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4600 continue;
4601
4602 vce = false;
4603 t = rhs;
4604 while (handled_component_p (t))
4605 {
4606 /* V_C_E can do things like convert an array of integers to one
4607 bigger integer and similar things we do not handle below. */
4608 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4609 {
4610 vce = true;
4611 break;
4612 }
4613 t = TREE_OPERAND (t, 0);
4614 }
4615 if (vce)
4616 continue;
4617
4618 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
3ff2ca23 4619 rhs, &index, &offset, &size, &by_ref))
2c9561b5
MJ
4620 continue;
4621 for (v = aggval; v; v = v->next)
4622 if (v->index == index
4623 && v->offset == offset)
4624 break;
3ff2ca23
JJ
4625 if (!v
4626 || v->by_ref != by_ref
9439e9a1 4627 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
2c9561b5
MJ
4628 continue;
4629
4630 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4631 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4632 {
4633 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4634 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4635 else if (TYPE_SIZE (TREE_TYPE (rhs))
4636 == TYPE_SIZE (TREE_TYPE (v->value)))
4637 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4638 else
4639 {
4640 if (dump_file)
4641 {
4642 fprintf (dump_file, " const ");
4643 print_generic_expr (dump_file, v->value, 0);
4644 fprintf (dump_file, " can't be converted to type of ");
4645 print_generic_expr (dump_file, rhs, 0);
4646 fprintf (dump_file, "\n");
4647 }
4648 continue;
4649 }
4650 }
4651 else
4652 val = v->value;
4653
4654 if (dump_file && (dump_flags & TDF_DETAILS))
4655 {
4656 fprintf (dump_file, "Modifying stmt:\n ");
4657 print_gimple_stmt (dump_file, stmt, 0, 0);
4658 }
4659 gimple_assign_set_rhs_from_tree (&gsi, val);
4660 update_stmt (stmt);
4661
4662 if (dump_file && (dump_flags & TDF_DETAILS))
4663 {
4664 fprintf (dump_file, "into:\n ");
4665 print_gimple_stmt (dump_file, stmt, 0, 0);
4666 fprintf (dump_file, "\n");
4667 }
4668
4669 something_changed = true;
4670 if (maybe_clean_eh_stmt (stmt)
4671 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4672 cfg_changed = true;
4673 }
4674
9771b263 4675 (*ipa_node_agg_replacements)[node->uid] = NULL;
2c9561b5 4676 free_parms_ainfo (parms_ainfo, param_count);
9771b263 4677 descriptors.release ();
2c9561b5
MJ
4678
4679 if (!something_changed)
4680 return 0;
4681 else if (cfg_changed)
4682 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4683 else
4684 return TODO_update_ssa_only_virtuals;
4685}