]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
* g++.dg/ext/sync-4.C: Require sync_long_long_runtime support.
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
d1e082c2 2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tree.h"
8e9055ae 24#include "gimple.h"
518dc859
RL
25#include "langhooks.h"
26#include "ggc.h"
27#include "target.h"
518dc859 28#include "ipa-prop.h"
442b4905
AM
29#include "bitmap.h"
30#include "gimple-ssa.h"
31#include "tree-cfg.h"
32#include "tree-phinodes.h"
33#include "ssa-iterators.h"
34#include "tree-into-ssa.h"
35#include "tree-dfa.h"
518dc859 36#include "tree-pass.h"
771578a0 37#include "tree-inline.h"
0f378cb5 38#include "ipa-inline.h"
518dc859 39#include "flags.h"
3e293154 40#include "diagnostic.h"
cf835838 41#include "gimple-pretty-print.h"
fb3f88cc 42#include "lto-streamer.h"
f0efc7aa
DN
43#include "data-streamer.h"
44#include "tree-streamer.h"
dfea20f1 45#include "params.h"
450ad0cd 46#include "ipa-utils.h"
771578a0 47
062c604f
MJ
48/* Intermediate information about a parameter that is only useful during the
49 run of ipa_analyze_node and is not kept afterwards. */
50
51struct param_analysis_info
52{
8b7773a4
MJ
53 bool parm_modified, ref_modified, pt_modified;
54 bitmap parm_visited_statements, pt_visited_statements;
062c604f
MJ
55};
56
771578a0 57/* Vector where the parameter infos are actually stored. */
9771b263 58vec<ipa_node_params_t> ipa_node_params_vector;
2c9561b5 59/* Vector of known aggregate values in cloned nodes. */
9771b263 60vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
771578a0 61/* Vector where the parameter infos are actually stored. */
9771b263 62vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
771578a0
MJ
63
64/* Holders of ipa cgraph hooks: */
e2c9111c
JH
65static struct cgraph_edge_hook_list *edge_removal_hook_holder;
66static struct cgraph_node_hook_list *node_removal_hook_holder;
67static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
68static struct cgraph_2node_hook_list *node_duplication_hook_holder;
40982661 69static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 70
4502fe8d
MJ
71/* Description of a reference to an IPA constant. */
72struct ipa_cst_ref_desc
73{
74 /* Edge that corresponds to the statement which took the reference. */
75 struct cgraph_edge *cs;
76 /* Linked list of duplicates created when call graph edges are cloned. */
77 struct ipa_cst_ref_desc *next_duplicate;
78 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
79 if out of control. */
80 int refcount;
81};
82
83/* Allocation pool for reference descriptions. */
84
85static alloc_pool ipa_refdesc_pool;
86
5fe8e757
MJ
87/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
88 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
89
90static bool
91ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
92{
67348ccc 93 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
94 struct cl_optimization *os;
95
96 if (!fs_opts)
97 return false;
98 os = TREE_OPTIMIZATION (fs_opts);
99 return !os->x_optimize || !os->x_flag_ipa_cp;
100}
101
be95e2b9
MJ
102/* Return index of the formal whose tree is PTREE in function which corresponds
103 to INFO. */
104
d044dd17 105static int
9771b263 106ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
518dc859
RL
107{
108 int i, count;
109
9771b263 110 count = descriptors.length ();
518dc859 111 for (i = 0; i < count; i++)
9771b263 112 if (descriptors[i].decl == ptree)
518dc859
RL
113 return i;
114
115 return -1;
116}
117
d044dd17
MJ
118/* Return index of the formal whose tree is PTREE in function which corresponds
119 to INFO. */
120
121int
122ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
123{
124 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
125}
126
127/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
128 NODE. */
be95e2b9 129
f8e2a1ed
MJ
130static void
131ipa_populate_param_decls (struct cgraph_node *node,
9771b263 132 vec<ipa_param_descriptor_t> &descriptors)
518dc859
RL
133{
134 tree fndecl;
135 tree fnargs;
136 tree parm;
137 int param_num;
3e293154 138
67348ccc 139 fndecl = node->decl;
0e8853ee 140 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
141 fnargs = DECL_ARGUMENTS (fndecl);
142 param_num = 0;
910ad8de 143 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 144 {
9771b263 145 descriptors[param_num].decl = parm;
0e8853ee 146 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
518dc859
RL
147 param_num++;
148 }
149}
150
3f84bf08
MJ
151/* Return how many formal parameters FNDECL has. */
152
153static inline int
310bc633 154count_formal_params (tree fndecl)
3f84bf08
MJ
155{
156 tree parm;
157 int count = 0;
0e8853ee 158 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 159
910ad8de 160 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
161 count++;
162
163 return count;
164}
165
0e8853ee
JH
166/* Return the declaration of Ith formal parameter of the function corresponding
167 to INFO. Note there is no setter function as this array is built just once
168 using ipa_initialize_node_params. */
169
170void
171ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
172{
173 fprintf (file, "param #%i", i);
174 if (info->descriptors[i].decl)
175 {
176 fprintf (file, " ");
177 print_generic_expr (file, info->descriptors[i].decl, 0);
178 }
179}
180
181/* Initialize the ipa_node_params structure associated with NODE
182 to hold PARAM_COUNT parameters. */
183
184void
185ipa_alloc_node_params (struct cgraph_node *node, int param_count)
186{
187 struct ipa_node_params *info = IPA_NODE_REF (node);
188
189 if (!info->descriptors.exists () && param_count)
190 info->descriptors.safe_grow_cleared (param_count);
191}
192
f8e2a1ed
MJ
193/* Initialize the ipa_node_params structure associated with NODE by counting
194 the function parameters, creating the descriptors and populating their
195 param_decls. */
be95e2b9 196
f8e2a1ed
MJ
197void
198ipa_initialize_node_params (struct cgraph_node *node)
199{
200 struct ipa_node_params *info = IPA_NODE_REF (node);
201
9771b263 202 if (!info->descriptors.exists ())
f8e2a1ed 203 {
67348ccc 204 ipa_alloc_node_params (node, count_formal_params (node->decl));
0e8853ee 205 ipa_populate_param_decls (node, info->descriptors);
f8e2a1ed 206 }
518dc859
RL
207}
208
749aa96d
MJ
209/* Print the jump functions associated with call graph edge CS to file F. */
210
211static void
212ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
213{
214 int i, count;
215
216 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
217 for (i = 0; i < count; i++)
218 {
219 struct ipa_jump_func *jump_func;
220 enum jump_func_type type;
221
222 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
223 type = jump_func->type;
224
225 fprintf (f, " param %d: ", i);
226 if (type == IPA_JF_UNKNOWN)
227 fprintf (f, "UNKNOWN\n");
228 else if (type == IPA_JF_KNOWN_TYPE)
229 {
c7573249
MJ
230 fprintf (f, "KNOWN TYPE: base ");
231 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
232 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
233 jump_func->value.known_type.offset);
234 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
235 fprintf (f, "\n");
749aa96d
MJ
236 }
237 else if (type == IPA_JF_CONST)
238 {
4502fe8d 239 tree val = jump_func->value.constant.value;
749aa96d
MJ
240 fprintf (f, "CONST: ");
241 print_generic_expr (f, val, 0);
242 if (TREE_CODE (val) == ADDR_EXPR
243 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
244 {
245 fprintf (f, " -> ");
246 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
247 0);
248 }
249 fprintf (f, "\n");
250 }
749aa96d
MJ
251 else if (type == IPA_JF_PASS_THROUGH)
252 {
253 fprintf (f, "PASS THROUGH: ");
8b7773a4 254 fprintf (f, "%d, op %s",
749aa96d 255 jump_func->value.pass_through.formal_id,
5806f481 256 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 257 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
258 {
259 fprintf (f, " ");
260 print_generic_expr (f,
261 jump_func->value.pass_through.operand, 0);
262 }
263 if (jump_func->value.pass_through.agg_preserved)
264 fprintf (f, ", agg_preserved");
b8f6e610
MJ
265 if (jump_func->value.pass_through.type_preserved)
266 fprintf (f, ", type_preserved");
3ea6239f 267 fprintf (f, "\n");
749aa96d
MJ
268 }
269 else if (type == IPA_JF_ANCESTOR)
270 {
271 fprintf (f, "ANCESTOR: ");
272 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
273 jump_func->value.ancestor.formal_id,
274 jump_func->value.ancestor.offset);
275 print_generic_expr (f, jump_func->value.ancestor.type, 0);
8b7773a4
MJ
276 if (jump_func->value.ancestor.agg_preserved)
277 fprintf (f, ", agg_preserved");
b8f6e610
MJ
278 if (jump_func->value.ancestor.type_preserved)
279 fprintf (f, ", type_preserved");
3ea6239f 280 fprintf (f, "\n");
749aa96d 281 }
8b7773a4
MJ
282
283 if (jump_func->agg.items)
284 {
285 struct ipa_agg_jf_item *item;
286 int j;
287
288 fprintf (f, " Aggregate passed by %s:\n",
289 jump_func->agg.by_ref ? "reference" : "value");
9771b263 290 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
291 {
292 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
293 item->offset);
294 if (TYPE_P (item->value))
295 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
296 tree_low_cst (TYPE_SIZE (item->value), 1));
297 else
298 {
299 fprintf (f, "cst: ");
300 print_generic_expr (f, item->value, 0);
301 }
302 fprintf (f, "\n");
303 }
304 }
749aa96d
MJ
305 }
306}
307
308
be95e2b9
MJ
309/* Print the jump functions of all arguments on all call graph edges going from
310 NODE to file F. */
311
518dc859 312void
3e293154 313ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 314{
3e293154 315 struct cgraph_edge *cs;
518dc859 316
9de04252 317 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
67348ccc 318 node->order);
3e293154
MJ
319 for (cs = node->callees; cs; cs = cs->next_callee)
320 {
321 if (!ipa_edge_args_info_available_for_edge_p (cs))
322 continue;
323
749aa96d 324 fprintf (f, " callsite %s/%i -> %s/%i : \n",
67348ccc 325 xstrdup (cgraph_node_name (node)), node->order,
9de04252 326 xstrdup (cgraph_node_name (cs->callee)),
67348ccc 327 cs->callee->order);
749aa96d
MJ
328 ipa_print_node_jump_functions_for_edge (f, cs);
329 }
518dc859 330
9de04252 331 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 332 {
9de04252 333 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
3e293154 336
9de04252
MJ
337 ii = cs->indirect_info;
338 if (ii->agg_contents)
c13bc3d9 339 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 340 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 341 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
342 ii->param_index, ii->offset,
343 ii->by_ref ? "by reference" : "by_value");
344 else
345 fprintf (f, " indirect %s callsite, calling param %i",
346 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
347
749aa96d
MJ
348 if (cs->call_stmt)
349 {
9de04252 350 fprintf (f, ", for stmt ");
749aa96d 351 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 352 }
749aa96d 353 else
9de04252 354 fprintf (f, "\n");
749aa96d 355 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
356 }
357}
358
359/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 360
3e293154
MJ
361void
362ipa_print_all_jump_functions (FILE *f)
363{
364 struct cgraph_node *node;
365
ca30a539 366 fprintf (f, "\nJump functions:\n");
65c70e6b 367 FOR_EACH_FUNCTION (node)
3e293154
MJ
368 {
369 ipa_print_node_jump_functions (f, node);
370 }
371}
372
7b872d9e
MJ
373/* Set JFUNC to be a known type jump function. */
374
375static void
376ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
377 tree base_type, tree component_type)
378{
06d65050
JH
379 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
380 && TYPE_BINFO (component_type));
7b872d9e
MJ
381 jfunc->type = IPA_JF_KNOWN_TYPE;
382 jfunc->value.known_type.offset = offset,
383 jfunc->value.known_type.base_type = base_type;
384 jfunc->value.known_type.component_type = component_type;
385}
386
b8f6e610
MJ
387/* Set JFUNC to be a copy of another jmp (to be used by jump function
388 combination code). The two functions will share their rdesc. */
389
390static void
391ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
392 struct ipa_jump_func *src)
393
394{
395 gcc_checking_assert (src->type == IPA_JF_CONST);
396 dst->type = IPA_JF_CONST;
397 dst->value.constant = src->value.constant;
398}
399
7b872d9e
MJ
400/* Set JFUNC to be a constant jmp function. */
401
402static void
4502fe8d
MJ
403ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
404 struct cgraph_edge *cs)
7b872d9e 405{
5368224f
DC
406 constant = unshare_expr (constant);
407 if (constant && EXPR_P (constant))
408 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
7b872d9e 409 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
410 jfunc->value.constant.value = unshare_expr_without_location (constant);
411
412 if (TREE_CODE (constant) == ADDR_EXPR
413 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
414 {
415 struct ipa_cst_ref_desc *rdesc;
416 if (!ipa_refdesc_pool)
417 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
418 sizeof (struct ipa_cst_ref_desc), 32);
419
420 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
421 rdesc->cs = cs;
422 rdesc->next_duplicate = NULL;
423 rdesc->refcount = 1;
424 jfunc->value.constant.rdesc = rdesc;
425 }
426 else
427 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
428}
429
430/* Set JFUNC to be a simple pass-through jump function. */
431static void
8b7773a4 432ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
b8f6e610 433 bool agg_preserved, bool type_preserved)
7b872d9e
MJ
434{
435 jfunc->type = IPA_JF_PASS_THROUGH;
436 jfunc->value.pass_through.operand = NULL_TREE;
437 jfunc->value.pass_through.formal_id = formal_id;
438 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 439 jfunc->value.pass_through.agg_preserved = agg_preserved;
b8f6e610 440 jfunc->value.pass_through.type_preserved = type_preserved;
7b872d9e
MJ
441}
442
443/* Set JFUNC to be an arithmetic pass through jump function. */
444
445static void
446ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
447 tree operand, enum tree_code operation)
448{
449 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 450 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
451 jfunc->value.pass_through.formal_id = formal_id;
452 jfunc->value.pass_through.operation = operation;
8b7773a4 453 jfunc->value.pass_through.agg_preserved = false;
b8f6e610 454 jfunc->value.pass_through.type_preserved = false;
7b872d9e
MJ
455}
456
457/* Set JFUNC to be an ancestor jump function. */
458
459static void
460ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
b8f6e610
MJ
461 tree type, int formal_id, bool agg_preserved,
462 bool type_preserved)
7b872d9e
MJ
463{
464 jfunc->type = IPA_JF_ANCESTOR;
465 jfunc->value.ancestor.formal_id = formal_id;
466 jfunc->value.ancestor.offset = offset;
467 jfunc->value.ancestor.type = type;
8b7773a4 468 jfunc->value.ancestor.agg_preserved = agg_preserved;
b8f6e610 469 jfunc->value.ancestor.type_preserved = type_preserved;
7b872d9e
MJ
470}
471
e248d83f
MJ
472/* Extract the acual BINFO being described by JFUNC which must be a known type
473 jump function. */
474
475tree
476ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
477{
478 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
479 if (!base_binfo)
480 return NULL_TREE;
481 return get_binfo_at_offset (base_binfo,
482 jfunc->value.known_type.offset,
483 jfunc->value.known_type.component_type);
484}
485
f65cf2b7
MJ
486/* Structure to be passed in between detect_type_change and
487 check_stmt_for_type_change. */
488
489struct type_change_info
490{
290ebcb7
MJ
491 /* Offset into the object where there is the virtual method pointer we are
492 looking for. */
493 HOST_WIDE_INT offset;
494 /* The declaration or SSA_NAME pointer of the base that we are checking for
495 type change. */
496 tree object;
497 /* If we actually can tell the type that the object has changed to, it is
498 stored in this field. Otherwise it remains NULL_TREE. */
499 tree known_current_type;
f65cf2b7
MJ
500 /* Set to true if dynamic type change has been detected. */
501 bool type_maybe_changed;
290ebcb7
MJ
502 /* Set to true if multiple types have been encountered. known_current_type
503 must be disregarded in that case. */
504 bool multiple_types_encountered;
f65cf2b7
MJ
505};
506
507/* Return true if STMT can modify a virtual method table pointer.
508
509 This function makes special assumptions about both constructors and
510 destructors which are all the functions that are allowed to alter the VMT
511 pointers. It assumes that destructors begin with assignment into all VMT
512 pointers and that constructors essentially look in the following way:
513
514 1) The very first thing they do is that they call constructors of ancestor
515 sub-objects that have them.
516
517 2) Then VMT pointers of this and all its ancestors is set to new values
518 corresponding to the type corresponding to the constructor.
519
520 3) Only afterwards, other stuff such as constructor of member sub-objects
521 and the code written by the user is run. Only this may include calling
522 virtual functions, directly or indirectly.
523
524 There is no way to call a constructor of an ancestor sub-object in any
525 other way.
526
527 This means that we do not have to care whether constructors get the correct
528 type information because they will always change it (in fact, if we define
529 the type to be given by the VMT pointer, it is undefined).
530
531 The most important fact to derive from the above is that if, for some
532 statement in the section 3, we try to detect whether the dynamic type has
533 changed, we can safely ignore all calls as we examine the function body
534 backwards until we reach statements in section 2 because these calls cannot
535 be ancestor constructors or destructors (if the input is not bogus) and so
536 do not change the dynamic type (this holds true only for automatically
537 allocated objects but at the moment we devirtualize only these). We then
538 must detect that statements in section 2 change the dynamic type and can try
539 to derive the new type. That is enough and we can stop, we will never see
540 the calls into constructors of sub-objects in this code. Therefore we can
541 safely ignore all call statements that we traverse.
542 */
543
544static bool
545stmt_may_be_vtbl_ptr_store (gimple stmt)
546{
547 if (is_gimple_call (stmt))
548 return false;
549 else if (is_gimple_assign (stmt))
550 {
551 tree lhs = gimple_assign_lhs (stmt);
552
0004f992
MJ
553 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
554 {
555 if (flag_strict_aliasing
556 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
557 return false;
558
559 if (TREE_CODE (lhs) == COMPONENT_REF
560 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 561 return false;
0004f992
MJ
562 /* In the future we might want to use get_base_ref_and_offset to find
563 if there is a field corresponding to the offset and if so, proceed
564 almost like if it was a component ref. */
565 }
f65cf2b7
MJ
566 }
567 return true;
568}
569
290ebcb7
MJ
570/* If STMT can be proved to be an assignment to the virtual method table
571 pointer of ANALYZED_OBJ and the type associated with the new table
572 identified, return the type. Otherwise return NULL_TREE. */
573
574static tree
575extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
576{
577 HOST_WIDE_INT offset, size, max_size;
578 tree lhs, rhs, base;
579
580 if (!gimple_assign_single_p (stmt))
581 return NULL_TREE;
582
583 lhs = gimple_assign_lhs (stmt);
584 rhs = gimple_assign_rhs1 (stmt);
585 if (TREE_CODE (lhs) != COMPONENT_REF
586 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
587 || TREE_CODE (rhs) != ADDR_EXPR)
588 return NULL_TREE;
589 rhs = get_base_address (TREE_OPERAND (rhs, 0));
590 if (!rhs
591 || TREE_CODE (rhs) != VAR_DECL
592 || !DECL_VIRTUAL_P (rhs))
593 return NULL_TREE;
594
595 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
596 if (offset != tci->offset
597 || size != POINTER_SIZE
598 || max_size != POINTER_SIZE)
599 return NULL_TREE;
600 if (TREE_CODE (base) == MEM_REF)
601 {
602 if (TREE_CODE (tci->object) != MEM_REF
603 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
604 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
605 TREE_OPERAND (base, 1)))
606 return NULL_TREE;
607 }
608 else if (tci->object != base)
609 return NULL_TREE;
610
611 return DECL_CONTEXT (rhs);
612}
613
61502ca8 614/* Callback of walk_aliased_vdefs and a helper function for
f65cf2b7
MJ
615 detect_type_change to check whether a particular statement may modify
616 the virtual table pointer, and if possible also determine the new type of
617 the (sub-)object. It stores its result into DATA, which points to a
618 type_change_info structure. */
619
620static bool
621check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
622{
623 gimple stmt = SSA_NAME_DEF_STMT (vdef);
624 struct type_change_info *tci = (struct type_change_info *) data;
625
626 if (stmt_may_be_vtbl_ptr_store (stmt))
627 {
290ebcb7
MJ
628 tree type;
629 type = extr_type_from_vtbl_ptr_store (stmt, tci);
630 if (tci->type_maybe_changed
631 && type != tci->known_current_type)
632 tci->multiple_types_encountered = true;
633 tci->known_current_type = type;
f65cf2b7
MJ
634 tci->type_maybe_changed = true;
635 return true;
636 }
637 else
638 return false;
639}
640
290ebcb7
MJ
641
642
06d65050
JH
643/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
644 callsite CALL) by looking for assignments to its virtual table pointer. If
645 it is, return true and fill in the jump function JFUNC with relevant type
646 information or set it to unknown. ARG is the object itself (not a pointer
647 to it, unless dereferenced). BASE is the base of the memory access as
648 returned by get_ref_base_and_extent, as is the offset. */
f65cf2b7
MJ
649
650static bool
06d65050
JH
651detect_type_change (tree arg, tree base, tree comp_type, gimple call,
652 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
f65cf2b7
MJ
653{
654 struct type_change_info tci;
655 ao_ref ao;
656
657 gcc_checking_assert (DECL_P (arg)
658 || TREE_CODE (arg) == MEM_REF
659 || handled_component_p (arg));
660 /* Const calls cannot call virtual methods through VMT and so type changes do
661 not matter. */
06d65050
JH
662 if (!flag_devirtualize || !gimple_vuse (call)
663 /* Be sure expected_type is polymorphic. */
664 || !comp_type
665 || TREE_CODE (comp_type) != RECORD_TYPE
666 || !TYPE_BINFO (comp_type)
667 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
f65cf2b7
MJ
668 return false;
669
dd887943 670 ao_ref_init (&ao, arg);
f65cf2b7
MJ
671 ao.base = base;
672 ao.offset = offset;
673 ao.size = POINTER_SIZE;
674 ao.max_size = ao.size;
f65cf2b7 675
290ebcb7
MJ
676 tci.offset = offset;
677 tci.object = get_base_address (arg);
678 tci.known_current_type = NULL_TREE;
679 tci.type_maybe_changed = false;
680 tci.multiple_types_encountered = false;
681
f65cf2b7
MJ
682 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
683 &tci, NULL);
684 if (!tci.type_maybe_changed)
685 return false;
686
290ebcb7
MJ
687 if (!tci.known_current_type
688 || tci.multiple_types_encountered
689 || offset != 0)
690 jfunc->type = IPA_JF_UNKNOWN;
691 else
7b872d9e 692 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
290ebcb7 693
f65cf2b7
MJ
694 return true;
695}
696
697/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
698 SSA name (its dereference will become the base and the offset is assumed to
699 be zero). */
700
701static bool
06d65050
JH
702detect_type_change_ssa (tree arg, tree comp_type,
703 gimple call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
704{
705 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 706 if (!flag_devirtualize
06d65050 707 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
708 return false;
709
710 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 711 build_int_cst (ptr_type_node, 0));
f65cf2b7 712
06d65050 713 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
f65cf2b7
MJ
714}
715
fdb0e1b4
MJ
716/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
717 boolean variable pointed to by DATA. */
718
719static bool
720mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
721 void *data)
722{
723 bool *b = (bool *) data;
724 *b = true;
725 return true;
726}
727
688010ba 728/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4
MJ
729 a value known not to be modified in this function before reaching the
730 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
731 information about the parameter. */
fdb0e1b4
MJ
732
733static bool
8b7773a4
MJ
734parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
735 gimple stmt, tree parm_load)
fdb0e1b4
MJ
736{
737 bool modified = false;
8b7773a4 738 bitmap *visited_stmts;
fdb0e1b4
MJ
739 ao_ref refd;
740
8b7773a4
MJ
741 if (parm_ainfo && parm_ainfo->parm_modified)
742 return false;
fdb0e1b4
MJ
743
744 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4
MJ
745 ao_ref_init (&refd, parm_load);
746 /* We can cache visited statements only when parm_ainfo is available and when
747 we are looking at a naked load of the whole parameter. */
748 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
749 visited_stmts = NULL;
750 else
751 visited_stmts = &parm_ainfo->parm_visited_statements;
752 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
753 visited_stmts);
754 if (parm_ainfo && modified)
755 parm_ainfo->parm_modified = true;
756 return !modified;
fdb0e1b4
MJ
757}
758
759/* If STMT is an assignment that loads a value from an parameter declaration,
760 return the index of the parameter in ipa_node_params which has not been
761 modified. Otherwise return -1. */
762
763static int
9771b263 764load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
fdb0e1b4
MJ
765 struct param_analysis_info *parms_ainfo,
766 gimple stmt)
767{
768 int index;
769 tree op1;
770
771 if (!gimple_assign_single_p (stmt))
772 return -1;
773
774 op1 = gimple_assign_rhs1 (stmt);
775 if (TREE_CODE (op1) != PARM_DECL)
776 return -1;
777
d044dd17 778 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 779 if (index < 0
8b7773a4
MJ
780 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
781 : NULL, stmt, op1))
fdb0e1b4
MJ
782 return -1;
783
784 return index;
785}
f65cf2b7 786
8b7773a4
MJ
787/* Return true if memory reference REF loads data that are known to be
788 unmodified in this function before reaching statement STMT. PARM_AINFO, if
789 non-NULL, is a pointer to a structure containing temporary information about
790 PARM. */
791
792static bool
793parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
794 gimple stmt, tree ref)
795{
796 bool modified = false;
797 ao_ref refd;
798
799 gcc_checking_assert (gimple_vuse (stmt));
800 if (parm_ainfo && parm_ainfo->ref_modified)
801 return false;
802
803 ao_ref_init (&refd, ref);
804 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
805 NULL);
806 if (parm_ainfo && modified)
807 parm_ainfo->ref_modified = true;
808 return !modified;
809}
810
811/* Return true if the data pointed to by PARM is known to be unmodified in this
812 function before reaching call statement CALL into which it is passed.
813 PARM_AINFO is a pointer to a structure containing temporary information
814 about PARM. */
815
816static bool
817parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
818 gimple call, tree parm)
819{
820 bool modified = false;
821 ao_ref refd;
822
823 /* It's unnecessary to calculate anything about memory contnets for a const
824 function because it is not goin to use it. But do not cache the result
825 either. Also, no such calculations for non-pointers. */
826 if (!gimple_vuse (call)
827 || !POINTER_TYPE_P (TREE_TYPE (parm)))
828 return false;
829
830 if (parm_ainfo->pt_modified)
831 return false;
832
833 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
834 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
835 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
836 if (modified)
837 parm_ainfo->pt_modified = true;
838 return !modified;
839}
840
841/* Return true if we can prove that OP is a memory reference loading unmodified
842 data from an aggregate passed as a parameter and if the aggregate is passed
843 by reference, that the alias type of the load corresponds to the type of the
844 formal parameter (so that we can rely on this type for TBAA in callers).
845 INFO and PARMS_AINFO describe parameters of the current function (but the
846 latter can be NULL), STMT is the load statement. If function returns true,
847 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
848 within the aggregate and whether it is a load from a value passed by
849 reference respectively. */
850
851static bool
9771b263 852ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
8b7773a4
MJ
853 struct param_analysis_info *parms_ainfo, gimple stmt,
854 tree op, int *index_p, HOST_WIDE_INT *offset_p,
855 bool *by_ref_p)
856{
857 int index;
858 HOST_WIDE_INT size, max_size;
859 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
860
861 if (max_size == -1 || max_size != size || *offset_p < 0)
862 return false;
863
864 if (DECL_P (base))
865 {
d044dd17 866 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4
MJ
867 if (index >= 0
868 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
869 : NULL, stmt, op))
870 {
871 *index_p = index;
872 *by_ref_p = false;
873 return true;
874 }
875 return false;
876 }
877
878 if (TREE_CODE (base) != MEM_REF
879 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
880 || !integer_zerop (TREE_OPERAND (base, 1)))
881 return false;
882
883 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
884 {
885 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 886 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
887 }
888 else
889 {
890 /* This branch catches situations where a pointer parameter is not a
891 gimple register, for example:
892
893 void hip7(S*) (struct S * p)
894 {
895 void (*<T2e4>) (struct S *) D.1867;
896 struct S * p.1;
897
898 <bb 2>:
899 p.1_1 = p;
900 D.1867_2 = p.1_1->f;
901 D.1867_2 ();
902 gdp = &p;
903 */
904
905 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
d044dd17 906 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
8b7773a4
MJ
907 }
908
909 if (index >= 0
910 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
911 stmt, op))
912 {
913 *index_p = index;
914 *by_ref_p = true;
915 return true;
916 }
917 return false;
918}
919
920/* Just like the previous function, just without the param_analysis_info
921 pointer, for users outside of this file. */
922
923bool
924ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
925 tree op, int *index_p, HOST_WIDE_INT *offset_p,
926 bool *by_ref_p)
927{
d044dd17
MJ
928 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
929 offset_p, by_ref_p);
8b7773a4
MJ
930}
931
b258210c 932/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
933 of an assignment statement STMT, try to determine whether we are actually
934 handling any of the following cases and construct an appropriate jump
935 function into JFUNC if so:
936
937 1) The passed value is loaded from a formal parameter which is not a gimple
938 register (most probably because it is addressable, the value has to be
939 scalar) and we can guarantee the value has not changed. This case can
940 therefore be described by a simple pass-through jump function. For example:
941
942 foo (int a)
943 {
944 int a.0;
945
946 a.0_2 = a;
947 bar (a.0_2);
948
949 2) The passed value can be described by a simple arithmetic pass-through
950 jump function. E.g.
951
952 foo (int a)
953 {
954 int D.2064;
955
956 D.2064_4 = a.1(D) + 4;
957 bar (D.2064_4);
958
959 This case can also occur in combination of the previous one, e.g.:
960
961 foo (int a, int z)
962 {
963 int a.0;
964 int D.2064;
965
966 a.0_3 = a;
967 D.2064_4 = a.0_3 + 4;
968 foo (D.2064_4);
969
970 3) The passed value is an address of an object within another one (which
971 also passed by reference). Such situations are described by an ancestor
972 jump function and describe situations such as:
973
974 B::foo() (struct B * const this)
975 {
976 struct A * D.1845;
977
978 D.1845_2 = &this_1(D)->D.1748;
979 A::bar (D.1845_2);
980
981 INFO is the structure describing individual parameters access different
982 stages of IPA optimizations. PARMS_AINFO contains the information that is
983 only needed for intraprocedural analysis. */
685b0d13
MJ
984
985static void
b258210c 986compute_complex_assign_jump_func (struct ipa_node_params *info,
fdb0e1b4 987 struct param_analysis_info *parms_ainfo,
b258210c 988 struct ipa_jump_func *jfunc,
06d65050
JH
989 gimple call, gimple stmt, tree name,
990 tree param_type)
685b0d13
MJ
991{
992 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 993 tree op1, tc_ssa, base, ssa;
685b0d13 994 int index;
685b0d13 995
685b0d13 996 op1 = gimple_assign_rhs1 (stmt);
685b0d13 997
fdb0e1b4 998 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 999 {
fdb0e1b4
MJ
1000 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1001 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1002 else
d044dd17 1003 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
fdb0e1b4
MJ
1004 SSA_NAME_DEF_STMT (op1));
1005 tc_ssa = op1;
1006 }
1007 else
1008 {
d044dd17 1009 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
fdb0e1b4
MJ
1010 tc_ssa = gimple_assign_lhs (stmt);
1011 }
1012
1013 if (index >= 0)
1014 {
1015 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 1016
b258210c 1017 if (op2)
685b0d13 1018 {
b258210c
MJ
1019 if (!is_gimple_ip_invariant (op2)
1020 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1021 && !useless_type_conversion_p (TREE_TYPE (name),
1022 TREE_TYPE (op1))))
1023 return;
1024
7b872d9e
MJ
1025 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1026 gimple_assign_rhs_code (stmt));
685b0d13 1027 }
b8f6e610 1028 else if (gimple_assign_single_p (stmt))
8b7773a4
MJ
1029 {
1030 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1031 call, tc_ssa);
06d65050
JH
1032 bool type_p = false;
1033
1034 if (param_type && POINTER_TYPE_P (param_type))
1035 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1036 call, jfunc);
b8f6e610
MJ
1037 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1038 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
8b7773a4 1039 }
685b0d13
MJ
1040 return;
1041 }
1042
1043 if (TREE_CODE (op1) != ADDR_EXPR)
1044 return;
1045 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1046 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1047 return;
32aa622c
MJ
1048 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1049 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1050 /* If this is a varying address, punt. */
1051 || max_size == -1
1052 || max_size != size)
685b0d13 1053 return;
32aa622c 1054 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
f65cf2b7
MJ
1055 ssa = TREE_OPERAND (base, 0);
1056 if (TREE_CODE (ssa) != SSA_NAME
1057 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1058 || offset < 0)
685b0d13
MJ
1059 return;
1060
b8f6e610 1061 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1062 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1063 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
b8f6e610 1064 {
06d65050
JH
1065 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1066 call, jfunc, offset);
b8f6e610
MJ
1067 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1068 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1069 parm_ref_data_pass_through_p (&parms_ainfo[index],
1070 call, ssa), type_p);
1071 }
685b0d13
MJ
1072}
1073
40591473
MJ
1074/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1075 it looks like:
1076
1077 iftmp.1_3 = &obj_2(D)->D.1762;
1078
1079 The base of the MEM_REF must be a default definition SSA NAME of a
1080 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1081 whole MEM_REF expression is returned and the offset calculated from any
1082 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1083 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1084
1085static tree
1086get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1087{
1088 HOST_WIDE_INT size, max_size;
1089 tree expr, parm, obj;
1090
1091 if (!gimple_assign_single_p (assign))
1092 return NULL_TREE;
1093 expr = gimple_assign_rhs1 (assign);
1094
1095 if (TREE_CODE (expr) != ADDR_EXPR)
1096 return NULL_TREE;
1097 expr = TREE_OPERAND (expr, 0);
1098 obj = expr;
1099 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1100
1101 if (TREE_CODE (expr) != MEM_REF
1102 /* If this is a varying address, punt. */
1103 || max_size == -1
1104 || max_size != size
1105 || *offset < 0)
1106 return NULL_TREE;
1107 parm = TREE_OPERAND (expr, 0);
1108 if (TREE_CODE (parm) != SSA_NAME
1109 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1110 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1111 return NULL_TREE;
1112
1113 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1114 *obj_p = obj;
1115 return expr;
1116}
1117
685b0d13 1118
b258210c
MJ
1119/* Given that an actual argument is an SSA_NAME that is a result of a phi
1120 statement PHI, try to find out whether NAME is in fact a
1121 multiple-inheritance typecast from a descendant into an ancestor of a formal
1122 parameter and thus can be described by an ancestor jump function and if so,
1123 write the appropriate function into JFUNC.
1124
1125 Essentially we want to match the following pattern:
1126
1127 if (obj_2(D) != 0B)
1128 goto <bb 3>;
1129 else
1130 goto <bb 4>;
1131
1132 <bb 3>:
1133 iftmp.1_3 = &obj_2(D)->D.1762;
1134
1135 <bb 4>:
1136 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1137 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1138 return D.1879_6; */
1139
1140static void
1141compute_complex_ancestor_jump_func (struct ipa_node_params *info,
8b7773a4 1142 struct param_analysis_info *parms_ainfo,
b258210c 1143 struct ipa_jump_func *jfunc,
06d65050 1144 gimple call, gimple phi, tree param_type)
b258210c 1145{
40591473 1146 HOST_WIDE_INT offset;
b258210c
MJ
1147 gimple assign, cond;
1148 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1149 tree tmp, parm, expr, obj;
b258210c
MJ
1150 int index, i;
1151
54e348cb 1152 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1153 return;
1154
54e348cb
MJ
1155 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1156 tmp = PHI_ARG_DEF (phi, 0);
1157 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1158 tmp = PHI_ARG_DEF (phi, 1);
1159 else
1160 return;
b258210c
MJ
1161 if (TREE_CODE (tmp) != SSA_NAME
1162 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1163 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1164 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1165 return;
1166
1167 assign = SSA_NAME_DEF_STMT (tmp);
1168 assign_bb = gimple_bb (assign);
40591473 1169 if (!single_pred_p (assign_bb))
b258210c 1170 return;
40591473
MJ
1171 expr = get_ancestor_addr_info (assign, &obj, &offset);
1172 if (!expr)
b258210c
MJ
1173 return;
1174 parm = TREE_OPERAND (expr, 0);
b258210c 1175 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
40591473 1176 gcc_assert (index >= 0);
b258210c
MJ
1177
1178 cond_bb = single_pred (assign_bb);
1179 cond = last_stmt (cond_bb);
69610617
SB
1180 if (!cond
1181 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1182 || gimple_cond_code (cond) != NE_EXPR
1183 || gimple_cond_lhs (cond) != parm
1184 || !integer_zerop (gimple_cond_rhs (cond)))
1185 return;
1186
b258210c
MJ
1187 phi_bb = gimple_bb (phi);
1188 for (i = 0; i < 2; i++)
1189 {
1190 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1191 if (pred != assign_bb && pred != cond_bb)
1192 return;
1193 }
1194
06d65050
JH
1195 bool type_p = false;
1196 if (param_type && POINTER_TYPE_P (param_type))
1197 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1198 call, jfunc, offset);
b8f6e610 1199 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
8b7773a4
MJ
1200 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1201 parm_ref_data_pass_through_p (&parms_ainfo[index],
b8f6e610 1202 call, parm), type_p);
b258210c
MJ
1203}
1204
61502ca8 1205/* Given OP which is passed as an actual argument to a called function,
b258210c 1206 determine if it is possible to construct a KNOWN_TYPE jump function for it
06d65050
JH
1207 and if so, create one and store it to JFUNC.
1208 EXPECTED_TYPE represents a type the argument should be in */
b258210c
MJ
1209
1210static void
f65cf2b7 1211compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
06d65050 1212 gimple call, tree expected_type)
b258210c 1213{
32aa622c 1214 HOST_WIDE_INT offset, size, max_size;
c7573249 1215 tree base;
b258210c 1216
05842ff5
MJ
1217 if (!flag_devirtualize
1218 || TREE_CODE (op) != ADDR_EXPR
06d65050
JH
1219 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1220 /* Be sure expected_type is polymorphic. */
1221 || !expected_type
1222 || TREE_CODE (expected_type) != RECORD_TYPE
1223 || !TYPE_BINFO (expected_type)
1224 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
b258210c
MJ
1225 return;
1226
1227 op = TREE_OPERAND (op, 0);
32aa622c
MJ
1228 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1229 if (!DECL_P (base)
1230 || max_size == -1
1231 || max_size != size
1232 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1233 || is_global_var (base))
1234 return;
1235
06d65050 1236 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
f65cf2b7
MJ
1237 return;
1238
06d65050
JH
1239 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1240 expected_type);
b258210c
MJ
1241}
1242
be95e2b9
MJ
1243/* Inspect the given TYPE and return true iff it has the same structure (the
1244 same number of fields of the same types) as a C++ member pointer. If
1245 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1246 corresponding fields there. */
1247
3e293154
MJ
1248static bool
1249type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1250{
1251 tree fld;
1252
1253 if (TREE_CODE (type) != RECORD_TYPE)
1254 return false;
1255
1256 fld = TYPE_FIELDS (type);
1257 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4
MJ
1258 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1259 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
3e293154
MJ
1260 return false;
1261
1262 if (method_ptr)
1263 *method_ptr = fld;
1264
910ad8de 1265 fld = DECL_CHAIN (fld);
8b7773a4
MJ
1266 if (!fld || INTEGRAL_TYPE_P (fld)
1267 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
3e293154
MJ
1268 return false;
1269 if (delta)
1270 *delta = fld;
1271
910ad8de 1272 if (DECL_CHAIN (fld))
3e293154
MJ
1273 return false;
1274
1275 return true;
1276}
1277
61502ca8 1278/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1279 return the rhs of its defining statement. Otherwise return RHS as it
1280 is. */
7ec49257
MJ
1281
1282static inline tree
1283get_ssa_def_if_simple_copy (tree rhs)
1284{
1285 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1286 {
1287 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1288
1289 if (gimple_assign_single_p (def_stmt))
1290 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1291 else
1292 break;
7ec49257
MJ
1293 }
1294 return rhs;
1295}
1296
8b7773a4
MJ
1297/* Simple linked list, describing known contents of an aggregate beforere
1298 call. */
1299
1300struct ipa_known_agg_contents_list
1301{
1302 /* Offset and size of the described part of the aggregate. */
1303 HOST_WIDE_INT offset, size;
1304 /* Known constant value or NULL if the contents is known to be unknown. */
1305 tree constant;
1306 /* Pointer to the next structure in the list. */
1307 struct ipa_known_agg_contents_list *next;
1308};
3e293154 1309
8b7773a4
MJ
1310/* Traverse statements from CALL backwards, scanning whether an aggregate given
1311 in ARG is filled in with constant values. ARG can either be an aggregate
1312 expression or a pointer to an aggregate. JFUNC is the jump function into
1313 which the constants are subsequently stored. */
be95e2b9 1314
3e293154 1315static void
8b7773a4
MJ
1316determine_known_aggregate_parts (gimple call, tree arg,
1317 struct ipa_jump_func *jfunc)
3e293154 1318{
8b7773a4
MJ
1319 struct ipa_known_agg_contents_list *list = NULL;
1320 int item_count = 0, const_count = 0;
1321 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1322 gimple_stmt_iterator gsi;
8b7773a4
MJ
1323 tree arg_base;
1324 bool check_ref, by_ref;
1325 ao_ref r;
3e293154 1326
8b7773a4
MJ
1327 /* The function operates in three stages. First, we prepare check_ref, r,
1328 arg_base and arg_offset based on what is actually passed as an actual
1329 argument. */
3e293154 1330
8b7773a4
MJ
1331 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1332 {
1333 by_ref = true;
1334 if (TREE_CODE (arg) == SSA_NAME)
1335 {
1336 tree type_size;
1337 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1338 return;
1339 check_ref = true;
1340 arg_base = arg;
1341 arg_offset = 0;
1342 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1343 arg_size = tree_low_cst (type_size, 1);
1344 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1345 }
1346 else if (TREE_CODE (arg) == ADDR_EXPR)
1347 {
1348 HOST_WIDE_INT arg_max_size;
1349
1350 arg = TREE_OPERAND (arg, 0);
1351 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1352 &arg_max_size);
1353 if (arg_max_size == -1
1354 || arg_max_size != arg_size
1355 || arg_offset < 0)
1356 return;
1357 if (DECL_P (arg_base))
1358 {
1359 tree size;
1360 check_ref = false;
1361 size = build_int_cst (integer_type_node, arg_size);
1362 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1363 }
1364 else
1365 return;
1366 }
1367 else
1368 return;
1369 }
1370 else
1371 {
1372 HOST_WIDE_INT arg_max_size;
1373
1374 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1375
1376 by_ref = false;
1377 check_ref = false;
1378 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1379 &arg_max_size);
1380 if (arg_max_size == -1
1381 || arg_max_size != arg_size
1382 || arg_offset < 0)
1383 return;
1384
1385 ao_ref_init (&r, arg);
1386 }
1387
1388 /* Second stage walks back the BB, looks at individual statements and as long
1389 as it is confident of how the statements affect contents of the
1390 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1391 describing it. */
1392 gsi = gsi_for_stmt (call);
726a989a
RB
1393 gsi_prev (&gsi);
1394 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1395 {
8b7773a4 1396 struct ipa_known_agg_contents_list *n, **p;
726a989a 1397 gimple stmt = gsi_stmt (gsi);
8b7773a4
MJ
1398 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1399 tree lhs, rhs, lhs_base;
1400 bool partial_overlap;
3e293154 1401
8b7773a4 1402 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1403 continue;
8b75fc9b 1404 if (!gimple_assign_single_p (stmt))
8b7773a4 1405 break;
3e293154 1406
726a989a
RB
1407 lhs = gimple_assign_lhs (stmt);
1408 rhs = gimple_assign_rhs1 (stmt);
7d2fb524
MJ
1409 if (!is_gimple_reg_type (rhs)
1410 || TREE_CODE (lhs) == BIT_FIELD_REF
1411 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1412 break;
3e293154 1413
8b7773a4
MJ
1414 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1415 &lhs_max_size);
1416 if (lhs_max_size == -1
1417 || lhs_max_size != lhs_size
1418 || (lhs_offset < arg_offset
1419 && lhs_offset + lhs_size > arg_offset)
1420 || (lhs_offset < arg_offset + arg_size
1421 && lhs_offset + lhs_size > arg_offset + arg_size))
1422 break;
3e293154 1423
8b7773a4 1424 if (check_ref)
518dc859 1425 {
8b7773a4
MJ
1426 if (TREE_CODE (lhs_base) != MEM_REF
1427 || TREE_OPERAND (lhs_base, 0) != arg_base
1428 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1429 break;
3e293154 1430 }
8b7773a4 1431 else if (lhs_base != arg_base)
774b8a55
MJ
1432 {
1433 if (DECL_P (lhs_base))
1434 continue;
1435 else
1436 break;
1437 }
3e293154 1438
8b7773a4
MJ
1439 if (lhs_offset + lhs_size < arg_offset
1440 || lhs_offset >= (arg_offset + arg_size))
1441 continue;
1442
1443 partial_overlap = false;
1444 p = &list;
1445 while (*p && (*p)->offset < lhs_offset)
3e293154 1446 {
8b7773a4 1447 if ((*p)->offset + (*p)->size > lhs_offset)
3e293154 1448 {
8b7773a4
MJ
1449 partial_overlap = true;
1450 break;
3e293154 1451 }
8b7773a4
MJ
1452 p = &(*p)->next;
1453 }
1454 if (partial_overlap)
1455 break;
1456 if (*p && (*p)->offset < lhs_offset + lhs_size)
1457 {
1458 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1459 /* We already know this value is subsequently overwritten with
1460 something else. */
1461 continue;
3e293154 1462 else
8b7773a4
MJ
1463 /* Otherwise this is a partial overlap which we cannot
1464 represent. */
1465 break;
3e293154 1466 }
3e293154 1467
8b7773a4
MJ
1468 rhs = get_ssa_def_if_simple_copy (rhs);
1469 n = XALLOCA (struct ipa_known_agg_contents_list);
1470 n->size = lhs_size;
1471 n->offset = lhs_offset;
1472 if (is_gimple_ip_invariant (rhs))
1473 {
1474 n->constant = rhs;
1475 const_count++;
1476 }
1477 else
1478 n->constant = NULL_TREE;
1479 n->next = *p;
1480 *p = n;
3e293154 1481
8b7773a4 1482 item_count++;
dfea20f1
MJ
1483 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1484 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1485 break;
1486 }
be95e2b9 1487
8b7773a4
MJ
1488 /* Third stage just goes over the list and creates an appropriate vector of
1489 ipa_agg_jf_item structures out of it, of sourse only if there are
1490 any known constants to begin with. */
3e293154 1491
8b7773a4 1492 if (const_count)
3e293154 1493 {
8b7773a4 1494 jfunc->agg.by_ref = by_ref;
9771b263 1495 vec_alloc (jfunc->agg.items, const_count);
8b7773a4
MJ
1496 while (list)
1497 {
1498 if (list->constant)
1499 {
f32682ca
DN
1500 struct ipa_agg_jf_item item;
1501 item.offset = list->offset - arg_offset;
7d2fb524 1502 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
d1f98542 1503 item.value = unshare_expr_without_location (list->constant);
9771b263 1504 jfunc->agg.items->quick_push (item);
8b7773a4
MJ
1505 }
1506 list = list->next;
1507 }
3e293154
MJ
1508 }
1509}
1510
06d65050
JH
1511static tree
1512ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1513{
1514 int n;
1515 tree type = (e->callee
67348ccc 1516 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1517 : gimple_call_fntype (e->call_stmt));
1518 tree t = TYPE_ARG_TYPES (type);
1519
1520 for (n = 0; n < i; n++)
1521 {
1522 if (!t)
1523 break;
1524 t = TREE_CHAIN (t);
1525 }
1526 if (t)
1527 return TREE_VALUE (t);
1528 if (!e->callee)
1529 return NULL;
67348ccc 1530 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1531 for (n = 0; n < i; n++)
1532 {
1533 if (!t)
1534 return NULL;
1535 t = TREE_CHAIN (t);
1536 }
1537 if (t)
1538 return TREE_TYPE (t);
1539 return NULL;
1540}
1541
3e293154
MJ
1542/* Compute jump function for all arguments of callsite CS and insert the
1543 information in the jump_functions array in the ipa_edge_args corresponding
1544 to this callsite. */
be95e2b9 1545
749aa96d 1546static void
c419671c 1547ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
062c604f 1548 struct cgraph_edge *cs)
3e293154
MJ
1549{
1550 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09
MJ
1551 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1552 gimple call = cs->call_stmt;
8b7773a4 1553 int n, arg_num = gimple_call_num_args (call);
3e293154 1554
606d9a09 1555 if (arg_num == 0 || args->jump_functions)
3e293154 1556 return;
9771b263 1557 vec_safe_grow_cleared (args->jump_functions, arg_num);
3e293154 1558
96e24d49
JJ
1559 if (gimple_call_internal_p (call))
1560 return;
5fe8e757
MJ
1561 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1562 return;
1563
8b7773a4
MJ
1564 for (n = 0; n < arg_num; n++)
1565 {
1566 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1567 tree arg = gimple_call_arg (call, n);
06d65050 1568 tree param_type = ipa_get_callee_param_type (cs, n);
3e293154 1569
8b7773a4 1570 if (is_gimple_ip_invariant (arg))
4502fe8d 1571 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1572 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1573 && TREE_CODE (arg) == PARM_DECL)
1574 {
1575 int index = ipa_get_param_decl_index (info, arg);
1576
1577 gcc_assert (index >=0);
1578 /* Aggregate passed by value, check for pass-through, otherwise we
1579 will attempt to fill in aggregate contents later in this
1580 for cycle. */
1581 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1582 {
b8f6e610 1583 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
8b7773a4
MJ
1584 continue;
1585 }
1586 }
1587 else if (TREE_CODE (arg) == SSA_NAME)
1588 {
1589 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1590 {
1591 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1592 if (index >= 0)
8b7773a4 1593 {
b8f6e610 1594 bool agg_p, type_p;
8b7773a4
MJ
1595 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1596 call, arg);
06d65050
JH
1597 if (param_type && POINTER_TYPE_P (param_type))
1598 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1599 call, jfunc);
1600 else
1601 type_p = false;
b8f6e610 1602 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
06d65050
JH
1603 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1604 type_p);
8b7773a4
MJ
1605 }
1606 }
1607 else
1608 {
1609 gimple stmt = SSA_NAME_DEF_STMT (arg);
1610 if (is_gimple_assign (stmt))
1611 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
06d65050 1612 call, stmt, arg, param_type);
8b7773a4
MJ
1613 else if (gimple_code (stmt) == GIMPLE_PHI)
1614 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
06d65050 1615 call, stmt, param_type);
8b7773a4
MJ
1616 }
1617 }
1618 else
06d65050
JH
1619 compute_known_type_jump_func (arg, jfunc, call,
1620 param_type
1621 && POINTER_TYPE_P (param_type)
1622 ? TREE_TYPE (param_type)
1623 : NULL);
3e293154 1624
8b7773a4
MJ
1625 if ((jfunc->type != IPA_JF_PASS_THROUGH
1626 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1627 && (jfunc->type != IPA_JF_ANCESTOR
1628 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1629 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1630 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1631 determine_known_aggregate_parts (call, arg, jfunc);
1632 }
3e293154
MJ
1633}
1634
749aa96d
MJ
1635/* Compute jump functions for all edges - both direct and indirect - outgoing
1636 from NODE. Also count the actual arguments in the process. */
1637
062c604f
MJ
1638static void
1639ipa_compute_jump_functions (struct cgraph_node *node,
c419671c 1640 struct param_analysis_info *parms_ainfo)
749aa96d
MJ
1641{
1642 struct cgraph_edge *cs;
1643
1644 for (cs = node->callees; cs; cs = cs->next_callee)
1645 {
d7da5cc8
MJ
1646 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1647 NULL);
749aa96d
MJ
1648 /* We do not need to bother analyzing calls to unknown
1649 functions unless they may become known during lto/whopr. */
67348ccc 1650 if (!callee->definition && !flag_lto)
749aa96d 1651 continue;
c419671c 1652 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1653 }
1654
1655 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
c419671c 1656 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1657}
1658
8b7773a4
MJ
1659/* If STMT looks like a statement loading a value from a member pointer formal
1660 parameter, return that parameter and store the offset of the field to
1661 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1662 might be clobbered). If USE_DELTA, then we look for a use of the delta
1663 field rather than the pfn. */
be95e2b9 1664
3e293154 1665static tree
8b7773a4
MJ
1666ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1667 HOST_WIDE_INT *offset_p)
3e293154 1668{
8b7773a4
MJ
1669 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1670
1671 if (!gimple_assign_single_p (stmt))
1672 return NULL_TREE;
3e293154 1673
8b7773a4 1674 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
1675 if (TREE_CODE (rhs) == COMPONENT_REF)
1676 {
1677 ref_field = TREE_OPERAND (rhs, 1);
1678 rhs = TREE_OPERAND (rhs, 0);
1679 }
1680 else
1681 ref_field = NULL_TREE;
d242d063 1682 if (TREE_CODE (rhs) != MEM_REF)
3e293154 1683 return NULL_TREE;
3e293154 1684 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
1685 if (TREE_CODE (rec) != ADDR_EXPR)
1686 return NULL_TREE;
1687 rec = TREE_OPERAND (rec, 0);
3e293154 1688 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 1689 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 1690 return NULL_TREE;
d242d063 1691 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 1692
8b7773a4
MJ
1693 if (use_delta)
1694 fld = delta_field;
1695 else
1696 fld = ptr_field;
1697 if (offset_p)
1698 *offset_p = int_bit_position (fld);
1699
ae788515
EB
1700 if (ref_field)
1701 {
1702 if (integer_nonzerop (ref_offset))
1703 return NULL_TREE;
ae788515
EB
1704 return ref_field == fld ? rec : NULL_TREE;
1705 }
3e293154 1706 else
8b7773a4
MJ
1707 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1708 : NULL_TREE;
3e293154
MJ
1709}
1710
1711/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 1712
3e293154
MJ
1713static bool
1714ipa_is_ssa_with_stmt_def (tree t)
1715{
1716 if (TREE_CODE (t) == SSA_NAME
1717 && !SSA_NAME_IS_DEFAULT_DEF (t))
1718 return true;
1719 else
1720 return false;
1721}
1722
40591473
MJ
1723/* Find the indirect call graph edge corresponding to STMT and mark it as a
1724 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1725 indirect call graph edge. */
be95e2b9 1726
40591473
MJ
1727static struct cgraph_edge *
1728ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
3e293154 1729{
e33c6cd6 1730 struct cgraph_edge *cs;
3e293154 1731
5f902d76 1732 cs = cgraph_edge (node, stmt);
b258210c 1733 cs->indirect_info->param_index = param_index;
8b7773a4 1734 cs->indirect_info->offset = 0;
40591473 1735 cs->indirect_info->polymorphic = 0;
8b7773a4 1736 cs->indirect_info->agg_contents = 0;
c13bc3d9 1737 cs->indirect_info->member_ptr = 0;
40591473 1738 return cs;
3e293154
MJ
1739}
1740
e33c6cd6 1741/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 1742 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
1743 intermediate information about each formal parameter. Currently it checks
1744 whether the call calls a pointer that is a formal parameter and if so, the
1745 parameter is marked with the called flag and an indirect call graph edge
1746 describing the call is created. This is very simple for ordinary pointers
1747 represented in SSA but not-so-nice when it comes to member pointers. The
1748 ugly part of this function does nothing more than trying to match the
1749 pattern of such a call. An example of such a pattern is the gimple dump
1750 below, the call is on the last line:
3e293154 1751
ae788515
EB
1752 <bb 2>:
1753 f$__delta_5 = f.__delta;
1754 f$__pfn_24 = f.__pfn;
1755
1756 or
3e293154 1757 <bb 2>:
d242d063
MJ
1758 f$__delta_5 = MEM[(struct *)&f];
1759 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 1760
ae788515 1761 and a few lines below:
8aa29647
MJ
1762
1763 <bb 5>
3e293154
MJ
1764 D.2496_3 = (int) f$__pfn_24;
1765 D.2497_4 = D.2496_3 & 1;
1766 if (D.2497_4 != 0)
1767 goto <bb 3>;
1768 else
1769 goto <bb 4>;
1770
8aa29647 1771 <bb 6>:
3e293154
MJ
1772 D.2500_7 = (unsigned int) f$__delta_5;
1773 D.2501_8 = &S + D.2500_7;
1774 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1775 D.2503_10 = *D.2502_9;
1776 D.2504_12 = f$__pfn_24 + -1;
1777 D.2505_13 = (unsigned int) D.2504_12;
1778 D.2506_14 = D.2503_10 + D.2505_13;
1779 D.2507_15 = *D.2506_14;
1780 iftmp.11_16 = (String:: *) D.2507_15;
1781
8aa29647 1782 <bb 7>:
3e293154
MJ
1783 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1784 D.2500_19 = (unsigned int) f$__delta_5;
1785 D.2508_20 = &S + D.2500_19;
1786 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1787
1788 Such patterns are results of simple calls to a member pointer:
1789
1790 int doprinting (int (MyString::* f)(int) const)
1791 {
1792 MyString S ("somestring");
1793
1794 return (S.*f)(4);
1795 }
8b7773a4
MJ
1796
1797 Moreover, the function also looks for called pointers loaded from aggregates
1798 passed by value or reference. */
3e293154
MJ
1799
1800static void
b258210c
MJ
1801ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1802 struct ipa_node_params *info,
c419671c 1803 struct param_analysis_info *parms_ainfo,
b258210c 1804 gimple call, tree target)
3e293154 1805{
726a989a 1806 gimple def;
3e293154 1807 tree n1, n2;
726a989a
RB
1808 gimple d1, d2;
1809 tree rec, rec2, cond;
1810 gimple branch;
3e293154 1811 int index;
3e293154 1812 basic_block bb, virt_bb, join;
8b7773a4
MJ
1813 HOST_WIDE_INT offset;
1814 bool by_ref;
3e293154 1815
3e293154
MJ
1816 if (SSA_NAME_IS_DEFAULT_DEF (target))
1817 {
b258210c 1818 tree var = SSA_NAME_VAR (target);
3e293154
MJ
1819 index = ipa_get_param_decl_index (info, var);
1820 if (index >= 0)
40591473 1821 ipa_note_param_call (node, index, call);
3e293154
MJ
1822 return;
1823 }
1824
8b7773a4
MJ
1825 def = SSA_NAME_DEF_STMT (target);
1826 if (gimple_assign_single_p (def)
d044dd17 1827 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
8b7773a4
MJ
1828 gimple_assign_rhs1 (def), &index, &offset,
1829 &by_ref))
1830 {
1831 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1832 cs->indirect_info->offset = offset;
1833 cs->indirect_info->agg_contents = 1;
1834 cs->indirect_info->by_ref = by_ref;
1835 return;
1836 }
1837
3e293154
MJ
1838 /* Now we need to try to match the complex pattern of calling a member
1839 pointer. */
8b7773a4
MJ
1840 if (gimple_code (def) != GIMPLE_PHI
1841 || gimple_phi_num_args (def) != 2
1842 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
1843 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1844 return;
1845
3e293154
MJ
1846 /* First, we need to check whether one of these is a load from a member
1847 pointer that is a parameter to this function. */
1848 n1 = PHI_ARG_DEF (def, 0);
1849 n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 1850 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154
MJ
1851 return;
1852 d1 = SSA_NAME_DEF_STMT (n1);
1853 d2 = SSA_NAME_DEF_STMT (n2);
1854
8aa29647 1855 join = gimple_bb (def);
8b7773a4 1856 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 1857 {
8b7773a4 1858 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
1859 return;
1860
8aa29647 1861 bb = EDGE_PRED (join, 0)->src;
726a989a 1862 virt_bb = gimple_bb (d2);
3e293154 1863 }
8b7773a4 1864 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 1865 {
8aa29647 1866 bb = EDGE_PRED (join, 1)->src;
726a989a 1867 virt_bb = gimple_bb (d1);
3e293154
MJ
1868 }
1869 else
1870 return;
1871
1872 /* Second, we need to check that the basic blocks are laid out in the way
1873 corresponding to the pattern. */
1874
3e293154
MJ
1875 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1876 || single_pred (virt_bb) != bb
1877 || single_succ (virt_bb) != join)
1878 return;
1879
1880 /* Third, let's see that the branching is done depending on the least
1881 significant bit of the pfn. */
1882
1883 branch = last_stmt (bb);
8aa29647 1884 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
1885 return;
1886
12430896
RG
1887 if ((gimple_cond_code (branch) != NE_EXPR
1888 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 1889 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 1890 return;
3e293154 1891
726a989a 1892 cond = gimple_cond_lhs (branch);
3e293154
MJ
1893 if (!ipa_is_ssa_with_stmt_def (cond))
1894 return;
1895
726a989a 1896 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 1897 if (!is_gimple_assign (def)
726a989a
RB
1898 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1899 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 1900 return;
726a989a
RB
1901
1902 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1903 if (!ipa_is_ssa_with_stmt_def (cond))
1904 return;
1905
726a989a 1906 def = SSA_NAME_DEF_STMT (cond);
3e293154 1907
8b75fc9b
MJ
1908 if (is_gimple_assign (def)
1909 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 1910 {
726a989a 1911 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1912 if (!ipa_is_ssa_with_stmt_def (cond))
1913 return;
726a989a 1914 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
1915 }
1916
6f7b8b70
RE
1917 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1918 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
1919 == ptrmemfunc_vbit_in_delta),
1920 NULL);
3e293154
MJ
1921 if (rec != rec2)
1922 return;
1923
1924 index = ipa_get_param_decl_index (info, rec);
8b7773a4
MJ
1925 if (index >= 0
1926 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1927 {
1928 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1929 cs->indirect_info->offset = offset;
1930 cs->indirect_info->agg_contents = 1;
c13bc3d9 1931 cs->indirect_info->member_ptr = 1;
8b7773a4 1932 }
3e293154
MJ
1933
1934 return;
1935}
1936
b258210c
MJ
1937/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1938 object referenced in the expression is a formal parameter of the caller
1939 (described by INFO), create a call note for the statement. */
1940
1941static void
1942ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1943 struct ipa_node_params *info, gimple call,
1944 tree target)
1945{
40591473
MJ
1946 struct cgraph_edge *cs;
1947 struct cgraph_indirect_call_info *ii;
f65cf2b7 1948 struct ipa_jump_func jfunc;
b258210c 1949 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 1950 int index;
40591473 1951 HOST_WIDE_INT anc_offset;
b258210c 1952
05842ff5
MJ
1953 if (!flag_devirtualize)
1954 return;
1955
40591473 1956 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
1957 return;
1958
40591473
MJ
1959 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1960 {
1961 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1962 return;
b258210c 1963
40591473
MJ
1964 anc_offset = 0;
1965 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1966 gcc_assert (index >= 0);
06d65050
JH
1967 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1968 call, &jfunc))
40591473
MJ
1969 return;
1970 }
1971 else
1972 {
1973 gimple stmt = SSA_NAME_DEF_STMT (obj);
1974 tree expr;
1975
1976 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1977 if (!expr)
1978 return;
1979 index = ipa_get_param_decl_index (info,
1980 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1981 gcc_assert (index >= 0);
06d65050
JH
1982 if (detect_type_change (obj, expr, obj_type_ref_class (target),
1983 call, &jfunc, anc_offset))
40591473
MJ
1984 return;
1985 }
1986
1987 cs = ipa_note_param_call (node, index, call);
1988 ii = cs->indirect_info;
8b7773a4 1989 ii->offset = anc_offset;
40591473 1990 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
c49bdb2e 1991 ii->otr_type = obj_type_ref_class (target);
40591473 1992 ii->polymorphic = 1;
b258210c
MJ
1993}
1994
1995/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 1996 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 1997 containing intermediate information about each formal parameter. */
b258210c
MJ
1998
1999static void
2000ipa_analyze_call_uses (struct cgraph_node *node,
062c604f 2001 struct ipa_node_params *info,
c419671c 2002 struct param_analysis_info *parms_ainfo, gimple call)
b258210c
MJ
2003{
2004 tree target = gimple_call_fn (call);
2005
25583c4f
RS
2006 if (!target)
2007 return;
b258210c 2008 if (TREE_CODE (target) == SSA_NAME)
c419671c 2009 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1d5755ef 2010 else if (virtual_method_call_p (target))
b258210c
MJ
2011 ipa_analyze_virtual_call_uses (node, info, call, target);
2012}
2013
2014
e33c6cd6
MJ
2015/* Analyze the call statement STMT with respect to formal parameters (described
2016 in INFO) of caller given by NODE. Currently it only checks whether formal
c419671c 2017 parameters are called. PARMS_AINFO is a pointer to a vector containing
062c604f 2018 intermediate information about each formal parameter. */
be95e2b9 2019
3e293154 2020static void
e33c6cd6 2021ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
c419671c 2022 struct param_analysis_info *parms_ainfo, gimple stmt)
3e293154 2023{
726a989a 2024 if (is_gimple_call (stmt))
c419671c 2025 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2026}
2027
2028/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2029 If OP is a parameter declaration, mark it as used in the info structure
2030 passed in DATA. */
2031
2032static bool
2033visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2034 tree op, void *data)
2035{
2036 struct ipa_node_params *info = (struct ipa_node_params *) data;
2037
2038 op = get_base_address (op);
2039 if (op
2040 && TREE_CODE (op) == PARM_DECL)
2041 {
2042 int index = ipa_get_param_decl_index (info, op);
2043 gcc_assert (index >= 0);
310bc633 2044 ipa_set_param_used (info, index, true);
062c604f
MJ
2045 }
2046
2047 return false;
3e293154
MJ
2048}
2049
2050/* Scan the function body of NODE and inspect the uses of formal parameters.
2051 Store the findings in various structures of the associated ipa_node_params
c419671c 2052 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
062c604f 2053 vector containing intermediate information about each formal parameter. */
be95e2b9 2054
062c604f
MJ
2055static void
2056ipa_analyze_params_uses (struct cgraph_node *node,
c419671c 2057 struct param_analysis_info *parms_ainfo)
3e293154 2058{
67348ccc 2059 tree decl = node->decl;
3e293154
MJ
2060 basic_block bb;
2061 struct function *func;
726a989a 2062 gimple_stmt_iterator gsi;
3e293154 2063 struct ipa_node_params *info = IPA_NODE_REF (node);
062c604f 2064 int i;
3e293154 2065
726a989a 2066 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
3e293154 2067 return;
3e293154 2068
5fe8e757
MJ
2069 info->uses_analysis_done = 1;
2070 if (ipa_func_spec_opts_forbid_analysis_p (node))
2071 {
2072 for (i = 0; i < ipa_get_param_count (info); i++)
2073 {
2074 ipa_set_param_used (info, i, true);
2075 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2076 }
2077 return;
2078 }
2079
062c604f
MJ
2080 for (i = 0; i < ipa_get_param_count (info); i++)
2081 {
2082 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2083 int controlled_uses = 0;
2084
062c604f
MJ
2085 /* For SSA regs see if parameter is used. For non-SSA we compute
2086 the flag during modification analysis. */
4502fe8d
MJ
2087 if (is_gimple_reg (parm))
2088 {
67348ccc 2089 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2090 parm);
2091 if (ddef && !has_zero_uses (ddef))
2092 {
2093 imm_use_iterator imm_iter;
2094 use_operand_p use_p;
2095
2096 ipa_set_param_used (info, i, true);
2097 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2098 if (!is_gimple_call (USE_STMT (use_p)))
2099 {
2100 controlled_uses = IPA_UNDESCRIBED_USE;
2101 break;
2102 }
2103 else
2104 controlled_uses++;
2105 }
2106 else
2107 controlled_uses = 0;
2108 }
2109 else
2110 controlled_uses = IPA_UNDESCRIBED_USE;
2111 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f
MJ
2112 }
2113
3e293154
MJ
2114 func = DECL_STRUCT_FUNCTION (decl);
2115 FOR_EACH_BB_FN (bb, func)
2116 {
726a989a 2117 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3e293154 2118 {
726a989a 2119 gimple stmt = gsi_stmt (gsi);
062c604f
MJ
2120
2121 if (is_gimple_debug (stmt))
2122 continue;
2123
c419671c 2124 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2125 walk_stmt_load_store_addr_ops (stmt, info,
2126 visit_ref_for_mod_analysis,
2127 visit_ref_for_mod_analysis,
2128 visit_ref_for_mod_analysis);
518dc859 2129 }
355a7673 2130 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
062c604f
MJ
2131 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2132 visit_ref_for_mod_analysis,
2133 visit_ref_for_mod_analysis,
2134 visit_ref_for_mod_analysis);
518dc859 2135 }
3e293154
MJ
2136}
2137
2c9561b5
MJ
2138/* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2139
2140static void
2141free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2142{
2143 int i;
2144
2145 for (i = 0; i < param_count; i++)
2146 {
2147 if (parms_ainfo[i].parm_visited_statements)
2148 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2149 if (parms_ainfo[i].pt_visited_statements)
2150 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2151 }
2152}
2153
dd5a833e
MS
2154/* Initialize the array describing properties of of formal parameters
2155 of NODE, analyze their uses and compute jump functions associated
2156 with actual arguments of calls from within NODE. */
062c604f
MJ
2157
2158void
2159ipa_analyze_node (struct cgraph_node *node)
2160{
57dbdc5a 2161 struct ipa_node_params *info;
c419671c 2162 struct param_analysis_info *parms_ainfo;
2c9561b5 2163 int param_count;
062c604f 2164
57dbdc5a
MJ
2165 ipa_check_create_node_params ();
2166 ipa_check_create_edge_args ();
2167 info = IPA_NODE_REF (node);
67348ccc 2168 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
062c604f
MJ
2169 ipa_initialize_node_params (node);
2170
2171 param_count = ipa_get_param_count (info);
c419671c
MJ
2172 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2173 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
062c604f 2174
c419671c
MJ
2175 ipa_analyze_params_uses (node, parms_ainfo);
2176 ipa_compute_jump_functions (node, parms_ainfo);
062c604f 2177
2c9561b5 2178 free_parms_ainfo (parms_ainfo, param_count);
f65cf2b7 2179 pop_cfun ();
062c604f
MJ
2180}
2181
e248d83f
MJ
2182/* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2183 attempt a type-based devirtualization. If successful, return the
2184 target function declaration, otherwise return NULL. */
2185
2186tree
2187ipa_intraprocedural_devirtualization (gimple call)
2188{
2189 tree binfo, token, fndecl;
2190 struct ipa_jump_func jfunc;
2191 tree otr = gimple_call_fn (call);
2192
2193 jfunc.type = IPA_JF_UNKNOWN;
2194 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
06d65050 2195 call, obj_type_ref_class (otr));
e248d83f
MJ
2196 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2197 return NULL_TREE;
2198 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2199 if (!binfo)
2200 return NULL_TREE;
2201 token = OBJ_TYPE_REF_TOKEN (otr);
2202 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2203 binfo);
450ad0cd
JH
2204#ifdef ENABLE_CHECKING
2205 if (fndecl)
2206 gcc_assert (possible_polymorphic_call_target_p
2207 (otr, cgraph_get_node (fndecl)));
2208#endif
e248d83f
MJ
2209 return fndecl;
2210}
062c604f 2211
61502ca8 2212/* Update the jump function DST when the call graph edge corresponding to SRC is
b258210c
MJ
2213 is being inlined, knowing that DST is of type ancestor and src of known
2214 type. */
2215
2216static void
2217combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2218 struct ipa_jump_func *dst)
2219{
c7573249
MJ
2220 HOST_WIDE_INT combined_offset;
2221 tree combined_type;
b258210c 2222
b8f6e610
MJ
2223 if (!ipa_get_jf_ancestor_type_preserved (dst))
2224 {
2225 dst->type = IPA_JF_UNKNOWN;
2226 return;
2227 }
2228
7b872d9e
MJ
2229 combined_offset = ipa_get_jf_known_type_offset (src)
2230 + ipa_get_jf_ancestor_offset (dst);
2231 combined_type = ipa_get_jf_ancestor_type (dst);
c7573249 2232
7b872d9e
MJ
2233 ipa_set_jf_known_type (dst, combined_offset,
2234 ipa_get_jf_known_type_base_type (src),
2235 combined_type);
b258210c
MJ
2236}
2237
be95e2b9 2238/* Update the jump functions associated with call graph edge E when the call
3e293154 2239 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2240 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2241
3e293154
MJ
2242static void
2243update_jump_functions_after_inlining (struct cgraph_edge *cs,
2244 struct cgraph_edge *e)
2245{
2246 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2247 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2248 int count = ipa_get_cs_argument_count (args);
2249 int i;
2250
2251 for (i = 0; i < count; i++)
2252 {
b258210c 2253 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3e293154 2254
685b0d13
MJ
2255 if (dst->type == IPA_JF_ANCESTOR)
2256 {
b258210c 2257 struct ipa_jump_func *src;
8b7773a4 2258 int dst_fid = dst->value.ancestor.formal_id;
685b0d13 2259
b258210c
MJ
2260 /* Variable number of arguments can cause havoc if we try to access
2261 one that does not exist in the inlined edge. So make sure we
2262 don't. */
8b7773a4 2263 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c
MJ
2264 {
2265 dst->type = IPA_JF_UNKNOWN;
2266 continue;
2267 }
2268
8b7773a4
MJ
2269 src = ipa_get_ith_jump_func (top, dst_fid);
2270
2271 if (src->agg.items
2272 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2273 {
2274 struct ipa_agg_jf_item *item;
2275 int j;
2276
2277 /* Currently we do not produce clobber aggregate jump functions,
2278 replace with merging when we do. */
2279 gcc_assert (!dst->agg.items);
2280
9771b263 2281 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2282 dst->agg.by_ref = src->agg.by_ref;
9771b263 2283 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2284 item->offset -= dst->value.ancestor.offset;
2285 }
2286
b258210c
MJ
2287 if (src->type == IPA_JF_KNOWN_TYPE)
2288 combine_known_type_and_ancestor_jfs (src, dst);
b258210c
MJ
2289 else if (src->type == IPA_JF_PASS_THROUGH
2290 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2291 {
2292 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2293 dst->value.ancestor.agg_preserved &=
2294 src->value.pass_through.agg_preserved;
b8f6e610
MJ
2295 dst->value.ancestor.type_preserved &=
2296 src->value.pass_through.type_preserved;
8b7773a4 2297 }
b258210c
MJ
2298 else if (src->type == IPA_JF_ANCESTOR)
2299 {
2300 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2301 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2302 dst->value.ancestor.agg_preserved &=
2303 src->value.ancestor.agg_preserved;
b8f6e610
MJ
2304 dst->value.ancestor.type_preserved &=
2305 src->value.ancestor.type_preserved;
b258210c
MJ
2306 }
2307 else
2308 dst->type = IPA_JF_UNKNOWN;
2309 }
2310 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2311 {
b258210c
MJ
2312 struct ipa_jump_func *src;
2313 /* We must check range due to calls with variable number of arguments
2314 and we cannot combine jump functions with operations. */
2315 if (dst->value.pass_through.operation == NOP_EXPR
2316 && (dst->value.pass_through.formal_id
2317 < ipa_get_cs_argument_count (top)))
2318 {
8b7773a4
MJ
2319 int dst_fid = dst->value.pass_through.formal_id;
2320 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2321 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
8b7773a4 2322
b8f6e610
MJ
2323 switch (src->type)
2324 {
2325 case IPA_JF_UNKNOWN:
2326 dst->type = IPA_JF_UNKNOWN;
2327 break;
2328 case IPA_JF_KNOWN_TYPE:
2329 ipa_set_jf_known_type (dst,
2330 ipa_get_jf_known_type_offset (src),
2331 ipa_get_jf_known_type_base_type (src),
2332 ipa_get_jf_known_type_base_type (src));
2333 break;
2334 case IPA_JF_CONST:
2335 ipa_set_jf_cst_copy (dst, src);
2336 break;
2337
2338 case IPA_JF_PASS_THROUGH:
2339 {
2340 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2341 enum tree_code operation;
2342 operation = ipa_get_jf_pass_through_operation (src);
2343
2344 if (operation == NOP_EXPR)
2345 {
2346 bool agg_p, type_p;
2347 agg_p = dst_agg_p
2348 && ipa_get_jf_pass_through_agg_preserved (src);
2349 type_p = ipa_get_jf_pass_through_type_preserved (src)
2350 && ipa_get_jf_pass_through_type_preserved (dst);
2351 ipa_set_jf_simple_pass_through (dst, formal_id,
2352 agg_p, type_p);
2353 }
2354 else
2355 {
2356 tree operand = ipa_get_jf_pass_through_operand (src);
2357 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2358 operation);
2359 }
2360 break;
2361 }
2362 case IPA_JF_ANCESTOR:
2363 {
2364 bool agg_p, type_p;
2365 agg_p = dst_agg_p
2366 && ipa_get_jf_ancestor_agg_preserved (src);
2367 type_p = ipa_get_jf_ancestor_type_preserved (src)
2368 && ipa_get_jf_pass_through_type_preserved (dst);
2369 ipa_set_ancestor_jf (dst,
2370 ipa_get_jf_ancestor_offset (src),
2371 ipa_get_jf_ancestor_type (src),
2372 ipa_get_jf_ancestor_formal_id (src),
2373 agg_p, type_p);
2374 break;
2375 }
2376 default:
2377 gcc_unreachable ();
2378 }
8b7773a4
MJ
2379
2380 if (src->agg.items
b8f6e610 2381 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2382 {
2383 /* Currently we do not produce clobber aggregate jump
2384 functions, replace with merging when we do. */
2385 gcc_assert (!dst->agg.items);
2386
2387 dst->agg.by_ref = src->agg.by_ref;
9771b263 2388 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2389 }
b258210c
MJ
2390 }
2391 else
2392 dst->type = IPA_JF_UNKNOWN;
3e293154 2393 }
b258210c
MJ
2394 }
2395}
2396
2397/* If TARGET is an addr_expr of a function declaration, make it the destination
81fa35bd 2398 of an indirect edge IE and return the edge. Otherwise, return NULL. */
b258210c 2399
3949c4a7 2400struct cgraph_edge *
81fa35bd 2401ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
b258210c
MJ
2402{
2403 struct cgraph_node *callee;
0f378cb5 2404 struct inline_edge_summary *es = inline_edge_summary (ie);
48b1474e 2405 bool unreachable = false;
b258210c 2406
ceeffab0
MJ
2407 if (TREE_CODE (target) == ADDR_EXPR)
2408 target = TREE_OPERAND (target, 0);
b258210c 2409 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2410 {
2411 target = canonicalize_constructor_val (target, NULL);
2412 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2413 {
c13bc3d9
MJ
2414 if (ie->indirect_info->member_ptr)
2415 /* Member pointer call that goes through a VMT lookup. */
2416 return NULL;
2417
a0a7b611
JH
2418 if (dump_file)
2419 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
48b1474e 2420 " in %s/%i, making it unreachable.\n",
67348ccc 2421 cgraph_node_name (ie->caller), ie->caller->order);
48b1474e
MJ
2422 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2423 callee = cgraph_get_create_node (target);
2424 unreachable = true;
a0a7b611 2425 }
48b1474e
MJ
2426 else
2427 callee = cgraph_get_node (target);
a0a7b611 2428 }
48b1474e
MJ
2429 else
2430 callee = cgraph_get_node (target);
a0a7b611
JH
2431
2432 /* Because may-edges are not explicitely represented and vtable may be external,
2433 we may create the first reference to the object in the unit. */
2434 if (!callee || callee->global.inlined_to)
2435 {
a0a7b611
JH
2436
2437 /* We are better to ensure we can refer to it.
2438 In the case of static functions we are out of luck, since we already
2439 removed its body. In the case of public functions we may or may
2440 not introduce the reference. */
2441 if (!canonicalize_constructor_val (target, NULL)
2442 || !TREE_PUBLIC (target))
2443 {
2444 if (dump_file)
2445 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2446 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
9de04252 2447 xstrdup (cgraph_node_name (ie->caller)),
67348ccc 2448 ie->caller->order,
9de04252 2449 xstrdup (cgraph_node_name (ie->callee)),
67348ccc 2450 ie->callee->order);
a0a7b611
JH
2451 return NULL;
2452 }
48f4a6fa 2453 callee = cgraph_get_create_real_symbol_node (target);
a0a7b611 2454 }
1dbee8c9 2455 ipa_check_create_node_params ();
ceeffab0 2456
81fa35bd
MJ
2457 /* We can not make edges to inline clones. It is bug that someone removed
2458 the cgraph node too early. */
17afc0fe
JH
2459 gcc_assert (!callee->global.inlined_to);
2460
48b1474e 2461 if (dump_file && !unreachable)
b258210c
MJ
2462 {
2463 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
ceeffab0 2464 "(%s/%i -> %s/%i), for stmt ",
b258210c 2465 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
9de04252 2466 xstrdup (cgraph_node_name (ie->caller)),
67348ccc 2467 ie->caller->order,
042ae7d2 2468 xstrdup (cgraph_node_name (callee)),
67348ccc 2469 callee->order);
b258210c
MJ
2470 if (ie->call_stmt)
2471 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2472 else
2473 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2
JH
2474 }
2475 ie = cgraph_make_edge_direct (ie, callee);
2476 es = inline_edge_summary (ie);
2477 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2478 - eni_size_weights.call_cost);
2479 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2480 - eni_time_weights.call_cost);
749aa96d 2481
b258210c 2482 return ie;
3e293154
MJ
2483}
2484
8b7773a4
MJ
2485/* Retrieve value from aggregate jump function AGG for the given OFFSET or
2486 return NULL if there is not any. BY_REF specifies whether the value has to
2487 be passed by reference or by value. */
2488
2489tree
2490ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2491 HOST_WIDE_INT offset, bool by_ref)
2492{
2493 struct ipa_agg_jf_item *item;
2494 int i;
2495
2496 if (by_ref != agg->by_ref)
2497 return NULL;
2498
9771b263 2499 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2500 if (item->offset == offset)
2501 {
2502 /* Currently we do not have clobber values, return NULL for them once
2503 we do. */
2504 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2505 return item->value;
2506 }
8b7773a4
MJ
2507 return NULL;
2508}
2509
4502fe8d 2510/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
2511 reference description RDESC. Return true if the reference has been
2512 successfully found and removed. */
4502fe8d 2513
568cda29 2514static bool
5e20cdc9 2515remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
2516{
2517 struct ipa_ref *to_del;
2518 struct cgraph_edge *origin;
2519
2520 origin = rdesc->cs;
a854f856
MJ
2521 if (!origin)
2522 return false;
67348ccc 2523 to_del = ipa_find_reference (origin->caller, symbol,
042ae7d2 2524 origin->call_stmt, origin->lto_stmt_uid);
568cda29
MJ
2525 if (!to_del)
2526 return false;
2527
4502fe8d
MJ
2528 ipa_remove_reference (to_del);
2529 if (dump_file)
2530 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2531 xstrdup (cgraph_node_name (origin->caller)),
67348ccc 2532 origin->caller->order, xstrdup (symtab_node_name (symbol)));
568cda29 2533 return true;
4502fe8d
MJ
2534}
2535
2536/* If JFUNC has a reference description with refcount different from
2537 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2538 NULL. JFUNC must be a constant jump function. */
2539
2540static struct ipa_cst_ref_desc *
2541jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2542{
2543 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2544 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2545 return rdesc;
2546 else
2547 return NULL;
2548}
2549
568cda29
MJ
2550/* If the value of constant jump function JFUNC is an address of a function
2551 declaration, return the associated call graph node. Otherwise return
2552 NULL. */
2553
2554static cgraph_node *
2555cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2556{
2557 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2558 tree cst = ipa_get_jf_constant (jfunc);
2559 if (TREE_CODE (cst) != ADDR_EXPR
2560 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2561 return NULL;
2562
2563 return cgraph_get_node (TREE_OPERAND (cst, 0));
2564}
2565
2566
2567/* If JFUNC is a constant jump function with a usable rdesc, decrement its
2568 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2569 the edge specified in the rdesc. Return false if either the symbol or the
2570 reference could not be found, otherwise return true. */
2571
2572static bool
2573try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2574{
2575 struct ipa_cst_ref_desc *rdesc;
2576 if (jfunc->type == IPA_JF_CONST
2577 && (rdesc = jfunc_rdesc_usable (jfunc))
2578 && --rdesc->refcount == 0)
2579 {
5e20cdc9 2580 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
2581 if (!symbol)
2582 return false;
2583
2584 return remove_described_reference (symbol, rdesc);
2585 }
2586 return true;
2587}
2588
b258210c
MJ
2589/* Try to find a destination for indirect edge IE that corresponds to a simple
2590 call or a call of a member function pointer and where the destination is a
2591 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
2592 determined, return the newly direct edge, otherwise return NULL.
2593 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 2594
b258210c
MJ
2595static struct cgraph_edge *
2596try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
2597 struct ipa_jump_func *jfunc,
2598 struct ipa_node_params *new_root_info)
b258210c 2599{
4502fe8d 2600 struct cgraph_edge *cs;
b258210c 2601 tree target;
042ae7d2 2602 bool agg_contents = ie->indirect_info->agg_contents;
b258210c 2603
8b7773a4 2604 if (ie->indirect_info->agg_contents)
d250540a
MJ
2605 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2606 ie->indirect_info->offset,
2607 ie->indirect_info->by_ref);
b258210c 2608 else
d250540a
MJ
2609 target = ipa_value_from_jfunc (new_root_info, jfunc);
2610 if (!target)
2611 return NULL;
4502fe8d
MJ
2612 cs = ipa_make_edge_direct_to_target (ie, target);
2613
a12cd2db 2614 if (cs && !agg_contents)
568cda29
MJ
2615 {
2616 bool ok;
2617 gcc_checking_assert (cs->callee
ae6d0907
MJ
2618 && (cs != ie
2619 || jfunc->type != IPA_JF_CONST
568cda29
MJ
2620 || !cgraph_node_for_jfunc (jfunc)
2621 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2622 ok = try_decrement_rdesc_refcount (jfunc);
2623 gcc_checking_assert (ok);
2624 }
4502fe8d
MJ
2625
2626 return cs;
b258210c
MJ
2627}
2628
d250540a
MJ
2629/* Try to find a destination for indirect edge IE that corresponds to a virtual
2630 call based on a formal parameter which is described by jump function JFUNC
2631 and if it can be determined, make it direct and return the direct edge.
2632 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2633 are relative to. */
b258210c
MJ
2634
2635static struct cgraph_edge *
2636try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a
MJ
2637 struct ipa_jump_func *jfunc,
2638 struct ipa_node_params *new_root_info)
3e293154 2639{
c7573249 2640 tree binfo, target;
b258210c 2641
d250540a
MJ
2642 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2643
da942ca0 2644 if (!binfo)
b258210c 2645 return NULL;
3e293154 2646
da942ca0
JH
2647 if (TREE_CODE (binfo) != TREE_BINFO)
2648 {
c49bdb2e
JH
2649 binfo = gimple_extract_devirt_binfo_from_cst
2650 (binfo, ie->indirect_info->otr_type);
da942ca0
JH
2651 if (!binfo)
2652 return NULL;
2653 }
2654
d250540a 2655 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
c7573249 2656 ie->indirect_info->otr_type);
b258210c 2657 if (binfo)
c7573249
MJ
2658 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2659 binfo);
b258210c
MJ
2660 else
2661 return NULL;
2662
2663 if (target)
450ad0cd
JH
2664 {
2665#ifdef ENABLE_CHECKING
2666 gcc_assert (possible_polymorphic_call_target_p
2667 (ie, cgraph_get_node (target)));
2668#endif
2669 return ipa_make_edge_direct_to_target (ie, target);
2670 }
b258210c
MJ
2671 else
2672 return NULL;
3e293154
MJ
2673}
2674
2675/* Update the param called notes associated with NODE when CS is being inlined,
2676 assuming NODE is (potentially indirectly) inlined into CS->callee.
2677 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 2678 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 2679 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 2680
f8e2a1ed 2681static bool
e33c6cd6
MJ
2682update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2683 struct cgraph_node *node,
9771b263 2684 vec<cgraph_edge_p> *new_edges)
3e293154 2685{
9e97ff61 2686 struct ipa_edge_args *top;
b258210c 2687 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 2688 struct ipa_node_params *new_root_info;
f8e2a1ed 2689 bool res = false;
3e293154 2690
e33c6cd6 2691 ipa_check_create_edge_args ();
9e97ff61 2692 top = IPA_EDGE_REF (cs);
d250540a
MJ
2693 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2694 ? cs->caller->global.inlined_to
2695 : cs->caller);
e33c6cd6
MJ
2696
2697 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 2698 {
e33c6cd6 2699 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 2700 struct ipa_jump_func *jfunc;
8b7773a4 2701 int param_index;
3e293154 2702
e33c6cd6 2703 next_ie = ie->next_callee;
3e293154 2704
5f902d76
JH
2705 if (ici->param_index == -1)
2706 continue;
e33c6cd6 2707
3e293154 2708 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 2709 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 2710 {
5ee53a06 2711 ici->param_index = -1;
3e293154
MJ
2712 continue;
2713 }
2714
8b7773a4
MJ
2715 param_index = ici->param_index;
2716 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06
JH
2717
2718 if (!flag_indirect_inlining)
36b72910
JH
2719 new_direct_edge = NULL;
2720 else if (ici->polymorphic)
d250540a
MJ
2721 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2722 new_root_info);
b258210c 2723 else
d250540a
MJ
2724 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2725 new_root_info);
042ae7d2
JH
2726 /* If speculation was removed, then we need to do nothing. */
2727 if (new_direct_edge && new_direct_edge != ie)
2728 {
2729 new_direct_edge->indirect_inlining_edge = 1;
2730 top = IPA_EDGE_REF (cs);
2731 res = true;
2732 }
2733 else if (new_direct_edge)
685b0d13 2734 {
b258210c 2735 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
2736 if (new_direct_edge->call_stmt)
2737 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
2738 = !gimple_check_call_matching_types (
2739 new_direct_edge->call_stmt,
67348ccc 2740 new_direct_edge->callee->decl, false);
b258210c
MJ
2741 if (new_edges)
2742 {
9771b263 2743 new_edges->safe_push (new_direct_edge);
b258210c
MJ
2744 res = true;
2745 }
042ae7d2 2746 top = IPA_EDGE_REF (cs);
685b0d13 2747 }
36b72910
JH
2748 else if (jfunc->type == IPA_JF_PASS_THROUGH
2749 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2750 {
2751 if (ici->agg_contents
2752 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2753 ici->param_index = -1;
2754 else
2755 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2756 }
2757 else if (jfunc->type == IPA_JF_ANCESTOR)
2758 {
2759 if (ici->agg_contents
2760 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2761 ici->param_index = -1;
2762 else
2763 {
2764 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2765 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2766 }
2767 }
2768 else
2769 /* Either we can find a destination for this edge now or never. */
2770 ici->param_index = -1;
3e293154 2771 }
e33c6cd6 2772
f8e2a1ed 2773 return res;
3e293154
MJ
2774}
2775
2776/* Recursively traverse subtree of NODE (including node) made of inlined
2777 cgraph_edges when CS has been inlined and invoke
e33c6cd6 2778 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
2779 update_jump_functions_after_inlining on all non-inlined edges that lead out
2780 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2781 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2782 created. */
be95e2b9 2783
f8e2a1ed 2784static bool
3e293154
MJ
2785propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2786 struct cgraph_node *node,
9771b263 2787 vec<cgraph_edge_p> *new_edges)
3e293154
MJ
2788{
2789 struct cgraph_edge *e;
f8e2a1ed 2790 bool res;
3e293154 2791
e33c6cd6 2792 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
2793
2794 for (e = node->callees; e; e = e->next_callee)
2795 if (!e->inline_failed)
f8e2a1ed 2796 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
2797 else
2798 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
2799 for (e = node->indirect_calls; e; e = e->next_callee)
2800 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
2801
2802 return res;
3e293154
MJ
2803}
2804
4502fe8d
MJ
2805/* Combine two controlled uses counts as done during inlining. */
2806
2807static int
2808combine_controlled_uses_counters (int c, int d)
2809{
2810 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2811 return IPA_UNDESCRIBED_USE;
2812 else
2813 return c + d - 1;
2814}
2815
2816/* Propagate number of controlled users from CS->caleee to the new root of the
2817 tree of inlined nodes. */
2818
2819static void
2820propagate_controlled_uses (struct cgraph_edge *cs)
2821{
2822 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2823 struct cgraph_node *new_root = cs->caller->global.inlined_to
2824 ? cs->caller->global.inlined_to : cs->caller;
2825 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2826 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2827 int count, i;
2828
2829 count = MIN (ipa_get_cs_argument_count (args),
2830 ipa_get_param_count (old_root_info));
2831 for (i = 0; i < count; i++)
2832 {
2833 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2834 struct ipa_cst_ref_desc *rdesc;
2835
2836 if (jf->type == IPA_JF_PASS_THROUGH)
2837 {
2838 int src_idx, c, d;
2839 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2840 c = ipa_get_controlled_uses (new_root_info, src_idx);
2841 d = ipa_get_controlled_uses (old_root_info, i);
2842
2843 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2844 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2845 c = combine_controlled_uses_counters (c, d);
2846 ipa_set_controlled_uses (new_root_info, src_idx, c);
2847 if (c == 0 && new_root_info->ipcp_orig_node)
2848 {
2849 struct cgraph_node *n;
2850 struct ipa_ref *ref;
2851 tree t = new_root_info->known_vals[src_idx];
2852
2853 if (t && TREE_CODE (t) == ADDR_EXPR
2854 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2855 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
67348ccc
DM
2856 && (ref = ipa_find_reference (new_root,
2857 n, NULL, 0)))
4502fe8d
MJ
2858 {
2859 if (dump_file)
2860 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2861 "reference from %s/%i to %s/%i.\n",
2862 xstrdup (cgraph_node_name (new_root)),
67348ccc
DM
2863 new_root->order,
2864 xstrdup (cgraph_node_name (n)), n->order);
4502fe8d
MJ
2865 ipa_remove_reference (ref);
2866 }
2867 }
2868 }
2869 else if (jf->type == IPA_JF_CONST
2870 && (rdesc = jfunc_rdesc_usable (jf)))
2871 {
2872 int d = ipa_get_controlled_uses (old_root_info, i);
2873 int c = rdesc->refcount;
2874 rdesc->refcount = combine_controlled_uses_counters (c, d);
2875 if (rdesc->refcount == 0)
2876 {
2877 tree cst = ipa_get_jf_constant (jf);
2878 struct cgraph_node *n;
2879 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2880 && TREE_CODE (TREE_OPERAND (cst, 0))
2881 == FUNCTION_DECL);
2882 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2883 if (n)
2884 {
2885 struct cgraph_node *clone;
568cda29 2886 bool ok;
67348ccc 2887 ok = remove_described_reference (n, rdesc);
568cda29 2888 gcc_checking_assert (ok);
4502fe8d
MJ
2889
2890 clone = cs->caller;
2891 while (clone->global.inlined_to
2892 && clone != rdesc->cs->caller
2893 && IPA_NODE_REF (clone)->ipcp_orig_node)
2894 {
2895 struct ipa_ref *ref;
67348ccc
DM
2896 ref = ipa_find_reference (clone,
2897 n, NULL, 0);
4502fe8d
MJ
2898 if (ref)
2899 {
2900 if (dump_file)
2901 fprintf (dump_file, "ipa-prop: Removing "
2902 "cloning-created reference "
2903 "from %s/%i to %s/%i.\n",
2904 xstrdup (cgraph_node_name (clone)),
67348ccc 2905 clone->order,
4502fe8d 2906 xstrdup (cgraph_node_name (n)),
67348ccc 2907 n->order);
4502fe8d
MJ
2908 ipa_remove_reference (ref);
2909 }
2910 clone = clone->callers->caller;
2911 }
2912 }
2913 }
2914 }
2915 }
2916
2917 for (i = ipa_get_param_count (old_root_info);
2918 i < ipa_get_cs_argument_count (args);
2919 i++)
2920 {
2921 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2922
2923 if (jf->type == IPA_JF_CONST)
2924 {
2925 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2926 if (rdesc)
2927 rdesc->refcount = IPA_UNDESCRIBED_USE;
2928 }
2929 else if (jf->type == IPA_JF_PASS_THROUGH)
2930 ipa_set_controlled_uses (new_root_info,
2931 jf->value.pass_through.formal_id,
2932 IPA_UNDESCRIBED_USE);
2933 }
2934}
2935
3e293154
MJ
2936/* Update jump functions and call note functions on inlining the call site CS.
2937 CS is expected to lead to a node already cloned by
2938 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2939 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2940 created. */
be95e2b9 2941
f8e2a1ed 2942bool
3e293154 2943ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
9771b263 2944 vec<cgraph_edge_p> *new_edges)
3e293154 2945{
5ee53a06 2946 bool changed;
f8e2a1ed
MJ
2947 /* Do nothing if the preparation phase has not been carried out yet
2948 (i.e. during early inlining). */
9771b263 2949 if (!ipa_node_params_vector.exists ())
f8e2a1ed
MJ
2950 return false;
2951 gcc_assert (ipa_edge_args_vector);
2952
4502fe8d 2953 propagate_controlled_uses (cs);
5ee53a06
JH
2954 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2955
5ee53a06 2956 return changed;
518dc859
RL
2957}
2958
771578a0
MJ
2959/* Frees all dynamically allocated structures that the argument info points
2960 to. */
be95e2b9 2961
518dc859 2962void
771578a0 2963ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 2964{
9771b263 2965 vec_free (args->jump_functions);
771578a0 2966 memset (args, 0, sizeof (*args));
518dc859
RL
2967}
2968
771578a0 2969/* Free all ipa_edge structures. */
be95e2b9 2970
518dc859 2971void
771578a0 2972ipa_free_all_edge_args (void)
518dc859 2973{
771578a0
MJ
2974 int i;
2975 struct ipa_edge_args *args;
518dc859 2976
9771b263
DN
2977 if (!ipa_edge_args_vector)
2978 return;
2979
2980 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
2981 ipa_free_edge_args_substructures (args);
2982
9771b263 2983 vec_free (ipa_edge_args_vector);
518dc859
RL
2984}
2985
771578a0
MJ
2986/* Frees all dynamically allocated structures that the param info points
2987 to. */
be95e2b9 2988
518dc859 2989void
771578a0 2990ipa_free_node_params_substructures (struct ipa_node_params *info)
518dc859 2991{
9771b263 2992 info->descriptors.release ();
310bc633
MJ
2993 free (info->lattices);
2994 /* Lattice values and their sources are deallocated with their alocation
2995 pool. */
9771b263 2996 info->known_vals.release ();
771578a0 2997 memset (info, 0, sizeof (*info));
518dc859
RL
2998}
2999
771578a0 3000/* Free all ipa_node_params structures. */
be95e2b9 3001
518dc859 3002void
771578a0 3003ipa_free_all_node_params (void)
518dc859 3004{
771578a0
MJ
3005 int i;
3006 struct ipa_node_params *info;
518dc859 3007
9771b263 3008 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
771578a0
MJ
3009 ipa_free_node_params_substructures (info);
3010
9771b263 3011 ipa_node_params_vector.release ();
771578a0
MJ
3012}
3013
2c9561b5
MJ
3014/* Set the aggregate replacements of NODE to be AGGVALS. */
3015
3016void
3017ipa_set_node_agg_value_chain (struct cgraph_node *node,
3018 struct ipa_agg_replacement_value *aggvals)
3019{
9771b263
DN
3020 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3021 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2c9561b5 3022
9771b263 3023 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2c9561b5
MJ
3024}
3025
771578a0 3026/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 3027
771578a0 3028static void
5c0466b5 3029ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 3030{
568cda29
MJ
3031 struct ipa_edge_args *args;
3032
3033 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
9771b263 3034 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 3035 return;
568cda29
MJ
3036
3037 args = IPA_EDGE_REF (cs);
3038 if (args->jump_functions)
3039 {
3040 struct ipa_jump_func *jf;
3041 int i;
3042 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3043 {
3044 struct ipa_cst_ref_desc *rdesc;
3045 try_decrement_rdesc_refcount (jf);
3046 if (jf->type == IPA_JF_CONST
3047 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3048 && rdesc->cs == cs)
3049 rdesc->cs = NULL;
3050 }
568cda29
MJ
3051 }
3052
771578a0 3053 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
3054}
3055
771578a0 3056/* Hook that is called by cgraph.c when a node is removed. */
be95e2b9 3057
771578a0 3058static void
5c0466b5 3059ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3060{
dd6d1ad7 3061 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 3062 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2c9561b5 3063 ipa_free_node_params_substructures (IPA_NODE_REF (node));
9771b263
DN
3064 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3065 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
771578a0
MJ
3066}
3067
8b7773a4 3068/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 3069
771578a0
MJ
3070static void
3071ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
f8e2a1ed 3072 __attribute__((unused)) void *data)
771578a0
MJ
3073{
3074 struct ipa_edge_args *old_args, *new_args;
8b7773a4 3075 unsigned int i;
771578a0
MJ
3076
3077 ipa_check_create_edge_args ();
3078
3079 old_args = IPA_EDGE_REF (src);
3080 new_args = IPA_EDGE_REF (dst);
3081
9771b263 3082 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
8b7773a4 3083
9771b263 3084 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3085 {
3086 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3087 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3088
3089 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3090
3091 if (src_jf->type == IPA_JF_CONST)
3092 {
3093 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3094
3095 if (!src_rdesc)
3096 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3097 else if (src->caller == dst->caller)
3098 {
3099 struct ipa_ref *ref;
5e20cdc9 3100 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3101 gcc_checking_assert (n);
67348ccc 3102 ref = ipa_find_reference (src->caller, n,
568cda29
MJ
3103 src->call_stmt, src->lto_stmt_uid);
3104 gcc_checking_assert (ref);
67348ccc 3105 ipa_clone_ref (ref, dst->caller, ref->stmt);
568cda29
MJ
3106
3107 gcc_checking_assert (ipa_refdesc_pool);
3108 struct ipa_cst_ref_desc *dst_rdesc
3109 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3110 dst_rdesc->cs = dst;
3111 dst_rdesc->refcount = src_rdesc->refcount;
3112 dst_rdesc->next_duplicate = NULL;
3113 dst_jf->value.constant.rdesc = dst_rdesc;
3114 }
4502fe8d
MJ
3115 else if (src_rdesc->cs == src)
3116 {
3117 struct ipa_cst_ref_desc *dst_rdesc;
3118 gcc_checking_assert (ipa_refdesc_pool);
3119 dst_rdesc
3120 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3121 dst_rdesc->cs = dst;
4502fe8d 3122 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3123 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3124 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3125 dst_jf->value.constant.rdesc = dst_rdesc;
3126 }
3127 else
3128 {
3129 struct ipa_cst_ref_desc *dst_rdesc;
3130 /* This can happen during inlining, when a JFUNC can refer to a
3131 reference taken in a function up in the tree of inline clones.
3132 We need to find the duplicate that refers to our tree of
3133 inline clones. */
3134
3135 gcc_assert (dst->caller->global.inlined_to);
3136 for (dst_rdesc = src_rdesc->next_duplicate;
3137 dst_rdesc;
3138 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3139 {
3140 struct cgraph_node *top;
3141 top = dst_rdesc->cs->caller->global.inlined_to
3142 ? dst_rdesc->cs->caller->global.inlined_to
3143 : dst_rdesc->cs->caller;
3144 if (dst->caller->global.inlined_to == top)
3145 break;
3146 }
44a60244 3147 gcc_assert (dst_rdesc);
4502fe8d
MJ
3148 dst_jf->value.constant.rdesc = dst_rdesc;
3149 }
3150 }
3151 }
771578a0
MJ
3152}
3153
3154/* Hook that is called by cgraph.c when a node is duplicated. */
be95e2b9 3155
771578a0
MJ
3156static void
3157ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
10a5dd5d 3158 ATTRIBUTE_UNUSED void *data)
771578a0
MJ
3159{
3160 struct ipa_node_params *old_info, *new_info;
2c9561b5 3161 struct ipa_agg_replacement_value *old_av, *new_av;
771578a0
MJ
3162
3163 ipa_check_create_node_params ();
3164 old_info = IPA_NODE_REF (src);
3165 new_info = IPA_NODE_REF (dst);
771578a0 3166
9771b263 3167 new_info->descriptors = old_info->descriptors.copy ();
310bc633 3168 new_info->lattices = NULL;
771578a0 3169 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 3170
3949c4a7
MJ
3171 new_info->uses_analysis_done = old_info->uses_analysis_done;
3172 new_info->node_enqueued = old_info->node_enqueued;
2c9561b5
MJ
3173
3174 old_av = ipa_get_agg_replacements_for_node (src);
3175 if (!old_av)
3176 return;
3177
3178 new_av = NULL;
3179 while (old_av)
3180 {
3181 struct ipa_agg_replacement_value *v;
3182
3183 v = ggc_alloc_ipa_agg_replacement_value ();
3184 memcpy (v, old_av, sizeof (*v));
3185 v->next = new_av;
3186 new_av = v;
3187 old_av = old_av->next;
3188 }
3189 ipa_set_node_agg_value_chain (dst, new_av);
771578a0
MJ
3190}
3191
40982661
JH
3192
3193/* Analyze newly added function into callgraph. */
3194
3195static void
3196ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3197{
3198 ipa_analyze_node (node);
3199}
3200
771578a0 3201/* Register our cgraph hooks if they are not already there. */
be95e2b9 3202
518dc859 3203void
771578a0 3204ipa_register_cgraph_hooks (void)
518dc859 3205{
771578a0
MJ
3206 if (!edge_removal_hook_holder)
3207 edge_removal_hook_holder =
3208 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3209 if (!node_removal_hook_holder)
3210 node_removal_hook_holder =
3211 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3212 if (!edge_duplication_hook_holder)
3213 edge_duplication_hook_holder =
3214 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3215 if (!node_duplication_hook_holder)
3216 node_duplication_hook_holder =
3217 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
40982661
JH
3218 function_insertion_hook_holder =
3219 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3220}
518dc859 3221
771578a0 3222/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3223
771578a0
MJ
3224static void
3225ipa_unregister_cgraph_hooks (void)
3226{
3227 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3228 edge_removal_hook_holder = NULL;
3229 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3230 node_removal_hook_holder = NULL;
3231 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3232 edge_duplication_hook_holder = NULL;
3233 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3234 node_duplication_hook_holder = NULL;
40982661
JH
3235 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3236 function_insertion_hook_holder = NULL;
771578a0
MJ
3237}
3238
3239/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3240 longer needed after ipa-cp. */
be95e2b9 3241
771578a0 3242void
e33c6cd6 3243ipa_free_all_structures_after_ipa_cp (void)
3e293154 3244{
5ee53a06 3245 if (!optimize)
3e293154
MJ
3246 {
3247 ipa_free_all_edge_args ();
3248 ipa_free_all_node_params ();
310bc633
MJ
3249 free_alloc_pool (ipcp_sources_pool);
3250 free_alloc_pool (ipcp_values_pool);
2c9561b5 3251 free_alloc_pool (ipcp_agg_lattice_pool);
3e293154 3252 ipa_unregister_cgraph_hooks ();
4502fe8d
MJ
3253 if (ipa_refdesc_pool)
3254 free_alloc_pool (ipa_refdesc_pool);
3e293154
MJ
3255 }
3256}
3257
3258/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3259 longer needed after indirect inlining. */
be95e2b9 3260
3e293154 3261void
e33c6cd6 3262ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3263{
3264 ipa_free_all_edge_args ();
3265 ipa_free_all_node_params ();
3266 ipa_unregister_cgraph_hooks ();
310bc633
MJ
3267 if (ipcp_sources_pool)
3268 free_alloc_pool (ipcp_sources_pool);
3269 if (ipcp_values_pool)
3270 free_alloc_pool (ipcp_values_pool);
2c9561b5
MJ
3271 if (ipcp_agg_lattice_pool)
3272 free_alloc_pool (ipcp_agg_lattice_pool);
4502fe8d
MJ
3273 if (ipa_refdesc_pool)
3274 free_alloc_pool (ipa_refdesc_pool);
518dc859
RL
3275}
3276
dcd416e3 3277/* Print ipa_tree_map data structures of all functions in the
518dc859 3278 callgraph to F. */
be95e2b9 3279
518dc859 3280void
2c9561b5 3281ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3282{
3283 int i, count;
3e293154 3284 struct ipa_node_params *info;
518dc859 3285
67348ccc 3286 if (!node->definition)
3e293154
MJ
3287 return;
3288 info = IPA_NODE_REF (node);
9de04252 3289 fprintf (f, " function %s/%i parameter descriptors:\n",
67348ccc 3290 cgraph_node_name (node), node->order);
3e293154
MJ
3291 count = ipa_get_param_count (info);
3292 for (i = 0; i < count; i++)
518dc859 3293 {
4502fe8d
MJ
3294 int c;
3295
e067bd43 3296 ipa_dump_param (f, info, i);
339f49ec
JH
3297 if (ipa_is_param_used (info, i))
3298 fprintf (f, " used");
4502fe8d
MJ
3299 c = ipa_get_controlled_uses (info, i);
3300 if (c == IPA_UNDESCRIBED_USE)
3301 fprintf (f, " undescribed_use");
3302 else
3303 fprintf (f, " controlled_uses=%i", c);
3e293154 3304 fprintf (f, "\n");
518dc859
RL
3305 }
3306}
dcd416e3 3307
ca30a539 3308/* Print ipa_tree_map data structures of all functions in the
3e293154 3309 callgraph to F. */
be95e2b9 3310
3e293154 3311void
ca30a539 3312ipa_print_all_params (FILE * f)
3e293154
MJ
3313{
3314 struct cgraph_node *node;
3315
ca30a539 3316 fprintf (f, "\nFunction parameters:\n");
65c70e6b 3317 FOR_EACH_FUNCTION (node)
ca30a539 3318 ipa_print_node_params (f, node);
3e293154 3319}
3f84bf08
MJ
3320
3321/* Return a heap allocated vector containing formal parameters of FNDECL. */
3322
9771b263 3323vec<tree>
3f84bf08
MJ
3324ipa_get_vector_of_formal_parms (tree fndecl)
3325{
9771b263 3326 vec<tree> args;
3f84bf08
MJ
3327 int count;
3328 tree parm;
3329
0e8853ee 3330 gcc_assert (!flag_wpa);
310bc633 3331 count = count_formal_params (fndecl);
9771b263 3332 args.create (count);
910ad8de 3333 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 3334 args.quick_push (parm);
3f84bf08
MJ
3335
3336 return args;
3337}
3338
3339/* Return a heap allocated vector containing types of formal parameters of
3340 function type FNTYPE. */
3341
9771b263 3342static inline vec<tree>
3f84bf08
MJ
3343get_vector_of_formal_parm_types (tree fntype)
3344{
9771b263 3345 vec<tree> types;
3f84bf08
MJ
3346 int count = 0;
3347 tree t;
3348
3349 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3350 count++;
3351
9771b263 3352 types.create (count);
3f84bf08 3353 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 3354 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
3355
3356 return types;
3357}
3358
3359/* Modify the function declaration FNDECL and its type according to the plan in
3360 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3361 to reflect the actual parameters being modified which are determined by the
3362 base_index field. */
3363
3364void
3365ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3366 const char *synth_parm_prefix)
3367{
9771b263 3368 vec<tree> oparms, otypes;
3f84bf08
MJ
3369 tree orig_type, new_type = NULL;
3370 tree old_arg_types, t, new_arg_types = NULL;
3371 tree parm, *link = &DECL_ARGUMENTS (fndecl);
9771b263 3372 int i, len = adjustments.length ();
3f84bf08
MJ
3373 tree new_reversed = NULL;
3374 bool care_for_types, last_parm_void;
3375
3376 if (!synth_parm_prefix)
3377 synth_parm_prefix = "SYNTH";
3378
3379 oparms = ipa_get_vector_of_formal_parms (fndecl);
3380 orig_type = TREE_TYPE (fndecl);
3381 old_arg_types = TYPE_ARG_TYPES (orig_type);
3382
3383 /* The following test is an ugly hack, some functions simply don't have any
3384 arguments in their type. This is probably a bug but well... */
3385 care_for_types = (old_arg_types != NULL_TREE);
3386 if (care_for_types)
3387 {
3388 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3389 == void_type_node);
3390 otypes = get_vector_of_formal_parm_types (orig_type);
3391 if (last_parm_void)
9771b263 3392 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 3393 else
9771b263 3394 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
3395 }
3396 else
3397 {
3398 last_parm_void = false;
9771b263 3399 otypes.create (0);
3f84bf08
MJ
3400 }
3401
3402 for (i = 0; i < len; i++)
3403 {
3404 struct ipa_parm_adjustment *adj;
3405 gcc_assert (link);
3406
9771b263
DN
3407 adj = &adjustments[i];
3408 parm = oparms[adj->base_index];
3f84bf08
MJ
3409 adj->base = parm;
3410
3411 if (adj->copy_param)
3412 {
3413 if (care_for_types)
9771b263 3414 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
3415 new_arg_types);
3416 *link = parm;
910ad8de 3417 link = &DECL_CHAIN (parm);
3f84bf08
MJ
3418 }
3419 else if (!adj->remove_param)
3420 {
3421 tree new_parm;
3422 tree ptype;
3423
3424 if (adj->by_ref)
3425 ptype = build_pointer_type (adj->type);
3426 else
3427 ptype = adj->type;
3428
3429 if (care_for_types)
3430 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3431
3432 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3433 ptype);
3434 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3435
3436 DECL_ARTIFICIAL (new_parm) = 1;
3437 DECL_ARG_TYPE (new_parm) = ptype;
3438 DECL_CONTEXT (new_parm) = fndecl;
3439 TREE_USED (new_parm) = 1;
3440 DECL_IGNORED_P (new_parm) = 1;
3441 layout_decl (new_parm, 0);
3442
3f84bf08
MJ
3443 adj->base = parm;
3444 adj->reduction = new_parm;
3445
3446 *link = new_parm;
3447
910ad8de 3448 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
3449 }
3450 }
3451
3452 *link = NULL_TREE;
3453
3454 if (care_for_types)
3455 {
3456 new_reversed = nreverse (new_arg_types);
3457 if (last_parm_void)
3458 {
3459 if (new_reversed)
3460 TREE_CHAIN (new_arg_types) = void_list_node;
3461 else
3462 new_reversed = void_list_node;
3463 }
3464 }
3465
3466 /* Use copy_node to preserve as much as possible from original type
3467 (debug info, attribute lists etc.)
3468 Exception is METHOD_TYPEs must have THIS argument.
3469 When we are asked to remove it, we need to build new FUNCTION_TYPE
3470 instead. */
3471 if (TREE_CODE (orig_type) != METHOD_TYPE
9771b263
DN
3472 || (adjustments[0].copy_param
3473 && adjustments[0].base_index == 0))
3f84bf08 3474 {
4eb3f32c 3475 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
3476 TYPE_ARG_TYPES (new_type) = new_reversed;
3477 }
3478 else
3479 {
3480 new_type
3481 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3482 new_reversed));
3483 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3484 DECL_VINDEX (fndecl) = NULL_TREE;
3485 }
3486
d402c33d
JH
3487 /* When signature changes, we need to clear builtin info. */
3488 if (DECL_BUILT_IN (fndecl))
3489 {
3490 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3491 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3492 }
3493
3f84bf08
MJ
3494 /* This is a new type, not a copy of an old type. Need to reassociate
3495 variants. We can handle everything except the main variant lazily. */
3496 t = TYPE_MAIN_VARIANT (orig_type);
3497 if (orig_type != t)
3498 {
3499 TYPE_MAIN_VARIANT (new_type) = t;
3500 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3501 TYPE_NEXT_VARIANT (t) = new_type;
3502 }
3503 else
3504 {
3505 TYPE_MAIN_VARIANT (new_type) = new_type;
3506 TYPE_NEXT_VARIANT (new_type) = NULL;
3507 }
3508
3509 TREE_TYPE (fndecl) = new_type;
9b389a5e 3510 DECL_VIRTUAL_P (fndecl) = 0;
9771b263
DN
3511 otypes.release ();
3512 oparms.release ();
3f84bf08
MJ
3513}
3514
3515/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3516 If this is a directly recursive call, CS must be NULL. Otherwise it must
3517 contain the corresponding call graph edge. */
3518
3519void
3520ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3521 ipa_parm_adjustment_vec adjustments)
3522{
82338059 3523 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
9771b263
DN
3524 vec<tree> vargs;
3525 vec<tree, va_gc> **debug_args = NULL;
3f84bf08 3526 gimple new_stmt;
82338059 3527 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
3528 tree callee_decl;
3529 int i, len;
3530
9771b263
DN
3531 len = adjustments.length ();
3532 vargs.create (len);
67348ccc
DM
3533 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3534 ipa_remove_stmt_references (current_node, stmt);
3f84bf08
MJ
3535
3536 gsi = gsi_for_stmt (stmt);
82338059
MJ
3537 prev_gsi = gsi;
3538 gsi_prev (&prev_gsi);
3f84bf08
MJ
3539 for (i = 0; i < len; i++)
3540 {
3541 struct ipa_parm_adjustment *adj;
3542
9771b263 3543 adj = &adjustments[i];
3f84bf08
MJ
3544
3545 if (adj->copy_param)
3546 {
3547 tree arg = gimple_call_arg (stmt, adj->base_index);
3548
9771b263 3549 vargs.quick_push (arg);
3f84bf08
MJ
3550 }
3551 else if (!adj->remove_param)
3552 {
fffe1e40
MJ
3553 tree expr, base, off;
3554 location_t loc;
f43245d1 3555 unsigned int deref_align = 0;
c1ed6a01 3556 bool deref_base = false;
fffe1e40
MJ
3557
3558 /* We create a new parameter out of the value of the old one, we can
3559 do the following kind of transformations:
3560
3561 - A scalar passed by reference is converted to a scalar passed by
3562 value. (adj->by_ref is false and the type of the original
3563 actual argument is a pointer to a scalar).
3564
3565 - A part of an aggregate is passed instead of the whole aggregate.
3566 The part can be passed either by value or by reference, this is
3567 determined by value of adj->by_ref. Moreover, the code below
3568 handles both situations when the original aggregate is passed by
3569 value (its type is not a pointer) and when it is passed by
3570 reference (it is a pointer to an aggregate).
3571
3572 When the new argument is passed by reference (adj->by_ref is true)
3573 it must be a part of an aggregate and therefore we form it by
3574 simply taking the address of a reference inside the original
3575 aggregate. */
3576
3577 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3578 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
3579 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3580 : EXPR_LOCATION (base);
fffe1e40 3581
82d49829
MJ
3582 if (TREE_CODE (base) != ADDR_EXPR
3583 && POINTER_TYPE_P (TREE_TYPE (base)))
3584 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 3585 adj->offset / BITS_PER_UNIT);
3f84bf08 3586 else
3f84bf08 3587 {
fffe1e40
MJ
3588 HOST_WIDE_INT base_offset;
3589 tree prev_base;
c1ed6a01 3590 bool addrof;
fffe1e40
MJ
3591
3592 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
3593 {
3594 base = TREE_OPERAND (base, 0);
3595 addrof = true;
3596 }
3597 else
3598 addrof = false;
fffe1e40
MJ
3599 prev_base = base;
3600 base = get_addr_base_and_unit_offset (base, &base_offset);
3601 /* Aggregate arguments can have non-invariant addresses. */
3602 if (!base)
3603 {
3604 base = build_fold_addr_expr (prev_base);
82d49829 3605 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3606 adj->offset / BITS_PER_UNIT);
3607 }
3608 else if (TREE_CODE (base) == MEM_REF)
3609 {
c1ed6a01
MJ
3610 if (!addrof)
3611 {
3612 deref_base = true;
3613 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3614 }
82d49829 3615 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3616 base_offset
3617 + adj->offset / BITS_PER_UNIT);
3618 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 3619 off);
fffe1e40
MJ
3620 base = TREE_OPERAND (base, 0);
3621 }
3622 else
3623 {
82d49829 3624 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3625 base_offset
3626 + adj->offset / BITS_PER_UNIT);
3627 base = build_fold_addr_expr (base);
3628 }
3f84bf08 3629 }
fffe1e40 3630
3a5a825a
RG
3631 if (!adj->by_ref)
3632 {
3633 tree type = adj->type;
3634 unsigned int align;
3635 unsigned HOST_WIDE_INT misalign;
644ffefd 3636
c1ed6a01
MJ
3637 if (deref_base)
3638 {
3639 align = deref_align;
3640 misalign = 0;
3641 }
3642 else
3643 {
3644 get_pointer_alignment_1 (base, &align, &misalign);
3645 if (TYPE_ALIGN (type) > align)
3646 align = TYPE_ALIGN (type);
3647 }
27bcd47c
LC
3648 misalign += (tree_to_double_int (off)
3649 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3a5a825a
RG
3650 * BITS_PER_UNIT);
3651 misalign = misalign & (align - 1);
3652 if (misalign != 0)
3653 align = (misalign & -misalign);
3654 if (align < TYPE_ALIGN (type))
3655 type = build_aligned_type (type, align);
3656 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3657 }
3658 else
3659 {
3660 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3661 expr = build_fold_addr_expr (expr);
3662 }
fffe1e40 3663
3f84bf08
MJ
3664 expr = force_gimple_operand_gsi (&gsi, expr,
3665 adj->by_ref
3666 || is_gimple_reg_type (adj->type),
3667 NULL, true, GSI_SAME_STMT);
9771b263 3668 vargs.quick_push (expr);
3f84bf08 3669 }
ddb555ed
JJ
3670 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3671 {
3672 unsigned int ix;
3673 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3674 gimple def_temp;
3675
3676 arg = gimple_call_arg (stmt, adj->base_index);
3677 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3678 {
3679 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3680 continue;
3681 arg = fold_convert_loc (gimple_location (stmt),
3682 TREE_TYPE (origin), arg);
3683 }
3684 if (debug_args == NULL)
3685 debug_args = decl_debug_args_insert (callee_decl);
9771b263 3686 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
3687 if (ddecl == origin)
3688 {
9771b263 3689 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
3690 break;
3691 }
3692 if (ddecl == NULL)
3693 {
3694 ddecl = make_node (DEBUG_EXPR_DECL);
3695 DECL_ARTIFICIAL (ddecl) = 1;
3696 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3697 DECL_MODE (ddecl) = DECL_MODE (origin);
3698
9771b263
DN
3699 vec_safe_push (*debug_args, origin);
3700 vec_safe_push (*debug_args, ddecl);
ddb555ed 3701 }
9771b263 3702 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
3703 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3704 }
3f84bf08
MJ
3705 }
3706
3707 if (dump_file && (dump_flags & TDF_DETAILS))
3708 {
3709 fprintf (dump_file, "replacing stmt:");
3710 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3711 }
3712
3f84bf08 3713 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 3714 vargs.release ();
3f84bf08
MJ
3715 if (gimple_call_lhs (stmt))
3716 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3717
3718 gimple_set_block (new_stmt, gimple_block (stmt));
3719 if (gimple_has_location (stmt))
3720 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 3721 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 3722 gimple_call_copy_flags (new_stmt, stmt);
3f84bf08
MJ
3723
3724 if (dump_file && (dump_flags & TDF_DETAILS))
3725 {
3726 fprintf (dump_file, "with stmt:");
3727 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3728 fprintf (dump_file, "\n");
3729 }
3730 gsi_replace (&gsi, new_stmt, true);
3731 if (cs)
3732 cgraph_set_call_stmt (cs, new_stmt);
82338059
MJ
3733 do
3734 {
3735 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3736 gsi_prev (&gsi);
3737 }
3738 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3739 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3740
3f84bf08
MJ
3741 update_ssa (TODO_update_ssa);
3742 free_dominance_info (CDI_DOMINATORS);
3743}
3744
3745/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3746
3747static bool
3748index_in_adjustments_multiple_times_p (int base_index,
3749 ipa_parm_adjustment_vec adjustments)
3750{
9771b263 3751 int i, len = adjustments.length ();
3f84bf08
MJ
3752 bool one = false;
3753
3754 for (i = 0; i < len; i++)
3755 {
3756 struct ipa_parm_adjustment *adj;
9771b263 3757 adj = &adjustments[i];
3f84bf08
MJ
3758
3759 if (adj->base_index == base_index)
3760 {
3761 if (one)
3762 return true;
3763 else
3764 one = true;
3765 }
3766 }
3767 return false;
3768}
3769
3770
3771/* Return adjustments that should have the same effect on function parameters
3772 and call arguments as if they were first changed according to adjustments in
3773 INNER and then by adjustments in OUTER. */
3774
3775ipa_parm_adjustment_vec
3776ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3777 ipa_parm_adjustment_vec outer)
3778{
9771b263
DN
3779 int i, outlen = outer.length ();
3780 int inlen = inner.length ();
3f84bf08
MJ
3781 int removals = 0;
3782 ipa_parm_adjustment_vec adjustments, tmp;
3783
9771b263 3784 tmp.create (inlen);
3f84bf08
MJ
3785 for (i = 0; i < inlen; i++)
3786 {
3787 struct ipa_parm_adjustment *n;
9771b263 3788 n = &inner[i];
3f84bf08
MJ
3789
3790 if (n->remove_param)
3791 removals++;
3792 else
9771b263 3793 tmp.quick_push (*n);
3f84bf08
MJ
3794 }
3795
9771b263 3796 adjustments.create (outlen + removals);
3f84bf08
MJ
3797 for (i = 0; i < outlen; i++)
3798 {
f32682ca 3799 struct ipa_parm_adjustment r;
9771b263
DN
3800 struct ipa_parm_adjustment *out = &outer[i];
3801 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 3802
f32682ca 3803 memset (&r, 0, sizeof (r));
3f84bf08
MJ
3804 gcc_assert (!in->remove_param);
3805 if (out->remove_param)
3806 {
3807 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3808 {
f32682ca 3809 r.remove_param = true;
9771b263 3810 adjustments.quick_push (r);
3f84bf08
MJ
3811 }
3812 continue;
3813 }
3814
f32682ca
DN
3815 r.base_index = in->base_index;
3816 r.type = out->type;
3f84bf08
MJ
3817
3818 /* FIXME: Create nonlocal value too. */
3819
3820 if (in->copy_param && out->copy_param)
f32682ca 3821 r.copy_param = true;
3f84bf08 3822 else if (in->copy_param)
f32682ca 3823 r.offset = out->offset;
3f84bf08 3824 else if (out->copy_param)
f32682ca 3825 r.offset = in->offset;
3f84bf08 3826 else
f32682ca 3827 r.offset = in->offset + out->offset;
9771b263 3828 adjustments.quick_push (r);
3f84bf08
MJ
3829 }
3830
3831 for (i = 0; i < inlen; i++)
3832 {
9771b263 3833 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08
MJ
3834
3835 if (n->remove_param)
9771b263 3836 adjustments.quick_push (*n);
3f84bf08
MJ
3837 }
3838
9771b263 3839 tmp.release ();
3f84bf08
MJ
3840 return adjustments;
3841}
3842
3843/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3844 friendly way, assuming they are meant to be applied to FNDECL. */
3845
3846void
3847ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3848 tree fndecl)
3849{
9771b263 3850 int i, len = adjustments.length ();
3f84bf08 3851 bool first = true;
9771b263 3852 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
3853
3854 fprintf (file, "IPA param adjustments: ");
3855 for (i = 0; i < len; i++)
3856 {
3857 struct ipa_parm_adjustment *adj;
9771b263 3858 adj = &adjustments[i];
3f84bf08
MJ
3859
3860 if (!first)
3861 fprintf (file, " ");
3862 else
3863 first = false;
3864
3865 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 3866 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
3867 if (adj->base)
3868 {
3869 fprintf (file, ", base: ");
3870 print_generic_expr (file, adj->base, 0);
3871 }
3872 if (adj->reduction)
3873 {
3874 fprintf (file, ", reduction: ");
3875 print_generic_expr (file, adj->reduction, 0);
3876 }
3877 if (adj->new_ssa_base)
3878 {
3879 fprintf (file, ", new_ssa_base: ");
3880 print_generic_expr (file, adj->new_ssa_base, 0);
3881 }
3882
3883 if (adj->copy_param)
3884 fprintf (file, ", copy_param");
3885 else if (adj->remove_param)
3886 fprintf (file, ", remove_param");
3887 else
3888 fprintf (file, ", offset %li", (long) adj->offset);
3889 if (adj->by_ref)
3890 fprintf (file, ", by_ref");
3891 print_node_brief (file, ", type: ", adj->type, 0);
3892 fprintf (file, "\n");
3893 }
9771b263 3894 parms.release ();
3f84bf08
MJ
3895}
3896
2c9561b5
MJ
3897/* Dump the AV linked list. */
3898
3899void
3900ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3901{
3902 bool comma = false;
3903 fprintf (f, " Aggregate replacements:");
3904 for (; av; av = av->next)
3905 {
3906 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3907 av->index, av->offset);
3908 print_generic_expr (f, av->value, 0);
3909 comma = true;
3910 }
3911 fprintf (f, "\n");
3912}
3913
fb3f88cc
JH
3914/* Stream out jump function JUMP_FUNC to OB. */
3915
3916static void
3917ipa_write_jump_function (struct output_block *ob,
3918 struct ipa_jump_func *jump_func)
3919{
8b7773a4
MJ
3920 struct ipa_agg_jf_item *item;
3921 struct bitpack_d bp;
3922 int i, count;
fb3f88cc 3923
8b7773a4 3924 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
3925 switch (jump_func->type)
3926 {
3927 case IPA_JF_UNKNOWN:
3928 break;
b258210c 3929 case IPA_JF_KNOWN_TYPE:
c7573249
MJ
3930 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3931 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3932 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
b258210c 3933 break;
fb3f88cc 3934 case IPA_JF_CONST:
5368224f 3935 gcc_assert (
4502fe8d
MJ
3936 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3937 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
3938 break;
3939 case IPA_JF_PASS_THROUGH:
412288f1 3940 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
3941 if (jump_func->value.pass_through.operation == NOP_EXPR)
3942 {
3943 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3944 bp = bitpack_create (ob->main_stream);
3945 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
b8f6e610 3946 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4a53743e
MJ
3947 streamer_write_bitpack (&bp);
3948 }
3949 else
3950 {
3951 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3952 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3953 }
fb3f88cc
JH
3954 break;
3955 case IPA_JF_ANCESTOR:
412288f1 3956 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
b9393656 3957 stream_write_tree (ob, jump_func->value.ancestor.type, true);
412288f1 3958 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
3959 bp = bitpack_create (ob->main_stream);
3960 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
b8f6e610 3961 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
8b7773a4 3962 streamer_write_bitpack (&bp);
fb3f88cc 3963 break;
8b7773a4
MJ
3964 }
3965
9771b263 3966 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
3967 streamer_write_uhwi (ob, count);
3968 if (count)
3969 {
3970 bp = bitpack_create (ob->main_stream);
3971 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3972 streamer_write_bitpack (&bp);
3973 }
3974
9771b263 3975 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
3976 {
3977 streamer_write_uhwi (ob, item->offset);
3978 stream_write_tree (ob, item->value, true);
fb3f88cc
JH
3979 }
3980}
3981
3982/* Read in jump function JUMP_FUNC from IB. */
3983
3984static void
3985ipa_read_jump_function (struct lto_input_block *ib,
3986 struct ipa_jump_func *jump_func,
4502fe8d 3987 struct cgraph_edge *cs,
fb3f88cc
JH
3988 struct data_in *data_in)
3989{
4a53743e
MJ
3990 enum jump_func_type jftype;
3991 enum tree_code operation;
8b7773a4 3992 int i, count;
fb3f88cc 3993
4a53743e
MJ
3994 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3995 switch (jftype)
fb3f88cc
JH
3996 {
3997 case IPA_JF_UNKNOWN:
4a53743e 3998 jump_func->type = IPA_JF_UNKNOWN;
fb3f88cc 3999 break;
b258210c 4000 case IPA_JF_KNOWN_TYPE:
4a53743e
MJ
4001 {
4002 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4003 tree base_type = stream_read_tree (ib, data_in);
4004 tree component_type = stream_read_tree (ib, data_in);
4005
4006 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4007 break;
4008 }
fb3f88cc 4009 case IPA_JF_CONST:
4502fe8d 4010 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4011 break;
4012 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4013 operation = (enum tree_code) streamer_read_uhwi (ib);
4014 if (operation == NOP_EXPR)
4015 {
4016 int formal_id = streamer_read_uhwi (ib);
4017 struct bitpack_d bp = streamer_read_bitpack (ib);
4018 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610
MJ
4019 bool type_preserved = bp_unpack_value (&bp, 1);
4020 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4021 type_preserved);
4a53743e
MJ
4022 }
4023 else
4024 {
4025 tree operand = stream_read_tree (ib, data_in);
4026 int formal_id = streamer_read_uhwi (ib);
4027 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4028 operation);
4029 }
fb3f88cc
JH
4030 break;
4031 case IPA_JF_ANCESTOR:
4a53743e
MJ
4032 {
4033 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4034 tree type = stream_read_tree (ib, data_in);
4035 int formal_id = streamer_read_uhwi (ib);
4036 struct bitpack_d bp = streamer_read_bitpack (ib);
4037 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610 4038 bool type_preserved = bp_unpack_value (&bp, 1);
4a53743e 4039
b8f6e610
MJ
4040 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4041 type_preserved);
4a53743e
MJ
4042 break;
4043 }
8b7773a4
MJ
4044 }
4045
4046 count = streamer_read_uhwi (ib);
9771b263 4047 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4048 if (count)
4049 {
4a53743e 4050 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4051 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4052 }
4053 for (i = 0; i < count; i++)
4054 {
f32682ca
DN
4055 struct ipa_agg_jf_item item;
4056 item.offset = streamer_read_uhwi (ib);
4057 item.value = stream_read_tree (ib, data_in);
9771b263 4058 jump_func->agg.items->quick_push (item);
fb3f88cc
JH
4059 }
4060}
4061
e33c6cd6
MJ
4062/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4063 relevant to indirect inlining to OB. */
661e7330
MJ
4064
4065static void
e33c6cd6
MJ
4066ipa_write_indirect_edge_info (struct output_block *ob,
4067 struct cgraph_edge *cs)
661e7330 4068{
e33c6cd6 4069 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4070 struct bitpack_d bp;
e33c6cd6 4071
412288f1 4072 streamer_write_hwi (ob, ii->param_index);
8b7773a4 4073 streamer_write_hwi (ob, ii->offset);
2465dcc2
RG
4074 bp = bitpack_create (ob->main_stream);
4075 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4076 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4077 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4078 bp_pack_value (&bp, ii->by_ref, 1);
412288f1 4079 streamer_write_bitpack (&bp);
b258210c
MJ
4080
4081 if (ii->polymorphic)
4082 {
412288f1 4083 streamer_write_hwi (ob, ii->otr_token);
b9393656 4084 stream_write_tree (ob, ii->otr_type, true);
b258210c 4085 }
661e7330
MJ
4086}
4087
e33c6cd6
MJ
4088/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4089 relevant to indirect inlining from IB. */
661e7330
MJ
4090
4091static void
e33c6cd6
MJ
4092ipa_read_indirect_edge_info (struct lto_input_block *ib,
4093 struct data_in *data_in ATTRIBUTE_UNUSED,
4094 struct cgraph_edge *cs)
661e7330 4095{
e33c6cd6 4096 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4097 struct bitpack_d bp;
661e7330 4098
412288f1 4099 ii->param_index = (int) streamer_read_hwi (ib);
8b7773a4 4100 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
412288f1 4101 bp = streamer_read_bitpack (ib);
2465dcc2 4102 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4103 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4104 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4105 ii->by_ref = bp_unpack_value (&bp, 1);
b258210c
MJ
4106 if (ii->polymorphic)
4107 {
412288f1 4108 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4109 ii->otr_type = stream_read_tree (ib, data_in);
b258210c 4110 }
661e7330
MJ
4111}
4112
fb3f88cc
JH
4113/* Stream out NODE info to OB. */
4114
4115static void
4116ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4117{
4118 int node_ref;
7380e6ef 4119 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4120 struct ipa_node_params *info = IPA_NODE_REF (node);
4121 int j;
4122 struct cgraph_edge *e;
2465dcc2 4123 struct bitpack_d bp;
fb3f88cc 4124
7380e6ef 4125 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4126 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4127 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4128
0e8853ee
JH
4129 streamer_write_uhwi (ob, ipa_get_param_count (info));
4130 for (j = 0; j < ipa_get_param_count (info); j++)
4131 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4132 bp = bitpack_create (ob->main_stream);
062c604f 4133 gcc_assert (info->uses_analysis_done
661e7330 4134 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4135 gcc_assert (!info->node_enqueued);
4136 gcc_assert (!info->ipcp_orig_node);
4137 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4138 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4139 streamer_write_bitpack (&bp);
4502fe8d
MJ
4140 for (j = 0; j < ipa_get_param_count (info); j++)
4141 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
fb3f88cc
JH
4142 for (e = node->callees; e; e = e->next_callee)
4143 {
4144 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4145
412288f1 4146 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
fb3f88cc
JH
4147 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4148 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4149 }
e33c6cd6 4150 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4151 {
4152 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4153
412288f1 4154 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
c8246dbe
JH
4155 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4156 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4157 ipa_write_indirect_edge_info (ob, e);
4158 }
fb3f88cc
JH
4159}
4160
61502ca8 4161/* Stream in NODE info from IB. */
fb3f88cc
JH
4162
4163static void
4164ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4165 struct data_in *data_in)
4166{
4167 struct ipa_node_params *info = IPA_NODE_REF (node);
4168 int k;
4169 struct cgraph_edge *e;
2465dcc2 4170 struct bitpack_d bp;
fb3f88cc 4171
0e8853ee 4172 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4173
0e8853ee
JH
4174 for (k = 0; k < ipa_get_param_count (info); k++)
4175 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4176
412288f1 4177 bp = streamer_read_bitpack (ib);
fb3f88cc 4178 if (ipa_get_param_count (info) != 0)
062c604f 4179 info->uses_analysis_done = true;
fb3f88cc
JH
4180 info->node_enqueued = false;
4181 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4182 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a
MJ
4183 for (k = 0; k < ipa_get_param_count (info); k++)
4184 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
fb3f88cc
JH
4185 for (e = node->callees; e; e = e->next_callee)
4186 {
4187 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4188 int count = streamer_read_uhwi (ib);
fb3f88cc 4189
fb3f88cc
JH
4190 if (!count)
4191 continue;
9771b263 4192 vec_safe_grow_cleared (args->jump_functions, count);
fb3f88cc 4193
fb3f88cc 4194 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d
MJ
4195 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4196 data_in);
fb3f88cc 4197 }
e33c6cd6 4198 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4199 {
4200 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4201 int count = streamer_read_uhwi (ib);
c8246dbe 4202
c8246dbe
JH
4203 if (count)
4204 {
9771b263 4205 vec_safe_grow_cleared (args->jump_functions, count);
c8246dbe 4206 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d 4207 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
606d9a09 4208 data_in);
c8246dbe
JH
4209 }
4210 ipa_read_indirect_edge_info (ib, data_in, e);
4211 }
fb3f88cc
JH
4212}
4213
4214/* Write jump functions for nodes in SET. */
4215
4216void
f27c1867 4217ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4218{
4219 struct cgraph_node *node;
93536c97 4220 struct output_block *ob;
fb3f88cc 4221 unsigned int count = 0;
f27c1867
JH
4222 lto_symtab_encoder_iterator lsei;
4223 lto_symtab_encoder_t encoder;
4224
fb3f88cc 4225
9771b263 4226 if (!ipa_node_params_vector.exists ())
93536c97 4227 return;
fb3f88cc 4228
93536c97 4229 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4230 encoder = ob->decl_state->symtab_node_encoder;
93536c97 4231 ob->cgraph_node = NULL;
f27c1867
JH
4232 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4233 lsei_next_function_in_partition (&lsei))
fb3f88cc 4234 {
f27c1867 4235 node = lsei_cgraph_node (lsei);
c47d0034
JH
4236 if (cgraph_function_with_gimple_body_p (node)
4237 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4238 count++;
4239 }
4240
412288f1 4241 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4242
4243 /* Process all of the functions. */
f27c1867
JH
4244 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4245 lsei_next_function_in_partition (&lsei))
fb3f88cc 4246 {
f27c1867 4247 node = lsei_cgraph_node (lsei);
c47d0034
JH
4248 if (cgraph_function_with_gimple_body_p (node)
4249 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4250 ipa_write_node_info (ob, node);
4251 }
412288f1 4252 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4253 produce_asm (ob, NULL);
4254 destroy_output_block (ob);
4255}
4256
4257/* Read section in file FILE_DATA of length LEN with data DATA. */
4258
4259static void
4260ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4261 size_t len)
4262{
4263 const struct lto_function_header *header =
4264 (const struct lto_function_header *) data;
4ad9a9de
EB
4265 const int cfg_offset = sizeof (struct lto_function_header);
4266 const int main_offset = cfg_offset + header->cfg_size;
4267 const int string_offset = main_offset + header->main_size;
fb3f88cc
JH
4268 struct data_in *data_in;
4269 struct lto_input_block ib_main;
4270 unsigned int i;
4271 unsigned int count;
4272
4273 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4274 header->main_size);
4275
4276 data_in =
4277 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4278 header->string_size, vNULL);
412288f1 4279 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4280
4281 for (i = 0; i < count; i++)
4282 {
4283 unsigned int index;
4284 struct cgraph_node *node;
7380e6ef 4285 lto_symtab_encoder_t encoder;
fb3f88cc 4286
412288f1 4287 index = streamer_read_uhwi (&ib_main);
7380e6ef
JH
4288 encoder = file_data->symtab_node_encoder;
4289 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4290 gcc_assert (node->definition);
fb3f88cc
JH
4291 ipa_read_node_info (&ib_main, node, data_in);
4292 }
4293 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4294 len);
4295 lto_data_in_delete (data_in);
4296}
4297
4298/* Read ipcp jump functions. */
4299
4300void
4301ipa_prop_read_jump_functions (void)
4302{
4303 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4304 struct lto_file_decl_data *file_data;
4305 unsigned int j = 0;
4306
4307 ipa_check_create_node_params ();
4308 ipa_check_create_edge_args ();
4309 ipa_register_cgraph_hooks ();
4310
4311 while ((file_data = file_data_vec[j++]))
4312 {
4313 size_t len;
4314 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4315
4316 if (data)
4317 ipa_prop_read_section (file_data, data, len);
4318 }
4319}
4320
b8698a0f 4321/* After merging units, we can get mismatch in argument counts.
61502ca8 4322 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
4323 Also compute called_with_variable_arg info. */
4324
4325void
4326ipa_update_after_lto_read (void)
4327{
05d3aa37
MJ
4328 ipa_check_create_node_params ();
4329 ipa_check_create_edge_args ();
fb3f88cc 4330}
2c9561b5
MJ
4331
4332void
4333write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4334{
4335 int node_ref;
4336 unsigned int count = 0;
4337 lto_symtab_encoder_t encoder;
4338 struct ipa_agg_replacement_value *aggvals, *av;
4339
4340 aggvals = ipa_get_agg_replacements_for_node (node);
4341 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4342 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4343 streamer_write_uhwi (ob, node_ref);
4344
4345 for (av = aggvals; av; av = av->next)
4346 count++;
4347 streamer_write_uhwi (ob, count);
4348
4349 for (av = aggvals; av; av = av->next)
4350 {
7b920a9a
MJ
4351 struct bitpack_d bp;
4352
2c9561b5
MJ
4353 streamer_write_uhwi (ob, av->offset);
4354 streamer_write_uhwi (ob, av->index);
4355 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4356
4357 bp = bitpack_create (ob->main_stream);
4358 bp_pack_value (&bp, av->by_ref, 1);
4359 streamer_write_bitpack (&bp);
2c9561b5
MJ
4360 }
4361}
4362
4363/* Stream in the aggregate value replacement chain for NODE from IB. */
4364
4365static void
4366read_agg_replacement_chain (struct lto_input_block *ib,
4367 struct cgraph_node *node,
4368 struct data_in *data_in)
4369{
4370 struct ipa_agg_replacement_value *aggvals = NULL;
4371 unsigned int count, i;
4372
4373 count = streamer_read_uhwi (ib);
4374 for (i = 0; i <count; i++)
4375 {
4376 struct ipa_agg_replacement_value *av;
7b920a9a 4377 struct bitpack_d bp;
2c9561b5
MJ
4378
4379 av = ggc_alloc_ipa_agg_replacement_value ();
4380 av->offset = streamer_read_uhwi (ib);
4381 av->index = streamer_read_uhwi (ib);
4382 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4383 bp = streamer_read_bitpack (ib);
4384 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4385 av->next = aggvals;
4386 aggvals = av;
4387 }
4388 ipa_set_node_agg_value_chain (node, aggvals);
4389}
4390
4391/* Write all aggregate replacement for nodes in set. */
4392
4393void
4394ipa_prop_write_all_agg_replacement (void)
4395{
4396 struct cgraph_node *node;
4397 struct output_block *ob;
4398 unsigned int count = 0;
4399 lto_symtab_encoder_iterator lsei;
4400 lto_symtab_encoder_t encoder;
4401
4402 if (!ipa_node_agg_replacements)
4403 return;
4404
4405 ob = create_output_block (LTO_section_ipcp_transform);
4406 encoder = ob->decl_state->symtab_node_encoder;
4407 ob->cgraph_node = NULL;
4408 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4409 lsei_next_function_in_partition (&lsei))
4410 {
4411 node = lsei_cgraph_node (lsei);
4412 if (cgraph_function_with_gimple_body_p (node)
4413 && ipa_get_agg_replacements_for_node (node) != NULL)
4414 count++;
4415 }
4416
4417 streamer_write_uhwi (ob, count);
4418
4419 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4420 lsei_next_function_in_partition (&lsei))
4421 {
4422 node = lsei_cgraph_node (lsei);
4423 if (cgraph_function_with_gimple_body_p (node)
4424 && ipa_get_agg_replacements_for_node (node) != NULL)
4425 write_agg_replacement_chain (ob, node);
4426 }
4427 streamer_write_char_stream (ob->main_stream, 0);
4428 produce_asm (ob, NULL);
4429 destroy_output_block (ob);
4430}
4431
4432/* Read replacements section in file FILE_DATA of length LEN with data
4433 DATA. */
4434
4435static void
4436read_replacements_section (struct lto_file_decl_data *file_data,
4437 const char *data,
4438 size_t len)
4439{
4440 const struct lto_function_header *header =
4441 (const struct lto_function_header *) data;
4442 const int cfg_offset = sizeof (struct lto_function_header);
4443 const int main_offset = cfg_offset + header->cfg_size;
4444 const int string_offset = main_offset + header->main_size;
4445 struct data_in *data_in;
4446 struct lto_input_block ib_main;
4447 unsigned int i;
4448 unsigned int count;
4449
4450 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4451 header->main_size);
4452
4453 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4454 header->string_size, vNULL);
2c9561b5
MJ
4455 count = streamer_read_uhwi (&ib_main);
4456
4457 for (i = 0; i < count; i++)
4458 {
4459 unsigned int index;
4460 struct cgraph_node *node;
4461 lto_symtab_encoder_t encoder;
4462
4463 index = streamer_read_uhwi (&ib_main);
4464 encoder = file_data->symtab_node_encoder;
4465 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4466 gcc_assert (node->definition);
2c9561b5
MJ
4467 read_agg_replacement_chain (&ib_main, node, data_in);
4468 }
4469 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4470 len);
4471 lto_data_in_delete (data_in);
4472}
4473
4474/* Read IPA-CP aggregate replacements. */
4475
4476void
4477ipa_prop_read_all_agg_replacement (void)
4478{
4479 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4480 struct lto_file_decl_data *file_data;
4481 unsigned int j = 0;
4482
4483 while ((file_data = file_data_vec[j++]))
4484 {
4485 size_t len;
4486 const char *data = lto_get_section_data (file_data,
4487 LTO_section_ipcp_transform,
4488 NULL, &len);
4489 if (data)
4490 read_replacements_section (file_data, data, len);
4491 }
4492}
4493
4494/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4495 NODE. */
4496
4497static void
4498adjust_agg_replacement_values (struct cgraph_node *node,
4499 struct ipa_agg_replacement_value *aggval)
4500{
4501 struct ipa_agg_replacement_value *v;
4502 int i, c = 0, d = 0, *adj;
4503
4504 if (!node->clone.combined_args_to_skip)
4505 return;
4506
4507 for (v = aggval; v; v = v->next)
4508 {
4509 gcc_assert (v->index >= 0);
4510 if (c < v->index)
4511 c = v->index;
4512 }
4513 c++;
4514
4515 adj = XALLOCAVEC (int, c);
4516 for (i = 0; i < c; i++)
4517 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4518 {
4519 adj[i] = -1;
4520 d++;
4521 }
4522 else
4523 adj[i] = i - d;
4524
4525 for (v = aggval; v; v = v->next)
4526 v->index = adj[v->index];
4527}
4528
4529
4530/* Function body transformation phase. */
4531
4532unsigned int
4533ipcp_transform_function (struct cgraph_node *node)
4534{
6e1aa848 4535 vec<ipa_param_descriptor_t> descriptors = vNULL;
2c9561b5
MJ
4536 struct param_analysis_info *parms_ainfo;
4537 struct ipa_agg_replacement_value *aggval;
4538 gimple_stmt_iterator gsi;
4539 basic_block bb;
4540 int param_count;
4541 bool cfg_changed = false, something_changed = false;
4542
4543 gcc_checking_assert (cfun);
4544 gcc_checking_assert (current_function_decl);
4545
4546 if (dump_file)
4547 fprintf (dump_file, "Modification phase of node %s/%i\n",
67348ccc 4548 cgraph_node_name (node), node->order);
2c9561b5
MJ
4549
4550 aggval = ipa_get_agg_replacements_for_node (node);
4551 if (!aggval)
4552 return 0;
67348ccc 4553 param_count = count_formal_params (node->decl);
2c9561b5
MJ
4554 if (param_count == 0)
4555 return 0;
4556 adjust_agg_replacement_values (node, aggval);
4557 if (dump_file)
4558 ipa_dump_agg_replacement_values (dump_file, aggval);
4559 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4560 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
9771b263 4561 descriptors.safe_grow_cleared (param_count);
2c9561b5
MJ
4562 ipa_populate_param_decls (node, descriptors);
4563
4564 FOR_EACH_BB (bb)
4565 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4566 {
4567 struct ipa_agg_replacement_value *v;
4568 gimple stmt = gsi_stmt (gsi);
4569 tree rhs, val, t;
4570 HOST_WIDE_INT offset;
4571 int index;
4572 bool by_ref, vce;
4573
4574 if (!gimple_assign_load_p (stmt))
4575 continue;
4576 rhs = gimple_assign_rhs1 (stmt);
4577 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4578 continue;
4579
4580 vce = false;
4581 t = rhs;
4582 while (handled_component_p (t))
4583 {
4584 /* V_C_E can do things like convert an array of integers to one
4585 bigger integer and similar things we do not handle below. */
4586 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4587 {
4588 vce = true;
4589 break;
4590 }
4591 t = TREE_OPERAND (t, 0);
4592 }
4593 if (vce)
4594 continue;
4595
4596 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4597 rhs, &index, &offset, &by_ref))
4598 continue;
4599 for (v = aggval; v; v = v->next)
4600 if (v->index == index
4601 && v->offset == offset)
4602 break;
7b920a9a 4603 if (!v || v->by_ref != by_ref)
2c9561b5
MJ
4604 continue;
4605
4606 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4607 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4608 {
4609 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4610 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4611 else if (TYPE_SIZE (TREE_TYPE (rhs))
4612 == TYPE_SIZE (TREE_TYPE (v->value)))
4613 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4614 else
4615 {
4616 if (dump_file)
4617 {
4618 fprintf (dump_file, " const ");
4619 print_generic_expr (dump_file, v->value, 0);
4620 fprintf (dump_file, " can't be converted to type of ");
4621 print_generic_expr (dump_file, rhs, 0);
4622 fprintf (dump_file, "\n");
4623 }
4624 continue;
4625 }
4626 }
4627 else
4628 val = v->value;
4629
4630 if (dump_file && (dump_flags & TDF_DETAILS))
4631 {
4632 fprintf (dump_file, "Modifying stmt:\n ");
4633 print_gimple_stmt (dump_file, stmt, 0, 0);
4634 }
4635 gimple_assign_set_rhs_from_tree (&gsi, val);
4636 update_stmt (stmt);
4637
4638 if (dump_file && (dump_flags & TDF_DETAILS))
4639 {
4640 fprintf (dump_file, "into:\n ");
4641 print_gimple_stmt (dump_file, stmt, 0, 0);
4642 fprintf (dump_file, "\n");
4643 }
4644
4645 something_changed = true;
4646 if (maybe_clean_eh_stmt (stmt)
4647 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4648 cfg_changed = true;
4649 }
4650
9771b263 4651 (*ipa_node_agg_replacements)[node->uid] = NULL;
2c9561b5 4652 free_parms_ainfo (parms_ainfo, param_count);
9771b263 4653 descriptors.release ();
2c9561b5
MJ
4654
4655 if (!something_changed)
4656 return 0;
4657 else if (cfg_changed)
4658 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4659 else
4660 return TODO_update_ssa_only_virtuals;
4661}