]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
Indirect inlining of targets from references of global constants
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
818ab71a 2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
0f378cb5 48#include "ipa-inline.h"
cf835838 49#include "gimple-pretty-print.h"
dfea20f1 50#include "params.h"
450ad0cd 51#include "ipa-utils.h"
2b5f0895 52#include "dbgcnt.h"
8aab5218 53#include "domwalk.h"
9b2b7279 54#include "builtins.h"
771578a0 55
dd912cb8
ML
56/* Function summary where the parameter infos are actually stored. */
57ipa_node_params_t *ipa_node_params_sum = NULL;
04be694e
MJ
58/* Vector of IPA-CP transformation data for each clone. */
59vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
771578a0 60/* Vector where the parameter infos are actually stored. */
84562394 61vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
771578a0
MJ
62
63/* Holders of ipa cgraph hooks: */
e2c9111c 64static struct cgraph_edge_hook_list *edge_removal_hook_holder;
e2c9111c 65static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
40982661 66static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 67
4502fe8d
MJ
68/* Description of a reference to an IPA constant. */
69struct ipa_cst_ref_desc
70{
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
78};
79
80/* Allocation pool for reference descriptions. */
81
fb0b2914 82static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 83 ("IPA-PROP ref descriptions");
4502fe8d 84
5fe8e757
MJ
85/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
87
88static bool
89ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
90{
67348ccc 91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
92
93 if (!fs_opts)
94 return false;
2bf86c84 95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
96}
97
be95e2b9
MJ
98/* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
100
d044dd17 101static int
84562394 102ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
518dc859
RL
103{
104 int i, count;
105
9771b263 106 count = descriptors.length ();
518dc859 107 for (i = 0; i < count; i++)
9771b263 108 if (descriptors[i].decl == ptree)
518dc859
RL
109 return i;
110
111 return -1;
112}
113
d044dd17
MJ
114/* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
116
117int
118ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
119{
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121}
122
123/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
be95e2b9 125
f8e2a1ed
MJ
126static void
127ipa_populate_param_decls (struct cgraph_node *node,
84562394 128 vec<ipa_param_descriptor> &descriptors)
518dc859
RL
129{
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
3e293154 134
67348ccc 135 fndecl = node->decl;
0e8853ee 136 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
910ad8de 139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 140 {
9771b263 141 descriptors[param_num].decl = parm;
b4c9af96
RB
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
518dc859
RL
144 param_num++;
145 }
146}
147
3f84bf08
MJ
148/* Return how many formal parameters FNDECL has. */
149
fd29c024 150int
310bc633 151count_formal_params (tree fndecl)
3f84bf08
MJ
152{
153 tree parm;
154 int count = 0;
0e8853ee 155 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 156
910ad8de 157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
158 count++;
159
160 return count;
161}
162
0e8853ee
JH
163/* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
166
167void
168ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
169{
170 fprintf (file, "param #%i", i);
171 if (info->descriptors[i].decl)
172 {
173 fprintf (file, " ");
174 print_generic_expr (file, info->descriptors[i].decl, 0);
175 }
176}
177
178/* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
180
181void
182ipa_alloc_node_params (struct cgraph_node *node, int param_count)
183{
184 struct ipa_node_params *info = IPA_NODE_REF (node);
185
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
188}
189
f8e2a1ed
MJ
190/* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
be95e2b9 193
f8e2a1ed
MJ
194void
195ipa_initialize_node_params (struct cgraph_node *node)
196{
197 struct ipa_node_params *info = IPA_NODE_REF (node);
198
9771b263 199 if (!info->descriptors.exists ())
f8e2a1ed 200 {
67348ccc 201 ipa_alloc_node_params (node, count_formal_params (node->decl));
0e8853ee 202 ipa_populate_param_decls (node, info->descriptors);
f8e2a1ed 203 }
518dc859
RL
204}
205
749aa96d
MJ
206/* Print the jump functions associated with call graph edge CS to file F. */
207
208static void
209ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
210{
211 int i, count;
212
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
215 {
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
218
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
221
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
225 else if (type == IPA_JF_CONST)
226 {
4502fe8d 227 tree val = jump_func->value.constant.value;
749aa96d
MJ
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
232 {
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
235 0);
236 }
237 fprintf (f, "\n");
238 }
749aa96d
MJ
239 else if (type == IPA_JF_PASS_THROUGH)
240 {
241 fprintf (f, "PASS THROUGH: ");
8b7773a4 242 fprintf (f, "%d, op %s",
749aa96d 243 jump_func->value.pass_through.formal_id,
5806f481 244 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 245 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
246 {
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
250 }
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
3ea6239f 253 fprintf (f, "\n");
749aa96d
MJ
254 }
255 else if (type == IPA_JF_ANCESTOR)
256 {
257 fprintf (f, "ANCESTOR: ");
16998094 258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
8b7773a4
MJ
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
3ea6239f 263 fprintf (f, "\n");
749aa96d 264 }
8b7773a4
MJ
265
266 if (jump_func->agg.items)
267 {
268 struct ipa_agg_jf_item *item;
269 int j;
270
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
9771b263 273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
274 {
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 279 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
280 else
281 {
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
284 }
285 fprintf (f, "\n");
286 }
287 }
44210a96
MJ
288
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
292 {
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
295 }
04be694e
MJ
296
297 if (jump_func->alignment.known)
298 {
299 fprintf (f, " Alignment: %u, misalignment: %u\n",
300 jump_func->alignment.align,
301 jump_func->alignment.misalign);
302 }
303 else
304 fprintf (f, " Unknown alignment\n");
749aa96d
MJ
305 }
306}
307
308
be95e2b9
MJ
309/* Print the jump functions of all arguments on all call graph edges going from
310 NODE to file F. */
311
518dc859 312void
3e293154 313ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 314{
3e293154 315 struct cgraph_edge *cs;
518dc859 316
fec39fa6 317 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
67348ccc 318 node->order);
3e293154
MJ
319 for (cs = node->callees; cs; cs = cs->next_callee)
320 {
321 if (!ipa_edge_args_info_available_for_edge_p (cs))
322 continue;
323
749aa96d 324 fprintf (f, " callsite %s/%i -> %s/%i : \n",
2a72a953
DM
325 xstrdup_for_dump (node->name ()), node->order,
326 xstrdup_for_dump (cs->callee->name ()),
67348ccc 327 cs->callee->order);
749aa96d
MJ
328 ipa_print_node_jump_functions_for_edge (f, cs);
329 }
518dc859 330
9de04252 331 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 332 {
9de04252 333 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
3e293154 336
9de04252
MJ
337 ii = cs->indirect_info;
338 if (ii->agg_contents)
c13bc3d9 339 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 340 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 341 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
342 ii->param_index, ii->offset,
343 ii->by_ref ? "by reference" : "by_value");
344 else
85942f45
JH
345 fprintf (f, " indirect %s callsite, calling param %i, "
346 "offset " HOST_WIDE_INT_PRINT_DEC,
347 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
348 ii->offset);
9de04252 349
749aa96d
MJ
350 if (cs->call_stmt)
351 {
9de04252 352 fprintf (f, ", for stmt ");
749aa96d 353 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 354 }
749aa96d 355 else
9de04252 356 fprintf (f, "\n");
ba392339
JH
357 if (ii->polymorphic)
358 ii->context.dump (f);
749aa96d 359 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
360 }
361}
362
363/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 364
3e293154
MJ
365void
366ipa_print_all_jump_functions (FILE *f)
367{
368 struct cgraph_node *node;
369
ca30a539 370 fprintf (f, "\nJump functions:\n");
65c70e6b 371 FOR_EACH_FUNCTION (node)
3e293154
MJ
372 {
373 ipa_print_node_jump_functions (f, node);
374 }
375}
376
04be694e
MJ
377/* Set jfunc to be a know-really nothing jump function. */
378
379static void
380ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
381{
382 jfunc->type = IPA_JF_UNKNOWN;
383 jfunc->alignment.known = false;
384}
385
b8f6e610
MJ
386/* Set JFUNC to be a copy of another jmp (to be used by jump function
387 combination code). The two functions will share their rdesc. */
388
389static void
390ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
391 struct ipa_jump_func *src)
392
393{
394 gcc_checking_assert (src->type == IPA_JF_CONST);
395 dst->type = IPA_JF_CONST;
396 dst->value.constant = src->value.constant;
397}
398
7b872d9e
MJ
399/* Set JFUNC to be a constant jmp function. */
400
401static void
4502fe8d
MJ
402ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
403 struct cgraph_edge *cs)
7b872d9e
MJ
404{
405 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
406 jfunc->value.constant.value = unshare_expr_without_location (constant);
407
408 if (TREE_CODE (constant) == ADDR_EXPR
409 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
410 {
411 struct ipa_cst_ref_desc *rdesc;
4502fe8d 412
601f3293 413 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
414 rdesc->cs = cs;
415 rdesc->next_duplicate = NULL;
416 rdesc->refcount = 1;
417 jfunc->value.constant.rdesc = rdesc;
418 }
419 else
420 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
421}
422
423/* Set JFUNC to be a simple pass-through jump function. */
424static void
8b7773a4 425ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 426 bool agg_preserved)
7b872d9e
MJ
427{
428 jfunc->type = IPA_JF_PASS_THROUGH;
429 jfunc->value.pass_through.operand = NULL_TREE;
430 jfunc->value.pass_through.formal_id = formal_id;
431 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 432 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
433}
434
435/* Set JFUNC to be an arithmetic pass through jump function. */
436
437static void
438ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
439 tree operand, enum tree_code operation)
440{
441 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 442 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
443 jfunc->value.pass_through.formal_id = formal_id;
444 jfunc->value.pass_through.operation = operation;
8b7773a4 445 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
446}
447
448/* Set JFUNC to be an ancestor jump function. */
449
450static void
451ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 452 int formal_id, bool agg_preserved)
7b872d9e
MJ
453{
454 jfunc->type = IPA_JF_ANCESTOR;
455 jfunc->value.ancestor.formal_id = formal_id;
456 jfunc->value.ancestor.offset = offset;
8b7773a4 457 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
458}
459
8aab5218
MJ
460/* Get IPA BB information about the given BB. FBI is the context of analyzis
461 of this function body. */
462
463static struct ipa_bb_info *
56b40062 464ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
465{
466 gcc_checking_assert (fbi);
467 return &fbi->bb_infos[bb->index];
468}
469
f65cf2b7
MJ
470/* Structure to be passed in between detect_type_change and
471 check_stmt_for_type_change. */
472
11478306 473struct prop_type_change_info
f65cf2b7 474{
290ebcb7
MJ
475 /* Offset into the object where there is the virtual method pointer we are
476 looking for. */
477 HOST_WIDE_INT offset;
478 /* The declaration or SSA_NAME pointer of the base that we are checking for
479 type change. */
480 tree object;
f65cf2b7
MJ
481 /* Set to true if dynamic type change has been detected. */
482 bool type_maybe_changed;
483};
484
485/* Return true if STMT can modify a virtual method table pointer.
486
487 This function makes special assumptions about both constructors and
488 destructors which are all the functions that are allowed to alter the VMT
489 pointers. It assumes that destructors begin with assignment into all VMT
490 pointers and that constructors essentially look in the following way:
491
492 1) The very first thing they do is that they call constructors of ancestor
493 sub-objects that have them.
494
495 2) Then VMT pointers of this and all its ancestors is set to new values
496 corresponding to the type corresponding to the constructor.
497
498 3) Only afterwards, other stuff such as constructor of member sub-objects
499 and the code written by the user is run. Only this may include calling
500 virtual functions, directly or indirectly.
501
502 There is no way to call a constructor of an ancestor sub-object in any
503 other way.
504
505 This means that we do not have to care whether constructors get the correct
506 type information because they will always change it (in fact, if we define
507 the type to be given by the VMT pointer, it is undefined).
508
509 The most important fact to derive from the above is that if, for some
510 statement in the section 3, we try to detect whether the dynamic type has
511 changed, we can safely ignore all calls as we examine the function body
512 backwards until we reach statements in section 2 because these calls cannot
513 be ancestor constructors or destructors (if the input is not bogus) and so
514 do not change the dynamic type (this holds true only for automatically
515 allocated objects but at the moment we devirtualize only these). We then
516 must detect that statements in section 2 change the dynamic type and can try
517 to derive the new type. That is enough and we can stop, we will never see
518 the calls into constructors of sub-objects in this code. Therefore we can
519 safely ignore all call statements that we traverse.
520 */
521
522static bool
355fe088 523stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
524{
525 if (is_gimple_call (stmt))
526 return false;
70f633c5
JH
527 if (gimple_clobber_p (stmt))
528 return false;
f65cf2b7
MJ
529 else if (is_gimple_assign (stmt))
530 {
531 tree lhs = gimple_assign_lhs (stmt);
532
0004f992
MJ
533 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
534 {
535 if (flag_strict_aliasing
536 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
537 return false;
538
539 if (TREE_CODE (lhs) == COMPONENT_REF
540 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 541 return false;
0004f992
MJ
542 /* In the future we might want to use get_base_ref_and_offset to find
543 if there is a field corresponding to the offset and if so, proceed
544 almost like if it was a component ref. */
545 }
f65cf2b7
MJ
546 }
547 return true;
548}
549
3b97a5c7
MJ
550/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
551 to check whether a particular statement may modify the virtual table
552 pointerIt stores its result into DATA, which points to a
11478306 553 prop_type_change_info structure. */
f65cf2b7
MJ
554
555static bool
556check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
557{
355fe088 558 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 559 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
560
561 if (stmt_may_be_vtbl_ptr_store (stmt))
562 {
563 tci->type_maybe_changed = true;
564 return true;
565 }
566 else
567 return false;
568}
569
058d0a90
JH
570/* See if ARG is PARAM_DECl describing instance passed by pointer
571 or reference in FUNCTION. Return false if the dynamic type may change
572 in between beggining of the function until CALL is invoked.
290ebcb7 573
058d0a90
JH
574 Generally functions are not allowed to change type of such instances,
575 but they call destructors. We assume that methods can not destroy the THIS
576 pointer. Also as a special cases, constructor and destructors may change
577 type of the THIS pointer. */
578
579static bool
355fe088 580param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90
JH
581{
582 /* Pure functions can not do any changes on the dynamic type;
583 that require writting to memory. */
584 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
585 return false;
586 /* We need to check if we are within inlined consturctor
587 or destructor (ideally we would have way to check that the
588 inline cdtor is actually working on ARG, but we don't have
589 easy tie on this, so punt on all non-pure cdtors.
590 We may also record the types of cdtors and once we know type
591 of the instance match them.
592
593 Also code unification optimizations may merge calls from
594 different blocks making return values unreliable. So
595 do nothing during late optimization. */
596 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
597 return true;
598 if (TREE_CODE (arg) == SSA_NAME
599 && SSA_NAME_IS_DEFAULT_DEF (arg)
600 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
601 {
602 /* Normal (non-THIS) argument. */
603 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
604 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 605 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
606 and destructors as those definitely may change the dynamic
607 type. */
608 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
609 && !DECL_CXX_CONSTRUCTOR_P (function)
610 && !DECL_CXX_DESTRUCTOR_P (function)
611 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
612 {
613 /* Walk the inline stack and watch out for ctors/dtors. */
614 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
615 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
616 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
617 return true;
058d0a90
JH
618 return false;
619 }
620 }
621 return true;
622}
290ebcb7 623
06d65050
JH
624/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
625 callsite CALL) by looking for assignments to its virtual table pointer. If
626 it is, return true and fill in the jump function JFUNC with relevant type
627 information or set it to unknown. ARG is the object itself (not a pointer
628 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
629 returned by get_ref_base_and_extent, as is the offset.
630
631 This is helper function for detect_type_change and detect_type_change_ssa
632 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
633
634static bool
058d0a90 635detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
538dd0b7 636 gcall *call, struct ipa_jump_func *jfunc,
058d0a90 637 HOST_WIDE_INT offset)
f65cf2b7 638{
11478306 639 struct prop_type_change_info tci;
f65cf2b7 640 ao_ref ao;
70f633c5 641 bool entry_reached = false;
f65cf2b7
MJ
642
643 gcc_checking_assert (DECL_P (arg)
644 || TREE_CODE (arg) == MEM_REF
645 || handled_component_p (arg));
f65cf2b7 646
b49407f8
JH
647 comp_type = TYPE_MAIN_VARIANT (comp_type);
648
d570d364
JH
649 /* Const calls cannot call virtual methods through VMT and so type changes do
650 not matter. */
651 if (!flag_devirtualize || !gimple_vuse (call)
652 /* Be sure expected_type is polymorphic. */
653 || !comp_type
654 || TREE_CODE (comp_type) != RECORD_TYPE
655 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
656 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
657 return true;
4bf2a588 658
dd887943 659 ao_ref_init (&ao, arg);
f65cf2b7
MJ
660 ao.base = base;
661 ao.offset = offset;
662 ao.size = POINTER_SIZE;
663 ao.max_size = ao.size;
f65cf2b7 664
290ebcb7
MJ
665 tci.offset = offset;
666 tci.object = get_base_address (arg);
290ebcb7 667 tci.type_maybe_changed = false;
290ebcb7 668
f65cf2b7 669 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
70f633c5 670 &tci, NULL, &entry_reached);
f65cf2b7
MJ
671 if (!tci.type_maybe_changed)
672 return false;
673
04be694e 674 ipa_set_jf_unknown (jfunc);
f65cf2b7
MJ
675 return true;
676}
677
058d0a90
JH
678/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
679 If it is, return true and fill in the jump function JFUNC with relevant type
680 information or set it to unknown. ARG is the object itself (not a pointer
681 to it, unless dereferenced). BASE is the base of the memory access as
682 returned by get_ref_base_and_extent, as is the offset. */
683
684static bool
538dd0b7 685detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
058d0a90
JH
686 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
687{
688 if (!flag_devirtualize)
689 return false;
690
691 if (TREE_CODE (base) == MEM_REF
692 && !param_type_may_change_p (current_function_decl,
693 TREE_OPERAND (base, 0),
694 call))
695 return false;
696 return detect_type_change_from_memory_writes (arg, base, comp_type,
697 call, jfunc, offset);
698}
699
f65cf2b7
MJ
700/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
701 SSA name (its dereference will become the base and the offset is assumed to
702 be zero). */
703
704static bool
06d65050 705detect_type_change_ssa (tree arg, tree comp_type,
538dd0b7 706 gcall *call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
707{
708 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 709 if (!flag_devirtualize
06d65050 710 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
711 return false;
712
058d0a90
JH
713 if (!param_type_may_change_p (current_function_decl, arg, call))
714 return false;
715
f65cf2b7 716 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 717 build_int_cst (ptr_type_node, 0));
f65cf2b7 718
058d0a90
JH
719 return detect_type_change_from_memory_writes (arg, arg, comp_type,
720 call, jfunc, 0);
f65cf2b7
MJ
721}
722
fdb0e1b4
MJ
723/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
724 boolean variable pointed to by DATA. */
725
726static bool
727mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
728 void *data)
729{
730 bool *b = (bool *) data;
731 *b = true;
732 return true;
733}
734
8aab5218
MJ
735/* Return true if we have already walked so many statements in AA that we
736 should really just start giving up. */
737
738static bool
56b40062 739aa_overwalked (struct ipa_func_body_info *fbi)
8aab5218
MJ
740{
741 gcc_checking_assert (fbi);
742 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
743}
744
745/* Find the nearest valid aa status for parameter specified by INDEX that
746 dominates BB. */
747
56b40062
MJ
748static struct ipa_param_aa_status *
749find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
750 int index)
751{
752 while (true)
753 {
754 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
755 if (!bb)
756 return NULL;
757 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
758 if (!bi->param_aa_statuses.is_empty ()
759 && bi->param_aa_statuses[index].valid)
760 return &bi->param_aa_statuses[index];
761 }
762}
763
764/* Get AA status structure for the given BB and parameter with INDEX. Allocate
765 structures and/or intialize the result with a dominating description as
766 necessary. */
767
56b40062
MJ
768static struct ipa_param_aa_status *
769parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
770 int index)
771{
772 gcc_checking_assert (fbi);
773 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
774 if (bi->param_aa_statuses.is_empty ())
775 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
56b40062 776 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
777 if (!paa->valid)
778 {
779 gcc_checking_assert (!paa->parm_modified
780 && !paa->ref_modified
781 && !paa->pt_modified);
56b40062 782 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
783 dom_paa = find_dominating_aa_status (fbi, bb, index);
784 if (dom_paa)
785 *paa = *dom_paa;
786 else
787 paa->valid = true;
788 }
789
790 return paa;
791}
792
688010ba 793/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 794 a value known not to be modified in this function before reaching the
8aab5218
MJ
795 statement STMT. FBI holds information about the function we have so far
796 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
797
798static bool
56b40062 799parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 800 gimple *stmt, tree parm_load)
fdb0e1b4 801{
56b40062 802 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
803 bool modified = false;
804 ao_ref refd;
805
8aab5218
MJ
806 /* FIXME: FBI can be NULL if we are being called from outside
807 ipa_node_analysis or ipcp_transform_function, which currently happens
808 during inlining analysis. It would be great to extend fbi's lifetime and
809 always have it. Currently, we are just not afraid of too much walking in
810 that case. */
811 if (fbi)
812 {
813 if (aa_overwalked (fbi))
814 return false;
815 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
816 if (paa->parm_modified)
817 return false;
818 }
819 else
820 paa = NULL;
fdb0e1b4
MJ
821
822 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 823 ao_ref_init (&refd, parm_load);
8aab5218
MJ
824 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
825 &modified, NULL);
826 if (fbi)
827 fbi->aa_walked += walked;
828 if (paa && modified)
829 paa->parm_modified = true;
8b7773a4 830 return !modified;
fdb0e1b4
MJ
831}
832
833/* If STMT is an assignment that loads a value from an parameter declaration,
834 return the index of the parameter in ipa_node_params which has not been
835 modified. Otherwise return -1. */
836
837static int
56b40062 838load_from_unmodified_param (struct ipa_func_body_info *fbi,
8aab5218 839 vec<ipa_param_descriptor> descriptors,
355fe088 840 gimple *stmt)
fdb0e1b4
MJ
841{
842 int index;
843 tree op1;
844
845 if (!gimple_assign_single_p (stmt))
846 return -1;
847
848 op1 = gimple_assign_rhs1 (stmt);
849 if (TREE_CODE (op1) != PARM_DECL)
850 return -1;
851
d044dd17 852 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 853 if (index < 0
8aab5218 854 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
fdb0e1b4
MJ
855 return -1;
856
857 return index;
858}
f65cf2b7 859
8aab5218
MJ
860/* Return true if memory reference REF (which must be a load through parameter
861 with INDEX) loads data that are known to be unmodified in this function
862 before reaching statement STMT. */
8b7773a4
MJ
863
864static bool
56b40062 865parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 866 int index, gimple *stmt, tree ref)
8b7773a4 867{
56b40062 868 struct ipa_param_aa_status *paa;
8b7773a4
MJ
869 bool modified = false;
870 ao_ref refd;
871
8aab5218
MJ
872 /* FIXME: FBI can be NULL if we are being called from outside
873 ipa_node_analysis or ipcp_transform_function, which currently happens
874 during inlining analysis. It would be great to extend fbi's lifetime and
875 always have it. Currently, we are just not afraid of too much walking in
876 that case. */
877 if (fbi)
878 {
879 if (aa_overwalked (fbi))
880 return false;
881 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
882 if (paa->ref_modified)
883 return false;
884 }
885 else
886 paa = NULL;
8b7773a4 887
8aab5218 888 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 889 ao_ref_init (&refd, ref);
8aab5218
MJ
890 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
891 &modified, NULL);
892 if (fbi)
893 fbi->aa_walked += walked;
894 if (paa && modified)
895 paa->ref_modified = true;
8b7773a4
MJ
896 return !modified;
897}
898
8aab5218
MJ
899/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
900 is known to be unmodified in this function before reaching call statement
901 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
902
903static bool
56b40062 904parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 905 gimple *call, tree parm)
8b7773a4
MJ
906{
907 bool modified = false;
908 ao_ref refd;
909
910 /* It's unnecessary to calculate anything about memory contnets for a const
911 function because it is not goin to use it. But do not cache the result
912 either. Also, no such calculations for non-pointers. */
913 if (!gimple_vuse (call)
8aab5218
MJ
914 || !POINTER_TYPE_P (TREE_TYPE (parm))
915 || aa_overwalked (fbi))
8b7773a4
MJ
916 return false;
917
56b40062
MJ
918 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
919 gimple_bb (call),
920 index);
8aab5218 921 if (paa->pt_modified)
8b7773a4
MJ
922 return false;
923
924 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218
MJ
925 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
926 &modified, NULL);
927 fbi->aa_walked += walked;
8b7773a4 928 if (modified)
8aab5218 929 paa->pt_modified = true;
8b7773a4
MJ
930 return !modified;
931}
932
91bb9f80
MJ
933/* Return true if we can prove that OP is a memory reference loading
934 data from an aggregate passed as a parameter.
935
936 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
937 false if it cannot prove that the value has not been modified before the
938 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
939 if it cannot prove the value has not been modified, in that case it will
940 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
941
8b7773a4
MJ
942 INFO and PARMS_AINFO describe parameters of the current function (but the
943 latter can be NULL), STMT is the load statement. If function returns true,
944 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
945 within the aggregate and whether it is a load from a value passed by
946 reference respectively. */
947
ff302741 948bool
56b40062 949ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
ff302741 950 vec<ipa_param_descriptor> descriptors,
355fe088 951 gimple *stmt, tree op, int *index_p,
ff302741 952 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
91bb9f80 953 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
954{
955 int index;
956 HOST_WIDE_INT size, max_size;
ee45a32d
EB
957 bool reverse;
958 tree base
959 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
8b7773a4
MJ
960
961 if (max_size == -1 || max_size != size || *offset_p < 0)
962 return false;
963
964 if (DECL_P (base))
965 {
d044dd17 966 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 967 if (index >= 0
8aab5218 968 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
969 {
970 *index_p = index;
971 *by_ref_p = false;
3ff2ca23
JJ
972 if (size_p)
973 *size_p = size;
91bb9f80
MJ
974 if (guaranteed_unmodified)
975 *guaranteed_unmodified = true;
8b7773a4
MJ
976 return true;
977 }
978 return false;
979 }
980
981 if (TREE_CODE (base) != MEM_REF
982 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
983 || !integer_zerop (TREE_OPERAND (base, 1)))
984 return false;
985
986 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
987 {
988 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 989 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
990 }
991 else
992 {
993 /* This branch catches situations where a pointer parameter is not a
994 gimple register, for example:
995
996 void hip7(S*) (struct S * p)
997 {
998 void (*<T2e4>) (struct S *) D.1867;
999 struct S * p.1;
1000
1001 <bb 2>:
1002 p.1_1 = p;
1003 D.1867_2 = p.1_1->f;
1004 D.1867_2 ();
1005 gdp = &p;
1006 */
1007
355fe088 1008 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1009 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1010 }
1011
91bb9f80 1012 if (index >= 0)
8b7773a4 1013 {
91bb9f80
MJ
1014 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1015 if (!data_preserved && !guaranteed_unmodified)
1016 return false;
1017
8b7773a4
MJ
1018 *index_p = index;
1019 *by_ref_p = true;
3ff2ca23
JJ
1020 if (size_p)
1021 *size_p = size;
91bb9f80
MJ
1022 if (guaranteed_unmodified)
1023 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1024 return true;
1025 }
1026 return false;
1027}
1028
b258210c 1029/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1030 of an assignment statement STMT, try to determine whether we are actually
1031 handling any of the following cases and construct an appropriate jump
1032 function into JFUNC if so:
1033
1034 1) The passed value is loaded from a formal parameter which is not a gimple
1035 register (most probably because it is addressable, the value has to be
1036 scalar) and we can guarantee the value has not changed. This case can
1037 therefore be described by a simple pass-through jump function. For example:
1038
1039 foo (int a)
1040 {
1041 int a.0;
1042
1043 a.0_2 = a;
1044 bar (a.0_2);
1045
1046 2) The passed value can be described by a simple arithmetic pass-through
1047 jump function. E.g.
1048
1049 foo (int a)
1050 {
1051 int D.2064;
1052
1053 D.2064_4 = a.1(D) + 4;
1054 bar (D.2064_4);
1055
1056 This case can also occur in combination of the previous one, e.g.:
1057
1058 foo (int a, int z)
1059 {
1060 int a.0;
1061 int D.2064;
1062
1063 a.0_3 = a;
1064 D.2064_4 = a.0_3 + 4;
1065 foo (D.2064_4);
1066
1067 3) The passed value is an address of an object within another one (which
1068 also passed by reference). Such situations are described by an ancestor
1069 jump function and describe situations such as:
1070
1071 B::foo() (struct B * const this)
1072 {
1073 struct A * D.1845;
1074
1075 D.1845_2 = &this_1(D)->D.1748;
1076 A::bar (D.1845_2);
1077
1078 INFO is the structure describing individual parameters access different
1079 stages of IPA optimizations. PARMS_AINFO contains the information that is
1080 only needed for intraprocedural analysis. */
685b0d13
MJ
1081
1082static void
56b40062 1083compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1084 struct ipa_node_params *info,
b258210c 1085 struct ipa_jump_func *jfunc,
355fe088 1086 gcall *call, gimple *stmt, tree name,
06d65050 1087 tree param_type)
685b0d13
MJ
1088{
1089 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 1090 tree op1, tc_ssa, base, ssa;
ee45a32d 1091 bool reverse;
685b0d13 1092 int index;
685b0d13 1093
685b0d13 1094 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1095
fdb0e1b4 1096 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1097 {
fdb0e1b4
MJ
1098 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1099 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1100 else
8aab5218 1101 index = load_from_unmodified_param (fbi, info->descriptors,
fdb0e1b4
MJ
1102 SSA_NAME_DEF_STMT (op1));
1103 tc_ssa = op1;
1104 }
1105 else
1106 {
8aab5218 1107 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1108 tc_ssa = gimple_assign_lhs (stmt);
1109 }
1110
1111 if (index >= 0)
1112 {
1113 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 1114
b258210c 1115 if (op2)
685b0d13 1116 {
b258210c
MJ
1117 if (!is_gimple_ip_invariant (op2)
1118 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1119 && !useless_type_conversion_p (TREE_TYPE (name),
1120 TREE_TYPE (op1))))
1121 return;
1122
7b872d9e
MJ
1123 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1124 gimple_assign_rhs_code (stmt));
685b0d13 1125 }
b8f6e610 1126 else if (gimple_assign_single_p (stmt))
8b7773a4 1127 {
8aab5218 1128 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
3b97a5c7 1129 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4 1130 }
685b0d13
MJ
1131 return;
1132 }
1133
1134 if (TREE_CODE (op1) != ADDR_EXPR)
1135 return;
1136 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1137 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1138 return;
ee45a32d 1139 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
32aa622c 1140 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1141 /* If this is a varying address, punt. */
1142 || max_size == -1
1143 || max_size != size)
685b0d13 1144 return;
807e902e 1145 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1146 ssa = TREE_OPERAND (base, 0);
1147 if (TREE_CODE (ssa) != SSA_NAME
1148 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1149 || offset < 0)
685b0d13
MJ
1150 return;
1151
b8f6e610 1152 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1153 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1154 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1155 ipa_set_ancestor_jf (jfunc, offset, index,
1156 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1157}
1158
40591473
MJ
1159/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1160 it looks like:
1161
1162 iftmp.1_3 = &obj_2(D)->D.1762;
1163
1164 The base of the MEM_REF must be a default definition SSA NAME of a
1165 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1166 whole MEM_REF expression is returned and the offset calculated from any
1167 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1168 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1169
1170static tree
355fe088 1171get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473
MJ
1172{
1173 HOST_WIDE_INT size, max_size;
1174 tree expr, parm, obj;
ee45a32d 1175 bool reverse;
40591473
MJ
1176
1177 if (!gimple_assign_single_p (assign))
1178 return NULL_TREE;
1179 expr = gimple_assign_rhs1 (assign);
1180
1181 if (TREE_CODE (expr) != ADDR_EXPR)
1182 return NULL_TREE;
1183 expr = TREE_OPERAND (expr, 0);
1184 obj = expr;
ee45a32d 1185 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
40591473
MJ
1186
1187 if (TREE_CODE (expr) != MEM_REF
1188 /* If this is a varying address, punt. */
1189 || max_size == -1
1190 || max_size != size
1191 || *offset < 0)
1192 return NULL_TREE;
1193 parm = TREE_OPERAND (expr, 0);
1194 if (TREE_CODE (parm) != SSA_NAME
1195 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1196 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1197 return NULL_TREE;
1198
807e902e 1199 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1200 *obj_p = obj;
1201 return expr;
1202}
1203
685b0d13 1204
b258210c
MJ
1205/* Given that an actual argument is an SSA_NAME that is a result of a phi
1206 statement PHI, try to find out whether NAME is in fact a
1207 multiple-inheritance typecast from a descendant into an ancestor of a formal
1208 parameter and thus can be described by an ancestor jump function and if so,
1209 write the appropriate function into JFUNC.
1210
1211 Essentially we want to match the following pattern:
1212
1213 if (obj_2(D) != 0B)
1214 goto <bb 3>;
1215 else
1216 goto <bb 4>;
1217
1218 <bb 3>:
1219 iftmp.1_3 = &obj_2(D)->D.1762;
1220
1221 <bb 4>:
1222 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1223 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1224 return D.1879_6; */
1225
1226static void
56b40062 1227compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1228 struct ipa_node_params *info,
b258210c 1229 struct ipa_jump_func *jfunc,
538dd0b7 1230 gcall *call, gphi *phi)
b258210c 1231{
40591473 1232 HOST_WIDE_INT offset;
355fe088 1233 gimple *assign, *cond;
b258210c 1234 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1235 tree tmp, parm, expr, obj;
b258210c
MJ
1236 int index, i;
1237
54e348cb 1238 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1239 return;
1240
54e348cb
MJ
1241 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1242 tmp = PHI_ARG_DEF (phi, 0);
1243 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1244 tmp = PHI_ARG_DEF (phi, 1);
1245 else
1246 return;
b258210c
MJ
1247 if (TREE_CODE (tmp) != SSA_NAME
1248 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1249 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1250 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1251 return;
1252
1253 assign = SSA_NAME_DEF_STMT (tmp);
1254 assign_bb = gimple_bb (assign);
40591473 1255 if (!single_pred_p (assign_bb))
b258210c 1256 return;
40591473
MJ
1257 expr = get_ancestor_addr_info (assign, &obj, &offset);
1258 if (!expr)
b258210c
MJ
1259 return;
1260 parm = TREE_OPERAND (expr, 0);
b258210c 1261 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1262 if (index < 0)
1263 return;
b258210c
MJ
1264
1265 cond_bb = single_pred (assign_bb);
1266 cond = last_stmt (cond_bb);
69610617
SB
1267 if (!cond
1268 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1269 || gimple_cond_code (cond) != NE_EXPR
1270 || gimple_cond_lhs (cond) != parm
1271 || !integer_zerop (gimple_cond_rhs (cond)))
1272 return;
1273
b258210c
MJ
1274 phi_bb = gimple_bb (phi);
1275 for (i = 0; i < 2; i++)
1276 {
1277 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1278 if (pred != assign_bb && pred != cond_bb)
1279 return;
1280 }
1281
3b97a5c7
MJ
1282 ipa_set_ancestor_jf (jfunc, offset, index,
1283 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1284}
1285
be95e2b9
MJ
1286/* Inspect the given TYPE and return true iff it has the same structure (the
1287 same number of fields of the same types) as a C++ member pointer. If
1288 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1289 corresponding fields there. */
1290
3e293154
MJ
1291static bool
1292type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1293{
1294 tree fld;
1295
1296 if (TREE_CODE (type) != RECORD_TYPE)
1297 return false;
1298
1299 fld = TYPE_FIELDS (type);
1300 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1301 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1302 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1303 return false;
1304
1305 if (method_ptr)
1306 *method_ptr = fld;
1307
910ad8de 1308 fld = DECL_CHAIN (fld);
8b7773a4 1309 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1310 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1311 return false;
1312 if (delta)
1313 *delta = fld;
1314
910ad8de 1315 if (DECL_CHAIN (fld))
3e293154
MJ
1316 return false;
1317
1318 return true;
1319}
1320
61502ca8 1321/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1322 return the rhs of its defining statement. Otherwise return RHS as it
1323 is. */
7ec49257
MJ
1324
1325static inline tree
1326get_ssa_def_if_simple_copy (tree rhs)
1327{
1328 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1329 {
355fe088 1330 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1331
1332 if (gimple_assign_single_p (def_stmt))
1333 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1334 else
1335 break;
7ec49257
MJ
1336 }
1337 return rhs;
1338}
1339
8b7773a4
MJ
1340/* Simple linked list, describing known contents of an aggregate beforere
1341 call. */
1342
1343struct ipa_known_agg_contents_list
1344{
1345 /* Offset and size of the described part of the aggregate. */
1346 HOST_WIDE_INT offset, size;
1347 /* Known constant value or NULL if the contents is known to be unknown. */
1348 tree constant;
1349 /* Pointer to the next structure in the list. */
1350 struct ipa_known_agg_contents_list *next;
1351};
3e293154 1352
0d48ee34
MJ
1353/* Find the proper place in linked list of ipa_known_agg_contents_list
1354 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1355 unless there is a partial overlap, in which case return NULL, or such
1356 element is already there, in which case set *ALREADY_THERE to true. */
1357
1358static struct ipa_known_agg_contents_list **
1359get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1360 HOST_WIDE_INT lhs_offset,
1361 HOST_WIDE_INT lhs_size,
1362 bool *already_there)
1363{
1364 struct ipa_known_agg_contents_list **p = list;
1365 while (*p && (*p)->offset < lhs_offset)
1366 {
1367 if ((*p)->offset + (*p)->size > lhs_offset)
1368 return NULL;
1369 p = &(*p)->next;
1370 }
1371
1372 if (*p && (*p)->offset < lhs_offset + lhs_size)
1373 {
1374 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1375 /* We already know this value is subsequently overwritten with
1376 something else. */
1377 *already_there = true;
1378 else
1379 /* Otherwise this is a partial overlap which we cannot
1380 represent. */
1381 return NULL;
1382 }
1383 return p;
1384}
1385
1386/* Build aggregate jump function from LIST, assuming there are exactly
1387 CONST_COUNT constant entries there and that th offset of the passed argument
1388 is ARG_OFFSET and store it into JFUNC. */
1389
1390static void
1391build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1392 int const_count, HOST_WIDE_INT arg_offset,
1393 struct ipa_jump_func *jfunc)
1394{
1395 vec_alloc (jfunc->agg.items, const_count);
1396 while (list)
1397 {
1398 if (list->constant)
1399 {
1400 struct ipa_agg_jf_item item;
1401 item.offset = list->offset - arg_offset;
1402 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1403 item.value = unshare_expr_without_location (list->constant);
1404 jfunc->agg.items->quick_push (item);
1405 }
1406 list = list->next;
1407 }
1408}
1409
8b7773a4
MJ
1410/* Traverse statements from CALL backwards, scanning whether an aggregate given
1411 in ARG is filled in with constant values. ARG can either be an aggregate
0d48ee34
MJ
1412 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1413 aggregate. JFUNC is the jump function into which the constants are
1414 subsequently stored. */
be95e2b9 1415
3e293154 1416static void
538dd0b7
DM
1417determine_locally_known_aggregate_parts (gcall *call, tree arg,
1418 tree arg_type,
0d48ee34 1419 struct ipa_jump_func *jfunc)
3e293154 1420{
8b7773a4
MJ
1421 struct ipa_known_agg_contents_list *list = NULL;
1422 int item_count = 0, const_count = 0;
1423 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1424 gimple_stmt_iterator gsi;
8b7773a4
MJ
1425 tree arg_base;
1426 bool check_ref, by_ref;
1427 ao_ref r;
3e293154 1428
29799e9d
MJ
1429 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1430 return;
1431
8b7773a4
MJ
1432 /* The function operates in three stages. First, we prepare check_ref, r,
1433 arg_base and arg_offset based on what is actually passed as an actual
1434 argument. */
3e293154 1435
85942f45 1436 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1437 {
1438 by_ref = true;
1439 if (TREE_CODE (arg) == SSA_NAME)
1440 {
1441 tree type_size;
85942f45 1442 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
8b7773a4
MJ
1443 return;
1444 check_ref = true;
1445 arg_base = arg;
1446 arg_offset = 0;
85942f45 1447 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1448 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1449 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1450 }
1451 else if (TREE_CODE (arg) == ADDR_EXPR)
1452 {
1453 HOST_WIDE_INT arg_max_size;
ee45a32d 1454 bool reverse;
8b7773a4
MJ
1455
1456 arg = TREE_OPERAND (arg, 0);
1457 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
ee45a32d 1458 &arg_max_size, &reverse);
8b7773a4
MJ
1459 if (arg_max_size == -1
1460 || arg_max_size != arg_size
1461 || arg_offset < 0)
1462 return;
1463 if (DECL_P (arg_base))
1464 {
8b7773a4 1465 check_ref = false;
0d48ee34 1466 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1467 }
1468 else
1469 return;
1470 }
1471 else
1472 return;
1473 }
1474 else
1475 {
1476 HOST_WIDE_INT arg_max_size;
ee45a32d 1477 bool reverse;
8b7773a4
MJ
1478
1479 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1480
1481 by_ref = false;
1482 check_ref = false;
1483 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
ee45a32d 1484 &arg_max_size, &reverse);
8b7773a4
MJ
1485 if (arg_max_size == -1
1486 || arg_max_size != arg_size
1487 || arg_offset < 0)
1488 return;
1489
1490 ao_ref_init (&r, arg);
1491 }
1492
1493 /* Second stage walks back the BB, looks at individual statements and as long
1494 as it is confident of how the statements affect contents of the
1495 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1496 describing it. */
1497 gsi = gsi_for_stmt (call);
726a989a
RB
1498 gsi_prev (&gsi);
1499 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1500 {
8b7773a4 1501 struct ipa_known_agg_contents_list *n, **p;
355fe088 1502 gimple *stmt = gsi_stmt (gsi);
8b7773a4
MJ
1503 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1504 tree lhs, rhs, lhs_base;
ee45a32d 1505 bool reverse;
3e293154 1506
8b7773a4 1507 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1508 continue;
8b75fc9b 1509 if (!gimple_assign_single_p (stmt))
8b7773a4 1510 break;
3e293154 1511
726a989a
RB
1512 lhs = gimple_assign_lhs (stmt);
1513 rhs = gimple_assign_rhs1 (stmt);
0c6b087c 1514 if (!is_gimple_reg_type (TREE_TYPE (rhs))
7d2fb524
MJ
1515 || TREE_CODE (lhs) == BIT_FIELD_REF
1516 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1517 break;
3e293154 1518
8b7773a4 1519 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
ee45a32d 1520 &lhs_max_size, &reverse);
8b7773a4 1521 if (lhs_max_size == -1
0d48ee34 1522 || lhs_max_size != lhs_size)
8b7773a4 1523 break;
3e293154 1524
8b7773a4 1525 if (check_ref)
518dc859 1526 {
8b7773a4
MJ
1527 if (TREE_CODE (lhs_base) != MEM_REF
1528 || TREE_OPERAND (lhs_base, 0) != arg_base
1529 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1530 break;
3e293154 1531 }
8b7773a4 1532 else if (lhs_base != arg_base)
774b8a55
MJ
1533 {
1534 if (DECL_P (lhs_base))
1535 continue;
1536 else
1537 break;
1538 }
3e293154 1539
0d48ee34
MJ
1540 bool already_there = false;
1541 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1542 &already_there);
1543 if (!p)
8b7773a4 1544 break;
0d48ee34
MJ
1545 if (already_there)
1546 continue;
3e293154 1547
8b7773a4
MJ
1548 rhs = get_ssa_def_if_simple_copy (rhs);
1549 n = XALLOCA (struct ipa_known_agg_contents_list);
1550 n->size = lhs_size;
1551 n->offset = lhs_offset;
1552 if (is_gimple_ip_invariant (rhs))
1553 {
1554 n->constant = rhs;
1555 const_count++;
1556 }
1557 else
1558 n->constant = NULL_TREE;
1559 n->next = *p;
1560 *p = n;
3e293154 1561
8b7773a4 1562 item_count++;
dfea20f1
MJ
1563 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1564 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1565 break;
1566 }
be95e2b9 1567
8b7773a4
MJ
1568 /* Third stage just goes over the list and creates an appropriate vector of
1569 ipa_agg_jf_item structures out of it, of sourse only if there are
1570 any known constants to begin with. */
3e293154 1571
8b7773a4 1572 if (const_count)
3e293154 1573 {
8b7773a4 1574 jfunc->agg.by_ref = by_ref;
0d48ee34 1575 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
3e293154
MJ
1576 }
1577}
1578
06d65050
JH
1579static tree
1580ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1581{
1582 int n;
1583 tree type = (e->callee
67348ccc 1584 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1585 : gimple_call_fntype (e->call_stmt));
1586 tree t = TYPE_ARG_TYPES (type);
1587
1588 for (n = 0; n < i; n++)
1589 {
1590 if (!t)
1591 break;
1592 t = TREE_CHAIN (t);
1593 }
1594 if (t)
1595 return TREE_VALUE (t);
1596 if (!e->callee)
1597 return NULL;
67348ccc 1598 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1599 for (n = 0; n < i; n++)
1600 {
1601 if (!t)
1602 return NULL;
1603 t = TREE_CHAIN (t);
1604 }
1605 if (t)
1606 return TREE_TYPE (t);
1607 return NULL;
1608}
1609
3e293154
MJ
1610/* Compute jump function for all arguments of callsite CS and insert the
1611 information in the jump_functions array in the ipa_edge_args corresponding
1612 to this callsite. */
be95e2b9 1613
749aa96d 1614static void
56b40062 1615ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 1616 struct cgraph_edge *cs)
3e293154
MJ
1617{
1618 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09 1619 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
538dd0b7 1620 gcall *call = cs->call_stmt;
8b7773a4 1621 int n, arg_num = gimple_call_num_args (call);
5ce97055 1622 bool useful_context = false;
3e293154 1623
606d9a09 1624 if (arg_num == 0 || args->jump_functions)
3e293154 1625 return;
9771b263 1626 vec_safe_grow_cleared (args->jump_functions, arg_num);
5ce97055
JH
1627 if (flag_devirtualize)
1628 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
3e293154 1629
96e24d49
JJ
1630 if (gimple_call_internal_p (call))
1631 return;
5fe8e757
MJ
1632 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1633 return;
1634
8b7773a4
MJ
1635 for (n = 0; n < arg_num; n++)
1636 {
1637 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1638 tree arg = gimple_call_arg (call, n);
06d65050 1639 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
1640 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1641 {
049e6d36 1642 tree instance;
5ce97055
JH
1643 struct ipa_polymorphic_call_context context (cs->caller->decl,
1644 arg, cs->call_stmt,
049e6d36
JH
1645 &instance);
1646 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
5ce97055
JH
1647 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1648 if (!context.useless_p ())
1649 useful_context = true;
1650 }
3e293154 1651
04be694e
MJ
1652 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1653 {
1654 unsigned HOST_WIDE_INT hwi_bitpos;
1655 unsigned align;
1656
5f9a167b
RB
1657 get_pointer_alignment_1 (arg, &align, &hwi_bitpos);
1658 if (align > BITS_PER_UNIT
fda3e285
MJ
1659 && align % BITS_PER_UNIT == 0
1660 && hwi_bitpos % BITS_PER_UNIT == 0)
04be694e
MJ
1661 {
1662 jfunc->alignment.known = true;
fda3e285 1663 jfunc->alignment.align = align / BITS_PER_UNIT;
04be694e
MJ
1664 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1665 }
1666 else
1667 gcc_assert (!jfunc->alignment.known);
1668 }
1669 else
1670 gcc_assert (!jfunc->alignment.known);
1671
8b7773a4 1672 if (is_gimple_ip_invariant (arg))
4502fe8d 1673 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1674 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1675 && TREE_CODE (arg) == PARM_DECL)
1676 {
1677 int index = ipa_get_param_decl_index (info, arg);
1678
1679 gcc_assert (index >=0);
1680 /* Aggregate passed by value, check for pass-through, otherwise we
1681 will attempt to fill in aggregate contents later in this
1682 for cycle. */
8aab5218 1683 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 1684 {
3b97a5c7 1685 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
1686 continue;
1687 }
1688 }
1689 else if (TREE_CODE (arg) == SSA_NAME)
1690 {
1691 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1692 {
1693 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1694 if (index >= 0)
8b7773a4 1695 {
3b97a5c7 1696 bool agg_p;
8aab5218 1697 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 1698 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
1699 }
1700 }
1701 else
1702 {
355fe088 1703 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 1704 if (is_gimple_assign (stmt))
8aab5218 1705 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 1706 call, stmt, arg, param_type);
8b7773a4 1707 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 1708 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
1709 call,
1710 as_a <gphi *> (stmt));
8b7773a4
MJ
1711 }
1712 }
3e293154 1713
85942f45
JH
1714 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1715 passed (because type conversions are ignored in gimple). Usually we can
1716 safely get type from function declaration, but in case of K&R prototypes or
1717 variadic functions we can try our luck with type of the pointer passed.
1718 TODO: Since we look for actual initialization of the memory object, we may better
1719 work out the type based on the memory stores we find. */
1720 if (!param_type)
1721 param_type = TREE_TYPE (arg);
1722
8b7773a4
MJ
1723 if ((jfunc->type != IPA_JF_PASS_THROUGH
1724 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1725 && (jfunc->type != IPA_JF_ANCESTOR
1726 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1727 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 1728 || POINTER_TYPE_P (param_type)))
0d48ee34 1729 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
8b7773a4 1730 }
5ce97055
JH
1731 if (!useful_context)
1732 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
1733}
1734
749aa96d 1735/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 1736 from BB. */
749aa96d 1737
062c604f 1738static void
56b40062 1739ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 1740{
8aab5218
MJ
1741 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1742 int i;
749aa96d
MJ
1743 struct cgraph_edge *cs;
1744
8aab5218 1745 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 1746 {
8aab5218 1747 struct cgraph_node *callee = cs->callee;
749aa96d 1748
8aab5218
MJ
1749 if (callee)
1750 {
d52f5295 1751 callee->ultimate_alias_target ();
8aab5218
MJ
1752 /* We do not need to bother analyzing calls to unknown functions
1753 unless they may become known during lto/whopr. */
1754 if (!callee->definition && !flag_lto)
1755 continue;
1756 }
1757 ipa_compute_jump_functions_for_edge (fbi, cs);
1758 }
749aa96d
MJ
1759}
1760
8b7773a4
MJ
1761/* If STMT looks like a statement loading a value from a member pointer formal
1762 parameter, return that parameter and store the offset of the field to
1763 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1764 might be clobbered). If USE_DELTA, then we look for a use of the delta
1765 field rather than the pfn. */
be95e2b9 1766
3e293154 1767static tree
355fe088 1768ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 1769 HOST_WIDE_INT *offset_p)
3e293154 1770{
8b7773a4
MJ
1771 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1772
1773 if (!gimple_assign_single_p (stmt))
1774 return NULL_TREE;
3e293154 1775
8b7773a4 1776 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
1777 if (TREE_CODE (rhs) == COMPONENT_REF)
1778 {
1779 ref_field = TREE_OPERAND (rhs, 1);
1780 rhs = TREE_OPERAND (rhs, 0);
1781 }
1782 else
1783 ref_field = NULL_TREE;
d242d063 1784 if (TREE_CODE (rhs) != MEM_REF)
3e293154 1785 return NULL_TREE;
3e293154 1786 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
1787 if (TREE_CODE (rec) != ADDR_EXPR)
1788 return NULL_TREE;
1789 rec = TREE_OPERAND (rec, 0);
3e293154 1790 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 1791 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 1792 return NULL_TREE;
d242d063 1793 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 1794
8b7773a4
MJ
1795 if (use_delta)
1796 fld = delta_field;
1797 else
1798 fld = ptr_field;
1799 if (offset_p)
1800 *offset_p = int_bit_position (fld);
1801
ae788515
EB
1802 if (ref_field)
1803 {
1804 if (integer_nonzerop (ref_offset))
1805 return NULL_TREE;
ae788515
EB
1806 return ref_field == fld ? rec : NULL_TREE;
1807 }
3e293154 1808 else
8b7773a4
MJ
1809 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1810 : NULL_TREE;
3e293154
MJ
1811}
1812
1813/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 1814
3e293154
MJ
1815static bool
1816ipa_is_ssa_with_stmt_def (tree t)
1817{
1818 if (TREE_CODE (t) == SSA_NAME
1819 && !SSA_NAME_IS_DEFAULT_DEF (t))
1820 return true;
1821 else
1822 return false;
1823}
1824
40591473
MJ
1825/* Find the indirect call graph edge corresponding to STMT and mark it as a
1826 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1827 indirect call graph edge. */
be95e2b9 1828
40591473 1829static struct cgraph_edge *
538dd0b7
DM
1830ipa_note_param_call (struct cgraph_node *node, int param_index,
1831 gcall *stmt)
3e293154 1832{
e33c6cd6 1833 struct cgraph_edge *cs;
3e293154 1834
d52f5295 1835 cs = node->get_edge (stmt);
b258210c 1836 cs->indirect_info->param_index = param_index;
8b7773a4 1837 cs->indirect_info->agg_contents = 0;
c13bc3d9 1838 cs->indirect_info->member_ptr = 0;
91bb9f80 1839 cs->indirect_info->guaranteed_unmodified = 0;
40591473 1840 return cs;
3e293154
MJ
1841}
1842
e33c6cd6 1843/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 1844 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
1845 intermediate information about each formal parameter. Currently it checks
1846 whether the call calls a pointer that is a formal parameter and if so, the
1847 parameter is marked with the called flag and an indirect call graph edge
1848 describing the call is created. This is very simple for ordinary pointers
1849 represented in SSA but not-so-nice when it comes to member pointers. The
1850 ugly part of this function does nothing more than trying to match the
1851 pattern of such a call. An example of such a pattern is the gimple dump
1852 below, the call is on the last line:
3e293154 1853
ae788515
EB
1854 <bb 2>:
1855 f$__delta_5 = f.__delta;
1856 f$__pfn_24 = f.__pfn;
1857
1858 or
3e293154 1859 <bb 2>:
d242d063
MJ
1860 f$__delta_5 = MEM[(struct *)&f];
1861 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 1862
ae788515 1863 and a few lines below:
8aa29647
MJ
1864
1865 <bb 5>
3e293154
MJ
1866 D.2496_3 = (int) f$__pfn_24;
1867 D.2497_4 = D.2496_3 & 1;
1868 if (D.2497_4 != 0)
1869 goto <bb 3>;
1870 else
1871 goto <bb 4>;
1872
8aa29647 1873 <bb 6>:
3e293154
MJ
1874 D.2500_7 = (unsigned int) f$__delta_5;
1875 D.2501_8 = &S + D.2500_7;
1876 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1877 D.2503_10 = *D.2502_9;
1878 D.2504_12 = f$__pfn_24 + -1;
1879 D.2505_13 = (unsigned int) D.2504_12;
1880 D.2506_14 = D.2503_10 + D.2505_13;
1881 D.2507_15 = *D.2506_14;
1882 iftmp.11_16 = (String:: *) D.2507_15;
1883
8aa29647 1884 <bb 7>:
3e293154
MJ
1885 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1886 D.2500_19 = (unsigned int) f$__delta_5;
1887 D.2508_20 = &S + D.2500_19;
1888 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1889
1890 Such patterns are results of simple calls to a member pointer:
1891
1892 int doprinting (int (MyString::* f)(int) const)
1893 {
1894 MyString S ("somestring");
1895
1896 return (S.*f)(4);
1897 }
8b7773a4
MJ
1898
1899 Moreover, the function also looks for called pointers loaded from aggregates
1900 passed by value or reference. */
3e293154
MJ
1901
1902static void
56b40062 1903ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 1904 tree target)
3e293154 1905{
8aab5218 1906 struct ipa_node_params *info = fbi->info;
8b7773a4
MJ
1907 HOST_WIDE_INT offset;
1908 bool by_ref;
3e293154 1909
3e293154
MJ
1910 if (SSA_NAME_IS_DEFAULT_DEF (target))
1911 {
b258210c 1912 tree var = SSA_NAME_VAR (target);
8aab5218 1913 int index = ipa_get_param_decl_index (info, var);
3e293154 1914 if (index >= 0)
8aab5218 1915 ipa_note_param_call (fbi->node, index, call);
3e293154
MJ
1916 return;
1917 }
1918
8aab5218 1919 int index;
355fe088 1920 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 1921 bool guaranteed_unmodified;
8b7773a4 1922 if (gimple_assign_single_p (def)
ff302741
PB
1923 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
1924 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 1925 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 1926 {
8aab5218 1927 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
1928 cs->indirect_info->offset = offset;
1929 cs->indirect_info->agg_contents = 1;
1930 cs->indirect_info->by_ref = by_ref;
91bb9f80 1931 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
1932 return;
1933 }
1934
3e293154
MJ
1935 /* Now we need to try to match the complex pattern of calling a member
1936 pointer. */
8b7773a4
MJ
1937 if (gimple_code (def) != GIMPLE_PHI
1938 || gimple_phi_num_args (def) != 2
1939 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
1940 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1941 return;
1942
3e293154
MJ
1943 /* First, we need to check whether one of these is a load from a member
1944 pointer that is a parameter to this function. */
8aab5218
MJ
1945 tree n1 = PHI_ARG_DEF (def, 0);
1946 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 1947 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 1948 return;
355fe088
TS
1949 gimple *d1 = SSA_NAME_DEF_STMT (n1);
1950 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 1951
8aab5218
MJ
1952 tree rec;
1953 basic_block bb, virt_bb;
1954 basic_block join = gimple_bb (def);
8b7773a4 1955 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 1956 {
8b7773a4 1957 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
1958 return;
1959
8aa29647 1960 bb = EDGE_PRED (join, 0)->src;
726a989a 1961 virt_bb = gimple_bb (d2);
3e293154 1962 }
8b7773a4 1963 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 1964 {
8aa29647 1965 bb = EDGE_PRED (join, 1)->src;
726a989a 1966 virt_bb = gimple_bb (d1);
3e293154
MJ
1967 }
1968 else
1969 return;
1970
1971 /* Second, we need to check that the basic blocks are laid out in the way
1972 corresponding to the pattern. */
1973
3e293154
MJ
1974 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1975 || single_pred (virt_bb) != bb
1976 || single_succ (virt_bb) != join)
1977 return;
1978
1979 /* Third, let's see that the branching is done depending on the least
1980 significant bit of the pfn. */
1981
355fe088 1982 gimple *branch = last_stmt (bb);
8aa29647 1983 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
1984 return;
1985
12430896
RG
1986 if ((gimple_cond_code (branch) != NE_EXPR
1987 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 1988 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 1989 return;
3e293154 1990
8aab5218 1991 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
1992 if (!ipa_is_ssa_with_stmt_def (cond))
1993 return;
1994
726a989a 1995 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 1996 if (!is_gimple_assign (def)
726a989a
RB
1997 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1998 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 1999 return;
726a989a
RB
2000
2001 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2002 if (!ipa_is_ssa_with_stmt_def (cond))
2003 return;
2004
726a989a 2005 def = SSA_NAME_DEF_STMT (cond);
3e293154 2006
8b75fc9b
MJ
2007 if (is_gimple_assign (def)
2008 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2009 {
726a989a 2010 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2011 if (!ipa_is_ssa_with_stmt_def (cond))
2012 return;
726a989a 2013 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2014 }
2015
8aab5218 2016 tree rec2;
6f7b8b70
RE
2017 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2018 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2019 == ptrmemfunc_vbit_in_delta),
2020 NULL);
3e293154
MJ
2021 if (rec != rec2)
2022 return;
2023
2024 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2025 if (index >= 0
8aab5218 2026 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2027 {
8aab5218 2028 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2029 cs->indirect_info->offset = offset;
2030 cs->indirect_info->agg_contents = 1;
c13bc3d9 2031 cs->indirect_info->member_ptr = 1;
91bb9f80 2032 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2033 }
3e293154
MJ
2034
2035 return;
2036}
2037
b258210c
MJ
2038/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2039 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2040 FBI->node (described by FBI->info), create a call note for the
2041 statement. */
b258210c
MJ
2042
2043static void
56b40062 2044ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2045 gcall *call, tree target)
b258210c
MJ
2046{
2047 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2048 int index;
40591473 2049 HOST_WIDE_INT anc_offset;
b258210c 2050
05842ff5
MJ
2051 if (!flag_devirtualize)
2052 return;
2053
40591473 2054 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2055 return;
2056
8aab5218 2057 struct ipa_node_params *info = fbi->info;
40591473
MJ
2058 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2059 {
8aab5218 2060 struct ipa_jump_func jfunc;
40591473
MJ
2061 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2062 return;
b258210c 2063
40591473
MJ
2064 anc_offset = 0;
2065 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2066 gcc_assert (index >= 0);
06d65050
JH
2067 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2068 call, &jfunc))
40591473
MJ
2069 return;
2070 }
2071 else
2072 {
8aab5218 2073 struct ipa_jump_func jfunc;
355fe088 2074 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2075 tree expr;
2076
2077 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2078 if (!expr)
2079 return;
2080 index = ipa_get_param_decl_index (info,
2081 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2082 gcc_assert (index >= 0);
06d65050
JH
2083 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2084 call, &jfunc, anc_offset))
40591473
MJ
2085 return;
2086 }
2087
8aab5218
MJ
2088 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2089 struct cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2090 ii->offset = anc_offset;
ae7e9ddd 2091 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2092 ii->otr_type = obj_type_ref_class (target);
40591473 2093 ii->polymorphic = 1;
b258210c
MJ
2094}
2095
2096/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2097 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2098 containing intermediate information about each formal parameter. */
b258210c
MJ
2099
2100static void
56b40062 2101ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2102{
2103 tree target = gimple_call_fn (call);
b786d31f
JH
2104
2105 if (!target
2106 || (TREE_CODE (target) != SSA_NAME
2107 && !virtual_method_call_p (target)))
2108 return;
b258210c 2109
7d0aa05b 2110 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2111 /* If we previously turned the call into a direct call, there is
2112 no need to analyze. */
b786d31f 2113 if (cs && !cs->indirect_unknown_callee)
25583c4f 2114 return;
7d0aa05b 2115
a5b58b28 2116 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2117 {
7d0aa05b
JH
2118 tree instance;
2119 tree target = gimple_call_fn (call);
6f8091fc
JH
2120 ipa_polymorphic_call_context context (current_function_decl,
2121 target, call, &instance);
7d0aa05b 2122
ba392339
JH
2123 gcc_checking_assert (cs->indirect_info->otr_type
2124 == obj_type_ref_class (target));
2125 gcc_checking_assert (cs->indirect_info->otr_token
2126 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2127
29c43c83
JH
2128 cs->indirect_info->vptr_changed
2129 = !context.get_dynamic_type (instance,
2130 OBJ_TYPE_REF_OBJECT (target),
2131 obj_type_ref_class (target), call);
0127c169 2132 cs->indirect_info->context = context;
7d0aa05b
JH
2133 }
2134
b258210c 2135 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2136 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2137 else if (virtual_method_call_p (target))
8aab5218 2138 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2139}
2140
2141
e33c6cd6 2142/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2143 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2144 formal parameters are called. */
be95e2b9 2145
3e293154 2146static void
355fe088 2147ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2148{
726a989a 2149 if (is_gimple_call (stmt))
538dd0b7 2150 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2151}
2152
2153/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2154 If OP is a parameter declaration, mark it as used in the info structure
2155 passed in DATA. */
2156
2157static bool
355fe088 2158visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f
MJ
2159{
2160 struct ipa_node_params *info = (struct ipa_node_params *) data;
2161
2162 op = get_base_address (op);
2163 if (op
2164 && TREE_CODE (op) == PARM_DECL)
2165 {
2166 int index = ipa_get_param_decl_index (info, op);
2167 gcc_assert (index >= 0);
310bc633 2168 ipa_set_param_used (info, index, true);
062c604f
MJ
2169 }
2170
2171 return false;
3e293154
MJ
2172}
2173
8aab5218
MJ
2174/* Scan the statements in BB and inspect the uses of formal parameters. Store
2175 the findings in various structures of the associated ipa_node_params
2176 structure, such as parameter flags, notes etc. FBI holds various data about
2177 the function being analyzed. */
be95e2b9 2178
062c604f 2179static void
56b40062 2180ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2181{
726a989a 2182 gimple_stmt_iterator gsi;
8aab5218
MJ
2183 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2184 {
355fe088 2185 gimple *stmt = gsi_stmt (gsi);
3e293154 2186
8aab5218
MJ
2187 if (is_gimple_debug (stmt))
2188 continue;
3e293154 2189
8aab5218
MJ
2190 ipa_analyze_stmt_uses (fbi, stmt);
2191 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2192 visit_ref_for_mod_analysis,
2193 visit_ref_for_mod_analysis,
2194 visit_ref_for_mod_analysis);
5fe8e757 2195 }
8aab5218
MJ
2196 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2197 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2198 visit_ref_for_mod_analysis,
2199 visit_ref_for_mod_analysis,
2200 visit_ref_for_mod_analysis);
2201}
2202
2203/* Calculate controlled uses of parameters of NODE. */
2204
2205static void
2206ipa_analyze_controlled_uses (struct cgraph_node *node)
2207{
2208 struct ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2209
8aab5218 2210 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2211 {
2212 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2213 int controlled_uses = 0;
2214
062c604f
MJ
2215 /* For SSA regs see if parameter is used. For non-SSA we compute
2216 the flag during modification analysis. */
4502fe8d
MJ
2217 if (is_gimple_reg (parm))
2218 {
67348ccc 2219 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2220 parm);
2221 if (ddef && !has_zero_uses (ddef))
2222 {
2223 imm_use_iterator imm_iter;
2224 use_operand_p use_p;
2225
2226 ipa_set_param_used (info, i, true);
2227 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2228 if (!is_gimple_call (USE_STMT (use_p)))
2229 {
c6de6665
JJ
2230 if (!is_gimple_debug (USE_STMT (use_p)))
2231 {
2232 controlled_uses = IPA_UNDESCRIBED_USE;
2233 break;
2234 }
4502fe8d
MJ
2235 }
2236 else
2237 controlled_uses++;
2238 }
2239 else
2240 controlled_uses = 0;
2241 }
2242 else
2243 controlled_uses = IPA_UNDESCRIBED_USE;
2244 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2245 }
8aab5218 2246}
062c604f 2247
8aab5218 2248/* Free stuff in BI. */
062c604f 2249
8aab5218
MJ
2250static void
2251free_ipa_bb_info (struct ipa_bb_info *bi)
2252{
2253 bi->cg_edges.release ();
2254 bi->param_aa_statuses.release ();
3e293154
MJ
2255}
2256
8aab5218 2257/* Dominator walker driving the analysis. */
2c9561b5 2258
8aab5218 2259class analysis_dom_walker : public dom_walker
2c9561b5 2260{
8aab5218 2261public:
56b40062 2262 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 2263 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2264
3daacdcd 2265 virtual edge before_dom_children (basic_block);
8aab5218
MJ
2266
2267private:
56b40062 2268 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
2269};
2270
3daacdcd 2271edge
8aab5218
MJ
2272analysis_dom_walker::before_dom_children (basic_block bb)
2273{
2274 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2275 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 2276 return NULL;
2c9561b5
MJ
2277}
2278
c3431191
ML
2279/* Release body info FBI. */
2280
2281void
2282ipa_release_body_info (struct ipa_func_body_info *fbi)
2283{
2284 int i;
2285 struct ipa_bb_info *bi;
2286
2287 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2288 free_ipa_bb_info (bi);
2289 fbi->bb_infos.release ();
2290}
2291
026c3cfd 2292/* Initialize the array describing properties of formal parameters
dd5a833e
MS
2293 of NODE, analyze their uses and compute jump functions associated
2294 with actual arguments of calls from within NODE. */
062c604f
MJ
2295
2296void
2297ipa_analyze_node (struct cgraph_node *node)
2298{
56b40062 2299 struct ipa_func_body_info fbi;
57dbdc5a 2300 struct ipa_node_params *info;
062c604f 2301
57dbdc5a
MJ
2302 ipa_check_create_node_params ();
2303 ipa_check_create_edge_args ();
2304 info = IPA_NODE_REF (node);
8aab5218
MJ
2305
2306 if (info->analysis_done)
2307 return;
2308 info->analysis_done = 1;
2309
2310 if (ipa_func_spec_opts_forbid_analysis_p (node))
2311 {
2312 for (int i = 0; i < ipa_get_param_count (info); i++)
2313 {
2314 ipa_set_param_used (info, i, true);
2315 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2316 }
2317 return;
2318 }
2319
2320 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2321 push_cfun (func);
2322 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2323 ipa_initialize_node_params (node);
8aab5218 2324 ipa_analyze_controlled_uses (node);
062c604f 2325
8aab5218
MJ
2326 fbi.node = node;
2327 fbi.info = IPA_NODE_REF (node);
2328 fbi.bb_infos = vNULL;
2329 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2330 fbi.param_count = ipa_get_param_count (info);
2331 fbi.aa_walked = 0;
062c604f 2332
8aab5218
MJ
2333 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2334 {
2335 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2336 bi->cg_edges.safe_push (cs);
2337 }
062c604f 2338
8aab5218
MJ
2339 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2340 {
2341 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2342 bi->cg_edges.safe_push (cs);
2343 }
2344
2345 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2346
c3431191 2347 ipa_release_body_info (&fbi);
8aab5218 2348 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2349 pop_cfun ();
062c604f 2350}
062c604f 2351
be95e2b9 2352/* Update the jump functions associated with call graph edge E when the call
3e293154 2353 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2354 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2355
3e293154
MJ
2356static void
2357update_jump_functions_after_inlining (struct cgraph_edge *cs,
2358 struct cgraph_edge *e)
2359{
2360 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2361 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2362 int count = ipa_get_cs_argument_count (args);
2363 int i;
2364
2365 for (i = 0; i < count; i++)
2366 {
b258210c 2367 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
5ce97055
JH
2368 struct ipa_polymorphic_call_context *dst_ctx
2369 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2370
685b0d13
MJ
2371 if (dst->type == IPA_JF_ANCESTOR)
2372 {
b258210c 2373 struct ipa_jump_func *src;
8b7773a4 2374 int dst_fid = dst->value.ancestor.formal_id;
5ce97055
JH
2375 struct ipa_polymorphic_call_context *src_ctx
2376 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 2377
b258210c
MJ
2378 /* Variable number of arguments can cause havoc if we try to access
2379 one that does not exist in the inlined edge. So make sure we
2380 don't. */
8b7773a4 2381 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 2382 {
04be694e 2383 ipa_set_jf_unknown (dst);
b258210c
MJ
2384 continue;
2385 }
2386
8b7773a4
MJ
2387 src = ipa_get_ith_jump_func (top, dst_fid);
2388
5ce97055
JH
2389 if (src_ctx && !src_ctx->useless_p ())
2390 {
2391 struct ipa_polymorphic_call_context ctx = *src_ctx;
2392
2393 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2394 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 2395 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2396 ctx.offset_by (dst->value.ancestor.offset);
2397 if (!ctx.useless_p ())
2398 {
a7d1f3fe
ML
2399 if (!dst_ctx)
2400 {
2401 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2402 count);
2403 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2404 }
2405
2406 dst_ctx->combine_with (ctx);
5ce97055
JH
2407 }
2408 }
2409
8b7773a4
MJ
2410 if (src->agg.items
2411 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2412 {
2413 struct ipa_agg_jf_item *item;
2414 int j;
2415
2416 /* Currently we do not produce clobber aggregate jump functions,
2417 replace with merging when we do. */
2418 gcc_assert (!dst->agg.items);
2419
9771b263 2420 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2421 dst->agg.by_ref = src->agg.by_ref;
9771b263 2422 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2423 item->offset -= dst->value.ancestor.offset;
2424 }
2425
3b97a5c7
MJ
2426 if (src->type == IPA_JF_PASS_THROUGH
2427 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2428 {
2429 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2430 dst->value.ancestor.agg_preserved &=
2431 src->value.pass_through.agg_preserved;
2432 }
b258210c
MJ
2433 else if (src->type == IPA_JF_ANCESTOR)
2434 {
2435 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2436 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2437 dst->value.ancestor.agg_preserved &=
2438 src->value.ancestor.agg_preserved;
b258210c
MJ
2439 }
2440 else
04be694e 2441 ipa_set_jf_unknown (dst);
b258210c
MJ
2442 }
2443 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2444 {
b258210c
MJ
2445 struct ipa_jump_func *src;
2446 /* We must check range due to calls with variable number of arguments
2447 and we cannot combine jump functions with operations. */
2448 if (dst->value.pass_through.operation == NOP_EXPR
2449 && (dst->value.pass_through.formal_id
2450 < ipa_get_cs_argument_count (top)))
2451 {
8b7773a4
MJ
2452 int dst_fid = dst->value.pass_through.formal_id;
2453 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2454 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
5ce97055
JH
2455 struct ipa_polymorphic_call_context *src_ctx
2456 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 2457
5ce97055
JH
2458 if (src_ctx && !src_ctx->useless_p ())
2459 {
2460 struct ipa_polymorphic_call_context ctx = *src_ctx;
2461
2462 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2463 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 2464 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2465 if (!ctx.useless_p ())
2466 {
2467 if (!dst_ctx)
2468 {
2469 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2470 count);
2471 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2472 }
2473 dst_ctx->combine_with (ctx);
2474 }
2475 }
b8f6e610
MJ
2476 switch (src->type)
2477 {
2478 case IPA_JF_UNKNOWN:
04be694e 2479 ipa_set_jf_unknown (dst);
b8f6e610 2480 break;
b8f6e610
MJ
2481 case IPA_JF_CONST:
2482 ipa_set_jf_cst_copy (dst, src);
2483 break;
2484
2485 case IPA_JF_PASS_THROUGH:
2486 {
2487 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2488 enum tree_code operation;
2489 operation = ipa_get_jf_pass_through_operation (src);
2490
2491 if (operation == NOP_EXPR)
2492 {
3b97a5c7 2493 bool agg_p;
b8f6e610
MJ
2494 agg_p = dst_agg_p
2495 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 2496 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610
MJ
2497 }
2498 else
2499 {
2500 tree operand = ipa_get_jf_pass_through_operand (src);
2501 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2502 operation);
2503 }
2504 break;
2505 }
2506 case IPA_JF_ANCESTOR:
2507 {
3b97a5c7 2508 bool agg_p;
b8f6e610
MJ
2509 agg_p = dst_agg_p
2510 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
2511 ipa_set_ancestor_jf (dst,
2512 ipa_get_jf_ancestor_offset (src),
b8f6e610 2513 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 2514 agg_p);
b8f6e610
MJ
2515 break;
2516 }
2517 default:
2518 gcc_unreachable ();
2519 }
8b7773a4
MJ
2520
2521 if (src->agg.items
b8f6e610 2522 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2523 {
2524 /* Currently we do not produce clobber aggregate jump
2525 functions, replace with merging when we do. */
2526 gcc_assert (!dst->agg.items);
2527
2528 dst->agg.by_ref = src->agg.by_ref;
9771b263 2529 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2530 }
b258210c
MJ
2531 }
2532 else
04be694e 2533 ipa_set_jf_unknown (dst);
3e293154 2534 }
b258210c
MJ
2535 }
2536}
2537
5ce97055
JH
2538/* If TARGET is an addr_expr of a function declaration, make it the
2539 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2540 Otherwise, return NULL. */
b258210c 2541
3949c4a7 2542struct cgraph_edge *
5ce97055
JH
2543ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2544 bool speculative)
b258210c
MJ
2545{
2546 struct cgraph_node *callee;
0f378cb5 2547 struct inline_edge_summary *es = inline_edge_summary (ie);
48b1474e 2548 bool unreachable = false;
b258210c 2549
ceeffab0
MJ
2550 if (TREE_CODE (target) == ADDR_EXPR)
2551 target = TREE_OPERAND (target, 0);
b258210c 2552 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2553 {
2554 target = canonicalize_constructor_val (target, NULL);
2555 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2556 {
db66bf68
JH
2557 /* Member pointer call that goes through a VMT lookup. */
2558 if (ie->indirect_info->member_ptr
2559 /* Or if target is not an invariant expression and we do not
2560 know if it will evaulate to function at runtime.
2561 This can happen when folding through &VAR, where &VAR
2562 is IP invariant, but VAR itself is not.
2563
2564 TODO: Revisit this when GCC 5 is branched. It seems that
2565 member_ptr check is not needed and that we may try to fold
2566 the expression and see if VAR is readonly. */
2567 || !is_gimple_ip_invariant (target))
2568 {
2569 if (dump_enabled_p ())
2570 {
2571 location_t loc = gimple_location_safe (ie->call_stmt);
2572 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2573 "discovered direct call non-invariant "
2574 "%s/%i\n",
2575 ie->caller->name (), ie->caller->order);
2576 }
2577 return NULL;
2578 }
2579
c13bc3d9 2580
2b5f0895
XDL
2581 if (dump_enabled_p ())
2582 {
807b7d62
ML
2583 location_t loc = gimple_location_safe (ie->call_stmt);
2584 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2585 "discovered direct call to non-function in %s/%i, "
2586 "making it __builtin_unreachable\n",
2587 ie->caller->name (), ie->caller->order);
2b5f0895 2588 }
3c9e6fca 2589
48b1474e 2590 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 2591 callee = cgraph_node::get_create (target);
48b1474e 2592 unreachable = true;
a0a7b611 2593 }
48b1474e 2594 else
d52f5295 2595 callee = cgraph_node::get (target);
a0a7b611 2596 }
48b1474e 2597 else
d52f5295 2598 callee = cgraph_node::get (target);
a0a7b611
JH
2599
2600 /* Because may-edges are not explicitely represented and vtable may be external,
2601 we may create the first reference to the object in the unit. */
2602 if (!callee || callee->global.inlined_to)
2603 {
a0a7b611
JH
2604
2605 /* We are better to ensure we can refer to it.
2606 In the case of static functions we are out of luck, since we already
2607 removed its body. In the case of public functions we may or may
2608 not introduce the reference. */
2609 if (!canonicalize_constructor_val (target, NULL)
2610 || !TREE_PUBLIC (target))
2611 {
2612 if (dump_file)
2613 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2614 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2a72a953 2615 xstrdup_for_dump (ie->caller->name ()),
67348ccc 2616 ie->caller->order,
2a72a953 2617 xstrdup_for_dump (ie->callee->name ()),
67348ccc 2618 ie->callee->order);
a0a7b611
JH
2619 return NULL;
2620 }
d52f5295 2621 callee = cgraph_node::get_create (target);
a0a7b611 2622 }
2b5f0895 2623
0127c169
JH
2624 /* If the edge is already speculated. */
2625 if (speculative && ie->speculative)
2626 {
2627 struct cgraph_edge *e2;
2628 struct ipa_ref *ref;
2629 ie->speculative_call_info (e2, ie, ref);
2630 if (e2->callee->ultimate_alias_target ()
2631 != callee->ultimate_alias_target ())
2632 {
2633 if (dump_file)
2634 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2635 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2a72a953 2636 xstrdup_for_dump (ie->caller->name ()),
0127c169 2637 ie->caller->order,
2a72a953 2638 xstrdup_for_dump (callee->name ()),
0127c169 2639 callee->order,
2a72a953 2640 xstrdup_for_dump (e2->callee->name ()),
0127c169
JH
2641 e2->callee->order);
2642 }
2643 else
2644 {
2645 if (dump_file)
2646 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2647 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2a72a953 2648 xstrdup_for_dump (ie->caller->name ()),
0127c169 2649 ie->caller->order,
2a72a953 2650 xstrdup_for_dump (callee->name ()),
0127c169
JH
2651 callee->order);
2652 }
2653 return NULL;
2654 }
2655
2b5f0895
XDL
2656 if (!dbg_cnt (devirt))
2657 return NULL;
2658
1dbee8c9 2659 ipa_check_create_node_params ();
ceeffab0 2660
81fa35bd
MJ
2661 /* We can not make edges to inline clones. It is bug that someone removed
2662 the cgraph node too early. */
17afc0fe
JH
2663 gcc_assert (!callee->global.inlined_to);
2664
48b1474e 2665 if (dump_file && !unreachable)
b258210c 2666 {
5ce97055 2667 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
ceeffab0 2668 "(%s/%i -> %s/%i), for stmt ",
b258210c 2669 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 2670 speculative ? "speculative" : "known",
2a72a953 2671 xstrdup_for_dump (ie->caller->name ()),
67348ccc 2672 ie->caller->order,
2a72a953 2673 xstrdup_for_dump (callee->name ()),
67348ccc 2674 callee->order);
b258210c
MJ
2675 if (ie->call_stmt)
2676 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2677 else
2678 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 2679 }
2b5f0895
XDL
2680 if (dump_enabled_p ())
2681 {
807b7d62 2682 location_t loc = gimple_location_safe (ie->call_stmt);
3c9e6fca 2683
807b7d62
ML
2684 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2685 "converting indirect call in %s to direct call to %s\n",
2686 ie->caller->name (), callee->name ());
2b5f0895 2687 }
5ce97055 2688 if (!speculative)
d8d5aef1
JH
2689 {
2690 struct cgraph_edge *orig = ie;
2691 ie = ie->make_direct (callee);
2692 /* If we resolved speculative edge the cost is already up to date
2693 for direct call (adjusted by inline_edge_duplication_hook). */
2694 if (ie == orig)
2695 {
2696 es = inline_edge_summary (ie);
2697 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2698 - eni_size_weights.call_cost);
2699 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2700 - eni_time_weights.call_cost);
2701 }
2702 }
5ce97055
JH
2703 else
2704 {
2705 if (!callee->can_be_discarded_p ())
2706 {
2707 cgraph_node *alias;
2708 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2709 if (alias)
2710 callee = alias;
2711 }
d8d5aef1 2712 /* make_speculative will update ie's cost to direct call cost. */
5ce97055
JH
2713 ie = ie->make_speculative
2714 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2715 }
749aa96d 2716
b258210c 2717 return ie;
3e293154
MJ
2718}
2719
91bb9f80
MJ
2720/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2721 CONSTRUCTOR and return it. Return NULL if the search fails for some
2722 reason. */
2723
2724static tree
2725find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2726{
2727 tree type = TREE_TYPE (constructor);
2728 if (TREE_CODE (type) != ARRAY_TYPE
2729 && TREE_CODE (type) != RECORD_TYPE)
2730 return NULL;
2731
2732 unsigned ix;
2733 tree index, val;
2734 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2735 {
2736 HOST_WIDE_INT elt_offset;
2737 if (TREE_CODE (type) == ARRAY_TYPE)
2738 {
2739 offset_int off;
2740 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2741 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2742
2743 if (index)
2744 {
2745 off = wi::to_offset (index);
2746 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2747 {
2748 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2749 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2750 off = wi::sext (off - wi::to_offset (low_bound),
2751 TYPE_PRECISION (TREE_TYPE (index)));
2752 }
2753 off *= wi::to_offset (unit_size);
2754 }
2755 else
2756 off = wi::to_offset (unit_size) * ix;
2757
2758 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2759 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2760 continue;
2761 elt_offset = off.to_shwi ();
2762 }
2763 else if (TREE_CODE (type) == RECORD_TYPE)
2764 {
2765 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2766 if (DECL_BIT_FIELD (index))
2767 continue;
2768 elt_offset = int_bit_position (index);
2769 }
2770 else
2771 gcc_unreachable ();
2772
2773 if (elt_offset > req_offset)
2774 return NULL;
2775
2776 if (TREE_CODE (val) == CONSTRUCTOR)
2777 return find_constructor_constant_at_offset (val,
2778 req_offset - elt_offset);
2779
2780 if (elt_offset == req_offset
2781 && is_gimple_reg_type (TREE_TYPE (val))
2782 && is_gimple_ip_invariant (val))
2783 return val;
2784 }
2785 return NULL;
2786}
2787
2788/* Check whether SCALAR could be used to look up an aggregate interprocedural
2789 invariant from a static constructor and if so, return it. Otherwise return
2790 NULL. */
2791
2792static tree
2793ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2794{
2795 if (by_ref)
2796 {
2797 if (TREE_CODE (scalar) != ADDR_EXPR)
2798 return NULL;
2799 scalar = TREE_OPERAND (scalar, 0);
2800 }
2801
2802 if (TREE_CODE (scalar) != VAR_DECL
2803 || !is_global_var (scalar)
2804 || !TREE_READONLY (scalar)
2805 || !DECL_INITIAL (scalar)
2806 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2807 return NULL;
2808
2809 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2810}
2811
2812/* Retrieve value from aggregate jump function AGG or static initializer of
2813 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2814 none. BY_REF specifies whether the value has to be passed by reference or
2815 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2816 to is set to true if the value comes from an initializer of a constant. */
8b7773a4
MJ
2817
2818tree
91bb9f80
MJ
2819ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2820 HOST_WIDE_INT offset, bool by_ref,
2821 bool *from_global_constant)
8b7773a4
MJ
2822{
2823 struct ipa_agg_jf_item *item;
2824 int i;
2825
91bb9f80
MJ
2826 if (scalar)
2827 {
2828 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2829 if (res)
2830 {
2831 if (from_global_constant)
2832 *from_global_constant = true;
2833 return res;
2834 }
2835 }
2836
2837 if (!agg
2838 || by_ref != agg->by_ref)
8b7773a4
MJ
2839 return NULL;
2840
9771b263 2841 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2842 if (item->offset == offset)
2843 {
2844 /* Currently we do not have clobber values, return NULL for them once
2845 we do. */
2846 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
2847 if (from_global_constant)
2848 *from_global_constant = false;
2c9561b5
MJ
2849 return item->value;
2850 }
8b7773a4
MJ
2851 return NULL;
2852}
2853
4502fe8d 2854/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
2855 reference description RDESC. Return true if the reference has been
2856 successfully found and removed. */
4502fe8d 2857
568cda29 2858static bool
5e20cdc9 2859remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
2860{
2861 struct ipa_ref *to_del;
2862 struct cgraph_edge *origin;
2863
2864 origin = rdesc->cs;
a854f856
MJ
2865 if (!origin)
2866 return false;
d122681a
ML
2867 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2868 origin->lto_stmt_uid);
568cda29
MJ
2869 if (!to_del)
2870 return false;
2871
d122681a 2872 to_del->remove_reference ();
4502fe8d
MJ
2873 if (dump_file)
2874 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2a72a953
DM
2875 xstrdup_for_dump (origin->caller->name ()),
2876 origin->caller->order, xstrdup_for_dump (symbol->name ()));
568cda29 2877 return true;
4502fe8d
MJ
2878}
2879
2880/* If JFUNC has a reference description with refcount different from
2881 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2882 NULL. JFUNC must be a constant jump function. */
2883
2884static struct ipa_cst_ref_desc *
2885jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2886{
2887 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2888 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2889 return rdesc;
2890 else
2891 return NULL;
2892}
2893
568cda29
MJ
2894/* If the value of constant jump function JFUNC is an address of a function
2895 declaration, return the associated call graph node. Otherwise return
2896 NULL. */
2897
2898static cgraph_node *
2899cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2900{
2901 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2902 tree cst = ipa_get_jf_constant (jfunc);
2903 if (TREE_CODE (cst) != ADDR_EXPR
2904 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2905 return NULL;
2906
d52f5295 2907 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
2908}
2909
2910
2911/* If JFUNC is a constant jump function with a usable rdesc, decrement its
2912 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2913 the edge specified in the rdesc. Return false if either the symbol or the
2914 reference could not be found, otherwise return true. */
2915
2916static bool
2917try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2918{
2919 struct ipa_cst_ref_desc *rdesc;
2920 if (jfunc->type == IPA_JF_CONST
2921 && (rdesc = jfunc_rdesc_usable (jfunc))
2922 && --rdesc->refcount == 0)
2923 {
5e20cdc9 2924 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
2925 if (!symbol)
2926 return false;
2927
2928 return remove_described_reference (symbol, rdesc);
2929 }
2930 return true;
2931}
2932
b258210c
MJ
2933/* Try to find a destination for indirect edge IE that corresponds to a simple
2934 call or a call of a member function pointer and where the destination is a
2935 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
2936 determined, return the newly direct edge, otherwise return NULL.
2937 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 2938
b258210c
MJ
2939static struct cgraph_edge *
2940try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
2941 struct ipa_jump_func *jfunc,
2942 struct ipa_node_params *new_root_info)
b258210c 2943{
4502fe8d 2944 struct cgraph_edge *cs;
b258210c 2945 tree target;
042ae7d2 2946 bool agg_contents = ie->indirect_info->agg_contents;
91bb9f80
MJ
2947 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
2948 if (agg_contents)
2949 {
2950 bool from_global_constant;
2951 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
2952 ie->indirect_info->offset,
2953 ie->indirect_info->by_ref,
2954 &from_global_constant);
2955 if (target
2956 && !from_global_constant
2957 && !ie->indirect_info->guaranteed_unmodified)
2958 return NULL;
2959 }
b258210c 2960 else
91bb9f80 2961 target = scalar;
d250540a
MJ
2962 if (!target)
2963 return NULL;
4502fe8d
MJ
2964 cs = ipa_make_edge_direct_to_target (ie, target);
2965
a12cd2db 2966 if (cs && !agg_contents)
568cda29
MJ
2967 {
2968 bool ok;
2969 gcc_checking_assert (cs->callee
ae6d0907
MJ
2970 && (cs != ie
2971 || jfunc->type != IPA_JF_CONST
568cda29
MJ
2972 || !cgraph_node_for_jfunc (jfunc)
2973 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2974 ok = try_decrement_rdesc_refcount (jfunc);
2975 gcc_checking_assert (ok);
2976 }
4502fe8d
MJ
2977
2978 return cs;
b258210c
MJ
2979}
2980
bec81025
MJ
2981/* Return the target to be used in cases of impossible devirtualization. IE
2982 and target (the latter can be NULL) are dumped when dumping is enabled. */
2983
72972c22
MJ
2984tree
2985ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
2986{
2987 if (dump_file)
2988 {
2989 if (target)
2990 fprintf (dump_file,
72972c22 2991 "Type inconsistent devirtualization: %s/%i->%s\n",
bec81025
MJ
2992 ie->caller->name (), ie->caller->order,
2993 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2994 else
2995 fprintf (dump_file,
2996 "No devirtualization target in %s/%i\n",
2997 ie->caller->name (), ie->caller->order);
2998 }
2999 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3000 cgraph_node::get_create (new_target);
bec81025
MJ
3001 return new_target;
3002}
3003
d250540a
MJ
3004/* Try to find a destination for indirect edge IE that corresponds to a virtual
3005 call based on a formal parameter which is described by jump function JFUNC
3006 and if it can be determined, make it direct and return the direct edge.
44210a96
MJ
3007 Otherwise, return NULL. CTX describes the polymorphic context that the
3008 parameter the call is based on brings along with it. */
b258210c
MJ
3009
3010static struct cgraph_edge *
3011try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3012 struct ipa_jump_func *jfunc,
44210a96 3013 struct ipa_polymorphic_call_context ctx)
3e293154 3014{
44210a96 3015 tree target = NULL;
5ce97055 3016 bool speculative = false;
85942f45 3017
2bf86c84 3018 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3019 return NULL;
b258210c 3020
44210a96 3021 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3022
3023 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3024 if (!ie->indirect_info->vptr_changed
3025 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3026 {
9de2f554
JH
3027 tree vtable;
3028 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3029 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3030 : NULL;
3031 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
85942f45
JH
3032 ie->indirect_info->offset,
3033 true);
9de2f554
JH
3034 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3035 {
2994ab20 3036 bool can_refer;
0127c169 3037 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3038 vtable, offset, &can_refer);
3039 if (can_refer)
9de2f554 3040 {
2994ab20
JH
3041 if (!t
3042 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3043 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
9de2f554 3044 || !possible_polymorphic_call_target_p
0127c169
JH
3045 (ie, cgraph_node::get (t)))
3046 {
33c3b6be 3047 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3048 if (!ie->indirect_info->vptr_changed)
3049 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3050 else
3051 target = NULL;
0127c169
JH
3052 }
3053 else
3054 {
3055 target = t;
3056 speculative = ie->indirect_info->vptr_changed;
3057 }
9de2f554
JH
3058 }
3059 }
85942f45
JH
3060 }
3061
44210a96
MJ
3062 ipa_polymorphic_call_context ie_context (ie);
3063 vec <cgraph_node *>targets;
3064 bool final;
d250540a 3065
44210a96
MJ
3066 ctx.offset_by (ie->indirect_info->offset);
3067 if (ie->indirect_info->vptr_changed)
3068 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3069 ie->indirect_info->otr_type);
3070 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3071 targets = possible_polymorphic_call_targets
3072 (ie->indirect_info->otr_type,
3073 ie->indirect_info->otr_token,
3074 ctx, &final);
3075 if (final && targets.length () <= 1)
5ce97055 3076 {
33c3b6be 3077 speculative = false;
44210a96
MJ
3078 if (targets.length () == 1)
3079 target = targets[0]->decl;
3080 else
3081 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3082 }
2bf86c84 3083 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3084 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3085 {
44210a96
MJ
3086 cgraph_node *n;
3087 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3088 ie->indirect_info->otr_token,
3089 ie->indirect_info->context);
3090 if (n)
5ce97055 3091 {
44210a96
MJ
3092 target = n->decl;
3093 speculative = true;
5ce97055 3094 }
5bccb77a 3095 }
b258210c
MJ
3096
3097 if (target)
450ad0cd 3098 {
44210a96
MJ
3099 if (!possible_polymorphic_call_target_p
3100 (ie, cgraph_node::get_create (target)))
0127c169 3101 {
29c43c83 3102 if (speculative)
0127c169
JH
3103 return NULL;
3104 target = ipa_impossible_devirt_target (ie, target);
3105 }
5ce97055 3106 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3107 }
b258210c
MJ
3108 else
3109 return NULL;
3e293154
MJ
3110}
3111
3112/* Update the param called notes associated with NODE when CS is being inlined,
3113 assuming NODE is (potentially indirectly) inlined into CS->callee.
3114 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3115 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3116 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3117
f8e2a1ed 3118static bool
e33c6cd6
MJ
3119update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3120 struct cgraph_node *node,
d52f5295 3121 vec<cgraph_edge *> *new_edges)
3e293154 3122{
9e97ff61 3123 struct ipa_edge_args *top;
b258210c 3124 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 3125 struct ipa_node_params *new_root_info;
f8e2a1ed 3126 bool res = false;
3e293154 3127
e33c6cd6 3128 ipa_check_create_edge_args ();
9e97ff61 3129 top = IPA_EDGE_REF (cs);
d250540a
MJ
3130 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3131 ? cs->caller->global.inlined_to
3132 : cs->caller);
e33c6cd6
MJ
3133
3134 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3135 {
e33c6cd6 3136 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3137 struct ipa_jump_func *jfunc;
8b7773a4 3138 int param_index;
3ff29913 3139 cgraph_node *spec_target = NULL;
3e293154 3140
e33c6cd6 3141 next_ie = ie->next_callee;
3e293154 3142
5f902d76
JH
3143 if (ici->param_index == -1)
3144 continue;
e33c6cd6 3145
3e293154 3146 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 3147 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3148 {
5ee53a06 3149 ici->param_index = -1;
3e293154
MJ
3150 continue;
3151 }
3152
8b7773a4
MJ
3153 param_index = ici->param_index;
3154 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3155
3ff29913
JH
3156 if (ie->speculative)
3157 {
3158 struct cgraph_edge *de;
3159 struct ipa_ref *ref;
3160 ie->speculative_call_info (de, ie, ref);
3161 spec_target = de->callee;
3162 }
3163
2bf86c84 3164 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3165 new_direct_edge = NULL;
3166 else if (ici->polymorphic)
5ce97055 3167 {
44210a96
MJ
3168 ipa_polymorphic_call_context ctx;
3169 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3170 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
5ce97055 3171 }
b258210c 3172 else
d250540a
MJ
3173 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3174 new_root_info);
042ae7d2 3175 /* If speculation was removed, then we need to do nothing. */
3ff29913
JH
3176 if (new_direct_edge && new_direct_edge != ie
3177 && new_direct_edge->callee == spec_target)
042ae7d2
JH
3178 {
3179 new_direct_edge->indirect_inlining_edge = 1;
3180 top = IPA_EDGE_REF (cs);
3181 res = true;
73d098df
JH
3182 if (!new_direct_edge->speculative)
3183 continue;
042ae7d2
JH
3184 }
3185 else if (new_direct_edge)
685b0d13 3186 {
b258210c 3187 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
3188 if (new_direct_edge->call_stmt)
3189 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
3190 = !gimple_check_call_matching_types (
3191 new_direct_edge->call_stmt,
67348ccc 3192 new_direct_edge->callee->decl, false);
b258210c
MJ
3193 if (new_edges)
3194 {
9771b263 3195 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3196 res = true;
3197 }
042ae7d2 3198 top = IPA_EDGE_REF (cs);
3ff29913
JH
3199 /* If speculative edge was introduced we still need to update
3200 call info of the indirect edge. */
3201 if (!new_direct_edge->speculative)
3202 continue;
685b0d13 3203 }
3ff29913
JH
3204 if (jfunc->type == IPA_JF_PASS_THROUGH
3205 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 3206 {
d0502276
JH
3207 if (ici->agg_contents
3208 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3209 && !ici->polymorphic)
36b72910
JH
3210 ici->param_index = -1;
3211 else
d0502276
JH
3212 {
3213 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3214 if (ici->polymorphic
3215 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3216 ici->vptr_changed = true;
3217 }
36b72910
JH
3218 }
3219 else if (jfunc->type == IPA_JF_ANCESTOR)
3220 {
d0502276
JH
3221 if (ici->agg_contents
3222 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3223 && !ici->polymorphic)
36b72910
JH
3224 ici->param_index = -1;
3225 else
3226 {
3227 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3228 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
3229 if (ici->polymorphic
3230 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3231 ici->vptr_changed = true;
36b72910
JH
3232 }
3233 }
3234 else
3235 /* Either we can find a destination for this edge now or never. */
3236 ici->param_index = -1;
3e293154 3237 }
e33c6cd6 3238
f8e2a1ed 3239 return res;
3e293154
MJ
3240}
3241
3242/* Recursively traverse subtree of NODE (including node) made of inlined
3243 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3244 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3245 update_jump_functions_after_inlining on all non-inlined edges that lead out
3246 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3247 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3248 created. */
be95e2b9 3249
f8e2a1ed 3250static bool
3e293154
MJ
3251propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3252 struct cgraph_node *node,
d52f5295 3253 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3254{
3255 struct cgraph_edge *e;
f8e2a1ed 3256 bool res;
3e293154 3257
e33c6cd6 3258 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3259
3260 for (e = node->callees; e; e = e->next_callee)
3261 if (!e->inline_failed)
f8e2a1ed 3262 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3263 else
3264 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3265 for (e = node->indirect_calls; e; e = e->next_callee)
3266 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3267
3268 return res;
3e293154
MJ
3269}
3270
4502fe8d
MJ
3271/* Combine two controlled uses counts as done during inlining. */
3272
3273static int
3274combine_controlled_uses_counters (int c, int d)
3275{
3276 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3277 return IPA_UNDESCRIBED_USE;
3278 else
3279 return c + d - 1;
3280}
3281
3282/* Propagate number of controlled users from CS->caleee to the new root of the
3283 tree of inlined nodes. */
3284
3285static void
3286propagate_controlled_uses (struct cgraph_edge *cs)
3287{
3288 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3289 struct cgraph_node *new_root = cs->caller->global.inlined_to
3290 ? cs->caller->global.inlined_to : cs->caller;
3291 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3292 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3293 int count, i;
3294
3295 count = MIN (ipa_get_cs_argument_count (args),
3296 ipa_get_param_count (old_root_info));
3297 for (i = 0; i < count; i++)
3298 {
3299 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3300 struct ipa_cst_ref_desc *rdesc;
3301
3302 if (jf->type == IPA_JF_PASS_THROUGH)
3303 {
3304 int src_idx, c, d;
3305 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3306 c = ipa_get_controlled_uses (new_root_info, src_idx);
3307 d = ipa_get_controlled_uses (old_root_info, i);
3308
3309 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3310 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3311 c = combine_controlled_uses_counters (c, d);
3312 ipa_set_controlled_uses (new_root_info, src_idx, c);
3313 if (c == 0 && new_root_info->ipcp_orig_node)
3314 {
3315 struct cgraph_node *n;
3316 struct ipa_ref *ref;
44210a96 3317 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
3318
3319 if (t && TREE_CODE (t) == ADDR_EXPR
3320 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 3321 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 3322 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
3323 {
3324 if (dump_file)
3325 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3326 "reference from %s/%i to %s/%i.\n",
2a72a953 3327 xstrdup_for_dump (new_root->name ()),
67348ccc 3328 new_root->order,
2a72a953 3329 xstrdup_for_dump (n->name ()), n->order);
d122681a 3330 ref->remove_reference ();
4502fe8d
MJ
3331 }
3332 }
3333 }
3334 else if (jf->type == IPA_JF_CONST
3335 && (rdesc = jfunc_rdesc_usable (jf)))
3336 {
3337 int d = ipa_get_controlled_uses (old_root_info, i);
3338 int c = rdesc->refcount;
3339 rdesc->refcount = combine_controlled_uses_counters (c, d);
3340 if (rdesc->refcount == 0)
3341 {
3342 tree cst = ipa_get_jf_constant (jf);
3343 struct cgraph_node *n;
3344 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3345 && TREE_CODE (TREE_OPERAND (cst, 0))
3346 == FUNCTION_DECL);
d52f5295 3347 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
3348 if (n)
3349 {
3350 struct cgraph_node *clone;
568cda29 3351 bool ok;
67348ccc 3352 ok = remove_described_reference (n, rdesc);
568cda29 3353 gcc_checking_assert (ok);
4502fe8d
MJ
3354
3355 clone = cs->caller;
3356 while (clone->global.inlined_to
3357 && clone != rdesc->cs->caller
3358 && IPA_NODE_REF (clone)->ipcp_orig_node)
3359 {
3360 struct ipa_ref *ref;
d122681a 3361 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
3362 if (ref)
3363 {
3364 if (dump_file)
3365 fprintf (dump_file, "ipa-prop: Removing "
3366 "cloning-created reference "
3367 "from %s/%i to %s/%i.\n",
2a72a953 3368 xstrdup_for_dump (clone->name ()),
67348ccc 3369 clone->order,
2a72a953 3370 xstrdup_for_dump (n->name ()),
67348ccc 3371 n->order);
d122681a 3372 ref->remove_reference ();
4502fe8d
MJ
3373 }
3374 clone = clone->callers->caller;
3375 }
3376 }
3377 }
3378 }
3379 }
3380
3381 for (i = ipa_get_param_count (old_root_info);
3382 i < ipa_get_cs_argument_count (args);
3383 i++)
3384 {
3385 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3386
3387 if (jf->type == IPA_JF_CONST)
3388 {
3389 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3390 if (rdesc)
3391 rdesc->refcount = IPA_UNDESCRIBED_USE;
3392 }
3393 else if (jf->type == IPA_JF_PASS_THROUGH)
3394 ipa_set_controlled_uses (new_root_info,
3395 jf->value.pass_through.formal_id,
3396 IPA_UNDESCRIBED_USE);
3397 }
3398}
3399
3e293154
MJ
3400/* Update jump functions and call note functions on inlining the call site CS.
3401 CS is expected to lead to a node already cloned by
3402 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3403 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3404 created. */
be95e2b9 3405
f8e2a1ed 3406bool
3e293154 3407ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 3408 vec<cgraph_edge *> *new_edges)
3e293154 3409{
5ee53a06 3410 bool changed;
f8e2a1ed
MJ
3411 /* Do nothing if the preparation phase has not been carried out yet
3412 (i.e. during early inlining). */
dd912cb8 3413 if (!ipa_node_params_sum)
f8e2a1ed
MJ
3414 return false;
3415 gcc_assert (ipa_edge_args_vector);
3416
4502fe8d 3417 propagate_controlled_uses (cs);
5ee53a06
JH
3418 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3419
5ee53a06 3420 return changed;
518dc859
RL
3421}
3422
771578a0
MJ
3423/* Frees all dynamically allocated structures that the argument info points
3424 to. */
be95e2b9 3425
518dc859 3426void
771578a0 3427ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 3428{
9771b263 3429 vec_free (args->jump_functions);
771578a0 3430 memset (args, 0, sizeof (*args));
518dc859
RL
3431}
3432
771578a0 3433/* Free all ipa_edge structures. */
be95e2b9 3434
518dc859 3435void
771578a0 3436ipa_free_all_edge_args (void)
518dc859 3437{
771578a0
MJ
3438 int i;
3439 struct ipa_edge_args *args;
518dc859 3440
9771b263
DN
3441 if (!ipa_edge_args_vector)
3442 return;
3443
3444 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
3445 ipa_free_edge_args_substructures (args);
3446
9771b263 3447 vec_free (ipa_edge_args_vector);
518dc859
RL
3448}
3449
771578a0
MJ
3450/* Frees all dynamically allocated structures that the param info points
3451 to. */
be95e2b9 3452
dd912cb8 3453ipa_node_params::~ipa_node_params ()
518dc859 3454{
dd912cb8
ML
3455 descriptors.release ();
3456 free (lattices);
310bc633
MJ
3457 /* Lattice values and their sources are deallocated with their alocation
3458 pool. */
c3431191 3459 known_csts.release ();
dd912cb8
ML
3460 known_contexts.release ();
3461
3462 lattices = NULL;
3463 ipcp_orig_node = NULL;
3464 analysis_done = 0;
3465 node_enqueued = 0;
3466 do_clone_for_all_contexts = 0;
3467 is_all_contexts_clone = 0;
3468 node_dead = 0;
518dc859
RL
3469}
3470
771578a0 3471/* Free all ipa_node_params structures. */
be95e2b9 3472
518dc859 3473void
771578a0 3474ipa_free_all_node_params (void)
518dc859 3475{
dd912cb8
ML
3476 delete ipa_node_params_sum;
3477 ipa_node_params_sum = NULL;
771578a0
MJ
3478}
3479
04be694e
MJ
3480/* Grow ipcp_transformations if necessary. */
3481
3482void
3483ipcp_grow_transformations_if_necessary (void)
3484{
3485 if (vec_safe_length (ipcp_transformations)
3486 <= (unsigned) symtab->cgraph_max_uid)
3487 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3488}
3489
2c9561b5
MJ
3490/* Set the aggregate replacements of NODE to be AGGVALS. */
3491
3492void
3493ipa_set_node_agg_value_chain (struct cgraph_node *node,
3494 struct ipa_agg_replacement_value *aggvals)
3495{
04be694e
MJ
3496 ipcp_grow_transformations_if_necessary ();
3497 (*ipcp_transformations)[node->uid].agg_values = aggvals;
2c9561b5
MJ
3498}
3499
771578a0 3500/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 3501
771578a0 3502static void
5c0466b5 3503ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 3504{
568cda29
MJ
3505 struct ipa_edge_args *args;
3506
3507 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
9771b263 3508 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 3509 return;
568cda29
MJ
3510
3511 args = IPA_EDGE_REF (cs);
3512 if (args->jump_functions)
3513 {
3514 struct ipa_jump_func *jf;
3515 int i;
3516 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3517 {
3518 struct ipa_cst_ref_desc *rdesc;
3519 try_decrement_rdesc_refcount (jf);
3520 if (jf->type == IPA_JF_CONST
3521 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3522 && rdesc->cs == cs)
3523 rdesc->cs = NULL;
3524 }
568cda29
MJ
3525 }
3526
771578a0 3527 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
3528}
3529
8b7773a4 3530/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 3531
771578a0
MJ
3532static void
3533ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
dd912cb8 3534 void *)
771578a0
MJ
3535{
3536 struct ipa_edge_args *old_args, *new_args;
8b7773a4 3537 unsigned int i;
771578a0
MJ
3538
3539 ipa_check_create_edge_args ();
3540
3541 old_args = IPA_EDGE_REF (src);
3542 new_args = IPA_EDGE_REF (dst);
3543
9771b263 3544 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
3545 if (old_args->polymorphic_call_contexts)
3546 new_args->polymorphic_call_contexts
3547 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 3548
9771b263 3549 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3550 {
3551 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3552 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3553
3554 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3555
3556 if (src_jf->type == IPA_JF_CONST)
3557 {
3558 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3559
3560 if (!src_rdesc)
3561 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3562 else if (src->caller == dst->caller)
3563 {
3564 struct ipa_ref *ref;
5e20cdc9 3565 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3566 gcc_checking_assert (n);
d122681a
ML
3567 ref = src->caller->find_reference (n, src->call_stmt,
3568 src->lto_stmt_uid);
568cda29 3569 gcc_checking_assert (ref);
d122681a 3570 dst->caller->clone_reference (ref, ref->stmt);
568cda29 3571
601f3293 3572 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
568cda29
MJ
3573 dst_rdesc->cs = dst;
3574 dst_rdesc->refcount = src_rdesc->refcount;
3575 dst_rdesc->next_duplicate = NULL;
3576 dst_jf->value.constant.rdesc = dst_rdesc;
3577 }
4502fe8d
MJ
3578 else if (src_rdesc->cs == src)
3579 {
601f3293 3580 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 3581 dst_rdesc->cs = dst;
4502fe8d 3582 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3583 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3584 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3585 dst_jf->value.constant.rdesc = dst_rdesc;
3586 }
3587 else
3588 {
3589 struct ipa_cst_ref_desc *dst_rdesc;
3590 /* This can happen during inlining, when a JFUNC can refer to a
3591 reference taken in a function up in the tree of inline clones.
3592 We need to find the duplicate that refers to our tree of
3593 inline clones. */
3594
3595 gcc_assert (dst->caller->global.inlined_to);
3596 for (dst_rdesc = src_rdesc->next_duplicate;
3597 dst_rdesc;
3598 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3599 {
3600 struct cgraph_node *top;
3601 top = dst_rdesc->cs->caller->global.inlined_to
3602 ? dst_rdesc->cs->caller->global.inlined_to
3603 : dst_rdesc->cs->caller;
3604 if (dst->caller->global.inlined_to == top)
3605 break;
3606 }
44a60244 3607 gcc_assert (dst_rdesc);
4502fe8d
MJ
3608 dst_jf->value.constant.rdesc = dst_rdesc;
3609 }
3610 }
6fe45955
MJ
3611 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3612 && src->caller == dst->caller)
3613 {
3614 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3615 ? dst->caller->global.inlined_to : dst->caller;
3616 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3617 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3618
3619 int c = ipa_get_controlled_uses (root_info, idx);
3620 if (c != IPA_UNDESCRIBED_USE)
3621 {
3622 c++;
3623 ipa_set_controlled_uses (root_info, idx, c);
3624 }
3625 }
4502fe8d 3626 }
771578a0
MJ
3627}
3628
dd912cb8 3629/* Analyze newly added function into callgraph. */
be95e2b9 3630
771578a0 3631static void
dd912cb8 3632ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3633{
dd912cb8
ML
3634 if (node->has_gimple_body_p ())
3635 ipa_analyze_node (node);
3636}
771578a0 3637
dd912cb8
ML
3638/* Hook that is called by summary when a node is duplicated. */
3639
3640void
3641ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3642 ipa_node_params *old_info,
3643 ipa_node_params *new_info)
3644{
3645 ipa_agg_replacement_value *old_av, *new_av;
771578a0 3646
9771b263 3647 new_info->descriptors = old_info->descriptors.copy ();
310bc633 3648 new_info->lattices = NULL;
771578a0 3649 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 3650
8aab5218 3651 new_info->analysis_done = old_info->analysis_done;
3949c4a7 3652 new_info->node_enqueued = old_info->node_enqueued;
7e729474 3653 new_info->versionable = old_info->versionable;
2c9561b5
MJ
3654
3655 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 3656 if (old_av)
2c9561b5 3657 {
04be694e
MJ
3658 new_av = NULL;
3659 while (old_av)
3660 {
3661 struct ipa_agg_replacement_value *v;
2c9561b5 3662
04be694e
MJ
3663 v = ggc_alloc<ipa_agg_replacement_value> ();
3664 memcpy (v, old_av, sizeof (*v));
3665 v->next = new_av;
3666 new_av = v;
3667 old_av = old_av->next;
3668 }
3669 ipa_set_node_agg_value_chain (dst, new_av);
3670 }
3671
3672 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3673
3674 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3675 {
3676 ipcp_grow_transformations_if_necessary ();
3677 src_trans = ipcp_get_transformation_summary (src);
3678 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3679 vec<ipa_alignment, va_gc> *&dst_alignments
3680 = ipcp_get_transformation_summary (dst)->alignments;
3681 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3682 for (unsigned i = 0; i < src_alignments->length (); ++i)
3683 dst_alignments->quick_push ((*src_alignments)[i]);
2c9561b5 3684 }
771578a0
MJ
3685}
3686
3687/* Register our cgraph hooks if they are not already there. */
be95e2b9 3688
518dc859 3689void
771578a0 3690ipa_register_cgraph_hooks (void)
518dc859 3691{
dd912cb8
ML
3692 ipa_check_create_node_params ();
3693
771578a0
MJ
3694 if (!edge_removal_hook_holder)
3695 edge_removal_hook_holder =
3dafb85c 3696 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
771578a0
MJ
3697 if (!edge_duplication_hook_holder)
3698 edge_duplication_hook_holder =
3dafb85c 3699 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
dd912cb8 3700 function_insertion_hook_holder =
3dafb85c 3701 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3702}
518dc859 3703
771578a0 3704/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3705
771578a0
MJ
3706static void
3707ipa_unregister_cgraph_hooks (void)
3708{
3dafb85c 3709 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
771578a0 3710 edge_removal_hook_holder = NULL;
3dafb85c 3711 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
771578a0 3712 edge_duplication_hook_holder = NULL;
3dafb85c 3713 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 3714 function_insertion_hook_holder = NULL;
771578a0
MJ
3715}
3716
3717/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3718 longer needed after ipa-cp. */
be95e2b9 3719
771578a0 3720void
e33c6cd6 3721ipa_free_all_structures_after_ipa_cp (void)
3e293154 3722{
2bf86c84 3723 if (!optimize && !in_lto_p)
3e293154
MJ
3724 {
3725 ipa_free_all_edge_args ();
3726 ipa_free_all_node_params ();
2651e637
ML
3727 ipcp_sources_pool.release ();
3728 ipcp_cst_values_pool.release ();
3729 ipcp_poly_ctx_values_pool.release ();
3730 ipcp_agg_lattice_pool.release ();
3e293154 3731 ipa_unregister_cgraph_hooks ();
601f3293 3732 ipa_refdesc_pool.release ();
3e293154
MJ
3733 }
3734}
3735
3736/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3737 longer needed after indirect inlining. */
be95e2b9 3738
3e293154 3739void
e33c6cd6 3740ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3741{
3742 ipa_free_all_edge_args ();
3743 ipa_free_all_node_params ();
3744 ipa_unregister_cgraph_hooks ();
2651e637
ML
3745 ipcp_sources_pool.release ();
3746 ipcp_cst_values_pool.release ();
3747 ipcp_poly_ctx_values_pool.release ();
3748 ipcp_agg_lattice_pool.release ();
601f3293 3749 ipa_refdesc_pool.release ();
518dc859
RL
3750}
3751
dcd416e3 3752/* Print ipa_tree_map data structures of all functions in the
518dc859 3753 callgraph to F. */
be95e2b9 3754
518dc859 3755void
2c9561b5 3756ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3757{
3758 int i, count;
3e293154 3759 struct ipa_node_params *info;
518dc859 3760
67348ccc 3761 if (!node->definition)
3e293154
MJ
3762 return;
3763 info = IPA_NODE_REF (node);
9de04252 3764 fprintf (f, " function %s/%i parameter descriptors:\n",
fec39fa6 3765 node->name (), node->order);
3e293154
MJ
3766 count = ipa_get_param_count (info);
3767 for (i = 0; i < count; i++)
518dc859 3768 {
4502fe8d
MJ
3769 int c;
3770
a4e33812 3771 fprintf (f, " ");
e067bd43 3772 ipa_dump_param (f, info, i);
339f49ec
JH
3773 if (ipa_is_param_used (info, i))
3774 fprintf (f, " used");
4502fe8d
MJ
3775 c = ipa_get_controlled_uses (info, i);
3776 if (c == IPA_UNDESCRIBED_USE)
3777 fprintf (f, " undescribed_use");
3778 else
3779 fprintf (f, " controlled_uses=%i", c);
3e293154 3780 fprintf (f, "\n");
518dc859
RL
3781 }
3782}
dcd416e3 3783
ca30a539 3784/* Print ipa_tree_map data structures of all functions in the
3e293154 3785 callgraph to F. */
be95e2b9 3786
3e293154 3787void
ca30a539 3788ipa_print_all_params (FILE * f)
3e293154
MJ
3789{
3790 struct cgraph_node *node;
3791
ca30a539 3792 fprintf (f, "\nFunction parameters:\n");
65c70e6b 3793 FOR_EACH_FUNCTION (node)
ca30a539 3794 ipa_print_node_params (f, node);
3e293154 3795}
3f84bf08
MJ
3796
3797/* Return a heap allocated vector containing formal parameters of FNDECL. */
3798
9771b263 3799vec<tree>
3f84bf08
MJ
3800ipa_get_vector_of_formal_parms (tree fndecl)
3801{
9771b263 3802 vec<tree> args;
3f84bf08
MJ
3803 int count;
3804 tree parm;
3805
0e8853ee 3806 gcc_assert (!flag_wpa);
310bc633 3807 count = count_formal_params (fndecl);
9771b263 3808 args.create (count);
910ad8de 3809 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 3810 args.quick_push (parm);
3f84bf08
MJ
3811
3812 return args;
3813}
3814
3815/* Return a heap allocated vector containing types of formal parameters of
3816 function type FNTYPE. */
3817
31519c38
AH
3818vec<tree>
3819ipa_get_vector_of_formal_parm_types (tree fntype)
3f84bf08 3820{
9771b263 3821 vec<tree> types;
3f84bf08
MJ
3822 int count = 0;
3823 tree t;
3824
3825 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3826 count++;
3827
9771b263 3828 types.create (count);
3f84bf08 3829 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 3830 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
3831
3832 return types;
3833}
3834
3835/* Modify the function declaration FNDECL and its type according to the plan in
3836 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3837 to reflect the actual parameters being modified which are determined by the
3838 base_index field. */
3839
3840void
31519c38 3841ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3f84bf08 3842{
31519c38
AH
3843 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3844 tree orig_type = TREE_TYPE (fndecl);
3845 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3f84bf08
MJ
3846
3847 /* The following test is an ugly hack, some functions simply don't have any
3848 arguments in their type. This is probably a bug but well... */
31519c38
AH
3849 bool care_for_types = (old_arg_types != NULL_TREE);
3850 bool last_parm_void;
3851 vec<tree> otypes;
3f84bf08
MJ
3852 if (care_for_types)
3853 {
3854 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3855 == void_type_node);
31519c38 3856 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3f84bf08 3857 if (last_parm_void)
9771b263 3858 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 3859 else
9771b263 3860 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
3861 }
3862 else
3863 {
3864 last_parm_void = false;
9771b263 3865 otypes.create (0);
3f84bf08
MJ
3866 }
3867
31519c38
AH
3868 int len = adjustments.length ();
3869 tree *link = &DECL_ARGUMENTS (fndecl);
3870 tree new_arg_types = NULL;
3871 for (int i = 0; i < len; i++)
3f84bf08
MJ
3872 {
3873 struct ipa_parm_adjustment *adj;
3874 gcc_assert (link);
3875
9771b263 3876 adj = &adjustments[i];
31519c38
AH
3877 tree parm;
3878 if (adj->op == IPA_PARM_OP_NEW)
3879 parm = NULL;
3880 else
3881 parm = oparms[adj->base_index];
3f84bf08
MJ
3882 adj->base = parm;
3883
31519c38 3884 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
3885 {
3886 if (care_for_types)
9771b263 3887 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
3888 new_arg_types);
3889 *link = parm;
910ad8de 3890 link = &DECL_CHAIN (parm);
3f84bf08 3891 }
31519c38 3892 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08
MJ
3893 {
3894 tree new_parm;
3895 tree ptype;
3896
3897 if (adj->by_ref)
3898 ptype = build_pointer_type (adj->type);
3899 else
e69dbe37
MJ
3900 {
3901 ptype = adj->type;
3902 if (is_gimple_reg_type (ptype))
3903 {
3904 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3905 if (TYPE_ALIGN (ptype) < malign)
3906 ptype = build_aligned_type (ptype, malign);
3907 }
3908 }
3f84bf08
MJ
3909
3910 if (care_for_types)
3911 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3912
3913 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3914 ptype);
31519c38
AH
3915 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3916 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3f84bf08
MJ
3917 DECL_ARTIFICIAL (new_parm) = 1;
3918 DECL_ARG_TYPE (new_parm) = ptype;
3919 DECL_CONTEXT (new_parm) = fndecl;
3920 TREE_USED (new_parm) = 1;
3921 DECL_IGNORED_P (new_parm) = 1;
3922 layout_decl (new_parm, 0);
3923
31519c38
AH
3924 if (adj->op == IPA_PARM_OP_NEW)
3925 adj->base = NULL;
3926 else
3927 adj->base = parm;
3928 adj->new_decl = new_parm;
3f84bf08
MJ
3929
3930 *link = new_parm;
910ad8de 3931 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
3932 }
3933 }
3934
3935 *link = NULL_TREE;
3936
31519c38 3937 tree new_reversed = NULL;
3f84bf08
MJ
3938 if (care_for_types)
3939 {
3940 new_reversed = nreverse (new_arg_types);
3941 if (last_parm_void)
3942 {
3943 if (new_reversed)
3944 TREE_CHAIN (new_arg_types) = void_list_node;
3945 else
3946 new_reversed = void_list_node;
3947 }
3948 }
3949
3950 /* Use copy_node to preserve as much as possible from original type
3951 (debug info, attribute lists etc.)
3952 Exception is METHOD_TYPEs must have THIS argument.
3953 When we are asked to remove it, we need to build new FUNCTION_TYPE
3954 instead. */
31519c38 3955 tree new_type = NULL;
3f84bf08 3956 if (TREE_CODE (orig_type) != METHOD_TYPE
31519c38 3957 || (adjustments[0].op == IPA_PARM_OP_COPY
9771b263 3958 && adjustments[0].base_index == 0))
3f84bf08 3959 {
4eb3f32c 3960 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
3961 TYPE_ARG_TYPES (new_type) = new_reversed;
3962 }
3963 else
3964 {
3965 new_type
3966 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3967 new_reversed));
3968 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3969 DECL_VINDEX (fndecl) = NULL_TREE;
3970 }
3971
d402c33d
JH
3972 /* When signature changes, we need to clear builtin info. */
3973 if (DECL_BUILT_IN (fndecl))
3974 {
3975 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3976 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3977 }
3978
3f84bf08 3979 TREE_TYPE (fndecl) = new_type;
9b389a5e 3980 DECL_VIRTUAL_P (fndecl) = 0;
70d6d5c1 3981 DECL_LANG_SPECIFIC (fndecl) = NULL;
9771b263
DN
3982 otypes.release ();
3983 oparms.release ();
3f84bf08
MJ
3984}
3985
3986/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3987 If this is a directly recursive call, CS must be NULL. Otherwise it must
3988 contain the corresponding call graph edge. */
3989
3990void
538dd0b7 3991ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3f84bf08
MJ
3992 ipa_parm_adjustment_vec adjustments)
3993{
d52f5295 3994 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
9771b263
DN
3995 vec<tree> vargs;
3996 vec<tree, va_gc> **debug_args = NULL;
538dd0b7 3997 gcall *new_stmt;
82338059 3998 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
3999 tree callee_decl;
4000 int i, len;
4001
9771b263
DN
4002 len = adjustments.length ();
4003 vargs.create (len);
67348ccc 4004 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
d122681a 4005 current_node->remove_stmt_references (stmt);
3f84bf08
MJ
4006
4007 gsi = gsi_for_stmt (stmt);
82338059
MJ
4008 prev_gsi = gsi;
4009 gsi_prev (&prev_gsi);
3f84bf08
MJ
4010 for (i = 0; i < len; i++)
4011 {
4012 struct ipa_parm_adjustment *adj;
4013
9771b263 4014 adj = &adjustments[i];
3f84bf08 4015
31519c38 4016 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
4017 {
4018 tree arg = gimple_call_arg (stmt, adj->base_index);
4019
9771b263 4020 vargs.quick_push (arg);
3f84bf08 4021 }
31519c38 4022 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08 4023 {
fffe1e40
MJ
4024 tree expr, base, off;
4025 location_t loc;
f43245d1 4026 unsigned int deref_align = 0;
c1ed6a01 4027 bool deref_base = false;
fffe1e40
MJ
4028
4029 /* We create a new parameter out of the value of the old one, we can
4030 do the following kind of transformations:
4031
4032 - A scalar passed by reference is converted to a scalar passed by
4033 value. (adj->by_ref is false and the type of the original
4034 actual argument is a pointer to a scalar).
4035
4036 - A part of an aggregate is passed instead of the whole aggregate.
4037 The part can be passed either by value or by reference, this is
4038 determined by value of adj->by_ref. Moreover, the code below
4039 handles both situations when the original aggregate is passed by
4040 value (its type is not a pointer) and when it is passed by
4041 reference (it is a pointer to an aggregate).
4042
4043 When the new argument is passed by reference (adj->by_ref is true)
4044 it must be a part of an aggregate and therefore we form it by
4045 simply taking the address of a reference inside the original
4046 aggregate. */
4047
4048 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4049 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
4050 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4051 : EXPR_LOCATION (base);
fffe1e40 4052
82d49829
MJ
4053 if (TREE_CODE (base) != ADDR_EXPR
4054 && POINTER_TYPE_P (TREE_TYPE (base)))
4055 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 4056 adj->offset / BITS_PER_UNIT);
3f84bf08 4057 else
3f84bf08 4058 {
fffe1e40
MJ
4059 HOST_WIDE_INT base_offset;
4060 tree prev_base;
c1ed6a01 4061 bool addrof;
fffe1e40
MJ
4062
4063 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
4064 {
4065 base = TREE_OPERAND (base, 0);
4066 addrof = true;
4067 }
4068 else
4069 addrof = false;
fffe1e40
MJ
4070 prev_base = base;
4071 base = get_addr_base_and_unit_offset (base, &base_offset);
4072 /* Aggregate arguments can have non-invariant addresses. */
4073 if (!base)
4074 {
4075 base = build_fold_addr_expr (prev_base);
82d49829 4076 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4077 adj->offset / BITS_PER_UNIT);
4078 }
4079 else if (TREE_CODE (base) == MEM_REF)
4080 {
c1ed6a01
MJ
4081 if (!addrof)
4082 {
4083 deref_base = true;
4084 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4085 }
82d49829 4086 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4087 base_offset
4088 + adj->offset / BITS_PER_UNIT);
4089 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 4090 off);
fffe1e40
MJ
4091 base = TREE_OPERAND (base, 0);
4092 }
4093 else
4094 {
82d49829 4095 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4096 base_offset
4097 + adj->offset / BITS_PER_UNIT);
4098 base = build_fold_addr_expr (base);
4099 }
3f84bf08 4100 }
fffe1e40 4101
3a5a825a
RG
4102 if (!adj->by_ref)
4103 {
4104 tree type = adj->type;
4105 unsigned int align;
4106 unsigned HOST_WIDE_INT misalign;
644ffefd 4107
c1ed6a01
MJ
4108 if (deref_base)
4109 {
4110 align = deref_align;
4111 misalign = 0;
4112 }
4113 else
4114 {
4115 get_pointer_alignment_1 (base, &align, &misalign);
4116 if (TYPE_ALIGN (type) > align)
4117 align = TYPE_ALIGN (type);
4118 }
807e902e 4119 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3a5a825a
RG
4120 * BITS_PER_UNIT);
4121 misalign = misalign & (align - 1);
4122 if (misalign != 0)
4123 align = (misalign & -misalign);
4124 if (align < TYPE_ALIGN (type))
4125 type = build_aligned_type (type, align);
4df65a85
RB
4126 base = force_gimple_operand_gsi (&gsi, base,
4127 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4128 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
ee45a32d 4129 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4df65a85
RB
4130 /* If expr is not a valid gimple call argument emit
4131 a load into a temporary. */
4132 if (is_gimple_reg_type (TREE_TYPE (expr)))
4133 {
355fe088 4134 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4df65a85
RB
4135 if (gimple_in_ssa_p (cfun))
4136 {
4137 gimple_set_vuse (tem, gimple_vuse (stmt));
4138 expr = make_ssa_name (TREE_TYPE (expr), tem);
4139 }
4140 else
b731b390 4141 expr = create_tmp_reg (TREE_TYPE (expr));
4df65a85
RB
4142 gimple_assign_set_lhs (tem, expr);
4143 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4144 }
3a5a825a
RG
4145 }
4146 else
4147 {
4148 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
ee45a32d 4149 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
3a5a825a 4150 expr = build_fold_addr_expr (expr);
4df65a85
RB
4151 expr = force_gimple_operand_gsi (&gsi, expr,
4152 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4153 }
9771b263 4154 vargs.quick_push (expr);
3f84bf08 4155 }
31519c38 4156 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
ddb555ed
JJ
4157 {
4158 unsigned int ix;
4159 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
355fe088 4160 gimple *def_temp;
ddb555ed
JJ
4161
4162 arg = gimple_call_arg (stmt, adj->base_index);
4163 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4164 {
4165 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4166 continue;
4167 arg = fold_convert_loc (gimple_location (stmt),
4168 TREE_TYPE (origin), arg);
4169 }
4170 if (debug_args == NULL)
4171 debug_args = decl_debug_args_insert (callee_decl);
9771b263 4172 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
4173 if (ddecl == origin)
4174 {
9771b263 4175 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
4176 break;
4177 }
4178 if (ddecl == NULL)
4179 {
4180 ddecl = make_node (DEBUG_EXPR_DECL);
4181 DECL_ARTIFICIAL (ddecl) = 1;
4182 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4183 DECL_MODE (ddecl) = DECL_MODE (origin);
4184
9771b263
DN
4185 vec_safe_push (*debug_args, origin);
4186 vec_safe_push (*debug_args, ddecl);
ddb555ed 4187 }
9771b263 4188 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
4189 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4190 }
3f84bf08
MJ
4191 }
4192
4193 if (dump_file && (dump_flags & TDF_DETAILS))
4194 {
4195 fprintf (dump_file, "replacing stmt:");
4196 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4197 }
4198
3f84bf08 4199 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 4200 vargs.release ();
3f84bf08
MJ
4201 if (gimple_call_lhs (stmt))
4202 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4203
4204 gimple_set_block (new_stmt, gimple_block (stmt));
4205 if (gimple_has_location (stmt))
4206 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 4207 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 4208 gimple_call_copy_flags (new_stmt, stmt);
4df65a85
RB
4209 if (gimple_in_ssa_p (cfun))
4210 {
4211 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4212 if (gimple_vdef (stmt))
4213 {
4214 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4215 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4216 }
4217 }
3f84bf08
MJ
4218
4219 if (dump_file && (dump_flags & TDF_DETAILS))
4220 {
4221 fprintf (dump_file, "with stmt:");
4222 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4223 fprintf (dump_file, "\n");
4224 }
4225 gsi_replace (&gsi, new_stmt, true);
4226 if (cs)
3dafb85c 4227 cs->set_call_stmt (new_stmt);
82338059
MJ
4228 do
4229 {
d52f5295 4230 current_node->record_stmt_references (gsi_stmt (gsi));
82338059
MJ
4231 gsi_prev (&gsi);
4232 }
3d354792 4233 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
3f84bf08
MJ
4234}
4235
31519c38
AH
4236/* If the expression *EXPR should be replaced by a reduction of a parameter, do
4237 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4238 specifies whether the function should care about type incompatibility the
4239 current and new expressions. If it is false, the function will leave
4240 incompatibility issues to the caller. Return true iff the expression
4241 was modified. */
4242
4243bool
4244ipa_modify_expr (tree *expr, bool convert,
4245 ipa_parm_adjustment_vec adjustments)
4246{
4247 struct ipa_parm_adjustment *cand
4248 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4249 if (!cand)
4250 return false;
4251
4252 tree src;
4253 if (cand->by_ref)
ee45a32d
EB
4254 {
4255 src = build_simple_mem_ref (cand->new_decl);
4256 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4257 }
31519c38
AH
4258 else
4259 src = cand->new_decl;
4260
4261 if (dump_file && (dump_flags & TDF_DETAILS))
4262 {
4263 fprintf (dump_file, "About to replace expr ");
4264 print_generic_expr (dump_file, *expr, 0);
4265 fprintf (dump_file, " with ");
4266 print_generic_expr (dump_file, src, 0);
4267 fprintf (dump_file, "\n");
4268 }
4269
4270 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4271 {
4272 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4273 *expr = vce;
4274 }
4275 else
4276 *expr = src;
4277 return true;
4278}
4279
4280/* If T is an SSA_NAME, return NULL if it is not a default def or
4281 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4282 the base variable is always returned, regardless if it is a default
4283 def. Return T if it is not an SSA_NAME. */
4284
4285static tree
4286get_ssa_base_param (tree t, bool ignore_default_def)
4287{
4288 if (TREE_CODE (t) == SSA_NAME)
4289 {
4290 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4291 return SSA_NAME_VAR (t);
4292 else
4293 return NULL_TREE;
4294 }
4295 return t;
4296}
4297
4298/* Given an expression, return an adjustment entry specifying the
4299 transformation to be done on EXPR. If no suitable adjustment entry
4300 was found, returns NULL.
4301
4302 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4303 default def, otherwise bail on them.
4304
4305 If CONVERT is non-NULL, this function will set *CONVERT if the
4306 expression provided is a component reference. ADJUSTMENTS is the
4307 adjustments vector. */
4308
4309ipa_parm_adjustment *
4310ipa_get_adjustment_candidate (tree **expr, bool *convert,
4311 ipa_parm_adjustment_vec adjustments,
4312 bool ignore_default_def)
4313{
4314 if (TREE_CODE (**expr) == BIT_FIELD_REF
4315 || TREE_CODE (**expr) == IMAGPART_EXPR
4316 || TREE_CODE (**expr) == REALPART_EXPR)
4317 {
4318 *expr = &TREE_OPERAND (**expr, 0);
4319 if (convert)
4320 *convert = true;
4321 }
4322
4323 HOST_WIDE_INT offset, size, max_size;
ee45a32d
EB
4324 bool reverse;
4325 tree base
4326 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
31519c38
AH
4327 if (!base || size == -1 || max_size == -1)
4328 return NULL;
4329
4330 if (TREE_CODE (base) == MEM_REF)
4331 {
807e902e 4332 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
31519c38
AH
4333 base = TREE_OPERAND (base, 0);
4334 }
4335
4336 base = get_ssa_base_param (base, ignore_default_def);
4337 if (!base || TREE_CODE (base) != PARM_DECL)
4338 return NULL;
4339
4340 struct ipa_parm_adjustment *cand = NULL;
4341 unsigned int len = adjustments.length ();
4342 for (unsigned i = 0; i < len; i++)
4343 {
4344 struct ipa_parm_adjustment *adj = &adjustments[i];
4345
4346 if (adj->base == base
4347 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4348 {
4349 cand = adj;
4350 break;
4351 }
4352 }
4353
4354 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4355 return NULL;
4356 return cand;
4357}
4358
3f84bf08
MJ
4359/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4360
4361static bool
4362index_in_adjustments_multiple_times_p (int base_index,
4363 ipa_parm_adjustment_vec adjustments)
4364{
9771b263 4365 int i, len = adjustments.length ();
3f84bf08
MJ
4366 bool one = false;
4367
4368 for (i = 0; i < len; i++)
4369 {
4370 struct ipa_parm_adjustment *adj;
9771b263 4371 adj = &adjustments[i];
3f84bf08
MJ
4372
4373 if (adj->base_index == base_index)
4374 {
4375 if (one)
4376 return true;
4377 else
4378 one = true;
4379 }
4380 }
4381 return false;
4382}
4383
4384
4385/* Return adjustments that should have the same effect on function parameters
4386 and call arguments as if they were first changed according to adjustments in
4387 INNER and then by adjustments in OUTER. */
4388
4389ipa_parm_adjustment_vec
4390ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4391 ipa_parm_adjustment_vec outer)
4392{
9771b263
DN
4393 int i, outlen = outer.length ();
4394 int inlen = inner.length ();
3f84bf08
MJ
4395 int removals = 0;
4396 ipa_parm_adjustment_vec adjustments, tmp;
4397
9771b263 4398 tmp.create (inlen);
3f84bf08
MJ
4399 for (i = 0; i < inlen; i++)
4400 {
4401 struct ipa_parm_adjustment *n;
9771b263 4402 n = &inner[i];
3f84bf08 4403
31519c38 4404 if (n->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4405 removals++;
4406 else
31519c38
AH
4407 {
4408 /* FIXME: Handling of new arguments are not implemented yet. */
4409 gcc_assert (n->op != IPA_PARM_OP_NEW);
4410 tmp.quick_push (*n);
4411 }
3f84bf08
MJ
4412 }
4413
9771b263 4414 adjustments.create (outlen + removals);
3f84bf08
MJ
4415 for (i = 0; i < outlen; i++)
4416 {
f32682ca 4417 struct ipa_parm_adjustment r;
9771b263
DN
4418 struct ipa_parm_adjustment *out = &outer[i];
4419 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 4420
f32682ca 4421 memset (&r, 0, sizeof (r));
31519c38
AH
4422 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4423 if (out->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4424 {
4425 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4426 {
31519c38 4427 r.op = IPA_PARM_OP_REMOVE;
9771b263 4428 adjustments.quick_push (r);
3f84bf08
MJ
4429 }
4430 continue;
4431 }
31519c38
AH
4432 else
4433 {
4434 /* FIXME: Handling of new arguments are not implemented yet. */
4435 gcc_assert (out->op != IPA_PARM_OP_NEW);
4436 }
3f84bf08 4437
f32682ca
DN
4438 r.base_index = in->base_index;
4439 r.type = out->type;
3f84bf08
MJ
4440
4441 /* FIXME: Create nonlocal value too. */
4442
31519c38
AH
4443 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4444 r.op = IPA_PARM_OP_COPY;
4445 else if (in->op == IPA_PARM_OP_COPY)
f32682ca 4446 r.offset = out->offset;
31519c38 4447 else if (out->op == IPA_PARM_OP_COPY)
f32682ca 4448 r.offset = in->offset;
3f84bf08 4449 else
f32682ca 4450 r.offset = in->offset + out->offset;
9771b263 4451 adjustments.quick_push (r);
3f84bf08
MJ
4452 }
4453
4454 for (i = 0; i < inlen; i++)
4455 {
9771b263 4456 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08 4457
31519c38 4458 if (n->op == IPA_PARM_OP_REMOVE)
9771b263 4459 adjustments.quick_push (*n);
3f84bf08
MJ
4460 }
4461
9771b263 4462 tmp.release ();
3f84bf08
MJ
4463 return adjustments;
4464}
4465
4466/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4467 friendly way, assuming they are meant to be applied to FNDECL. */
4468
4469void
4470ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4471 tree fndecl)
4472{
9771b263 4473 int i, len = adjustments.length ();
3f84bf08 4474 bool first = true;
9771b263 4475 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
4476
4477 fprintf (file, "IPA param adjustments: ");
4478 for (i = 0; i < len; i++)
4479 {
4480 struct ipa_parm_adjustment *adj;
9771b263 4481 adj = &adjustments[i];
3f84bf08
MJ
4482
4483 if (!first)
4484 fprintf (file, " ");
4485 else
4486 first = false;
4487
4488 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 4489 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
4490 if (adj->base)
4491 {
4492 fprintf (file, ", base: ");
4493 print_generic_expr (file, adj->base, 0);
4494 }
31519c38 4495 if (adj->new_decl)
3f84bf08 4496 {
31519c38
AH
4497 fprintf (file, ", new_decl: ");
4498 print_generic_expr (file, adj->new_decl, 0);
3f84bf08
MJ
4499 }
4500 if (adj->new_ssa_base)
4501 {
4502 fprintf (file, ", new_ssa_base: ");
4503 print_generic_expr (file, adj->new_ssa_base, 0);
4504 }
4505
31519c38 4506 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08 4507 fprintf (file, ", copy_param");
31519c38 4508 else if (adj->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4509 fprintf (file, ", remove_param");
4510 else
4511 fprintf (file, ", offset %li", (long) adj->offset);
4512 if (adj->by_ref)
4513 fprintf (file, ", by_ref");
4514 print_node_brief (file, ", type: ", adj->type, 0);
4515 fprintf (file, "\n");
4516 }
9771b263 4517 parms.release ();
3f84bf08
MJ
4518}
4519
2c9561b5
MJ
4520/* Dump the AV linked list. */
4521
4522void
4523ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4524{
4525 bool comma = false;
4526 fprintf (f, " Aggregate replacements:");
4527 for (; av; av = av->next)
4528 {
4529 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4530 av->index, av->offset);
4531 print_generic_expr (f, av->value, 0);
4532 comma = true;
4533 }
4534 fprintf (f, "\n");
4535}
4536
fb3f88cc
JH
4537/* Stream out jump function JUMP_FUNC to OB. */
4538
4539static void
4540ipa_write_jump_function (struct output_block *ob,
4541 struct ipa_jump_func *jump_func)
4542{
8b7773a4
MJ
4543 struct ipa_agg_jf_item *item;
4544 struct bitpack_d bp;
4545 int i, count;
fb3f88cc 4546
8b7773a4 4547 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
4548 switch (jump_func->type)
4549 {
4550 case IPA_JF_UNKNOWN:
4551 break;
4552 case IPA_JF_CONST:
5368224f 4553 gcc_assert (
4502fe8d
MJ
4554 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4555 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
4556 break;
4557 case IPA_JF_PASS_THROUGH:
412288f1 4558 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4559 if (jump_func->value.pass_through.operation == NOP_EXPR)
4560 {
4561 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4562 bp = bitpack_create (ob->main_stream);
4563 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4564 streamer_write_bitpack (&bp);
4565 }
4566 else
4567 {
4568 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4569 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4570 }
fb3f88cc
JH
4571 break;
4572 case IPA_JF_ANCESTOR:
412288f1 4573 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4574 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4575 bp = bitpack_create (ob->main_stream);
4576 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4577 streamer_write_bitpack (&bp);
fb3f88cc 4578 break;
8b7773a4
MJ
4579 }
4580
9771b263 4581 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4582 streamer_write_uhwi (ob, count);
4583 if (count)
4584 {
4585 bp = bitpack_create (ob->main_stream);
4586 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4587 streamer_write_bitpack (&bp);
4588 }
4589
9771b263 4590 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4591 {
4592 streamer_write_uhwi (ob, item->offset);
4593 stream_write_tree (ob, item->value, true);
fb3f88cc 4594 }
04be694e
MJ
4595
4596 bp = bitpack_create (ob->main_stream);
4597 bp_pack_value (&bp, jump_func->alignment.known, 1);
4598 streamer_write_bitpack (&bp);
4599 if (jump_func->alignment.known)
4600 {
4601 streamer_write_uhwi (ob, jump_func->alignment.align);
4602 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4603 }
fb3f88cc
JH
4604}
4605
4606/* Read in jump function JUMP_FUNC from IB. */
4607
4608static void
4609ipa_read_jump_function (struct lto_input_block *ib,
4610 struct ipa_jump_func *jump_func,
4502fe8d 4611 struct cgraph_edge *cs,
fb3f88cc
JH
4612 struct data_in *data_in)
4613{
4a53743e
MJ
4614 enum jump_func_type jftype;
4615 enum tree_code operation;
8b7773a4 4616 int i, count;
fb3f88cc 4617
4a53743e
MJ
4618 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4619 switch (jftype)
fb3f88cc
JH
4620 {
4621 case IPA_JF_UNKNOWN:
04be694e 4622 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4623 break;
4624 case IPA_JF_CONST:
4502fe8d 4625 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4626 break;
4627 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4628 operation = (enum tree_code) streamer_read_uhwi (ib);
4629 if (operation == NOP_EXPR)
4630 {
4631 int formal_id = streamer_read_uhwi (ib);
4632 struct bitpack_d bp = streamer_read_bitpack (ib);
4633 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4634 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e
MJ
4635 }
4636 else
4637 {
4638 tree operand = stream_read_tree (ib, data_in);
4639 int formal_id = streamer_read_uhwi (ib);
4640 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4641 operation);
4642 }
fb3f88cc
JH
4643 break;
4644 case IPA_JF_ANCESTOR:
4a53743e
MJ
4645 {
4646 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4647 int formal_id = streamer_read_uhwi (ib);
4648 struct bitpack_d bp = streamer_read_bitpack (ib);
4649 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4650 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4651 break;
4652 }
8b7773a4
MJ
4653 }
4654
4655 count = streamer_read_uhwi (ib);
9771b263 4656 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4657 if (count)
4658 {
4a53743e 4659 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4660 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4661 }
4662 for (i = 0; i < count; i++)
4663 {
f32682ca
DN
4664 struct ipa_agg_jf_item item;
4665 item.offset = streamer_read_uhwi (ib);
4666 item.value = stream_read_tree (ib, data_in);
9771b263 4667 jump_func->agg.items->quick_push (item);
fb3f88cc 4668 }
04be694e
MJ
4669
4670 struct bitpack_d bp = streamer_read_bitpack (ib);
4671 bool alignment_known = bp_unpack_value (&bp, 1);
4672 if (alignment_known)
4673 {
4674 jump_func->alignment.known = true;
4675 jump_func->alignment.align = streamer_read_uhwi (ib);
4676 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4677 }
4678 else
4679 jump_func->alignment.known = false;
fb3f88cc
JH
4680}
4681
e33c6cd6
MJ
4682/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4683 relevant to indirect inlining to OB. */
661e7330
MJ
4684
4685static void
e33c6cd6
MJ
4686ipa_write_indirect_edge_info (struct output_block *ob,
4687 struct cgraph_edge *cs)
661e7330 4688{
e33c6cd6 4689 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4690 struct bitpack_d bp;
e33c6cd6 4691
412288f1 4692 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4693 bp = bitpack_create (ob->main_stream);
4694 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4695 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4696 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4697 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4698 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4699 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4700 streamer_write_bitpack (&bp);
ba392339
JH
4701 if (ii->agg_contents || ii->polymorphic)
4702 streamer_write_hwi (ob, ii->offset);
4703 else
4704 gcc_assert (ii->offset == 0);
b258210c
MJ
4705
4706 if (ii->polymorphic)
4707 {
412288f1 4708 streamer_write_hwi (ob, ii->otr_token);
b9393656 4709 stream_write_tree (ob, ii->otr_type, true);
ba392339 4710 ii->context.stream_out (ob);
b258210c 4711 }
661e7330
MJ
4712}
4713
e33c6cd6
MJ
4714/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4715 relevant to indirect inlining from IB. */
661e7330
MJ
4716
4717static void
e33c6cd6 4718ipa_read_indirect_edge_info (struct lto_input_block *ib,
ba392339 4719 struct data_in *data_in,
e33c6cd6 4720 struct cgraph_edge *cs)
661e7330 4721{
e33c6cd6 4722 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4723 struct bitpack_d bp;
661e7330 4724
412288f1 4725 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 4726 bp = streamer_read_bitpack (ib);
2465dcc2 4727 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4728 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4729 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4730 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 4731 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 4732 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
4733 if (ii->agg_contents || ii->polymorphic)
4734 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4735 else
4736 ii->offset = 0;
b258210c
MJ
4737 if (ii->polymorphic)
4738 {
412288f1 4739 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4740 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 4741 ii->context.stream_in (ib, data_in);
b258210c 4742 }
661e7330
MJ
4743}
4744
fb3f88cc
JH
4745/* Stream out NODE info to OB. */
4746
4747static void
4748ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4749{
4750 int node_ref;
7380e6ef 4751 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4752 struct ipa_node_params *info = IPA_NODE_REF (node);
4753 int j;
4754 struct cgraph_edge *e;
2465dcc2 4755 struct bitpack_d bp;
fb3f88cc 4756
7380e6ef 4757 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4758 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4759 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4760
0e8853ee
JH
4761 streamer_write_uhwi (ob, ipa_get_param_count (info));
4762 for (j = 0; j < ipa_get_param_count (info); j++)
4763 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4764 bp = bitpack_create (ob->main_stream);
8aab5218 4765 gcc_assert (info->analysis_done
661e7330 4766 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4767 gcc_assert (!info->node_enqueued);
4768 gcc_assert (!info->ipcp_orig_node);
4769 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4770 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4771 streamer_write_bitpack (&bp);
4502fe8d
MJ
4772 for (j = 0; j < ipa_get_param_count (info); j++)
4773 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
fb3f88cc
JH
4774 for (e = node->callees; e; e = e->next_callee)
4775 {
4776 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4777
5ce97055
JH
4778 streamer_write_uhwi (ob,
4779 ipa_get_cs_argument_count (args) * 2
4780 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 4781 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4782 {
4783 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4784 if (args->polymorphic_call_contexts != NULL)
4785 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4786 }
fb3f88cc 4787 }
e33c6cd6 4788 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4789 {
4790 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4791
5ce97055
JH
4792 streamer_write_uhwi (ob,
4793 ipa_get_cs_argument_count (args) * 2
4794 + (args->polymorphic_call_contexts != NULL));
c8246dbe 4795 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4796 {
4797 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4798 if (args->polymorphic_call_contexts != NULL)
4799 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4800 }
c8246dbe
JH
4801 ipa_write_indirect_edge_info (ob, e);
4802 }
fb3f88cc
JH
4803}
4804
61502ca8 4805/* Stream in NODE info from IB. */
fb3f88cc
JH
4806
4807static void
4808ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4809 struct data_in *data_in)
4810{
4811 struct ipa_node_params *info = IPA_NODE_REF (node);
4812 int k;
4813 struct cgraph_edge *e;
2465dcc2 4814 struct bitpack_d bp;
fb3f88cc 4815
0e8853ee 4816 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4817
0e8853ee
JH
4818 for (k = 0; k < ipa_get_param_count (info); k++)
4819 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4820
412288f1 4821 bp = streamer_read_bitpack (ib);
fb3f88cc 4822 if (ipa_get_param_count (info) != 0)
8aab5218 4823 info->analysis_done = true;
fb3f88cc
JH
4824 info->node_enqueued = false;
4825 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4826 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a
MJ
4827 for (k = 0; k < ipa_get_param_count (info); k++)
4828 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
fb3f88cc
JH
4829 for (e = node->callees; e; e = e->next_callee)
4830 {
4831 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4832 int count = streamer_read_uhwi (ib);
5ce97055
JH
4833 bool contexts_computed = count & 1;
4834 count /= 2;
fb3f88cc 4835
fb3f88cc
JH
4836 if (!count)
4837 continue;
9771b263 4838 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
4839 if (contexts_computed)
4840 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
fb3f88cc 4841
fb3f88cc 4842 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
4843 {
4844 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4845 data_in);
4846 if (contexts_computed)
4847 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4848 }
fb3f88cc 4849 }
e33c6cd6 4850 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4851 {
4852 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4853 int count = streamer_read_uhwi (ib);
5ce97055
JH
4854 bool contexts_computed = count & 1;
4855 count /= 2;
c8246dbe 4856
c8246dbe
JH
4857 if (count)
4858 {
9771b263 4859 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
4860 if (contexts_computed)
4861 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
c8246dbe 4862 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
4863 {
4864 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4865 data_in);
4866 if (contexts_computed)
4867 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4868 }
c8246dbe
JH
4869 }
4870 ipa_read_indirect_edge_info (ib, data_in, e);
4871 }
fb3f88cc
JH
4872}
4873
4874/* Write jump functions for nodes in SET. */
4875
4876void
f27c1867 4877ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4878{
4879 struct cgraph_node *node;
93536c97 4880 struct output_block *ob;
fb3f88cc 4881 unsigned int count = 0;
f27c1867
JH
4882 lto_symtab_encoder_iterator lsei;
4883 lto_symtab_encoder_t encoder;
4884
dd912cb8 4885 if (!ipa_node_params_sum)
93536c97 4886 return;
fb3f88cc 4887
93536c97 4888 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4889 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 4890 ob->symbol = NULL;
f27c1867
JH
4891 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4892 lsei_next_function_in_partition (&lsei))
fb3f88cc 4893 {
f27c1867 4894 node = lsei_cgraph_node (lsei);
d52f5295 4895 if (node->has_gimple_body_p ()
c47d0034 4896 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4897 count++;
4898 }
4899
412288f1 4900 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4901
4902 /* Process all of the functions. */
f27c1867
JH
4903 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4904 lsei_next_function_in_partition (&lsei))
fb3f88cc 4905 {
f27c1867 4906 node = lsei_cgraph_node (lsei);
d52f5295 4907 if (node->has_gimple_body_p ()
c47d0034 4908 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4909 ipa_write_node_info (ob, node);
4910 }
412288f1 4911 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4912 produce_asm (ob, NULL);
4913 destroy_output_block (ob);
4914}
4915
4916/* Read section in file FILE_DATA of length LEN with data DATA. */
4917
4918static void
4919ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4920 size_t len)
4921{
4922 const struct lto_function_header *header =
4923 (const struct lto_function_header *) data;
4ad9a9de
EB
4924 const int cfg_offset = sizeof (struct lto_function_header);
4925 const int main_offset = cfg_offset + header->cfg_size;
4926 const int string_offset = main_offset + header->main_size;
fb3f88cc 4927 struct data_in *data_in;
fb3f88cc
JH
4928 unsigned int i;
4929 unsigned int count;
4930
207c68cd 4931 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 4932 header->main_size, file_data->mode_table);
fb3f88cc
JH
4933
4934 data_in =
4935 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4936 header->string_size, vNULL);
412288f1 4937 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4938
4939 for (i = 0; i < count; i++)
4940 {
4941 unsigned int index;
4942 struct cgraph_node *node;
7380e6ef 4943 lto_symtab_encoder_t encoder;
fb3f88cc 4944
412288f1 4945 index = streamer_read_uhwi (&ib_main);
7380e6ef 4946 encoder = file_data->symtab_node_encoder;
d52f5295
ML
4947 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4948 index));
67348ccc 4949 gcc_assert (node->definition);
fb3f88cc
JH
4950 ipa_read_node_info (&ib_main, node, data_in);
4951 }
4952 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4953 len);
4954 lto_data_in_delete (data_in);
4955}
4956
4957/* Read ipcp jump functions. */
4958
4959void
4960ipa_prop_read_jump_functions (void)
4961{
4962 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4963 struct lto_file_decl_data *file_data;
4964 unsigned int j = 0;
4965
4966 ipa_check_create_node_params ();
4967 ipa_check_create_edge_args ();
4968 ipa_register_cgraph_hooks ();
4969
4970 while ((file_data = file_data_vec[j++]))
4971 {
4972 size_t len;
4973 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4974
4975 if (data)
4976 ipa_prop_read_section (file_data, data, len);
4977 }
4978}
4979
b8698a0f 4980/* After merging units, we can get mismatch in argument counts.
61502ca8 4981 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
4982 Also compute called_with_variable_arg info. */
4983
4984void
4985ipa_update_after_lto_read (void)
4986{
05d3aa37
MJ
4987 ipa_check_create_node_params ();
4988 ipa_check_create_edge_args ();
fb3f88cc 4989}
2c9561b5
MJ
4990
4991void
04be694e 4992write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
4993{
4994 int node_ref;
4995 unsigned int count = 0;
4996 lto_symtab_encoder_t encoder;
4997 struct ipa_agg_replacement_value *aggvals, *av;
4998
4999 aggvals = ipa_get_agg_replacements_for_node (node);
5000 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5001 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
5002 streamer_write_uhwi (ob, node_ref);
5003
5004 for (av = aggvals; av; av = av->next)
5005 count++;
5006 streamer_write_uhwi (ob, count);
5007
5008 for (av = aggvals; av; av = av->next)
5009 {
7b920a9a
MJ
5010 struct bitpack_d bp;
5011
2c9561b5
MJ
5012 streamer_write_uhwi (ob, av->offset);
5013 streamer_write_uhwi (ob, av->index);
5014 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
5015
5016 bp = bitpack_create (ob->main_stream);
5017 bp_pack_value (&bp, av->by_ref, 1);
5018 streamer_write_bitpack (&bp);
2c9561b5 5019 }
04be694e
MJ
5020
5021 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5022 if (ts && vec_safe_length (ts->alignments) > 0)
5023 {
5024 count = ts->alignments->length ();
5025
5026 streamer_write_uhwi (ob, count);
5027 for (unsigned i = 0; i < count; ++i)
5028 {
5029 ipa_alignment *parm_al = &(*ts->alignments)[i];
5030
5031 struct bitpack_d bp;
5032 bp = bitpack_create (ob->main_stream);
5033 bp_pack_value (&bp, parm_al->known, 1);
5034 streamer_write_bitpack (&bp);
5035 if (parm_al->known)
5036 {
5037 streamer_write_uhwi (ob, parm_al->align);
5038 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
5039 parm_al->misalign);
5040 }
5041 }
5042 }
5043 else
5044 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
5045}
5046
5047/* Stream in the aggregate value replacement chain for NODE from IB. */
5048
5049static void
04be694e
MJ
5050read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5051 data_in *data_in)
2c9561b5
MJ
5052{
5053 struct ipa_agg_replacement_value *aggvals = NULL;
5054 unsigned int count, i;
5055
5056 count = streamer_read_uhwi (ib);
5057 for (i = 0; i <count; i++)
5058 {
5059 struct ipa_agg_replacement_value *av;
7b920a9a 5060 struct bitpack_d bp;
2c9561b5 5061
766090c2 5062 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
5063 av->offset = streamer_read_uhwi (ib);
5064 av->index = streamer_read_uhwi (ib);
5065 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
5066 bp = streamer_read_bitpack (ib);
5067 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
5068 av->next = aggvals;
5069 aggvals = av;
5070 }
5071 ipa_set_node_agg_value_chain (node, aggvals);
04be694e
MJ
5072
5073 count = streamer_read_uhwi (ib);
5074 if (count > 0)
5075 {
5076 ipcp_grow_transformations_if_necessary ();
5077
5078 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5079 vec_safe_grow_cleared (ts->alignments, count);
5080
5081 for (i = 0; i < count; i++)
5082 {
5083 ipa_alignment *parm_al;
5084 parm_al = &(*ts->alignments)[i];
5085 struct bitpack_d bp;
5086 bp = streamer_read_bitpack (ib);
5087 parm_al->known = bp_unpack_value (&bp, 1);
5088 if (parm_al->known)
5089 {
5090 parm_al->align = streamer_read_uhwi (ib);
5091 parm_al->misalign
5092 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
5093 0, parm_al->align);
5094 }
5095 }
5096 }
2c9561b5
MJ
5097}
5098
5099/* Write all aggregate replacement for nodes in set. */
5100
5101void
04be694e 5102ipcp_write_transformation_summaries (void)
2c9561b5
MJ
5103{
5104 struct cgraph_node *node;
5105 struct output_block *ob;
5106 unsigned int count = 0;
5107 lto_symtab_encoder_iterator lsei;
5108 lto_symtab_encoder_t encoder;
5109
2c9561b5
MJ
5110 ob = create_output_block (LTO_section_ipcp_transform);
5111 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5112 ob->symbol = NULL;
2c9561b5
MJ
5113 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5114 lsei_next_function_in_partition (&lsei))
5115 {
5116 node = lsei_cgraph_node (lsei);
04be694e 5117 if (node->has_gimple_body_p ())
2c9561b5
MJ
5118 count++;
5119 }
5120
5121 streamer_write_uhwi (ob, count);
5122
5123 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5124 lsei_next_function_in_partition (&lsei))
5125 {
5126 node = lsei_cgraph_node (lsei);
04be694e
MJ
5127 if (node->has_gimple_body_p ())
5128 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
5129 }
5130 streamer_write_char_stream (ob->main_stream, 0);
5131 produce_asm (ob, NULL);
5132 destroy_output_block (ob);
5133}
5134
5135/* Read replacements section in file FILE_DATA of length LEN with data
5136 DATA. */
5137
5138static void
5139read_replacements_section (struct lto_file_decl_data *file_data,
5140 const char *data,
5141 size_t len)
5142{
5143 const struct lto_function_header *header =
5144 (const struct lto_function_header *) data;
5145 const int cfg_offset = sizeof (struct lto_function_header);
5146 const int main_offset = cfg_offset + header->cfg_size;
5147 const int string_offset = main_offset + header->main_size;
5148 struct data_in *data_in;
2c9561b5
MJ
5149 unsigned int i;
5150 unsigned int count;
5151
207c68cd 5152 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5153 header->main_size, file_data->mode_table);
2c9561b5
MJ
5154
5155 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5156 header->string_size, vNULL);
2c9561b5
MJ
5157 count = streamer_read_uhwi (&ib_main);
5158
5159 for (i = 0; i < count; i++)
5160 {
5161 unsigned int index;
5162 struct cgraph_node *node;
5163 lto_symtab_encoder_t encoder;
5164
5165 index = streamer_read_uhwi (&ib_main);
5166 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5167 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5168 index));
67348ccc 5169 gcc_assert (node->definition);
04be694e 5170 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
5171 }
5172 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5173 len);
5174 lto_data_in_delete (data_in);
5175}
5176
5177/* Read IPA-CP aggregate replacements. */
5178
5179void
04be694e 5180ipcp_read_transformation_summaries (void)
2c9561b5
MJ
5181{
5182 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5183 struct lto_file_decl_data *file_data;
5184 unsigned int j = 0;
5185
5186 while ((file_data = file_data_vec[j++]))
5187 {
5188 size_t len;
5189 const char *data = lto_get_section_data (file_data,
5190 LTO_section_ipcp_transform,
5191 NULL, &len);
5192 if (data)
5193 read_replacements_section (file_data, data, len);
5194 }
5195}
5196
5197/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5198 NODE. */
5199
5200static void
5201adjust_agg_replacement_values (struct cgraph_node *node,
5202 struct ipa_agg_replacement_value *aggval)
5203{
5204 struct ipa_agg_replacement_value *v;
5205 int i, c = 0, d = 0, *adj;
5206
5207 if (!node->clone.combined_args_to_skip)
5208 return;
5209
5210 for (v = aggval; v; v = v->next)
5211 {
5212 gcc_assert (v->index >= 0);
5213 if (c < v->index)
5214 c = v->index;
5215 }
5216 c++;
5217
5218 adj = XALLOCAVEC (int, c);
5219 for (i = 0; i < c; i++)
5220 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5221 {
5222 adj[i] = -1;
5223 d++;
5224 }
5225 else
5226 adj[i] = i - d;
5227
5228 for (v = aggval; v; v = v->next)
5229 v->index = adj[v->index];
5230}
5231
8aab5218
MJ
5232/* Dominator walker driving the ipcp modification phase. */
5233
5234class ipcp_modif_dom_walker : public dom_walker
5235{
5236public:
56b40062 5237 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
8aab5218
MJ
5238 vec<ipa_param_descriptor> descs,
5239 struct ipa_agg_replacement_value *av,
5240 bool *sc, bool *cc)
5241 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5242 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5243
3daacdcd 5244 virtual edge before_dom_children (basic_block);
8aab5218
MJ
5245
5246private:
56b40062 5247 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
5248 vec<ipa_param_descriptor> m_descriptors;
5249 struct ipa_agg_replacement_value *m_aggval;
5250 bool *m_something_changed, *m_cfg_changed;
5251};
5252
3daacdcd 5253edge
8aab5218
MJ
5254ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5255{
5256 gimple_stmt_iterator gsi;
5257 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5258 {
5259 struct ipa_agg_replacement_value *v;
355fe088 5260 gimple *stmt = gsi_stmt (gsi);
8aab5218
MJ
5261 tree rhs, val, t;
5262 HOST_WIDE_INT offset, size;
5263 int index;
5264 bool by_ref, vce;
5265
5266 if (!gimple_assign_load_p (stmt))
5267 continue;
5268 rhs = gimple_assign_rhs1 (stmt);
5269 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5270 continue;
2c9561b5 5271
8aab5218
MJ
5272 vce = false;
5273 t = rhs;
5274 while (handled_component_p (t))
5275 {
5276 /* V_C_E can do things like convert an array of integers to one
5277 bigger integer and similar things we do not handle below. */
5278 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5279 {
5280 vce = true;
5281 break;
5282 }
5283 t = TREE_OPERAND (t, 0);
5284 }
5285 if (vce)
5286 continue;
5287
ff302741
PB
5288 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5289 &offset, &size, &by_ref))
8aab5218
MJ
5290 continue;
5291 for (v = m_aggval; v; v = v->next)
5292 if (v->index == index
5293 && v->offset == offset)
5294 break;
5295 if (!v
5296 || v->by_ref != by_ref
5297 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5298 continue;
5299
5300 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5301 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5302 {
5303 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5304 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5305 else if (TYPE_SIZE (TREE_TYPE (rhs))
5306 == TYPE_SIZE (TREE_TYPE (v->value)))
5307 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5308 else
5309 {
5310 if (dump_file)
5311 {
5312 fprintf (dump_file, " const ");
5313 print_generic_expr (dump_file, v->value, 0);
5314 fprintf (dump_file, " can't be converted to type of ");
5315 print_generic_expr (dump_file, rhs, 0);
5316 fprintf (dump_file, "\n");
5317 }
5318 continue;
5319 }
5320 }
5321 else
5322 val = v->value;
5323
5324 if (dump_file && (dump_flags & TDF_DETAILS))
5325 {
5326 fprintf (dump_file, "Modifying stmt:\n ");
5327 print_gimple_stmt (dump_file, stmt, 0, 0);
5328 }
5329 gimple_assign_set_rhs_from_tree (&gsi, val);
5330 update_stmt (stmt);
5331
5332 if (dump_file && (dump_flags & TDF_DETAILS))
5333 {
5334 fprintf (dump_file, "into:\n ");
5335 print_gimple_stmt (dump_file, stmt, 0, 0);
5336 fprintf (dump_file, "\n");
5337 }
5338
5339 *m_something_changed = true;
5340 if (maybe_clean_eh_stmt (stmt)
5341 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5342 *m_cfg_changed = true;
5343 }
3daacdcd 5344 return NULL;
8aab5218
MJ
5345}
5346
04be694e
MJ
5347/* Update alignment of formal parameters as described in
5348 ipcp_transformation_summary. */
5349
5350static void
5351ipcp_update_alignments (struct cgraph_node *node)
5352{
5353 tree fndecl = node->decl;
5354 tree parm = DECL_ARGUMENTS (fndecl);
5355 tree next_parm = parm;
5356 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5357 if (!ts || vec_safe_length (ts->alignments) == 0)
5358 return;
5359 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5360 unsigned count = alignments.length ();
5361
5362 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5363 {
5364 if (node->clone.combined_args_to_skip
5365 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5366 continue;
5367 gcc_checking_assert (parm);
5368 next_parm = DECL_CHAIN (parm);
5369
5370 if (!alignments[i].known || !is_gimple_reg (parm))
5371 continue;
5372 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5373 if (!ddef)
5374 continue;
5375
5376 if (dump_file)
5377 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5378 "misalignment to %u\n", i, alignments[i].align,
5379 alignments[i].misalign);
5380
5381 struct ptr_info_def *pi = get_ptr_info (ddef);
5382 gcc_checking_assert (pi);
5383 unsigned old_align;
5384 unsigned old_misalign;
5385 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5386
5387 if (old_known
5388 && old_align >= alignments[i].align)
5389 {
5390 if (dump_file)
5391 fprintf (dump_file, " But the alignment was already %u.\n",
5392 old_align);
5393 continue;
5394 }
5395 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5396 }
5397}
5398
8aab5218 5399/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5400
5401unsigned int
5402ipcp_transform_function (struct cgraph_node *node)
5403{
84562394 5404 vec<ipa_param_descriptor> descriptors = vNULL;
56b40062 5405 struct ipa_func_body_info fbi;
2c9561b5 5406 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5407 int param_count;
5408 bool cfg_changed = false, something_changed = false;
5409
5410 gcc_checking_assert (cfun);
5411 gcc_checking_assert (current_function_decl);
5412
5413 if (dump_file)
5414 fprintf (dump_file, "Modification phase of node %s/%i\n",
fec39fa6 5415 node->name (), node->order);
2c9561b5 5416
04be694e 5417 ipcp_update_alignments (node);
2c9561b5
MJ
5418 aggval = ipa_get_agg_replacements_for_node (node);
5419 if (!aggval)
5420 return 0;
67348ccc 5421 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5422 if (param_count == 0)
5423 return 0;
5424 adjust_agg_replacement_values (node, aggval);
5425 if (dump_file)
5426 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5427
8aab5218
MJ
5428 fbi.node = node;
5429 fbi.info = NULL;
5430 fbi.bb_infos = vNULL;
5431 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5432 fbi.param_count = param_count;
5433 fbi.aa_walked = 0;
2c9561b5 5434
8aab5218
MJ
5435 descriptors.safe_grow_cleared (param_count);
5436 ipa_populate_param_decls (node, descriptors);
5437 calculate_dominance_info (CDI_DOMINATORS);
5438 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5439 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5440
8aab5218
MJ
5441 int i;
5442 struct ipa_bb_info *bi;
5443 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5444 free_ipa_bb_info (bi);
5445 fbi.bb_infos.release ();
5446 free_dominance_info (CDI_DOMINATORS);
04be694e
MJ
5447 (*ipcp_transformations)[node->uid].agg_values = NULL;
5448 (*ipcp_transformations)[node->uid].alignments = NULL;
9771b263 5449 descriptors.release ();
2c9561b5
MJ
5450
5451 if (!something_changed)
5452 return 0;
5453 else if (cfg_changed)
5454 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5455 else
5456 return TODO_update_ssa_only_virtuals;
5457}