]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-prop.c
runtime: copy lfstack code from Go 1.7 runtime
[thirdparty/gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
818ab71a 2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
40e23961 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
40e23961 33#include "fold-const.h"
2fb9a547
AM
34#include "gimple-fold.h"
35#include "tree-eh.h"
36566b39 36#include "calls.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "print-tree.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
18f429e2 41#include "gimplify-me.h"
5be5c238 42#include "gimple-walk.h"
dd912cb8 43#include "symbol-summary.h"
518dc859 44#include "ipa-prop.h"
442b4905 45#include "tree-cfg.h"
442b4905 46#include "tree-dfa.h"
771578a0 47#include "tree-inline.h"
0f378cb5 48#include "ipa-inline.h"
cf835838 49#include "gimple-pretty-print.h"
dfea20f1 50#include "params.h"
450ad0cd 51#include "ipa-utils.h"
2b5f0895 52#include "dbgcnt.h"
8aab5218 53#include "domwalk.h"
9b2b7279 54#include "builtins.h"
771578a0 55
dd912cb8
ML
56/* Function summary where the parameter infos are actually stored. */
57ipa_node_params_t *ipa_node_params_sum = NULL;
04be694e
MJ
58/* Vector of IPA-CP transformation data for each clone. */
59vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
771578a0 60/* Vector where the parameter infos are actually stored. */
84562394 61vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
771578a0
MJ
62
63/* Holders of ipa cgraph hooks: */
e2c9111c 64static struct cgraph_edge_hook_list *edge_removal_hook_holder;
e2c9111c 65static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
40982661 66static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 67
4502fe8d
MJ
68/* Description of a reference to an IPA constant. */
69struct ipa_cst_ref_desc
70{
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
78};
79
80/* Allocation pool for reference descriptions. */
81
fb0b2914 82static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
fcb87c50 83 ("IPA-PROP ref descriptions");
4502fe8d 84
5fe8e757
MJ
85/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
87
88static bool
89ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
90{
67348ccc 91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
92
93 if (!fs_opts)
94 return false;
2bf86c84 95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
5fe8e757
MJ
96}
97
be95e2b9
MJ
98/* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
100
d044dd17 101static int
84562394 102ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
518dc859
RL
103{
104 int i, count;
105
9771b263 106 count = descriptors.length ();
518dc859 107 for (i = 0; i < count; i++)
209ca542 108 if (descriptors[i].decl_or_type == ptree)
518dc859
RL
109 return i;
110
111 return -1;
112}
113
d044dd17
MJ
114/* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
116
117int
118ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
119{
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121}
122
123/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
be95e2b9 125
f8e2a1ed
MJ
126static void
127ipa_populate_param_decls (struct cgraph_node *node,
84562394 128 vec<ipa_param_descriptor> &descriptors)
518dc859
RL
129{
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
3e293154 134
67348ccc 135 fndecl = node->decl;
0e8853ee 136 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
910ad8de 139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 140 {
209ca542 141 descriptors[param_num].decl_or_type = parm;
b4c9af96
RB
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
518dc859
RL
144 param_num++;
145 }
146}
147
3f84bf08
MJ
148/* Return how many formal parameters FNDECL has. */
149
fd29c024 150int
310bc633 151count_formal_params (tree fndecl)
3f84bf08
MJ
152{
153 tree parm;
154 int count = 0;
0e8853ee 155 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 156
910ad8de 157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
158 count++;
159
160 return count;
161}
162
0e8853ee
JH
163/* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
166
167void
168ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
169{
170 fprintf (file, "param #%i", i);
209ca542 171 if (info->descriptors[i].decl_or_type)
0e8853ee
JH
172 {
173 fprintf (file, " ");
209ca542 174 print_generic_expr (file, info->descriptors[i].decl_or_type, 0);
0e8853ee
JH
175 }
176}
177
178/* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
180
181void
182ipa_alloc_node_params (struct cgraph_node *node, int param_count)
183{
184 struct ipa_node_params *info = IPA_NODE_REF (node);
185
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
188}
189
f8e2a1ed
MJ
190/* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
be95e2b9 193
f8e2a1ed
MJ
194void
195ipa_initialize_node_params (struct cgraph_node *node)
196{
197 struct ipa_node_params *info = IPA_NODE_REF (node);
198
9771b263 199 if (!info->descriptors.exists ())
f8e2a1ed 200 {
67348ccc 201 ipa_alloc_node_params (node, count_formal_params (node->decl));
0e8853ee 202 ipa_populate_param_decls (node, info->descriptors);
f8e2a1ed 203 }
518dc859
RL
204}
205
749aa96d
MJ
206/* Print the jump functions associated with call graph edge CS to file F. */
207
208static void
209ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
210{
211 int i, count;
212
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
215 {
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
218
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
221
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
749aa96d
MJ
225 else if (type == IPA_JF_CONST)
226 {
4502fe8d 227 tree val = jump_func->value.constant.value;
749aa96d
MJ
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
232 {
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
235 0);
236 }
237 fprintf (f, "\n");
238 }
749aa96d
MJ
239 else if (type == IPA_JF_PASS_THROUGH)
240 {
241 fprintf (f, "PASS THROUGH: ");
8b7773a4 242 fprintf (f, "%d, op %s",
749aa96d 243 jump_func->value.pass_through.formal_id,
5806f481 244 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 245 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
246 {
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
250 }
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
3ea6239f 253 fprintf (f, "\n");
749aa96d
MJ
254 }
255 else if (type == IPA_JF_ANCESTOR)
256 {
257 fprintf (f, "ANCESTOR: ");
16998094 258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
749aa96d
MJ
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
8b7773a4
MJ
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
3ea6239f 263 fprintf (f, "\n");
749aa96d 264 }
8b7773a4
MJ
265
266 if (jump_func->agg.items)
267 {
268 struct ipa_agg_jf_item *item;
269 int j;
270
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
9771b263 273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
274 {
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 279 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
280 else
281 {
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
284 }
285 fprintf (f, "\n");
286 }
287 }
44210a96
MJ
288
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
292 {
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
295 }
04be694e 296
209ca542
PK
297 if (jump_func->bits.known)
298 {
299 fprintf (f, " value: "); print_hex (jump_func->bits.value, f);
300 fprintf (f, ", mask: "); print_hex (jump_func->bits.mask, f);
301 fprintf (f, "\n");
302 }
303 else
304 fprintf (f, " Unknown bits\n");
8bc5448f
KV
305
306 if (jump_func->vr_known)
307 {
308 fprintf (f, " VR ");
309 fprintf (f, "%s[",
310 (jump_func->m_vr.type == VR_ANTI_RANGE) ? "~" : "");
311 print_decs (jump_func->m_vr.min, f);
312 fprintf (f, ", ");
313 print_decs (jump_func->m_vr.max, f);
314 fprintf (f, "]\n");
315 }
316 else
317 fprintf (f, " Unknown VR\n");
749aa96d
MJ
318 }
319}
320
321
be95e2b9
MJ
322/* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
518dc859 325void
3e293154 326ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 327{
3e293154 328 struct cgraph_edge *cs;
518dc859 329
fec39fa6 330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
67348ccc 331 node->order);
3e293154
MJ
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
749aa96d 337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
2a72a953
DM
338 xstrdup_for_dump (node->name ()), node->order,
339 xstrdup_for_dump (cs->callee->name ()),
67348ccc 340 cs->callee->order);
749aa96d
MJ
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
518dc859 343
9de04252 344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 345 {
9de04252 346 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
3e293154 349
9de04252
MJ
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
c13bc3d9 352 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 354 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
85942f45
JH
358 fprintf (f, " indirect %s callsite, calling param %i, "
359 "offset " HOST_WIDE_INT_PRINT_DEC,
360 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
361 ii->offset);
9de04252 362
749aa96d
MJ
363 if (cs->call_stmt)
364 {
9de04252 365 fprintf (f, ", for stmt ");
749aa96d 366 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 367 }
749aa96d 368 else
9de04252 369 fprintf (f, "\n");
ba392339
JH
370 if (ii->polymorphic)
371 ii->context.dump (f);
749aa96d 372 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
373 }
374}
375
376/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 377
3e293154
MJ
378void
379ipa_print_all_jump_functions (FILE *f)
380{
381 struct cgraph_node *node;
382
ca30a539 383 fprintf (f, "\nJump functions:\n");
65c70e6b 384 FOR_EACH_FUNCTION (node)
3e293154
MJ
385 {
386 ipa_print_node_jump_functions (f, node);
387 }
388}
389
04be694e
MJ
390/* Set jfunc to be a know-really nothing jump function. */
391
392static void
393ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
394{
395 jfunc->type = IPA_JF_UNKNOWN;
209ca542 396 jfunc->bits.known = false;
8bc5448f 397 jfunc->vr_known = false;
04be694e
MJ
398}
399
b8f6e610
MJ
400/* Set JFUNC to be a copy of another jmp (to be used by jump function
401 combination code). The two functions will share their rdesc. */
402
403static void
404ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
405 struct ipa_jump_func *src)
406
407{
408 gcc_checking_assert (src->type == IPA_JF_CONST);
409 dst->type = IPA_JF_CONST;
410 dst->value.constant = src->value.constant;
411}
412
7b872d9e
MJ
413/* Set JFUNC to be a constant jmp function. */
414
415static void
4502fe8d
MJ
416ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
417 struct cgraph_edge *cs)
7b872d9e
MJ
418{
419 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
420 jfunc->value.constant.value = unshare_expr_without_location (constant);
421
422 if (TREE_CODE (constant) == ADDR_EXPR
423 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
424 {
425 struct ipa_cst_ref_desc *rdesc;
4502fe8d 426
601f3293 427 rdesc = ipa_refdesc_pool.allocate ();
4502fe8d
MJ
428 rdesc->cs = cs;
429 rdesc->next_duplicate = NULL;
430 rdesc->refcount = 1;
431 jfunc->value.constant.rdesc = rdesc;
432 }
433 else
434 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
435}
436
437/* Set JFUNC to be a simple pass-through jump function. */
438static void
8b7773a4 439ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
3b97a5c7 440 bool agg_preserved)
7b872d9e
MJ
441{
442 jfunc->type = IPA_JF_PASS_THROUGH;
443 jfunc->value.pass_through.operand = NULL_TREE;
444 jfunc->value.pass_through.formal_id = formal_id;
445 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 446 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
447}
448
449/* Set JFUNC to be an arithmetic pass through jump function. */
450
451static void
452ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
453 tree operand, enum tree_code operation)
454{
455 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 456 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
457 jfunc->value.pass_through.formal_id = formal_id;
458 jfunc->value.pass_through.operation = operation;
8b7773a4 459 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
460}
461
462/* Set JFUNC to be an ancestor jump function. */
463
464static void
465ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
3b97a5c7 466 int formal_id, bool agg_preserved)
7b872d9e
MJ
467{
468 jfunc->type = IPA_JF_ANCESTOR;
469 jfunc->value.ancestor.formal_id = formal_id;
470 jfunc->value.ancestor.offset = offset;
8b7773a4 471 jfunc->value.ancestor.agg_preserved = agg_preserved;
e248d83f
MJ
472}
473
8aab5218
MJ
474/* Get IPA BB information about the given BB. FBI is the context of analyzis
475 of this function body. */
476
477static struct ipa_bb_info *
56b40062 478ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
8aab5218
MJ
479{
480 gcc_checking_assert (fbi);
481 return &fbi->bb_infos[bb->index];
482}
483
f65cf2b7
MJ
484/* Structure to be passed in between detect_type_change and
485 check_stmt_for_type_change. */
486
11478306 487struct prop_type_change_info
f65cf2b7 488{
290ebcb7
MJ
489 /* Offset into the object where there is the virtual method pointer we are
490 looking for. */
491 HOST_WIDE_INT offset;
492 /* The declaration or SSA_NAME pointer of the base that we are checking for
493 type change. */
494 tree object;
f65cf2b7
MJ
495 /* Set to true if dynamic type change has been detected. */
496 bool type_maybe_changed;
497};
498
499/* Return true if STMT can modify a virtual method table pointer.
500
501 This function makes special assumptions about both constructors and
502 destructors which are all the functions that are allowed to alter the VMT
503 pointers. It assumes that destructors begin with assignment into all VMT
504 pointers and that constructors essentially look in the following way:
505
506 1) The very first thing they do is that they call constructors of ancestor
507 sub-objects that have them.
508
509 2) Then VMT pointers of this and all its ancestors is set to new values
510 corresponding to the type corresponding to the constructor.
511
512 3) Only afterwards, other stuff such as constructor of member sub-objects
513 and the code written by the user is run. Only this may include calling
514 virtual functions, directly or indirectly.
515
516 There is no way to call a constructor of an ancestor sub-object in any
517 other way.
518
519 This means that we do not have to care whether constructors get the correct
520 type information because they will always change it (in fact, if we define
521 the type to be given by the VMT pointer, it is undefined).
522
523 The most important fact to derive from the above is that if, for some
524 statement in the section 3, we try to detect whether the dynamic type has
525 changed, we can safely ignore all calls as we examine the function body
526 backwards until we reach statements in section 2 because these calls cannot
527 be ancestor constructors or destructors (if the input is not bogus) and so
528 do not change the dynamic type (this holds true only for automatically
529 allocated objects but at the moment we devirtualize only these). We then
530 must detect that statements in section 2 change the dynamic type and can try
531 to derive the new type. That is enough and we can stop, we will never see
532 the calls into constructors of sub-objects in this code. Therefore we can
533 safely ignore all call statements that we traverse.
534 */
535
536static bool
355fe088 537stmt_may_be_vtbl_ptr_store (gimple *stmt)
f65cf2b7
MJ
538{
539 if (is_gimple_call (stmt))
540 return false;
70f633c5
JH
541 if (gimple_clobber_p (stmt))
542 return false;
f65cf2b7
MJ
543 else if (is_gimple_assign (stmt))
544 {
545 tree lhs = gimple_assign_lhs (stmt);
546
0004f992
MJ
547 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
548 {
549 if (flag_strict_aliasing
550 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
551 return false;
552
553 if (TREE_CODE (lhs) == COMPONENT_REF
554 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 555 return false;
0004f992
MJ
556 /* In the future we might want to use get_base_ref_and_offset to find
557 if there is a field corresponding to the offset and if so, proceed
558 almost like if it was a component ref. */
559 }
f65cf2b7
MJ
560 }
561 return true;
562}
563
3b97a5c7
MJ
564/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
565 to check whether a particular statement may modify the virtual table
566 pointerIt stores its result into DATA, which points to a
11478306 567 prop_type_change_info structure. */
f65cf2b7
MJ
568
569static bool
570check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
571{
355fe088 572 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
11478306 573 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
574
575 if (stmt_may_be_vtbl_ptr_store (stmt))
576 {
577 tci->type_maybe_changed = true;
578 return true;
579 }
580 else
581 return false;
582}
583
058d0a90
JH
584/* See if ARG is PARAM_DECl describing instance passed by pointer
585 or reference in FUNCTION. Return false if the dynamic type may change
586 in between beggining of the function until CALL is invoked.
290ebcb7 587
058d0a90
JH
588 Generally functions are not allowed to change type of such instances,
589 but they call destructors. We assume that methods can not destroy the THIS
590 pointer. Also as a special cases, constructor and destructors may change
591 type of the THIS pointer. */
592
593static bool
355fe088 594param_type_may_change_p (tree function, tree arg, gimple *call)
058d0a90
JH
595{
596 /* Pure functions can not do any changes on the dynamic type;
597 that require writting to memory. */
598 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
599 return false;
600 /* We need to check if we are within inlined consturctor
601 or destructor (ideally we would have way to check that the
602 inline cdtor is actually working on ARG, but we don't have
603 easy tie on this, so punt on all non-pure cdtors.
604 We may also record the types of cdtors and once we know type
605 of the instance match them.
606
607 Also code unification optimizations may merge calls from
608 different blocks making return values unreliable. So
609 do nothing during late optimization. */
610 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
611 return true;
612 if (TREE_CODE (arg) == SSA_NAME
613 && SSA_NAME_IS_DEFAULT_DEF (arg)
614 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
615 {
616 /* Normal (non-THIS) argument. */
617 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
618 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
026c3cfd 619 /* THIS pointer of an method - here we want to watch constructors
058d0a90
JH
620 and destructors as those definitely may change the dynamic
621 type. */
622 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
623 && !DECL_CXX_CONSTRUCTOR_P (function)
624 && !DECL_CXX_DESTRUCTOR_P (function)
625 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
626 {
627 /* Walk the inline stack and watch out for ctors/dtors. */
628 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
629 block = BLOCK_SUPERCONTEXT (block))
00a0ea64
JJ
630 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
631 return true;
058d0a90
JH
632 return false;
633 }
634 }
635 return true;
636}
290ebcb7 637
06d65050
JH
638/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
639 callsite CALL) by looking for assignments to its virtual table pointer. If
640 it is, return true and fill in the jump function JFUNC with relevant type
641 information or set it to unknown. ARG is the object itself (not a pointer
642 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
643 returned by get_ref_base_and_extent, as is the offset.
644
645 This is helper function for detect_type_change and detect_type_change_ssa
646 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
647
648static bool
058d0a90 649detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
538dd0b7 650 gcall *call, struct ipa_jump_func *jfunc,
058d0a90 651 HOST_WIDE_INT offset)
f65cf2b7 652{
11478306 653 struct prop_type_change_info tci;
f65cf2b7 654 ao_ref ao;
70f633c5 655 bool entry_reached = false;
f65cf2b7
MJ
656
657 gcc_checking_assert (DECL_P (arg)
658 || TREE_CODE (arg) == MEM_REF
659 || handled_component_p (arg));
f65cf2b7 660
b49407f8
JH
661 comp_type = TYPE_MAIN_VARIANT (comp_type);
662
d570d364
JH
663 /* Const calls cannot call virtual methods through VMT and so type changes do
664 not matter. */
665 if (!flag_devirtualize || !gimple_vuse (call)
666 /* Be sure expected_type is polymorphic. */
667 || !comp_type
668 || TREE_CODE (comp_type) != RECORD_TYPE
669 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
670 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
671 return true;
4bf2a588 672
dd887943 673 ao_ref_init (&ao, arg);
f65cf2b7
MJ
674 ao.base = base;
675 ao.offset = offset;
676 ao.size = POINTER_SIZE;
677 ao.max_size = ao.size;
f65cf2b7 678
290ebcb7
MJ
679 tci.offset = offset;
680 tci.object = get_base_address (arg);
290ebcb7 681 tci.type_maybe_changed = false;
290ebcb7 682
f65cf2b7 683 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
70f633c5 684 &tci, NULL, &entry_reached);
f65cf2b7
MJ
685 if (!tci.type_maybe_changed)
686 return false;
687
04be694e 688 ipa_set_jf_unknown (jfunc);
f65cf2b7
MJ
689 return true;
690}
691
058d0a90
JH
692/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
693 If it is, return true and fill in the jump function JFUNC with relevant type
694 information or set it to unknown. ARG is the object itself (not a pointer
695 to it, unless dereferenced). BASE is the base of the memory access as
696 returned by get_ref_base_and_extent, as is the offset. */
697
698static bool
538dd0b7 699detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
058d0a90
JH
700 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
701{
702 if (!flag_devirtualize)
703 return false;
704
705 if (TREE_CODE (base) == MEM_REF
706 && !param_type_may_change_p (current_function_decl,
707 TREE_OPERAND (base, 0),
708 call))
709 return false;
710 return detect_type_change_from_memory_writes (arg, base, comp_type,
711 call, jfunc, offset);
712}
713
f65cf2b7
MJ
714/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
715 SSA name (its dereference will become the base and the offset is assumed to
716 be zero). */
717
718static bool
06d65050 719detect_type_change_ssa (tree arg, tree comp_type,
538dd0b7 720 gcall *call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
721{
722 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 723 if (!flag_devirtualize
06d65050 724 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
725 return false;
726
058d0a90
JH
727 if (!param_type_may_change_p (current_function_decl, arg, call))
728 return false;
729
f65cf2b7 730 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 731 build_int_cst (ptr_type_node, 0));
f65cf2b7 732
058d0a90
JH
733 return detect_type_change_from_memory_writes (arg, arg, comp_type,
734 call, jfunc, 0);
f65cf2b7
MJ
735}
736
fdb0e1b4
MJ
737/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
738 boolean variable pointed to by DATA. */
739
740static bool
741mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
742 void *data)
743{
744 bool *b = (bool *) data;
745 *b = true;
746 return true;
747}
748
8aab5218
MJ
749/* Return true if we have already walked so many statements in AA that we
750 should really just start giving up. */
751
752static bool
56b40062 753aa_overwalked (struct ipa_func_body_info *fbi)
8aab5218
MJ
754{
755 gcc_checking_assert (fbi);
756 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
757}
758
759/* Find the nearest valid aa status for parameter specified by INDEX that
760 dominates BB. */
761
56b40062
MJ
762static struct ipa_param_aa_status *
763find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
764 int index)
765{
766 while (true)
767 {
768 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
769 if (!bb)
770 return NULL;
771 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
772 if (!bi->param_aa_statuses.is_empty ()
773 && bi->param_aa_statuses[index].valid)
774 return &bi->param_aa_statuses[index];
775 }
776}
777
778/* Get AA status structure for the given BB and parameter with INDEX. Allocate
779 structures and/or intialize the result with a dominating description as
780 necessary. */
781
56b40062
MJ
782static struct ipa_param_aa_status *
783parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
8aab5218
MJ
784 int index)
785{
786 gcc_checking_assert (fbi);
787 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
788 if (bi->param_aa_statuses.is_empty ())
789 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
56b40062 790 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
8aab5218
MJ
791 if (!paa->valid)
792 {
793 gcc_checking_assert (!paa->parm_modified
794 && !paa->ref_modified
795 && !paa->pt_modified);
56b40062 796 struct ipa_param_aa_status *dom_paa;
8aab5218
MJ
797 dom_paa = find_dominating_aa_status (fbi, bb, index);
798 if (dom_paa)
799 *paa = *dom_paa;
800 else
801 paa->valid = true;
802 }
803
804 return paa;
805}
806
688010ba 807/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 808 a value known not to be modified in this function before reaching the
8aab5218
MJ
809 statement STMT. FBI holds information about the function we have so far
810 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
811
812static bool
56b40062 813parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
355fe088 814 gimple *stmt, tree parm_load)
fdb0e1b4 815{
56b40062 816 struct ipa_param_aa_status *paa;
fdb0e1b4
MJ
817 bool modified = false;
818 ao_ref refd;
819
776e4fe2
MJ
820 tree base = get_base_address (parm_load);
821 gcc_assert (TREE_CODE (base) == PARM_DECL);
822 if (TREE_READONLY (base))
823 return true;
824
8aab5218
MJ
825 /* FIXME: FBI can be NULL if we are being called from outside
826 ipa_node_analysis or ipcp_transform_function, which currently happens
827 during inlining analysis. It would be great to extend fbi's lifetime and
828 always have it. Currently, we are just not afraid of too much walking in
829 that case. */
830 if (fbi)
831 {
832 if (aa_overwalked (fbi))
833 return false;
834 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
835 if (paa->parm_modified)
836 return false;
837 }
838 else
839 paa = NULL;
fdb0e1b4
MJ
840
841 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 842 ao_ref_init (&refd, parm_load);
8aab5218
MJ
843 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
844 &modified, NULL);
845 if (fbi)
846 fbi->aa_walked += walked;
847 if (paa && modified)
848 paa->parm_modified = true;
8b7773a4 849 return !modified;
fdb0e1b4
MJ
850}
851
852/* If STMT is an assignment that loads a value from an parameter declaration,
853 return the index of the parameter in ipa_node_params which has not been
854 modified. Otherwise return -1. */
855
856static int
56b40062 857load_from_unmodified_param (struct ipa_func_body_info *fbi,
8aab5218 858 vec<ipa_param_descriptor> descriptors,
355fe088 859 gimple *stmt)
fdb0e1b4
MJ
860{
861 int index;
862 tree op1;
863
864 if (!gimple_assign_single_p (stmt))
865 return -1;
866
867 op1 = gimple_assign_rhs1 (stmt);
868 if (TREE_CODE (op1) != PARM_DECL)
869 return -1;
870
d044dd17 871 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 872 if (index < 0
8aab5218 873 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
fdb0e1b4
MJ
874 return -1;
875
876 return index;
877}
f65cf2b7 878
8aab5218
MJ
879/* Return true if memory reference REF (which must be a load through parameter
880 with INDEX) loads data that are known to be unmodified in this function
881 before reaching statement STMT. */
8b7773a4
MJ
882
883static bool
56b40062 884parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
355fe088 885 int index, gimple *stmt, tree ref)
8b7773a4 886{
56b40062 887 struct ipa_param_aa_status *paa;
8b7773a4
MJ
888 bool modified = false;
889 ao_ref refd;
890
8aab5218
MJ
891 /* FIXME: FBI can be NULL if we are being called from outside
892 ipa_node_analysis or ipcp_transform_function, which currently happens
893 during inlining analysis. It would be great to extend fbi's lifetime and
894 always have it. Currently, we are just not afraid of too much walking in
895 that case. */
896 if (fbi)
897 {
898 if (aa_overwalked (fbi))
899 return false;
900 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
901 if (paa->ref_modified)
902 return false;
903 }
904 else
905 paa = NULL;
8b7773a4 906
8aab5218 907 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 908 ao_ref_init (&refd, ref);
8aab5218
MJ
909 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
910 &modified, NULL);
911 if (fbi)
912 fbi->aa_walked += walked;
913 if (paa && modified)
914 paa->ref_modified = true;
8b7773a4
MJ
915 return !modified;
916}
917
8aab5218
MJ
918/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
919 is known to be unmodified in this function before reaching call statement
920 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
921
922static bool
56b40062 923parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
355fe088 924 gimple *call, tree parm)
8b7773a4
MJ
925{
926 bool modified = false;
927 ao_ref refd;
928
929 /* It's unnecessary to calculate anything about memory contnets for a const
930 function because it is not goin to use it. But do not cache the result
931 either. Also, no such calculations for non-pointers. */
932 if (!gimple_vuse (call)
8aab5218
MJ
933 || !POINTER_TYPE_P (TREE_TYPE (parm))
934 || aa_overwalked (fbi))
8b7773a4
MJ
935 return false;
936
56b40062
MJ
937 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
938 gimple_bb (call),
939 index);
8aab5218 940 if (paa->pt_modified)
8b7773a4
MJ
941 return false;
942
943 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218
MJ
944 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
945 &modified, NULL);
946 fbi->aa_walked += walked;
8b7773a4 947 if (modified)
8aab5218 948 paa->pt_modified = true;
8b7773a4
MJ
949 return !modified;
950}
951
91bb9f80
MJ
952/* Return true if we can prove that OP is a memory reference loading
953 data from an aggregate passed as a parameter.
954
955 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
956 false if it cannot prove that the value has not been modified before the
957 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
958 if it cannot prove the value has not been modified, in that case it will
959 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
960
8b7773a4
MJ
961 INFO and PARMS_AINFO describe parameters of the current function (but the
962 latter can be NULL), STMT is the load statement. If function returns true,
963 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
964 within the aggregate and whether it is a load from a value passed by
965 reference respectively. */
966
ff302741 967bool
56b40062 968ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
ff302741 969 vec<ipa_param_descriptor> descriptors,
355fe088 970 gimple *stmt, tree op, int *index_p,
ff302741 971 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
91bb9f80 972 bool *by_ref_p, bool *guaranteed_unmodified)
8b7773a4
MJ
973{
974 int index;
975 HOST_WIDE_INT size, max_size;
ee45a32d
EB
976 bool reverse;
977 tree base
978 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
8b7773a4
MJ
979
980 if (max_size == -1 || max_size != size || *offset_p < 0)
981 return false;
982
983 if (DECL_P (base))
984 {
d044dd17 985 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 986 if (index >= 0
8aab5218 987 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
988 {
989 *index_p = index;
990 *by_ref_p = false;
3ff2ca23
JJ
991 if (size_p)
992 *size_p = size;
91bb9f80
MJ
993 if (guaranteed_unmodified)
994 *guaranteed_unmodified = true;
8b7773a4
MJ
995 return true;
996 }
997 return false;
998 }
999
1000 if (TREE_CODE (base) != MEM_REF
1001 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1002 || !integer_zerop (TREE_OPERAND (base, 1)))
1003 return false;
1004
1005 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1006 {
1007 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1008 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1009 }
1010 else
1011 {
1012 /* This branch catches situations where a pointer parameter is not a
1013 gimple register, for example:
1014
1015 void hip7(S*) (struct S * p)
1016 {
1017 void (*<T2e4>) (struct S *) D.1867;
1018 struct S * p.1;
1019
1020 <bb 2>:
1021 p.1_1 = p;
1022 D.1867_2 = p.1_1->f;
1023 D.1867_2 ();
1024 gdp = &p;
1025 */
1026
355fe088 1027 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1028 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1029 }
1030
91bb9f80 1031 if (index >= 0)
8b7773a4 1032 {
91bb9f80
MJ
1033 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1034 if (!data_preserved && !guaranteed_unmodified)
1035 return false;
1036
8b7773a4
MJ
1037 *index_p = index;
1038 *by_ref_p = true;
3ff2ca23
JJ
1039 if (size_p)
1040 *size_p = size;
91bb9f80
MJ
1041 if (guaranteed_unmodified)
1042 *guaranteed_unmodified = data_preserved;
8b7773a4
MJ
1043 return true;
1044 }
1045 return false;
1046}
1047
b258210c 1048/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1049 of an assignment statement STMT, try to determine whether we are actually
1050 handling any of the following cases and construct an appropriate jump
1051 function into JFUNC if so:
1052
1053 1) The passed value is loaded from a formal parameter which is not a gimple
1054 register (most probably because it is addressable, the value has to be
1055 scalar) and we can guarantee the value has not changed. This case can
1056 therefore be described by a simple pass-through jump function. For example:
1057
1058 foo (int a)
1059 {
1060 int a.0;
1061
1062 a.0_2 = a;
1063 bar (a.0_2);
1064
1065 2) The passed value can be described by a simple arithmetic pass-through
1066 jump function. E.g.
1067
1068 foo (int a)
1069 {
1070 int D.2064;
1071
1072 D.2064_4 = a.1(D) + 4;
1073 bar (D.2064_4);
1074
1075 This case can also occur in combination of the previous one, e.g.:
1076
1077 foo (int a, int z)
1078 {
1079 int a.0;
1080 int D.2064;
1081
1082 a.0_3 = a;
1083 D.2064_4 = a.0_3 + 4;
1084 foo (D.2064_4);
1085
1086 3) The passed value is an address of an object within another one (which
1087 also passed by reference). Such situations are described by an ancestor
1088 jump function and describe situations such as:
1089
1090 B::foo() (struct B * const this)
1091 {
1092 struct A * D.1845;
1093
1094 D.1845_2 = &this_1(D)->D.1748;
1095 A::bar (D.1845_2);
1096
1097 INFO is the structure describing individual parameters access different
1098 stages of IPA optimizations. PARMS_AINFO contains the information that is
1099 only needed for intraprocedural analysis. */
685b0d13
MJ
1100
1101static void
56b40062 1102compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1103 struct ipa_node_params *info,
b258210c 1104 struct ipa_jump_func *jfunc,
355fe088 1105 gcall *call, gimple *stmt, tree name,
06d65050 1106 tree param_type)
685b0d13
MJ
1107{
1108 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 1109 tree op1, tc_ssa, base, ssa;
ee45a32d 1110 bool reverse;
685b0d13 1111 int index;
685b0d13 1112
685b0d13 1113 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1114
fdb0e1b4 1115 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1116 {
fdb0e1b4
MJ
1117 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1118 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1119 else
8aab5218 1120 index = load_from_unmodified_param (fbi, info->descriptors,
fdb0e1b4
MJ
1121 SSA_NAME_DEF_STMT (op1));
1122 tc_ssa = op1;
1123 }
1124 else
1125 {
8aab5218 1126 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1127 tc_ssa = gimple_assign_lhs (stmt);
1128 }
1129
1130 if (index >= 0)
1131 {
1132 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 1133
b258210c 1134 if (op2)
685b0d13 1135 {
b258210c
MJ
1136 if (!is_gimple_ip_invariant (op2)
1137 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1138 && !useless_type_conversion_p (TREE_TYPE (name),
1139 TREE_TYPE (op1))))
1140 return;
1141
7b872d9e
MJ
1142 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1143 gimple_assign_rhs_code (stmt));
685b0d13 1144 }
b8f6e610 1145 else if (gimple_assign_single_p (stmt))
8b7773a4 1146 {
8aab5218 1147 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
3b97a5c7 1148 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4 1149 }
685b0d13
MJ
1150 return;
1151 }
1152
1153 if (TREE_CODE (op1) != ADDR_EXPR)
1154 return;
1155 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1156 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1157 return;
ee45a32d 1158 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
32aa622c 1159 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1160 /* If this is a varying address, punt. */
1161 || max_size == -1
1162 || max_size != size)
685b0d13 1163 return;
807e902e 1164 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1165 ssa = TREE_OPERAND (base, 0);
1166 if (TREE_CODE (ssa) != SSA_NAME
1167 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1168 || offset < 0)
685b0d13
MJ
1169 return;
1170
b8f6e610 1171 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1172 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1173 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
3b97a5c7
MJ
1174 ipa_set_ancestor_jf (jfunc, offset, index,
1175 parm_ref_data_pass_through_p (fbi, index, call, ssa));
685b0d13
MJ
1176}
1177
40591473
MJ
1178/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1179 it looks like:
1180
1181 iftmp.1_3 = &obj_2(D)->D.1762;
1182
1183 The base of the MEM_REF must be a default definition SSA NAME of a
1184 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1185 whole MEM_REF expression is returned and the offset calculated from any
1186 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1187 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1188
1189static tree
355fe088 1190get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
40591473
MJ
1191{
1192 HOST_WIDE_INT size, max_size;
1193 tree expr, parm, obj;
ee45a32d 1194 bool reverse;
40591473
MJ
1195
1196 if (!gimple_assign_single_p (assign))
1197 return NULL_TREE;
1198 expr = gimple_assign_rhs1 (assign);
1199
1200 if (TREE_CODE (expr) != ADDR_EXPR)
1201 return NULL_TREE;
1202 expr = TREE_OPERAND (expr, 0);
1203 obj = expr;
ee45a32d 1204 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
40591473
MJ
1205
1206 if (TREE_CODE (expr) != MEM_REF
1207 /* If this is a varying address, punt. */
1208 || max_size == -1
1209 || max_size != size
1210 || *offset < 0)
1211 return NULL_TREE;
1212 parm = TREE_OPERAND (expr, 0);
1213 if (TREE_CODE (parm) != SSA_NAME
1214 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1215 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1216 return NULL_TREE;
1217
807e902e 1218 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1219 *obj_p = obj;
1220 return expr;
1221}
1222
685b0d13 1223
b258210c
MJ
1224/* Given that an actual argument is an SSA_NAME that is a result of a phi
1225 statement PHI, try to find out whether NAME is in fact a
1226 multiple-inheritance typecast from a descendant into an ancestor of a formal
1227 parameter and thus can be described by an ancestor jump function and if so,
1228 write the appropriate function into JFUNC.
1229
1230 Essentially we want to match the following pattern:
1231
1232 if (obj_2(D) != 0B)
1233 goto <bb 3>;
1234 else
1235 goto <bb 4>;
1236
1237 <bb 3>:
1238 iftmp.1_3 = &obj_2(D)->D.1762;
1239
1240 <bb 4>:
1241 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1242 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1243 return D.1879_6; */
1244
1245static void
56b40062 1246compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
8aab5218 1247 struct ipa_node_params *info,
b258210c 1248 struct ipa_jump_func *jfunc,
538dd0b7 1249 gcall *call, gphi *phi)
b258210c 1250{
40591473 1251 HOST_WIDE_INT offset;
355fe088 1252 gimple *assign, *cond;
b258210c 1253 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1254 tree tmp, parm, expr, obj;
b258210c
MJ
1255 int index, i;
1256
54e348cb 1257 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1258 return;
1259
54e348cb
MJ
1260 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1261 tmp = PHI_ARG_DEF (phi, 0);
1262 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1263 tmp = PHI_ARG_DEF (phi, 1);
1264 else
1265 return;
b258210c
MJ
1266 if (TREE_CODE (tmp) != SSA_NAME
1267 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1268 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1269 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1270 return;
1271
1272 assign = SSA_NAME_DEF_STMT (tmp);
1273 assign_bb = gimple_bb (assign);
40591473 1274 if (!single_pred_p (assign_bb))
b258210c 1275 return;
40591473
MJ
1276 expr = get_ancestor_addr_info (assign, &obj, &offset);
1277 if (!expr)
b258210c
MJ
1278 return;
1279 parm = TREE_OPERAND (expr, 0);
b258210c 1280 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1281 if (index < 0)
1282 return;
b258210c
MJ
1283
1284 cond_bb = single_pred (assign_bb);
1285 cond = last_stmt (cond_bb);
69610617
SB
1286 if (!cond
1287 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1288 || gimple_cond_code (cond) != NE_EXPR
1289 || gimple_cond_lhs (cond) != parm
1290 || !integer_zerop (gimple_cond_rhs (cond)))
1291 return;
1292
b258210c
MJ
1293 phi_bb = gimple_bb (phi);
1294 for (i = 0; i < 2; i++)
1295 {
1296 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1297 if (pred != assign_bb && pred != cond_bb)
1298 return;
1299 }
1300
3b97a5c7
MJ
1301 ipa_set_ancestor_jf (jfunc, offset, index,
1302 parm_ref_data_pass_through_p (fbi, index, call, parm));
b258210c
MJ
1303}
1304
be95e2b9
MJ
1305/* Inspect the given TYPE and return true iff it has the same structure (the
1306 same number of fields of the same types) as a C++ member pointer. If
1307 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1308 corresponding fields there. */
1309
3e293154
MJ
1310static bool
1311type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1312{
1313 tree fld;
1314
1315 if (TREE_CODE (type) != RECORD_TYPE)
1316 return false;
1317
1318 fld = TYPE_FIELDS (type);
1319 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1320 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1321 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1322 return false;
1323
1324 if (method_ptr)
1325 *method_ptr = fld;
1326
910ad8de 1327 fld = DECL_CHAIN (fld);
8b7773a4 1328 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1329 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1330 return false;
1331 if (delta)
1332 *delta = fld;
1333
910ad8de 1334 if (DECL_CHAIN (fld))
3e293154
MJ
1335 return false;
1336
1337 return true;
1338}
1339
61502ca8 1340/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1341 return the rhs of its defining statement. Otherwise return RHS as it
1342 is. */
7ec49257
MJ
1343
1344static inline tree
1345get_ssa_def_if_simple_copy (tree rhs)
1346{
1347 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1348 {
355fe088 1349 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
7ec49257
MJ
1350
1351 if (gimple_assign_single_p (def_stmt))
1352 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1353 else
1354 break;
7ec49257
MJ
1355 }
1356 return rhs;
1357}
1358
8b7773a4
MJ
1359/* Simple linked list, describing known contents of an aggregate beforere
1360 call. */
1361
1362struct ipa_known_agg_contents_list
1363{
1364 /* Offset and size of the described part of the aggregate. */
1365 HOST_WIDE_INT offset, size;
1366 /* Known constant value or NULL if the contents is known to be unknown. */
1367 tree constant;
1368 /* Pointer to the next structure in the list. */
1369 struct ipa_known_agg_contents_list *next;
1370};
3e293154 1371
0d48ee34
MJ
1372/* Find the proper place in linked list of ipa_known_agg_contents_list
1373 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1374 unless there is a partial overlap, in which case return NULL, or such
1375 element is already there, in which case set *ALREADY_THERE to true. */
1376
1377static struct ipa_known_agg_contents_list **
1378get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1379 HOST_WIDE_INT lhs_offset,
1380 HOST_WIDE_INT lhs_size,
1381 bool *already_there)
1382{
1383 struct ipa_known_agg_contents_list **p = list;
1384 while (*p && (*p)->offset < lhs_offset)
1385 {
1386 if ((*p)->offset + (*p)->size > lhs_offset)
1387 return NULL;
1388 p = &(*p)->next;
1389 }
1390
1391 if (*p && (*p)->offset < lhs_offset + lhs_size)
1392 {
1393 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1394 /* We already know this value is subsequently overwritten with
1395 something else. */
1396 *already_there = true;
1397 else
1398 /* Otherwise this is a partial overlap which we cannot
1399 represent. */
1400 return NULL;
1401 }
1402 return p;
1403}
1404
1405/* Build aggregate jump function from LIST, assuming there are exactly
1406 CONST_COUNT constant entries there and that th offset of the passed argument
1407 is ARG_OFFSET and store it into JFUNC. */
1408
1409static void
1410build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1411 int const_count, HOST_WIDE_INT arg_offset,
1412 struct ipa_jump_func *jfunc)
1413{
1414 vec_alloc (jfunc->agg.items, const_count);
1415 while (list)
1416 {
1417 if (list->constant)
1418 {
1419 struct ipa_agg_jf_item item;
1420 item.offset = list->offset - arg_offset;
1421 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1422 item.value = unshare_expr_without_location (list->constant);
1423 jfunc->agg.items->quick_push (item);
1424 }
1425 list = list->next;
1426 }
1427}
1428
8b7773a4
MJ
1429/* Traverse statements from CALL backwards, scanning whether an aggregate given
1430 in ARG is filled in with constant values. ARG can either be an aggregate
0d48ee34
MJ
1431 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1432 aggregate. JFUNC is the jump function into which the constants are
1433 subsequently stored. */
be95e2b9 1434
3e293154 1435static void
538dd0b7
DM
1436determine_locally_known_aggregate_parts (gcall *call, tree arg,
1437 tree arg_type,
0d48ee34 1438 struct ipa_jump_func *jfunc)
3e293154 1439{
8b7773a4
MJ
1440 struct ipa_known_agg_contents_list *list = NULL;
1441 int item_count = 0, const_count = 0;
1442 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1443 gimple_stmt_iterator gsi;
8b7773a4
MJ
1444 tree arg_base;
1445 bool check_ref, by_ref;
1446 ao_ref r;
3e293154 1447
29799e9d
MJ
1448 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1449 return;
1450
8b7773a4
MJ
1451 /* The function operates in three stages. First, we prepare check_ref, r,
1452 arg_base and arg_offset based on what is actually passed as an actual
1453 argument. */
3e293154 1454
85942f45 1455 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1456 {
1457 by_ref = true;
1458 if (TREE_CODE (arg) == SSA_NAME)
1459 {
1460 tree type_size;
85942f45 1461 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
8b7773a4
MJ
1462 return;
1463 check_ref = true;
1464 arg_base = arg;
1465 arg_offset = 0;
85942f45 1466 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1467 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1468 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1469 }
1470 else if (TREE_CODE (arg) == ADDR_EXPR)
1471 {
1472 HOST_WIDE_INT arg_max_size;
ee45a32d 1473 bool reverse;
8b7773a4
MJ
1474
1475 arg = TREE_OPERAND (arg, 0);
1476 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
ee45a32d 1477 &arg_max_size, &reverse);
8b7773a4
MJ
1478 if (arg_max_size == -1
1479 || arg_max_size != arg_size
1480 || arg_offset < 0)
1481 return;
1482 if (DECL_P (arg_base))
1483 {
8b7773a4 1484 check_ref = false;
0d48ee34 1485 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1486 }
1487 else
1488 return;
1489 }
1490 else
1491 return;
1492 }
1493 else
1494 {
1495 HOST_WIDE_INT arg_max_size;
ee45a32d 1496 bool reverse;
8b7773a4
MJ
1497
1498 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1499
1500 by_ref = false;
1501 check_ref = false;
1502 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
ee45a32d 1503 &arg_max_size, &reverse);
8b7773a4
MJ
1504 if (arg_max_size == -1
1505 || arg_max_size != arg_size
1506 || arg_offset < 0)
1507 return;
1508
1509 ao_ref_init (&r, arg);
1510 }
1511
1512 /* Second stage walks back the BB, looks at individual statements and as long
1513 as it is confident of how the statements affect contents of the
1514 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1515 describing it. */
1516 gsi = gsi_for_stmt (call);
726a989a
RB
1517 gsi_prev (&gsi);
1518 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1519 {
8b7773a4 1520 struct ipa_known_agg_contents_list *n, **p;
355fe088 1521 gimple *stmt = gsi_stmt (gsi);
8b7773a4
MJ
1522 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1523 tree lhs, rhs, lhs_base;
ee45a32d 1524 bool reverse;
3e293154 1525
8b7773a4 1526 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1527 continue;
8b75fc9b 1528 if (!gimple_assign_single_p (stmt))
8b7773a4 1529 break;
3e293154 1530
726a989a
RB
1531 lhs = gimple_assign_lhs (stmt);
1532 rhs = gimple_assign_rhs1 (stmt);
0c6b087c 1533 if (!is_gimple_reg_type (TREE_TYPE (rhs))
7d2fb524
MJ
1534 || TREE_CODE (lhs) == BIT_FIELD_REF
1535 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1536 break;
3e293154 1537
8b7773a4 1538 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
ee45a32d 1539 &lhs_max_size, &reverse);
8b7773a4 1540 if (lhs_max_size == -1
0d48ee34 1541 || lhs_max_size != lhs_size)
8b7773a4 1542 break;
3e293154 1543
8b7773a4 1544 if (check_ref)
518dc859 1545 {
8b7773a4
MJ
1546 if (TREE_CODE (lhs_base) != MEM_REF
1547 || TREE_OPERAND (lhs_base, 0) != arg_base
1548 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1549 break;
3e293154 1550 }
8b7773a4 1551 else if (lhs_base != arg_base)
774b8a55
MJ
1552 {
1553 if (DECL_P (lhs_base))
1554 continue;
1555 else
1556 break;
1557 }
3e293154 1558
0d48ee34
MJ
1559 bool already_there = false;
1560 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1561 &already_there);
1562 if (!p)
8b7773a4 1563 break;
0d48ee34
MJ
1564 if (already_there)
1565 continue;
3e293154 1566
8b7773a4
MJ
1567 rhs = get_ssa_def_if_simple_copy (rhs);
1568 n = XALLOCA (struct ipa_known_agg_contents_list);
1569 n->size = lhs_size;
1570 n->offset = lhs_offset;
1571 if (is_gimple_ip_invariant (rhs))
1572 {
1573 n->constant = rhs;
1574 const_count++;
1575 }
1576 else
1577 n->constant = NULL_TREE;
1578 n->next = *p;
1579 *p = n;
3e293154 1580
8b7773a4 1581 item_count++;
dfea20f1
MJ
1582 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1583 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1584 break;
1585 }
be95e2b9 1586
8b7773a4
MJ
1587 /* Third stage just goes over the list and creates an appropriate vector of
1588 ipa_agg_jf_item structures out of it, of sourse only if there are
1589 any known constants to begin with. */
3e293154 1590
8b7773a4 1591 if (const_count)
3e293154 1592 {
8b7773a4 1593 jfunc->agg.by_ref = by_ref;
0d48ee34 1594 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
3e293154
MJ
1595 }
1596}
1597
06d65050
JH
1598static tree
1599ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1600{
1601 int n;
1602 tree type = (e->callee
67348ccc 1603 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1604 : gimple_call_fntype (e->call_stmt));
1605 tree t = TYPE_ARG_TYPES (type);
1606
1607 for (n = 0; n < i; n++)
1608 {
1609 if (!t)
1610 break;
1611 t = TREE_CHAIN (t);
1612 }
1613 if (t)
1614 return TREE_VALUE (t);
1615 if (!e->callee)
1616 return NULL;
67348ccc 1617 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1618 for (n = 0; n < i; n++)
1619 {
1620 if (!t)
1621 return NULL;
1622 t = TREE_CHAIN (t);
1623 }
1624 if (t)
1625 return TREE_TYPE (t);
1626 return NULL;
1627}
1628
3e293154
MJ
1629/* Compute jump function for all arguments of callsite CS and insert the
1630 information in the jump_functions array in the ipa_edge_args corresponding
1631 to this callsite. */
be95e2b9 1632
749aa96d 1633static void
56b40062 1634ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
062c604f 1635 struct cgraph_edge *cs)
3e293154
MJ
1636{
1637 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09 1638 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
538dd0b7 1639 gcall *call = cs->call_stmt;
8b7773a4 1640 int n, arg_num = gimple_call_num_args (call);
5ce97055 1641 bool useful_context = false;
3e293154 1642
606d9a09 1643 if (arg_num == 0 || args->jump_functions)
3e293154 1644 return;
9771b263 1645 vec_safe_grow_cleared (args->jump_functions, arg_num);
5ce97055
JH
1646 if (flag_devirtualize)
1647 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
3e293154 1648
96e24d49
JJ
1649 if (gimple_call_internal_p (call))
1650 return;
5fe8e757
MJ
1651 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1652 return;
1653
8b7773a4
MJ
1654 for (n = 0; n < arg_num; n++)
1655 {
1656 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1657 tree arg = gimple_call_arg (call, n);
06d65050 1658 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
1659 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1660 {
049e6d36 1661 tree instance;
5ce97055
JH
1662 struct ipa_polymorphic_call_context context (cs->caller->decl,
1663 arg, cs->call_stmt,
049e6d36
JH
1664 &instance);
1665 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
5ce97055
JH
1666 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1667 if (!context.useless_p ())
1668 useful_context = true;
1669 }
3e293154 1670
718625ad
KV
1671 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1672 {
1673 if (TREE_CODE (arg) == SSA_NAME
1674 && param_type
1675 && get_ptr_nonnull (arg))
1676 {
1677 jfunc->vr_known = true;
1678 jfunc->m_vr.type = VR_ANTI_RANGE;
1679 jfunc->m_vr.min = build_int_cst (TREE_TYPE (arg), 0);
1680 jfunc->m_vr.max = build_int_cst (TREE_TYPE (arg), 0);
1681 jfunc->m_vr.equiv = NULL;
1682 }
1683 else
1684 gcc_assert (!jfunc->vr_known);
1685 }
1686 else
8bc5448f
KV
1687 {
1688 wide_int min, max;
1689 value_range_type type;
1690 if (TREE_CODE (arg) == SSA_NAME
1691 && param_type
1692 && (type = get_range_info (arg, &min, &max))
3a4228ba 1693 && (type == VR_RANGE || type == VR_ANTI_RANGE))
8bc5448f 1694 {
3a4228ba
KV
1695 value_range vr;
1696
1697 vr.type = type;
1698 vr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1699 vr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1700 vr.equiv = NULL;
1701 extract_range_from_unary_expr (&jfunc->m_vr,
1702 NOP_EXPR,
1703 param_type,
1704 &vr, TREE_TYPE (arg));
1705 if (jfunc->m_vr.type == VR_RANGE
1706 || jfunc->m_vr.type == VR_ANTI_RANGE)
1707 jfunc->vr_known = true;
1708 else
1709 jfunc->vr_known = false;
8bc5448f
KV
1710 }
1711 else
1712 gcc_assert (!jfunc->vr_known);
8bc5448f 1713 }
04be694e 1714
209ca542
PK
1715 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1716 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1717 {
1718 jfunc->bits.known = true;
1719
1720 if (TREE_CODE (arg) == SSA_NAME)
1721 {
1722 jfunc->bits.value = 0;
1723 jfunc->bits.mask = widest_int::from (get_nonzero_bits (arg),
1724 TYPE_SIGN (TREE_TYPE (arg)));
1725 }
1726 else
1727 {
1728 jfunc->bits.value = wi::to_widest (arg);
1729 jfunc->bits.mask = 0;
1730 }
1731 }
67b97478
PK
1732 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1733 {
1734 unsigned HOST_WIDE_INT bitpos;
1735 unsigned align;
1736
1737 jfunc->bits.known = true;
1738 get_pointer_alignment_1 (arg, &align, &bitpos);
1739 jfunc->bits.mask = wi::mask<widest_int>(TYPE_PRECISION (TREE_TYPE (arg)), false)
1740 .and_not (align / BITS_PER_UNIT - 1);
1741 jfunc->bits.value = bitpos / BITS_PER_UNIT;
1742 }
209ca542
PK
1743 else
1744 gcc_assert (!jfunc->bits.known);
1745
04643334 1746 if (is_gimple_ip_invariant (arg)
8813a647 1747 || (VAR_P (arg)
04643334
MJ
1748 && is_global_var (arg)
1749 && TREE_READONLY (arg)))
4502fe8d 1750 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1751 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1752 && TREE_CODE (arg) == PARM_DECL)
1753 {
1754 int index = ipa_get_param_decl_index (info, arg);
1755
1756 gcc_assert (index >=0);
1757 /* Aggregate passed by value, check for pass-through, otherwise we
1758 will attempt to fill in aggregate contents later in this
1759 for cycle. */
8aab5218 1760 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 1761 {
3b97a5c7 1762 ipa_set_jf_simple_pass_through (jfunc, index, false);
8b7773a4
MJ
1763 continue;
1764 }
1765 }
1766 else if (TREE_CODE (arg) == SSA_NAME)
1767 {
1768 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1769 {
1770 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1771 if (index >= 0)
8b7773a4 1772 {
3b97a5c7 1773 bool agg_p;
8aab5218 1774 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
3b97a5c7 1775 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
8b7773a4
MJ
1776 }
1777 }
1778 else
1779 {
355fe088 1780 gimple *stmt = SSA_NAME_DEF_STMT (arg);
8b7773a4 1781 if (is_gimple_assign (stmt))
8aab5218 1782 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 1783 call, stmt, arg, param_type);
8b7773a4 1784 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 1785 compute_complex_ancestor_jump_func (fbi, info, jfunc,
538dd0b7
DM
1786 call,
1787 as_a <gphi *> (stmt));
8b7773a4
MJ
1788 }
1789 }
3e293154 1790
85942f45
JH
1791 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1792 passed (because type conversions are ignored in gimple). Usually we can
1793 safely get type from function declaration, but in case of K&R prototypes or
1794 variadic functions we can try our luck with type of the pointer passed.
1795 TODO: Since we look for actual initialization of the memory object, we may better
1796 work out the type based on the memory stores we find. */
1797 if (!param_type)
1798 param_type = TREE_TYPE (arg);
1799
8b7773a4
MJ
1800 if ((jfunc->type != IPA_JF_PASS_THROUGH
1801 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1802 && (jfunc->type != IPA_JF_ANCESTOR
1803 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1804 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 1805 || POINTER_TYPE_P (param_type)))
0d48ee34 1806 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
8b7773a4 1807 }
5ce97055
JH
1808 if (!useful_context)
1809 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
1810}
1811
749aa96d 1812/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 1813 from BB. */
749aa96d 1814
062c604f 1815static void
56b40062 1816ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
749aa96d 1817{
8aab5218
MJ
1818 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1819 int i;
749aa96d
MJ
1820 struct cgraph_edge *cs;
1821
8aab5218 1822 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 1823 {
8aab5218 1824 struct cgraph_node *callee = cs->callee;
749aa96d 1825
8aab5218
MJ
1826 if (callee)
1827 {
d52f5295 1828 callee->ultimate_alias_target ();
8aab5218
MJ
1829 /* We do not need to bother analyzing calls to unknown functions
1830 unless they may become known during lto/whopr. */
1831 if (!callee->definition && !flag_lto)
1832 continue;
1833 }
1834 ipa_compute_jump_functions_for_edge (fbi, cs);
1835 }
749aa96d
MJ
1836}
1837
8b7773a4
MJ
1838/* If STMT looks like a statement loading a value from a member pointer formal
1839 parameter, return that parameter and store the offset of the field to
1840 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1841 might be clobbered). If USE_DELTA, then we look for a use of the delta
1842 field rather than the pfn. */
be95e2b9 1843
3e293154 1844static tree
355fe088 1845ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
8b7773a4 1846 HOST_WIDE_INT *offset_p)
3e293154 1847{
8b7773a4
MJ
1848 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1849
1850 if (!gimple_assign_single_p (stmt))
1851 return NULL_TREE;
3e293154 1852
8b7773a4 1853 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
1854 if (TREE_CODE (rhs) == COMPONENT_REF)
1855 {
1856 ref_field = TREE_OPERAND (rhs, 1);
1857 rhs = TREE_OPERAND (rhs, 0);
1858 }
1859 else
1860 ref_field = NULL_TREE;
d242d063 1861 if (TREE_CODE (rhs) != MEM_REF)
3e293154 1862 return NULL_TREE;
3e293154 1863 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
1864 if (TREE_CODE (rec) != ADDR_EXPR)
1865 return NULL_TREE;
1866 rec = TREE_OPERAND (rec, 0);
3e293154 1867 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 1868 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 1869 return NULL_TREE;
d242d063 1870 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 1871
8b7773a4
MJ
1872 if (use_delta)
1873 fld = delta_field;
1874 else
1875 fld = ptr_field;
1876 if (offset_p)
1877 *offset_p = int_bit_position (fld);
1878
ae788515
EB
1879 if (ref_field)
1880 {
1881 if (integer_nonzerop (ref_offset))
1882 return NULL_TREE;
ae788515
EB
1883 return ref_field == fld ? rec : NULL_TREE;
1884 }
3e293154 1885 else
8b7773a4
MJ
1886 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1887 : NULL_TREE;
3e293154
MJ
1888}
1889
1890/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 1891
3e293154
MJ
1892static bool
1893ipa_is_ssa_with_stmt_def (tree t)
1894{
1895 if (TREE_CODE (t) == SSA_NAME
1896 && !SSA_NAME_IS_DEFAULT_DEF (t))
1897 return true;
1898 else
1899 return false;
1900}
1901
40591473
MJ
1902/* Find the indirect call graph edge corresponding to STMT and mark it as a
1903 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1904 indirect call graph edge. */
be95e2b9 1905
40591473 1906static struct cgraph_edge *
538dd0b7
DM
1907ipa_note_param_call (struct cgraph_node *node, int param_index,
1908 gcall *stmt)
3e293154 1909{
e33c6cd6 1910 struct cgraph_edge *cs;
3e293154 1911
d52f5295 1912 cs = node->get_edge (stmt);
b258210c 1913 cs->indirect_info->param_index = param_index;
8b7773a4 1914 cs->indirect_info->agg_contents = 0;
c13bc3d9 1915 cs->indirect_info->member_ptr = 0;
91bb9f80 1916 cs->indirect_info->guaranteed_unmodified = 0;
40591473 1917 return cs;
3e293154
MJ
1918}
1919
e33c6cd6 1920/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 1921 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
1922 intermediate information about each formal parameter. Currently it checks
1923 whether the call calls a pointer that is a formal parameter and if so, the
1924 parameter is marked with the called flag and an indirect call graph edge
1925 describing the call is created. This is very simple for ordinary pointers
1926 represented in SSA but not-so-nice when it comes to member pointers. The
1927 ugly part of this function does nothing more than trying to match the
1928 pattern of such a call. An example of such a pattern is the gimple dump
1929 below, the call is on the last line:
3e293154 1930
ae788515
EB
1931 <bb 2>:
1932 f$__delta_5 = f.__delta;
1933 f$__pfn_24 = f.__pfn;
1934
1935 or
3e293154 1936 <bb 2>:
d242d063
MJ
1937 f$__delta_5 = MEM[(struct *)&f];
1938 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 1939
ae788515 1940 and a few lines below:
8aa29647
MJ
1941
1942 <bb 5>
3e293154
MJ
1943 D.2496_3 = (int) f$__pfn_24;
1944 D.2497_4 = D.2496_3 & 1;
1945 if (D.2497_4 != 0)
1946 goto <bb 3>;
1947 else
1948 goto <bb 4>;
1949
8aa29647 1950 <bb 6>:
3e293154
MJ
1951 D.2500_7 = (unsigned int) f$__delta_5;
1952 D.2501_8 = &S + D.2500_7;
1953 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1954 D.2503_10 = *D.2502_9;
1955 D.2504_12 = f$__pfn_24 + -1;
1956 D.2505_13 = (unsigned int) D.2504_12;
1957 D.2506_14 = D.2503_10 + D.2505_13;
1958 D.2507_15 = *D.2506_14;
1959 iftmp.11_16 = (String:: *) D.2507_15;
1960
8aa29647 1961 <bb 7>:
3e293154
MJ
1962 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1963 D.2500_19 = (unsigned int) f$__delta_5;
1964 D.2508_20 = &S + D.2500_19;
1965 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1966
1967 Such patterns are results of simple calls to a member pointer:
1968
1969 int doprinting (int (MyString::* f)(int) const)
1970 {
1971 MyString S ("somestring");
1972
1973 return (S.*f)(4);
1974 }
8b7773a4
MJ
1975
1976 Moreover, the function also looks for called pointers loaded from aggregates
1977 passed by value or reference. */
3e293154
MJ
1978
1979static void
56b40062 1980ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
8aab5218 1981 tree target)
3e293154 1982{
8aab5218 1983 struct ipa_node_params *info = fbi->info;
8b7773a4
MJ
1984 HOST_WIDE_INT offset;
1985 bool by_ref;
3e293154 1986
3e293154
MJ
1987 if (SSA_NAME_IS_DEFAULT_DEF (target))
1988 {
b258210c 1989 tree var = SSA_NAME_VAR (target);
8aab5218 1990 int index = ipa_get_param_decl_index (info, var);
3e293154 1991 if (index >= 0)
8aab5218 1992 ipa_note_param_call (fbi->node, index, call);
3e293154
MJ
1993 return;
1994 }
1995
8aab5218 1996 int index;
355fe088 1997 gimple *def = SSA_NAME_DEF_STMT (target);
91bb9f80 1998 bool guaranteed_unmodified;
8b7773a4 1999 if (gimple_assign_single_p (def)
ff302741
PB
2000 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2001 gimple_assign_rhs1 (def), &index, &offset,
91bb9f80 2002 NULL, &by_ref, &guaranteed_unmodified))
8b7773a4 2003 {
8aab5218 2004 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2005 cs->indirect_info->offset = offset;
2006 cs->indirect_info->agg_contents = 1;
2007 cs->indirect_info->by_ref = by_ref;
91bb9f80 2008 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
8b7773a4
MJ
2009 return;
2010 }
2011
3e293154
MJ
2012 /* Now we need to try to match the complex pattern of calling a member
2013 pointer. */
8b7773a4
MJ
2014 if (gimple_code (def) != GIMPLE_PHI
2015 || gimple_phi_num_args (def) != 2
2016 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2017 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2018 return;
2019
3e293154
MJ
2020 /* First, we need to check whether one of these is a load from a member
2021 pointer that is a parameter to this function. */
8aab5218
MJ
2022 tree n1 = PHI_ARG_DEF (def, 0);
2023 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2024 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2025 return;
355fe088
TS
2026 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2027 gimple *d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2028
8aab5218
MJ
2029 tree rec;
2030 basic_block bb, virt_bb;
2031 basic_block join = gimple_bb (def);
8b7773a4 2032 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2033 {
8b7773a4 2034 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2035 return;
2036
8aa29647 2037 bb = EDGE_PRED (join, 0)->src;
726a989a 2038 virt_bb = gimple_bb (d2);
3e293154 2039 }
8b7773a4 2040 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2041 {
8aa29647 2042 bb = EDGE_PRED (join, 1)->src;
726a989a 2043 virt_bb = gimple_bb (d1);
3e293154
MJ
2044 }
2045 else
2046 return;
2047
2048 /* Second, we need to check that the basic blocks are laid out in the way
2049 corresponding to the pattern. */
2050
3e293154
MJ
2051 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2052 || single_pred (virt_bb) != bb
2053 || single_succ (virt_bb) != join)
2054 return;
2055
2056 /* Third, let's see that the branching is done depending on the least
2057 significant bit of the pfn. */
2058
355fe088 2059 gimple *branch = last_stmt (bb);
8aa29647 2060 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2061 return;
2062
12430896
RG
2063 if ((gimple_cond_code (branch) != NE_EXPR
2064 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2065 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2066 return;
3e293154 2067
8aab5218 2068 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2069 if (!ipa_is_ssa_with_stmt_def (cond))
2070 return;
2071
726a989a 2072 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2073 if (!is_gimple_assign (def)
726a989a
RB
2074 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2075 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2076 return;
726a989a
RB
2077
2078 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2079 if (!ipa_is_ssa_with_stmt_def (cond))
2080 return;
2081
726a989a 2082 def = SSA_NAME_DEF_STMT (cond);
3e293154 2083
8b75fc9b
MJ
2084 if (is_gimple_assign (def)
2085 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2086 {
726a989a 2087 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2088 if (!ipa_is_ssa_with_stmt_def (cond))
2089 return;
726a989a 2090 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2091 }
2092
8aab5218 2093 tree rec2;
6f7b8b70
RE
2094 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2095 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2096 == ptrmemfunc_vbit_in_delta),
2097 NULL);
3e293154
MJ
2098 if (rec != rec2)
2099 return;
2100
2101 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2102 if (index >= 0
8aab5218 2103 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2104 {
8aab5218 2105 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2106 cs->indirect_info->offset = offset;
2107 cs->indirect_info->agg_contents = 1;
c13bc3d9 2108 cs->indirect_info->member_ptr = 1;
91bb9f80 2109 cs->indirect_info->guaranteed_unmodified = 1;
8b7773a4 2110 }
3e293154
MJ
2111
2112 return;
2113}
2114
b258210c
MJ
2115/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2116 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2117 FBI->node (described by FBI->info), create a call note for the
2118 statement. */
b258210c
MJ
2119
2120static void
56b40062 2121ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
538dd0b7 2122 gcall *call, tree target)
b258210c
MJ
2123{
2124 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2125 int index;
40591473 2126 HOST_WIDE_INT anc_offset;
b258210c 2127
05842ff5
MJ
2128 if (!flag_devirtualize)
2129 return;
2130
40591473 2131 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2132 return;
2133
8aab5218 2134 struct ipa_node_params *info = fbi->info;
40591473
MJ
2135 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2136 {
8aab5218 2137 struct ipa_jump_func jfunc;
40591473
MJ
2138 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2139 return;
b258210c 2140
40591473
MJ
2141 anc_offset = 0;
2142 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2143 gcc_assert (index >= 0);
06d65050
JH
2144 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2145 call, &jfunc))
40591473
MJ
2146 return;
2147 }
2148 else
2149 {
8aab5218 2150 struct ipa_jump_func jfunc;
355fe088 2151 gimple *stmt = SSA_NAME_DEF_STMT (obj);
40591473
MJ
2152 tree expr;
2153
2154 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2155 if (!expr)
2156 return;
2157 index = ipa_get_param_decl_index (info,
2158 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2159 gcc_assert (index >= 0);
06d65050
JH
2160 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2161 call, &jfunc, anc_offset))
40591473
MJ
2162 return;
2163 }
2164
8aab5218
MJ
2165 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2166 struct cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2167 ii->offset = anc_offset;
ae7e9ddd 2168 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2169 ii->otr_type = obj_type_ref_class (target);
40591473 2170 ii->polymorphic = 1;
b258210c
MJ
2171}
2172
2173/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2174 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2175 containing intermediate information about each formal parameter. */
b258210c
MJ
2176
2177static void
56b40062 2178ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
b258210c
MJ
2179{
2180 tree target = gimple_call_fn (call);
b786d31f
JH
2181
2182 if (!target
2183 || (TREE_CODE (target) != SSA_NAME
2184 && !virtual_method_call_p (target)))
2185 return;
b258210c 2186
7d0aa05b 2187 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2188 /* If we previously turned the call into a direct call, there is
2189 no need to analyze. */
b786d31f 2190 if (cs && !cs->indirect_unknown_callee)
25583c4f 2191 return;
7d0aa05b 2192
a5b58b28 2193 if (cs->indirect_info->polymorphic && flag_devirtualize)
7d0aa05b 2194 {
7d0aa05b
JH
2195 tree instance;
2196 tree target = gimple_call_fn (call);
6f8091fc
JH
2197 ipa_polymorphic_call_context context (current_function_decl,
2198 target, call, &instance);
7d0aa05b 2199
ba392339
JH
2200 gcc_checking_assert (cs->indirect_info->otr_type
2201 == obj_type_ref_class (target));
2202 gcc_checking_assert (cs->indirect_info->otr_token
2203 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2204
29c43c83
JH
2205 cs->indirect_info->vptr_changed
2206 = !context.get_dynamic_type (instance,
2207 OBJ_TYPE_REF_OBJECT (target),
2208 obj_type_ref_class (target), call);
0127c169 2209 cs->indirect_info->context = context;
7d0aa05b
JH
2210 }
2211
b258210c 2212 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2213 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2214 else if (virtual_method_call_p (target))
8aab5218 2215 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2216}
2217
2218
e33c6cd6 2219/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2220 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2221 formal parameters are called. */
be95e2b9 2222
3e293154 2223static void
355fe088 2224ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
3e293154 2225{
726a989a 2226 if (is_gimple_call (stmt))
538dd0b7 2227 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
062c604f
MJ
2228}
2229
2230/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2231 If OP is a parameter declaration, mark it as used in the info structure
2232 passed in DATA. */
2233
2234static bool
355fe088 2235visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
062c604f
MJ
2236{
2237 struct ipa_node_params *info = (struct ipa_node_params *) data;
2238
2239 op = get_base_address (op);
2240 if (op
2241 && TREE_CODE (op) == PARM_DECL)
2242 {
2243 int index = ipa_get_param_decl_index (info, op);
2244 gcc_assert (index >= 0);
310bc633 2245 ipa_set_param_used (info, index, true);
062c604f
MJ
2246 }
2247
2248 return false;
3e293154
MJ
2249}
2250
8aab5218
MJ
2251/* Scan the statements in BB and inspect the uses of formal parameters. Store
2252 the findings in various structures of the associated ipa_node_params
2253 structure, such as parameter flags, notes etc. FBI holds various data about
2254 the function being analyzed. */
be95e2b9 2255
062c604f 2256static void
56b40062 2257ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
3e293154 2258{
726a989a 2259 gimple_stmt_iterator gsi;
8aab5218
MJ
2260 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2261 {
355fe088 2262 gimple *stmt = gsi_stmt (gsi);
3e293154 2263
8aab5218
MJ
2264 if (is_gimple_debug (stmt))
2265 continue;
3e293154 2266
8aab5218
MJ
2267 ipa_analyze_stmt_uses (fbi, stmt);
2268 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2269 visit_ref_for_mod_analysis,
2270 visit_ref_for_mod_analysis,
2271 visit_ref_for_mod_analysis);
5fe8e757 2272 }
8aab5218
MJ
2273 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2274 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2275 visit_ref_for_mod_analysis,
2276 visit_ref_for_mod_analysis,
2277 visit_ref_for_mod_analysis);
2278}
2279
2280/* Calculate controlled uses of parameters of NODE. */
2281
2282static void
2283ipa_analyze_controlled_uses (struct cgraph_node *node)
2284{
2285 struct ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2286
8aab5218 2287 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2288 {
2289 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2290 int controlled_uses = 0;
2291
062c604f
MJ
2292 /* For SSA regs see if parameter is used. For non-SSA we compute
2293 the flag during modification analysis. */
4502fe8d
MJ
2294 if (is_gimple_reg (parm))
2295 {
67348ccc 2296 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2297 parm);
2298 if (ddef && !has_zero_uses (ddef))
2299 {
2300 imm_use_iterator imm_iter;
2301 use_operand_p use_p;
2302
2303 ipa_set_param_used (info, i, true);
2304 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2305 if (!is_gimple_call (USE_STMT (use_p)))
2306 {
c6de6665
JJ
2307 if (!is_gimple_debug (USE_STMT (use_p)))
2308 {
2309 controlled_uses = IPA_UNDESCRIBED_USE;
2310 break;
2311 }
4502fe8d
MJ
2312 }
2313 else
2314 controlled_uses++;
2315 }
2316 else
2317 controlled_uses = 0;
2318 }
2319 else
2320 controlled_uses = IPA_UNDESCRIBED_USE;
2321 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2322 }
8aab5218 2323}
062c604f 2324
8aab5218 2325/* Free stuff in BI. */
062c604f 2326
8aab5218
MJ
2327static void
2328free_ipa_bb_info (struct ipa_bb_info *bi)
2329{
2330 bi->cg_edges.release ();
2331 bi->param_aa_statuses.release ();
3e293154
MJ
2332}
2333
8aab5218 2334/* Dominator walker driving the analysis. */
2c9561b5 2335
8aab5218 2336class analysis_dom_walker : public dom_walker
2c9561b5 2337{
8aab5218 2338public:
56b40062 2339 analysis_dom_walker (struct ipa_func_body_info *fbi)
8aab5218 2340 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2341
3daacdcd 2342 virtual edge before_dom_children (basic_block);
8aab5218
MJ
2343
2344private:
56b40062 2345 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
2346};
2347
3daacdcd 2348edge
8aab5218
MJ
2349analysis_dom_walker::before_dom_children (basic_block bb)
2350{
2351 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2352 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3daacdcd 2353 return NULL;
2c9561b5
MJ
2354}
2355
c3431191
ML
2356/* Release body info FBI. */
2357
2358void
2359ipa_release_body_info (struct ipa_func_body_info *fbi)
2360{
2361 int i;
2362 struct ipa_bb_info *bi;
2363
2364 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2365 free_ipa_bb_info (bi);
2366 fbi->bb_infos.release ();
2367}
2368
026c3cfd 2369/* Initialize the array describing properties of formal parameters
dd5a833e
MS
2370 of NODE, analyze their uses and compute jump functions associated
2371 with actual arguments of calls from within NODE. */
062c604f
MJ
2372
2373void
2374ipa_analyze_node (struct cgraph_node *node)
2375{
56b40062 2376 struct ipa_func_body_info fbi;
57dbdc5a 2377 struct ipa_node_params *info;
062c604f 2378
57dbdc5a
MJ
2379 ipa_check_create_node_params ();
2380 ipa_check_create_edge_args ();
2381 info = IPA_NODE_REF (node);
8aab5218
MJ
2382
2383 if (info->analysis_done)
2384 return;
2385 info->analysis_done = 1;
2386
2387 if (ipa_func_spec_opts_forbid_analysis_p (node))
2388 {
2389 for (int i = 0; i < ipa_get_param_count (info); i++)
2390 {
2391 ipa_set_param_used (info, i, true);
2392 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2393 }
2394 return;
2395 }
2396
2397 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2398 push_cfun (func);
2399 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2400 ipa_initialize_node_params (node);
8aab5218 2401 ipa_analyze_controlled_uses (node);
062c604f 2402
8aab5218
MJ
2403 fbi.node = node;
2404 fbi.info = IPA_NODE_REF (node);
2405 fbi.bb_infos = vNULL;
2406 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2407 fbi.param_count = ipa_get_param_count (info);
2408 fbi.aa_walked = 0;
062c604f 2409
8aab5218
MJ
2410 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2411 {
2412 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2413 bi->cg_edges.safe_push (cs);
2414 }
062c604f 2415
8aab5218
MJ
2416 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2417 {
2418 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2419 bi->cg_edges.safe_push (cs);
2420 }
2421
2422 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2423
c3431191 2424 ipa_release_body_info (&fbi);
8aab5218 2425 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2426 pop_cfun ();
062c604f 2427}
062c604f 2428
be95e2b9 2429/* Update the jump functions associated with call graph edge E when the call
3e293154 2430 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2431 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2432
3e293154
MJ
2433static void
2434update_jump_functions_after_inlining (struct cgraph_edge *cs,
2435 struct cgraph_edge *e)
2436{
2437 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2438 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2439 int count = ipa_get_cs_argument_count (args);
2440 int i;
2441
2442 for (i = 0; i < count; i++)
2443 {
b258210c 2444 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
5ce97055
JH
2445 struct ipa_polymorphic_call_context *dst_ctx
2446 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2447
685b0d13
MJ
2448 if (dst->type == IPA_JF_ANCESTOR)
2449 {
b258210c 2450 struct ipa_jump_func *src;
8b7773a4 2451 int dst_fid = dst->value.ancestor.formal_id;
5ce97055
JH
2452 struct ipa_polymorphic_call_context *src_ctx
2453 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 2454
b258210c
MJ
2455 /* Variable number of arguments can cause havoc if we try to access
2456 one that does not exist in the inlined edge. So make sure we
2457 don't. */
8b7773a4 2458 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c 2459 {
04be694e 2460 ipa_set_jf_unknown (dst);
b258210c
MJ
2461 continue;
2462 }
2463
8b7773a4
MJ
2464 src = ipa_get_ith_jump_func (top, dst_fid);
2465
5ce97055
JH
2466 if (src_ctx && !src_ctx->useless_p ())
2467 {
2468 struct ipa_polymorphic_call_context ctx = *src_ctx;
2469
2470 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2471 if (!ipa_get_jf_ancestor_type_preserved (dst))
f9bb202b 2472 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2473 ctx.offset_by (dst->value.ancestor.offset);
2474 if (!ctx.useless_p ())
2475 {
a7d1f3fe
ML
2476 if (!dst_ctx)
2477 {
2478 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2479 count);
2480 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2481 }
2482
2483 dst_ctx->combine_with (ctx);
5ce97055
JH
2484 }
2485 }
2486
8b7773a4
MJ
2487 if (src->agg.items
2488 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2489 {
2490 struct ipa_agg_jf_item *item;
2491 int j;
2492
2493 /* Currently we do not produce clobber aggregate jump functions,
2494 replace with merging when we do. */
2495 gcc_assert (!dst->agg.items);
2496
9771b263 2497 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2498 dst->agg.by_ref = src->agg.by_ref;
9771b263 2499 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2500 item->offset -= dst->value.ancestor.offset;
2501 }
2502
3b97a5c7
MJ
2503 if (src->type == IPA_JF_PASS_THROUGH
2504 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2505 {
2506 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2507 dst->value.ancestor.agg_preserved &=
2508 src->value.pass_through.agg_preserved;
2509 }
b258210c
MJ
2510 else if (src->type == IPA_JF_ANCESTOR)
2511 {
2512 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2513 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2514 dst->value.ancestor.agg_preserved &=
2515 src->value.ancestor.agg_preserved;
b258210c
MJ
2516 }
2517 else
04be694e 2518 ipa_set_jf_unknown (dst);
b258210c
MJ
2519 }
2520 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2521 {
b258210c
MJ
2522 struct ipa_jump_func *src;
2523 /* We must check range due to calls with variable number of arguments
2524 and we cannot combine jump functions with operations. */
2525 if (dst->value.pass_through.operation == NOP_EXPR
2526 && (dst->value.pass_through.formal_id
2527 < ipa_get_cs_argument_count (top)))
2528 {
8b7773a4
MJ
2529 int dst_fid = dst->value.pass_through.formal_id;
2530 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2531 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
5ce97055
JH
2532 struct ipa_polymorphic_call_context *src_ctx
2533 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 2534
5ce97055
JH
2535 if (src_ctx && !src_ctx->useless_p ())
2536 {
2537 struct ipa_polymorphic_call_context ctx = *src_ctx;
2538
2539 /* TODO: Make type preserved safe WRT contexts. */
44210a96 2540 if (!ipa_get_jf_pass_through_type_preserved (dst))
f9bb202b 2541 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2542 if (!ctx.useless_p ())
2543 {
2544 if (!dst_ctx)
2545 {
2546 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2547 count);
2548 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2549 }
2550 dst_ctx->combine_with (ctx);
2551 }
2552 }
b8f6e610
MJ
2553 switch (src->type)
2554 {
2555 case IPA_JF_UNKNOWN:
04be694e 2556 ipa_set_jf_unknown (dst);
b8f6e610 2557 break;
b8f6e610
MJ
2558 case IPA_JF_CONST:
2559 ipa_set_jf_cst_copy (dst, src);
2560 break;
2561
2562 case IPA_JF_PASS_THROUGH:
2563 {
2564 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2565 enum tree_code operation;
2566 operation = ipa_get_jf_pass_through_operation (src);
2567
2568 if (operation == NOP_EXPR)
2569 {
3b97a5c7 2570 bool agg_p;
b8f6e610
MJ
2571 agg_p = dst_agg_p
2572 && ipa_get_jf_pass_through_agg_preserved (src);
3b97a5c7 2573 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
b8f6e610
MJ
2574 }
2575 else
2576 {
2577 tree operand = ipa_get_jf_pass_through_operand (src);
2578 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2579 operation);
2580 }
2581 break;
2582 }
2583 case IPA_JF_ANCESTOR:
2584 {
3b97a5c7 2585 bool agg_p;
b8f6e610
MJ
2586 agg_p = dst_agg_p
2587 && ipa_get_jf_ancestor_agg_preserved (src);
b8f6e610
MJ
2588 ipa_set_ancestor_jf (dst,
2589 ipa_get_jf_ancestor_offset (src),
b8f6e610 2590 ipa_get_jf_ancestor_formal_id (src),
3b97a5c7 2591 agg_p);
b8f6e610
MJ
2592 break;
2593 }
2594 default:
2595 gcc_unreachable ();
2596 }
8b7773a4
MJ
2597
2598 if (src->agg.items
b8f6e610 2599 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2600 {
2601 /* Currently we do not produce clobber aggregate jump
2602 functions, replace with merging when we do. */
2603 gcc_assert (!dst->agg.items);
2604
2605 dst->agg.by_ref = src->agg.by_ref;
9771b263 2606 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2607 }
b258210c
MJ
2608 }
2609 else
04be694e 2610 ipa_set_jf_unknown (dst);
3e293154 2611 }
b258210c
MJ
2612 }
2613}
2614
5ce97055
JH
2615/* If TARGET is an addr_expr of a function declaration, make it the
2616 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2617 Otherwise, return NULL. */
b258210c 2618
3949c4a7 2619struct cgraph_edge *
5ce97055
JH
2620ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2621 bool speculative)
b258210c
MJ
2622{
2623 struct cgraph_node *callee;
0f378cb5 2624 struct inline_edge_summary *es = inline_edge_summary (ie);
48b1474e 2625 bool unreachable = false;
b258210c 2626
ceeffab0
MJ
2627 if (TREE_CODE (target) == ADDR_EXPR)
2628 target = TREE_OPERAND (target, 0);
b258210c 2629 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2630 {
2631 target = canonicalize_constructor_val (target, NULL);
2632 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2633 {
db66bf68
JH
2634 /* Member pointer call that goes through a VMT lookup. */
2635 if (ie->indirect_info->member_ptr
2636 /* Or if target is not an invariant expression and we do not
2637 know if it will evaulate to function at runtime.
2638 This can happen when folding through &VAR, where &VAR
2639 is IP invariant, but VAR itself is not.
2640
2641 TODO: Revisit this when GCC 5 is branched. It seems that
2642 member_ptr check is not needed and that we may try to fold
2643 the expression and see if VAR is readonly. */
2644 || !is_gimple_ip_invariant (target))
2645 {
2646 if (dump_enabled_p ())
2647 {
2648 location_t loc = gimple_location_safe (ie->call_stmt);
2649 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2650 "discovered direct call non-invariant "
2651 "%s/%i\n",
2652 ie->caller->name (), ie->caller->order);
2653 }
2654 return NULL;
2655 }
2656
c13bc3d9 2657
2b5f0895
XDL
2658 if (dump_enabled_p ())
2659 {
807b7d62
ML
2660 location_t loc = gimple_location_safe (ie->call_stmt);
2661 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2662 "discovered direct call to non-function in %s/%i, "
2663 "making it __builtin_unreachable\n",
2664 ie->caller->name (), ie->caller->order);
2b5f0895 2665 }
3c9e6fca 2666
48b1474e 2667 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 2668 callee = cgraph_node::get_create (target);
48b1474e 2669 unreachable = true;
a0a7b611 2670 }
48b1474e 2671 else
d52f5295 2672 callee = cgraph_node::get (target);
a0a7b611 2673 }
48b1474e 2674 else
d52f5295 2675 callee = cgraph_node::get (target);
a0a7b611
JH
2676
2677 /* Because may-edges are not explicitely represented and vtable may be external,
2678 we may create the first reference to the object in the unit. */
2679 if (!callee || callee->global.inlined_to)
2680 {
a0a7b611
JH
2681
2682 /* We are better to ensure we can refer to it.
2683 In the case of static functions we are out of luck, since we already
2684 removed its body. In the case of public functions we may or may
2685 not introduce the reference. */
2686 if (!canonicalize_constructor_val (target, NULL)
2687 || !TREE_PUBLIC (target))
2688 {
2689 if (dump_file)
2690 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2691 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2a72a953 2692 xstrdup_for_dump (ie->caller->name ()),
67348ccc 2693 ie->caller->order,
2a72a953 2694 xstrdup_for_dump (ie->callee->name ()),
67348ccc 2695 ie->callee->order);
a0a7b611
JH
2696 return NULL;
2697 }
d52f5295 2698 callee = cgraph_node::get_create (target);
a0a7b611 2699 }
2b5f0895 2700
0127c169
JH
2701 /* If the edge is already speculated. */
2702 if (speculative && ie->speculative)
2703 {
2704 struct cgraph_edge *e2;
2705 struct ipa_ref *ref;
2706 ie->speculative_call_info (e2, ie, ref);
2707 if (e2->callee->ultimate_alias_target ()
2708 != callee->ultimate_alias_target ())
2709 {
2710 if (dump_file)
2711 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2712 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2a72a953 2713 xstrdup_for_dump (ie->caller->name ()),
0127c169 2714 ie->caller->order,
2a72a953 2715 xstrdup_for_dump (callee->name ()),
0127c169 2716 callee->order,
2a72a953 2717 xstrdup_for_dump (e2->callee->name ()),
0127c169
JH
2718 e2->callee->order);
2719 }
2720 else
2721 {
2722 if (dump_file)
2723 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2724 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2a72a953 2725 xstrdup_for_dump (ie->caller->name ()),
0127c169 2726 ie->caller->order,
2a72a953 2727 xstrdup_for_dump (callee->name ()),
0127c169
JH
2728 callee->order);
2729 }
2730 return NULL;
2731 }
2732
2b5f0895
XDL
2733 if (!dbg_cnt (devirt))
2734 return NULL;
2735
1dbee8c9 2736 ipa_check_create_node_params ();
ceeffab0 2737
81fa35bd
MJ
2738 /* We can not make edges to inline clones. It is bug that someone removed
2739 the cgraph node too early. */
17afc0fe
JH
2740 gcc_assert (!callee->global.inlined_to);
2741
48b1474e 2742 if (dump_file && !unreachable)
b258210c 2743 {
5ce97055 2744 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
ceeffab0 2745 "(%s/%i -> %s/%i), for stmt ",
b258210c 2746 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 2747 speculative ? "speculative" : "known",
2a72a953 2748 xstrdup_for_dump (ie->caller->name ()),
67348ccc 2749 ie->caller->order,
2a72a953 2750 xstrdup_for_dump (callee->name ()),
67348ccc 2751 callee->order);
b258210c
MJ
2752 if (ie->call_stmt)
2753 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2754 else
2755 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 2756 }
2b5f0895
XDL
2757 if (dump_enabled_p ())
2758 {
807b7d62 2759 location_t loc = gimple_location_safe (ie->call_stmt);
3c9e6fca 2760
807b7d62
ML
2761 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2762 "converting indirect call in %s to direct call to %s\n",
2763 ie->caller->name (), callee->name ());
2b5f0895 2764 }
5ce97055 2765 if (!speculative)
d8d5aef1
JH
2766 {
2767 struct cgraph_edge *orig = ie;
2768 ie = ie->make_direct (callee);
2769 /* If we resolved speculative edge the cost is already up to date
2770 for direct call (adjusted by inline_edge_duplication_hook). */
2771 if (ie == orig)
2772 {
2773 es = inline_edge_summary (ie);
2774 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2775 - eni_size_weights.call_cost);
2776 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2777 - eni_time_weights.call_cost);
2778 }
2779 }
5ce97055
JH
2780 else
2781 {
2782 if (!callee->can_be_discarded_p ())
2783 {
2784 cgraph_node *alias;
2785 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2786 if (alias)
2787 callee = alias;
2788 }
d8d5aef1 2789 /* make_speculative will update ie's cost to direct call cost. */
5ce97055
JH
2790 ie = ie->make_speculative
2791 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2792 }
749aa96d 2793
b258210c 2794 return ie;
3e293154
MJ
2795}
2796
91bb9f80
MJ
2797/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2798 CONSTRUCTOR and return it. Return NULL if the search fails for some
2799 reason. */
2800
2801static tree
2802find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2803{
2804 tree type = TREE_TYPE (constructor);
2805 if (TREE_CODE (type) != ARRAY_TYPE
2806 && TREE_CODE (type) != RECORD_TYPE)
2807 return NULL;
2808
2809 unsigned ix;
2810 tree index, val;
2811 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2812 {
2813 HOST_WIDE_INT elt_offset;
2814 if (TREE_CODE (type) == ARRAY_TYPE)
2815 {
2816 offset_int off;
2817 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2818 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2819
2820 if (index)
2821 {
2822 off = wi::to_offset (index);
2823 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2824 {
2825 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2826 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2827 off = wi::sext (off - wi::to_offset (low_bound),
2828 TYPE_PRECISION (TREE_TYPE (index)));
2829 }
2830 off *= wi::to_offset (unit_size);
2831 }
2832 else
2833 off = wi::to_offset (unit_size) * ix;
2834
2835 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
2836 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
2837 continue;
2838 elt_offset = off.to_shwi ();
2839 }
2840 else if (TREE_CODE (type) == RECORD_TYPE)
2841 {
2842 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
2843 if (DECL_BIT_FIELD (index))
2844 continue;
2845 elt_offset = int_bit_position (index);
2846 }
2847 else
2848 gcc_unreachable ();
2849
2850 if (elt_offset > req_offset)
2851 return NULL;
2852
2853 if (TREE_CODE (val) == CONSTRUCTOR)
2854 return find_constructor_constant_at_offset (val,
2855 req_offset - elt_offset);
2856
2857 if (elt_offset == req_offset
2858 && is_gimple_reg_type (TREE_TYPE (val))
2859 && is_gimple_ip_invariant (val))
2860 return val;
2861 }
2862 return NULL;
2863}
2864
2865/* Check whether SCALAR could be used to look up an aggregate interprocedural
2866 invariant from a static constructor and if so, return it. Otherwise return
2867 NULL. */
2868
2869static tree
2870ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
2871{
2872 if (by_ref)
2873 {
2874 if (TREE_CODE (scalar) != ADDR_EXPR)
2875 return NULL;
2876 scalar = TREE_OPERAND (scalar, 0);
2877 }
2878
8813a647 2879 if (!VAR_P (scalar)
91bb9f80
MJ
2880 || !is_global_var (scalar)
2881 || !TREE_READONLY (scalar)
2882 || !DECL_INITIAL (scalar)
2883 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
2884 return NULL;
2885
2886 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
2887}
2888
2889/* Retrieve value from aggregate jump function AGG or static initializer of
2890 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2891 none. BY_REF specifies whether the value has to be passed by reference or
2892 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2893 to is set to true if the value comes from an initializer of a constant. */
8b7773a4
MJ
2894
2895tree
91bb9f80
MJ
2896ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
2897 HOST_WIDE_INT offset, bool by_ref,
2898 bool *from_global_constant)
8b7773a4
MJ
2899{
2900 struct ipa_agg_jf_item *item;
2901 int i;
2902
91bb9f80
MJ
2903 if (scalar)
2904 {
2905 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
2906 if (res)
2907 {
2908 if (from_global_constant)
2909 *from_global_constant = true;
2910 return res;
2911 }
2912 }
2913
2914 if (!agg
2915 || by_ref != agg->by_ref)
8b7773a4
MJ
2916 return NULL;
2917
9771b263 2918 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2919 if (item->offset == offset)
2920 {
2921 /* Currently we do not have clobber values, return NULL for them once
2922 we do. */
2923 gcc_checking_assert (is_gimple_ip_invariant (item->value));
91bb9f80
MJ
2924 if (from_global_constant)
2925 *from_global_constant = false;
2c9561b5
MJ
2926 return item->value;
2927 }
8b7773a4
MJ
2928 return NULL;
2929}
2930
4502fe8d 2931/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
2932 reference description RDESC. Return true if the reference has been
2933 successfully found and removed. */
4502fe8d 2934
568cda29 2935static bool
5e20cdc9 2936remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
2937{
2938 struct ipa_ref *to_del;
2939 struct cgraph_edge *origin;
2940
2941 origin = rdesc->cs;
a854f856
MJ
2942 if (!origin)
2943 return false;
d122681a
ML
2944 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2945 origin->lto_stmt_uid);
568cda29
MJ
2946 if (!to_del)
2947 return false;
2948
d122681a 2949 to_del->remove_reference ();
4502fe8d
MJ
2950 if (dump_file)
2951 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2a72a953
DM
2952 xstrdup_for_dump (origin->caller->name ()),
2953 origin->caller->order, xstrdup_for_dump (symbol->name ()));
568cda29 2954 return true;
4502fe8d
MJ
2955}
2956
2957/* If JFUNC has a reference description with refcount different from
2958 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2959 NULL. JFUNC must be a constant jump function. */
2960
2961static struct ipa_cst_ref_desc *
2962jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2963{
2964 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2965 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2966 return rdesc;
2967 else
2968 return NULL;
2969}
2970
568cda29
MJ
2971/* If the value of constant jump function JFUNC is an address of a function
2972 declaration, return the associated call graph node. Otherwise return
2973 NULL. */
2974
2975static cgraph_node *
2976cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2977{
2978 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2979 tree cst = ipa_get_jf_constant (jfunc);
2980 if (TREE_CODE (cst) != ADDR_EXPR
2981 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2982 return NULL;
2983
d52f5295 2984 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
2985}
2986
2987
2988/* If JFUNC is a constant jump function with a usable rdesc, decrement its
2989 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2990 the edge specified in the rdesc. Return false if either the symbol or the
2991 reference could not be found, otherwise return true. */
2992
2993static bool
2994try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2995{
2996 struct ipa_cst_ref_desc *rdesc;
2997 if (jfunc->type == IPA_JF_CONST
2998 && (rdesc = jfunc_rdesc_usable (jfunc))
2999 && --rdesc->refcount == 0)
3000 {
5e20cdc9 3001 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
3002 if (!symbol)
3003 return false;
3004
3005 return remove_described_reference (symbol, rdesc);
3006 }
3007 return true;
3008}
3009
b258210c
MJ
3010/* Try to find a destination for indirect edge IE that corresponds to a simple
3011 call or a call of a member function pointer and where the destination is a
3012 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
3013 determined, return the newly direct edge, otherwise return NULL.
3014 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 3015
b258210c
MJ
3016static struct cgraph_edge *
3017try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
3018 struct ipa_jump_func *jfunc,
3019 struct ipa_node_params *new_root_info)
b258210c 3020{
4502fe8d 3021 struct cgraph_edge *cs;
b258210c 3022 tree target;
042ae7d2 3023 bool agg_contents = ie->indirect_info->agg_contents;
91bb9f80
MJ
3024 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc);
3025 if (agg_contents)
3026 {
3027 bool from_global_constant;
3028 target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3029 ie->indirect_info->offset,
3030 ie->indirect_info->by_ref,
3031 &from_global_constant);
3032 if (target
3033 && !from_global_constant
3034 && !ie->indirect_info->guaranteed_unmodified)
3035 return NULL;
3036 }
b258210c 3037 else
91bb9f80 3038 target = scalar;
d250540a
MJ
3039 if (!target)
3040 return NULL;
4502fe8d
MJ
3041 cs = ipa_make_edge_direct_to_target (ie, target);
3042
a12cd2db 3043 if (cs && !agg_contents)
568cda29
MJ
3044 {
3045 bool ok;
3046 gcc_checking_assert (cs->callee
ae6d0907
MJ
3047 && (cs != ie
3048 || jfunc->type != IPA_JF_CONST
568cda29
MJ
3049 || !cgraph_node_for_jfunc (jfunc)
3050 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3051 ok = try_decrement_rdesc_refcount (jfunc);
3052 gcc_checking_assert (ok);
3053 }
4502fe8d
MJ
3054
3055 return cs;
b258210c
MJ
3056}
3057
bec81025
MJ
3058/* Return the target to be used in cases of impossible devirtualization. IE
3059 and target (the latter can be NULL) are dumped when dumping is enabled. */
3060
72972c22
MJ
3061tree
3062ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3063{
3064 if (dump_file)
3065 {
3066 if (target)
3067 fprintf (dump_file,
72972c22 3068 "Type inconsistent devirtualization: %s/%i->%s\n",
bec81025
MJ
3069 ie->caller->name (), ie->caller->order,
3070 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3071 else
3072 fprintf (dump_file,
3073 "No devirtualization target in %s/%i\n",
3074 ie->caller->name (), ie->caller->order);
3075 }
3076 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3077 cgraph_node::get_create (new_target);
bec81025
MJ
3078 return new_target;
3079}
3080
d250540a
MJ
3081/* Try to find a destination for indirect edge IE that corresponds to a virtual
3082 call based on a formal parameter which is described by jump function JFUNC
3083 and if it can be determined, make it direct and return the direct edge.
44210a96
MJ
3084 Otherwise, return NULL. CTX describes the polymorphic context that the
3085 parameter the call is based on brings along with it. */
b258210c
MJ
3086
3087static struct cgraph_edge *
3088try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3089 struct ipa_jump_func *jfunc,
44210a96 3090 struct ipa_polymorphic_call_context ctx)
3e293154 3091{
44210a96 3092 tree target = NULL;
5ce97055 3093 bool speculative = false;
85942f45 3094
2bf86c84 3095 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
85942f45 3096 return NULL;
b258210c 3097
44210a96 3098 gcc_assert (!ie->indirect_info->by_ref);
5ce97055
JH
3099
3100 /* Try to do lookup via known virtual table pointer value. */
2bf86c84
JH
3101 if (!ie->indirect_info->vptr_changed
3102 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
85942f45 3103 {
9de2f554
JH
3104 tree vtable;
3105 unsigned HOST_WIDE_INT offset;
91bb9f80
MJ
3106 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3107 : NULL;
3108 tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
85942f45
JH
3109 ie->indirect_info->offset,
3110 true);
9de2f554
JH
3111 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3112 {
2994ab20 3113 bool can_refer;
0127c169 3114 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2994ab20
JH
3115 vtable, offset, &can_refer);
3116 if (can_refer)
9de2f554 3117 {
2994ab20
JH
3118 if (!t
3119 || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3120 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
9de2f554 3121 || !possible_polymorphic_call_target_p
0127c169
JH
3122 (ie, cgraph_node::get (t)))
3123 {
33c3b6be 3124 /* Do not speculate builtin_unreachable, it is stupid! */
0127c169
JH
3125 if (!ie->indirect_info->vptr_changed)
3126 target = ipa_impossible_devirt_target (ie, target);
2994ab20
JH
3127 else
3128 target = NULL;
0127c169
JH
3129 }
3130 else
3131 {
3132 target = t;
3133 speculative = ie->indirect_info->vptr_changed;
3134 }
9de2f554
JH
3135 }
3136 }
85942f45
JH
3137 }
3138
44210a96
MJ
3139 ipa_polymorphic_call_context ie_context (ie);
3140 vec <cgraph_node *>targets;
3141 bool final;
d250540a 3142
44210a96
MJ
3143 ctx.offset_by (ie->indirect_info->offset);
3144 if (ie->indirect_info->vptr_changed)
3145 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3146 ie->indirect_info->otr_type);
3147 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3148 targets = possible_polymorphic_call_targets
3149 (ie->indirect_info->otr_type,
3150 ie->indirect_info->otr_token,
3151 ctx, &final);
3152 if (final && targets.length () <= 1)
5ce97055 3153 {
33c3b6be 3154 speculative = false;
44210a96
MJ
3155 if (targets.length () == 1)
3156 target = targets[0]->decl;
3157 else
3158 target = ipa_impossible_devirt_target (ie, NULL_TREE);
5ce97055 3159 }
2bf86c84 3160 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
44210a96 3161 && !ie->speculative && ie->maybe_hot_p ())
5bccb77a 3162 {
44210a96
MJ
3163 cgraph_node *n;
3164 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3165 ie->indirect_info->otr_token,
3166 ie->indirect_info->context);
3167 if (n)
5ce97055 3168 {
44210a96
MJ
3169 target = n->decl;
3170 speculative = true;
5ce97055 3171 }
5bccb77a 3172 }
b258210c
MJ
3173
3174 if (target)
450ad0cd 3175 {
44210a96
MJ
3176 if (!possible_polymorphic_call_target_p
3177 (ie, cgraph_node::get_create (target)))
0127c169 3178 {
29c43c83 3179 if (speculative)
0127c169
JH
3180 return NULL;
3181 target = ipa_impossible_devirt_target (ie, target);
3182 }
5ce97055 3183 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3184 }
b258210c
MJ
3185 else
3186 return NULL;
3e293154
MJ
3187}
3188
3189/* Update the param called notes associated with NODE when CS is being inlined,
3190 assuming NODE is (potentially indirectly) inlined into CS->callee.
3191 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3192 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3193 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3194
f8e2a1ed 3195static bool
e33c6cd6
MJ
3196update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3197 struct cgraph_node *node,
d52f5295 3198 vec<cgraph_edge *> *new_edges)
3e293154 3199{
9e97ff61 3200 struct ipa_edge_args *top;
b258210c 3201 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 3202 struct ipa_node_params *new_root_info;
f8e2a1ed 3203 bool res = false;
3e293154 3204
e33c6cd6 3205 ipa_check_create_edge_args ();
9e97ff61 3206 top = IPA_EDGE_REF (cs);
d250540a
MJ
3207 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3208 ? cs->caller->global.inlined_to
3209 : cs->caller);
e33c6cd6
MJ
3210
3211 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3212 {
e33c6cd6 3213 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3214 struct ipa_jump_func *jfunc;
8b7773a4 3215 int param_index;
3ff29913 3216 cgraph_node *spec_target = NULL;
3e293154 3217
e33c6cd6 3218 next_ie = ie->next_callee;
3e293154 3219
5f902d76
JH
3220 if (ici->param_index == -1)
3221 continue;
e33c6cd6 3222
3e293154 3223 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 3224 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3225 {
5ee53a06 3226 ici->param_index = -1;
3e293154
MJ
3227 continue;
3228 }
3229
8b7773a4
MJ
3230 param_index = ici->param_index;
3231 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06 3232
3ff29913
JH
3233 if (ie->speculative)
3234 {
3235 struct cgraph_edge *de;
3236 struct ipa_ref *ref;
3237 ie->speculative_call_info (de, ie, ref);
3238 spec_target = de->callee;
3239 }
3240
2bf86c84 3241 if (!opt_for_fn (node->decl, flag_indirect_inlining))
36b72910
JH
3242 new_direct_edge = NULL;
3243 else if (ici->polymorphic)
5ce97055 3244 {
44210a96
MJ
3245 ipa_polymorphic_call_context ctx;
3246 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3247 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
5ce97055 3248 }
b258210c 3249 else
d250540a
MJ
3250 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3251 new_root_info);
042ae7d2 3252 /* If speculation was removed, then we need to do nothing. */
3ff29913
JH
3253 if (new_direct_edge && new_direct_edge != ie
3254 && new_direct_edge->callee == spec_target)
042ae7d2
JH
3255 {
3256 new_direct_edge->indirect_inlining_edge = 1;
3257 top = IPA_EDGE_REF (cs);
3258 res = true;
73d098df
JH
3259 if (!new_direct_edge->speculative)
3260 continue;
042ae7d2
JH
3261 }
3262 else if (new_direct_edge)
685b0d13 3263 {
b258210c 3264 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
3265 if (new_direct_edge->call_stmt)
3266 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
3267 = !gimple_check_call_matching_types (
3268 new_direct_edge->call_stmt,
67348ccc 3269 new_direct_edge->callee->decl, false);
b258210c
MJ
3270 if (new_edges)
3271 {
9771b263 3272 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3273 res = true;
3274 }
042ae7d2 3275 top = IPA_EDGE_REF (cs);
3ff29913
JH
3276 /* If speculative edge was introduced we still need to update
3277 call info of the indirect edge. */
3278 if (!new_direct_edge->speculative)
3279 continue;
685b0d13 3280 }
3ff29913
JH
3281 if (jfunc->type == IPA_JF_PASS_THROUGH
3282 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
36b72910 3283 {
d0502276
JH
3284 if (ici->agg_contents
3285 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3286 && !ici->polymorphic)
36b72910
JH
3287 ici->param_index = -1;
3288 else
d0502276
JH
3289 {
3290 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3291 if (ici->polymorphic
3292 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3293 ici->vptr_changed = true;
3294 }
36b72910
JH
3295 }
3296 else if (jfunc->type == IPA_JF_ANCESTOR)
3297 {
d0502276
JH
3298 if (ici->agg_contents
3299 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3300 && !ici->polymorphic)
36b72910
JH
3301 ici->param_index = -1;
3302 else
3303 {
3304 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3305 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
d0502276
JH
3306 if (ici->polymorphic
3307 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3308 ici->vptr_changed = true;
36b72910
JH
3309 }
3310 }
3311 else
3312 /* Either we can find a destination for this edge now or never. */
3313 ici->param_index = -1;
3e293154 3314 }
e33c6cd6 3315
f8e2a1ed 3316 return res;
3e293154
MJ
3317}
3318
3319/* Recursively traverse subtree of NODE (including node) made of inlined
3320 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3321 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3322 update_jump_functions_after_inlining on all non-inlined edges that lead out
3323 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3324 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3325 created. */
be95e2b9 3326
f8e2a1ed 3327static bool
3e293154
MJ
3328propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3329 struct cgraph_node *node,
d52f5295 3330 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3331{
3332 struct cgraph_edge *e;
f8e2a1ed 3333 bool res;
3e293154 3334
e33c6cd6 3335 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3336
3337 for (e = node->callees; e; e = e->next_callee)
3338 if (!e->inline_failed)
f8e2a1ed 3339 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3340 else
3341 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3342 for (e = node->indirect_calls; e; e = e->next_callee)
3343 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3344
3345 return res;
3e293154
MJ
3346}
3347
4502fe8d
MJ
3348/* Combine two controlled uses counts as done during inlining. */
3349
3350static int
3351combine_controlled_uses_counters (int c, int d)
3352{
3353 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3354 return IPA_UNDESCRIBED_USE;
3355 else
3356 return c + d - 1;
3357}
3358
3359/* Propagate number of controlled users from CS->caleee to the new root of the
3360 tree of inlined nodes. */
3361
3362static void
3363propagate_controlled_uses (struct cgraph_edge *cs)
3364{
3365 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3366 struct cgraph_node *new_root = cs->caller->global.inlined_to
3367 ? cs->caller->global.inlined_to : cs->caller;
3368 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3369 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3370 int count, i;
3371
3372 count = MIN (ipa_get_cs_argument_count (args),
3373 ipa_get_param_count (old_root_info));
3374 for (i = 0; i < count; i++)
3375 {
3376 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3377 struct ipa_cst_ref_desc *rdesc;
3378
3379 if (jf->type == IPA_JF_PASS_THROUGH)
3380 {
3381 int src_idx, c, d;
3382 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3383 c = ipa_get_controlled_uses (new_root_info, src_idx);
3384 d = ipa_get_controlled_uses (old_root_info, i);
3385
3386 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3387 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3388 c = combine_controlled_uses_counters (c, d);
3389 ipa_set_controlled_uses (new_root_info, src_idx, c);
3390 if (c == 0 && new_root_info->ipcp_orig_node)
3391 {
3392 struct cgraph_node *n;
3393 struct ipa_ref *ref;
44210a96 3394 tree t = new_root_info->known_csts[src_idx];
4502fe8d
MJ
3395
3396 if (t && TREE_CODE (t) == ADDR_EXPR
3397 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 3398 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 3399 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
3400 {
3401 if (dump_file)
3402 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3403 "reference from %s/%i to %s/%i.\n",
2a72a953 3404 xstrdup_for_dump (new_root->name ()),
67348ccc 3405 new_root->order,
2a72a953 3406 xstrdup_for_dump (n->name ()), n->order);
d122681a 3407 ref->remove_reference ();
4502fe8d
MJ
3408 }
3409 }
3410 }
3411 else if (jf->type == IPA_JF_CONST
3412 && (rdesc = jfunc_rdesc_usable (jf)))
3413 {
3414 int d = ipa_get_controlled_uses (old_root_info, i);
3415 int c = rdesc->refcount;
3416 rdesc->refcount = combine_controlled_uses_counters (c, d);
3417 if (rdesc->refcount == 0)
3418 {
3419 tree cst = ipa_get_jf_constant (jf);
3420 struct cgraph_node *n;
3421 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3422 && TREE_CODE (TREE_OPERAND (cst, 0))
3423 == FUNCTION_DECL);
d52f5295 3424 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
3425 if (n)
3426 {
3427 struct cgraph_node *clone;
568cda29 3428 bool ok;
67348ccc 3429 ok = remove_described_reference (n, rdesc);
568cda29 3430 gcc_checking_assert (ok);
4502fe8d
MJ
3431
3432 clone = cs->caller;
3433 while (clone->global.inlined_to
3434 && clone != rdesc->cs->caller
3435 && IPA_NODE_REF (clone)->ipcp_orig_node)
3436 {
3437 struct ipa_ref *ref;
d122681a 3438 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
3439 if (ref)
3440 {
3441 if (dump_file)
3442 fprintf (dump_file, "ipa-prop: Removing "
3443 "cloning-created reference "
3444 "from %s/%i to %s/%i.\n",
2a72a953 3445 xstrdup_for_dump (clone->name ()),
67348ccc 3446 clone->order,
2a72a953 3447 xstrdup_for_dump (n->name ()),
67348ccc 3448 n->order);
d122681a 3449 ref->remove_reference ();
4502fe8d
MJ
3450 }
3451 clone = clone->callers->caller;
3452 }
3453 }
3454 }
3455 }
3456 }
3457
3458 for (i = ipa_get_param_count (old_root_info);
3459 i < ipa_get_cs_argument_count (args);
3460 i++)
3461 {
3462 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3463
3464 if (jf->type == IPA_JF_CONST)
3465 {
3466 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3467 if (rdesc)
3468 rdesc->refcount = IPA_UNDESCRIBED_USE;
3469 }
3470 else if (jf->type == IPA_JF_PASS_THROUGH)
3471 ipa_set_controlled_uses (new_root_info,
3472 jf->value.pass_through.formal_id,
3473 IPA_UNDESCRIBED_USE);
3474 }
3475}
3476
3e293154
MJ
3477/* Update jump functions and call note functions on inlining the call site CS.
3478 CS is expected to lead to a node already cloned by
3479 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3480 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3481 created. */
be95e2b9 3482
f8e2a1ed 3483bool
3e293154 3484ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 3485 vec<cgraph_edge *> *new_edges)
3e293154 3486{
5ee53a06 3487 bool changed;
f8e2a1ed
MJ
3488 /* Do nothing if the preparation phase has not been carried out yet
3489 (i.e. during early inlining). */
dd912cb8 3490 if (!ipa_node_params_sum)
f8e2a1ed
MJ
3491 return false;
3492 gcc_assert (ipa_edge_args_vector);
3493
4502fe8d 3494 propagate_controlled_uses (cs);
5ee53a06
JH
3495 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3496
5ee53a06 3497 return changed;
518dc859
RL
3498}
3499
771578a0
MJ
3500/* Frees all dynamically allocated structures that the argument info points
3501 to. */
be95e2b9 3502
518dc859 3503void
771578a0 3504ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 3505{
9771b263 3506 vec_free (args->jump_functions);
771578a0 3507 memset (args, 0, sizeof (*args));
518dc859
RL
3508}
3509
771578a0 3510/* Free all ipa_edge structures. */
be95e2b9 3511
518dc859 3512void
771578a0 3513ipa_free_all_edge_args (void)
518dc859 3514{
771578a0
MJ
3515 int i;
3516 struct ipa_edge_args *args;
518dc859 3517
9771b263
DN
3518 if (!ipa_edge_args_vector)
3519 return;
3520
3521 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
3522 ipa_free_edge_args_substructures (args);
3523
9771b263 3524 vec_free (ipa_edge_args_vector);
518dc859
RL
3525}
3526
771578a0
MJ
3527/* Frees all dynamically allocated structures that the param info points
3528 to. */
be95e2b9 3529
dd912cb8 3530ipa_node_params::~ipa_node_params ()
518dc859 3531{
dd912cb8
ML
3532 descriptors.release ();
3533 free (lattices);
310bc633
MJ
3534 /* Lattice values and their sources are deallocated with their alocation
3535 pool. */
c3431191 3536 known_csts.release ();
dd912cb8
ML
3537 known_contexts.release ();
3538
3539 lattices = NULL;
3540 ipcp_orig_node = NULL;
3541 analysis_done = 0;
3542 node_enqueued = 0;
3543 do_clone_for_all_contexts = 0;
3544 is_all_contexts_clone = 0;
3545 node_dead = 0;
518dc859
RL
3546}
3547
771578a0 3548/* Free all ipa_node_params structures. */
be95e2b9 3549
518dc859 3550void
771578a0 3551ipa_free_all_node_params (void)
518dc859 3552{
dd912cb8
ML
3553 delete ipa_node_params_sum;
3554 ipa_node_params_sum = NULL;
771578a0
MJ
3555}
3556
04be694e
MJ
3557/* Grow ipcp_transformations if necessary. */
3558
3559void
3560ipcp_grow_transformations_if_necessary (void)
3561{
3562 if (vec_safe_length (ipcp_transformations)
3563 <= (unsigned) symtab->cgraph_max_uid)
3564 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3565}
3566
2c9561b5
MJ
3567/* Set the aggregate replacements of NODE to be AGGVALS. */
3568
3569void
3570ipa_set_node_agg_value_chain (struct cgraph_node *node,
3571 struct ipa_agg_replacement_value *aggvals)
3572{
04be694e
MJ
3573 ipcp_grow_transformations_if_necessary ();
3574 (*ipcp_transformations)[node->uid].agg_values = aggvals;
2c9561b5
MJ
3575}
3576
771578a0 3577/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 3578
771578a0 3579static void
5c0466b5 3580ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 3581{
568cda29
MJ
3582 struct ipa_edge_args *args;
3583
3584 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
9771b263 3585 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 3586 return;
568cda29
MJ
3587
3588 args = IPA_EDGE_REF (cs);
3589 if (args->jump_functions)
3590 {
3591 struct ipa_jump_func *jf;
3592 int i;
3593 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3594 {
3595 struct ipa_cst_ref_desc *rdesc;
3596 try_decrement_rdesc_refcount (jf);
3597 if (jf->type == IPA_JF_CONST
3598 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3599 && rdesc->cs == cs)
3600 rdesc->cs = NULL;
3601 }
568cda29
MJ
3602 }
3603
771578a0 3604 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
3605}
3606
8b7773a4 3607/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 3608
771578a0
MJ
3609static void
3610ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
dd912cb8 3611 void *)
771578a0
MJ
3612{
3613 struct ipa_edge_args *old_args, *new_args;
8b7773a4 3614 unsigned int i;
771578a0
MJ
3615
3616 ipa_check_create_edge_args ();
3617
3618 old_args = IPA_EDGE_REF (src);
3619 new_args = IPA_EDGE_REF (dst);
3620
9771b263 3621 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
3622 if (old_args->polymorphic_call_contexts)
3623 new_args->polymorphic_call_contexts
3624 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 3625
9771b263 3626 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3627 {
3628 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3629 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3630
3631 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3632
3633 if (src_jf->type == IPA_JF_CONST)
3634 {
3635 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3636
3637 if (!src_rdesc)
3638 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3639 else if (src->caller == dst->caller)
3640 {
3641 struct ipa_ref *ref;
5e20cdc9 3642 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3643 gcc_checking_assert (n);
d122681a
ML
3644 ref = src->caller->find_reference (n, src->call_stmt,
3645 src->lto_stmt_uid);
568cda29 3646 gcc_checking_assert (ref);
d122681a 3647 dst->caller->clone_reference (ref, ref->stmt);
568cda29 3648
601f3293 3649 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
568cda29
MJ
3650 dst_rdesc->cs = dst;
3651 dst_rdesc->refcount = src_rdesc->refcount;
3652 dst_rdesc->next_duplicate = NULL;
3653 dst_jf->value.constant.rdesc = dst_rdesc;
3654 }
4502fe8d
MJ
3655 else if (src_rdesc->cs == src)
3656 {
601f3293 3657 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4502fe8d 3658 dst_rdesc->cs = dst;
4502fe8d 3659 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3660 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3661 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3662 dst_jf->value.constant.rdesc = dst_rdesc;
3663 }
3664 else
3665 {
3666 struct ipa_cst_ref_desc *dst_rdesc;
3667 /* This can happen during inlining, when a JFUNC can refer to a
3668 reference taken in a function up in the tree of inline clones.
3669 We need to find the duplicate that refers to our tree of
3670 inline clones. */
3671
3672 gcc_assert (dst->caller->global.inlined_to);
3673 for (dst_rdesc = src_rdesc->next_duplicate;
3674 dst_rdesc;
3675 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3676 {
3677 struct cgraph_node *top;
3678 top = dst_rdesc->cs->caller->global.inlined_to
3679 ? dst_rdesc->cs->caller->global.inlined_to
3680 : dst_rdesc->cs->caller;
3681 if (dst->caller->global.inlined_to == top)
3682 break;
3683 }
44a60244 3684 gcc_assert (dst_rdesc);
4502fe8d
MJ
3685 dst_jf->value.constant.rdesc = dst_rdesc;
3686 }
3687 }
6fe45955
MJ
3688 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3689 && src->caller == dst->caller)
3690 {
3691 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3692 ? dst->caller->global.inlined_to : dst->caller;
3693 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3694 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3695
3696 int c = ipa_get_controlled_uses (root_info, idx);
3697 if (c != IPA_UNDESCRIBED_USE)
3698 {
3699 c++;
3700 ipa_set_controlled_uses (root_info, idx, c);
3701 }
3702 }
4502fe8d 3703 }
771578a0
MJ
3704}
3705
dd912cb8 3706/* Analyze newly added function into callgraph. */
be95e2b9 3707
771578a0 3708static void
dd912cb8 3709ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3710{
dd912cb8
ML
3711 if (node->has_gimple_body_p ())
3712 ipa_analyze_node (node);
3713}
771578a0 3714
dd912cb8
ML
3715/* Hook that is called by summary when a node is duplicated. */
3716
3717void
3718ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3719 ipa_node_params *old_info,
3720 ipa_node_params *new_info)
3721{
3722 ipa_agg_replacement_value *old_av, *new_av;
771578a0 3723
9771b263 3724 new_info->descriptors = old_info->descriptors.copy ();
310bc633 3725 new_info->lattices = NULL;
771578a0 3726 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 3727
8aab5218 3728 new_info->analysis_done = old_info->analysis_done;
3949c4a7 3729 new_info->node_enqueued = old_info->node_enqueued;
7e729474 3730 new_info->versionable = old_info->versionable;
2c9561b5
MJ
3731
3732 old_av = ipa_get_agg_replacements_for_node (src);
04be694e 3733 if (old_av)
2c9561b5 3734 {
04be694e
MJ
3735 new_av = NULL;
3736 while (old_av)
3737 {
3738 struct ipa_agg_replacement_value *v;
2c9561b5 3739
04be694e
MJ
3740 v = ggc_alloc<ipa_agg_replacement_value> ();
3741 memcpy (v, old_av, sizeof (*v));
3742 v->next = new_av;
3743 new_av = v;
3744 old_av = old_av->next;
3745 }
3746 ipa_set_node_agg_value_chain (dst, new_av);
3747 }
3748
3749 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3750
8bc5448f 3751 if (src_trans)
04be694e
MJ
3752 {
3753 ipcp_grow_transformations_if_necessary ();
3754 src_trans = ipcp_get_transformation_summary (src);
8bc5448f 3755 const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
8bc5448f
KV
3756 vec<ipa_vr, va_gc> *&dst_vr
3757 = ipcp_get_transformation_summary (dst)->m_vr;
8bc5448f
KV
3758 if (vec_safe_length (src_trans->m_vr) > 0)
3759 {
3760 vec_safe_reserve_exact (dst_vr, src_vr->length ());
3761 for (unsigned i = 0; i < src_vr->length (); ++i)
3762 dst_vr->quick_push ((*src_vr)[i]);
3763 }
2c9561b5 3764 }
209ca542
PK
3765
3766 if (src_trans && vec_safe_length (src_trans->bits) > 0)
3767 {
3768 ipcp_grow_transformations_if_necessary ();
3769 src_trans = ipcp_get_transformation_summary (src);
3770 const vec<ipa_bits, va_gc> *src_bits = src_trans->bits;
3771 vec<ipa_bits, va_gc> *&dst_bits
3772 = ipcp_get_transformation_summary (dst)->bits;
3773 vec_safe_reserve_exact (dst_bits, src_bits->length ());
3774 for (unsigned i = 0; i < src_bits->length (); ++i)
3775 dst_bits->quick_push ((*src_bits)[i]);
3776 }
771578a0
MJ
3777}
3778
3779/* Register our cgraph hooks if they are not already there. */
be95e2b9 3780
518dc859 3781void
771578a0 3782ipa_register_cgraph_hooks (void)
518dc859 3783{
dd912cb8
ML
3784 ipa_check_create_node_params ();
3785
771578a0
MJ
3786 if (!edge_removal_hook_holder)
3787 edge_removal_hook_holder =
3dafb85c 3788 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
771578a0
MJ
3789 if (!edge_duplication_hook_holder)
3790 edge_duplication_hook_holder =
3dafb85c 3791 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
dd912cb8 3792 function_insertion_hook_holder =
3dafb85c 3793 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3794}
518dc859 3795
771578a0 3796/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3797
771578a0
MJ
3798static void
3799ipa_unregister_cgraph_hooks (void)
3800{
3dafb85c 3801 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
771578a0 3802 edge_removal_hook_holder = NULL;
3dafb85c 3803 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
771578a0 3804 edge_duplication_hook_holder = NULL;
3dafb85c 3805 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 3806 function_insertion_hook_holder = NULL;
771578a0
MJ
3807}
3808
3809/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3810 longer needed after ipa-cp. */
be95e2b9 3811
771578a0 3812void
e33c6cd6 3813ipa_free_all_structures_after_ipa_cp (void)
3e293154 3814{
2bf86c84 3815 if (!optimize && !in_lto_p)
3e293154
MJ
3816 {
3817 ipa_free_all_edge_args ();
3818 ipa_free_all_node_params ();
2651e637
ML
3819 ipcp_sources_pool.release ();
3820 ipcp_cst_values_pool.release ();
3821 ipcp_poly_ctx_values_pool.release ();
3822 ipcp_agg_lattice_pool.release ();
3e293154 3823 ipa_unregister_cgraph_hooks ();
601f3293 3824 ipa_refdesc_pool.release ();
3e293154
MJ
3825 }
3826}
3827
3828/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3829 longer needed after indirect inlining. */
be95e2b9 3830
3e293154 3831void
e33c6cd6 3832ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3833{
3834 ipa_free_all_edge_args ();
3835 ipa_free_all_node_params ();
3836 ipa_unregister_cgraph_hooks ();
2651e637
ML
3837 ipcp_sources_pool.release ();
3838 ipcp_cst_values_pool.release ();
3839 ipcp_poly_ctx_values_pool.release ();
3840 ipcp_agg_lattice_pool.release ();
601f3293 3841 ipa_refdesc_pool.release ();
518dc859
RL
3842}
3843
dcd416e3 3844/* Print ipa_tree_map data structures of all functions in the
518dc859 3845 callgraph to F. */
be95e2b9 3846
518dc859 3847void
2c9561b5 3848ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3849{
3850 int i, count;
3e293154 3851 struct ipa_node_params *info;
518dc859 3852
67348ccc 3853 if (!node->definition)
3e293154
MJ
3854 return;
3855 info = IPA_NODE_REF (node);
9de04252 3856 fprintf (f, " function %s/%i parameter descriptors:\n",
fec39fa6 3857 node->name (), node->order);
3e293154
MJ
3858 count = ipa_get_param_count (info);
3859 for (i = 0; i < count; i++)
518dc859 3860 {
4502fe8d
MJ
3861 int c;
3862
a4e33812 3863 fprintf (f, " ");
e067bd43 3864 ipa_dump_param (f, info, i);
339f49ec
JH
3865 if (ipa_is_param_used (info, i))
3866 fprintf (f, " used");
4502fe8d
MJ
3867 c = ipa_get_controlled_uses (info, i);
3868 if (c == IPA_UNDESCRIBED_USE)
3869 fprintf (f, " undescribed_use");
3870 else
3871 fprintf (f, " controlled_uses=%i", c);
3e293154 3872 fprintf (f, "\n");
518dc859
RL
3873 }
3874}
dcd416e3 3875
ca30a539 3876/* Print ipa_tree_map data structures of all functions in the
3e293154 3877 callgraph to F. */
be95e2b9 3878
3e293154 3879void
ca30a539 3880ipa_print_all_params (FILE * f)
3e293154
MJ
3881{
3882 struct cgraph_node *node;
3883
ca30a539 3884 fprintf (f, "\nFunction parameters:\n");
65c70e6b 3885 FOR_EACH_FUNCTION (node)
ca30a539 3886 ipa_print_node_params (f, node);
3e293154 3887}
3f84bf08
MJ
3888
3889/* Return a heap allocated vector containing formal parameters of FNDECL. */
3890
9771b263 3891vec<tree>
3f84bf08
MJ
3892ipa_get_vector_of_formal_parms (tree fndecl)
3893{
9771b263 3894 vec<tree> args;
3f84bf08
MJ
3895 int count;
3896 tree parm;
3897
0e8853ee 3898 gcc_assert (!flag_wpa);
310bc633 3899 count = count_formal_params (fndecl);
9771b263 3900 args.create (count);
910ad8de 3901 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 3902 args.quick_push (parm);
3f84bf08
MJ
3903
3904 return args;
3905}
3906
3907/* Return a heap allocated vector containing types of formal parameters of
3908 function type FNTYPE. */
3909
31519c38
AH
3910vec<tree>
3911ipa_get_vector_of_formal_parm_types (tree fntype)
3f84bf08 3912{
9771b263 3913 vec<tree> types;
3f84bf08
MJ
3914 int count = 0;
3915 tree t;
3916
3917 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3918 count++;
3919
9771b263 3920 types.create (count);
3f84bf08 3921 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 3922 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
3923
3924 return types;
3925}
3926
3927/* Modify the function declaration FNDECL and its type according to the plan in
3928 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3929 to reflect the actual parameters being modified which are determined by the
3930 base_index field. */
3931
3932void
31519c38 3933ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3f84bf08 3934{
31519c38
AH
3935 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3936 tree orig_type = TREE_TYPE (fndecl);
3937 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3f84bf08
MJ
3938
3939 /* The following test is an ugly hack, some functions simply don't have any
3940 arguments in their type. This is probably a bug but well... */
31519c38
AH
3941 bool care_for_types = (old_arg_types != NULL_TREE);
3942 bool last_parm_void;
3943 vec<tree> otypes;
3f84bf08
MJ
3944 if (care_for_types)
3945 {
3946 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3947 == void_type_node);
31519c38 3948 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3f84bf08 3949 if (last_parm_void)
9771b263 3950 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 3951 else
9771b263 3952 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
3953 }
3954 else
3955 {
3956 last_parm_void = false;
9771b263 3957 otypes.create (0);
3f84bf08
MJ
3958 }
3959
31519c38
AH
3960 int len = adjustments.length ();
3961 tree *link = &DECL_ARGUMENTS (fndecl);
3962 tree new_arg_types = NULL;
3963 for (int i = 0; i < len; i++)
3f84bf08
MJ
3964 {
3965 struct ipa_parm_adjustment *adj;
3966 gcc_assert (link);
3967
9771b263 3968 adj = &adjustments[i];
31519c38
AH
3969 tree parm;
3970 if (adj->op == IPA_PARM_OP_NEW)
3971 parm = NULL;
3972 else
3973 parm = oparms[adj->base_index];
3f84bf08
MJ
3974 adj->base = parm;
3975
31519c38 3976 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
3977 {
3978 if (care_for_types)
9771b263 3979 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
3980 new_arg_types);
3981 *link = parm;
910ad8de 3982 link = &DECL_CHAIN (parm);
3f84bf08 3983 }
31519c38 3984 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08
MJ
3985 {
3986 tree new_parm;
3987 tree ptype;
3988
3989 if (adj->by_ref)
3990 ptype = build_pointer_type (adj->type);
3991 else
e69dbe37
MJ
3992 {
3993 ptype = adj->type;
3994 if (is_gimple_reg_type (ptype))
3995 {
3996 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
fe7afdf5 3997 if (TYPE_ALIGN (ptype) != malign)
e69dbe37
MJ
3998 ptype = build_aligned_type (ptype, malign);
3999 }
4000 }
3f84bf08
MJ
4001
4002 if (care_for_types)
4003 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4004
4005 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4006 ptype);
31519c38
AH
4007 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4008 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3f84bf08
MJ
4009 DECL_ARTIFICIAL (new_parm) = 1;
4010 DECL_ARG_TYPE (new_parm) = ptype;
4011 DECL_CONTEXT (new_parm) = fndecl;
4012 TREE_USED (new_parm) = 1;
4013 DECL_IGNORED_P (new_parm) = 1;
4014 layout_decl (new_parm, 0);
4015
31519c38
AH
4016 if (adj->op == IPA_PARM_OP_NEW)
4017 adj->base = NULL;
4018 else
4019 adj->base = parm;
4020 adj->new_decl = new_parm;
3f84bf08
MJ
4021
4022 *link = new_parm;
910ad8de 4023 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
4024 }
4025 }
4026
4027 *link = NULL_TREE;
4028
31519c38 4029 tree new_reversed = NULL;
3f84bf08
MJ
4030 if (care_for_types)
4031 {
4032 new_reversed = nreverse (new_arg_types);
4033 if (last_parm_void)
4034 {
4035 if (new_reversed)
4036 TREE_CHAIN (new_arg_types) = void_list_node;
4037 else
4038 new_reversed = void_list_node;
4039 }
4040 }
4041
4042 /* Use copy_node to preserve as much as possible from original type
4043 (debug info, attribute lists etc.)
4044 Exception is METHOD_TYPEs must have THIS argument.
4045 When we are asked to remove it, we need to build new FUNCTION_TYPE
4046 instead. */
31519c38 4047 tree new_type = NULL;
3f84bf08 4048 if (TREE_CODE (orig_type) != METHOD_TYPE
31519c38 4049 || (adjustments[0].op == IPA_PARM_OP_COPY
9771b263 4050 && adjustments[0].base_index == 0))
3f84bf08 4051 {
4eb3f32c 4052 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
4053 TYPE_ARG_TYPES (new_type) = new_reversed;
4054 }
4055 else
4056 {
4057 new_type
4058 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4059 new_reversed));
4060 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4061 DECL_VINDEX (fndecl) = NULL_TREE;
4062 }
4063
d402c33d
JH
4064 /* When signature changes, we need to clear builtin info. */
4065 if (DECL_BUILT_IN (fndecl))
4066 {
4067 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4068 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4069 }
4070
3f84bf08 4071 TREE_TYPE (fndecl) = new_type;
9b389a5e 4072 DECL_VIRTUAL_P (fndecl) = 0;
70d6d5c1 4073 DECL_LANG_SPECIFIC (fndecl) = NULL;
9771b263
DN
4074 otypes.release ();
4075 oparms.release ();
3f84bf08
MJ
4076}
4077
4078/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4079 If this is a directly recursive call, CS must be NULL. Otherwise it must
4080 contain the corresponding call graph edge. */
4081
4082void
538dd0b7 4083ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3f84bf08
MJ
4084 ipa_parm_adjustment_vec adjustments)
4085{
d52f5295 4086 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
9771b263
DN
4087 vec<tree> vargs;
4088 vec<tree, va_gc> **debug_args = NULL;
538dd0b7 4089 gcall *new_stmt;
82338059 4090 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
4091 tree callee_decl;
4092 int i, len;
4093
9771b263
DN
4094 len = adjustments.length ();
4095 vargs.create (len);
67348ccc 4096 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
d122681a 4097 current_node->remove_stmt_references (stmt);
3f84bf08
MJ
4098
4099 gsi = gsi_for_stmt (stmt);
82338059
MJ
4100 prev_gsi = gsi;
4101 gsi_prev (&prev_gsi);
3f84bf08
MJ
4102 for (i = 0; i < len; i++)
4103 {
4104 struct ipa_parm_adjustment *adj;
4105
9771b263 4106 adj = &adjustments[i];
3f84bf08 4107
31519c38 4108 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
4109 {
4110 tree arg = gimple_call_arg (stmt, adj->base_index);
4111
9771b263 4112 vargs.quick_push (arg);
3f84bf08 4113 }
31519c38 4114 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08 4115 {
fffe1e40
MJ
4116 tree expr, base, off;
4117 location_t loc;
f43245d1 4118 unsigned int deref_align = 0;
c1ed6a01 4119 bool deref_base = false;
fffe1e40
MJ
4120
4121 /* We create a new parameter out of the value of the old one, we can
4122 do the following kind of transformations:
4123
4124 - A scalar passed by reference is converted to a scalar passed by
4125 value. (adj->by_ref is false and the type of the original
4126 actual argument is a pointer to a scalar).
4127
4128 - A part of an aggregate is passed instead of the whole aggregate.
4129 The part can be passed either by value or by reference, this is
4130 determined by value of adj->by_ref. Moreover, the code below
4131 handles both situations when the original aggregate is passed by
4132 value (its type is not a pointer) and when it is passed by
4133 reference (it is a pointer to an aggregate).
4134
4135 When the new argument is passed by reference (adj->by_ref is true)
4136 it must be a part of an aggregate and therefore we form it by
4137 simply taking the address of a reference inside the original
4138 aggregate. */
4139
4140 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4141 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
4142 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4143 : EXPR_LOCATION (base);
fffe1e40 4144
82d49829
MJ
4145 if (TREE_CODE (base) != ADDR_EXPR
4146 && POINTER_TYPE_P (TREE_TYPE (base)))
4147 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 4148 adj->offset / BITS_PER_UNIT);
3f84bf08 4149 else
3f84bf08 4150 {
fffe1e40
MJ
4151 HOST_WIDE_INT base_offset;
4152 tree prev_base;
c1ed6a01 4153 bool addrof;
fffe1e40
MJ
4154
4155 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
4156 {
4157 base = TREE_OPERAND (base, 0);
4158 addrof = true;
4159 }
4160 else
4161 addrof = false;
fffe1e40
MJ
4162 prev_base = base;
4163 base = get_addr_base_and_unit_offset (base, &base_offset);
4164 /* Aggregate arguments can have non-invariant addresses. */
4165 if (!base)
4166 {
4167 base = build_fold_addr_expr (prev_base);
82d49829 4168 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4169 adj->offset / BITS_PER_UNIT);
4170 }
4171 else if (TREE_CODE (base) == MEM_REF)
4172 {
c1ed6a01
MJ
4173 if (!addrof)
4174 {
4175 deref_base = true;
4176 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4177 }
82d49829 4178 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4179 base_offset
4180 + adj->offset / BITS_PER_UNIT);
4181 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 4182 off);
fffe1e40
MJ
4183 base = TREE_OPERAND (base, 0);
4184 }
4185 else
4186 {
82d49829 4187 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4188 base_offset
4189 + adj->offset / BITS_PER_UNIT);
4190 base = build_fold_addr_expr (base);
4191 }
3f84bf08 4192 }
fffe1e40 4193
3a5a825a
RG
4194 if (!adj->by_ref)
4195 {
4196 tree type = adj->type;
4197 unsigned int align;
4198 unsigned HOST_WIDE_INT misalign;
644ffefd 4199
c1ed6a01
MJ
4200 if (deref_base)
4201 {
4202 align = deref_align;
4203 misalign = 0;
4204 }
4205 else
4206 {
4207 get_pointer_alignment_1 (base, &align, &misalign);
4208 if (TYPE_ALIGN (type) > align)
4209 align = TYPE_ALIGN (type);
4210 }
807e902e 4211 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3a5a825a
RG
4212 * BITS_PER_UNIT);
4213 misalign = misalign & (align - 1);
4214 if (misalign != 0)
146ec50f 4215 align = least_bit_hwi (misalign);
3a5a825a
RG
4216 if (align < TYPE_ALIGN (type))
4217 type = build_aligned_type (type, align);
4df65a85
RB
4218 base = force_gimple_operand_gsi (&gsi, base,
4219 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4220 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
ee45a32d 4221 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4df65a85
RB
4222 /* If expr is not a valid gimple call argument emit
4223 a load into a temporary. */
4224 if (is_gimple_reg_type (TREE_TYPE (expr)))
4225 {
355fe088 4226 gimple *tem = gimple_build_assign (NULL_TREE, expr);
4df65a85
RB
4227 if (gimple_in_ssa_p (cfun))
4228 {
4229 gimple_set_vuse (tem, gimple_vuse (stmt));
4230 expr = make_ssa_name (TREE_TYPE (expr), tem);
4231 }
4232 else
b731b390 4233 expr = create_tmp_reg (TREE_TYPE (expr));
4df65a85
RB
4234 gimple_assign_set_lhs (tem, expr);
4235 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4236 }
3a5a825a
RG
4237 }
4238 else
4239 {
4240 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
ee45a32d 4241 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
3a5a825a 4242 expr = build_fold_addr_expr (expr);
4df65a85
RB
4243 expr = force_gimple_operand_gsi (&gsi, expr,
4244 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4245 }
9771b263 4246 vargs.quick_push (expr);
3f84bf08 4247 }
31519c38 4248 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
ddb555ed
JJ
4249 {
4250 unsigned int ix;
4251 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
355fe088 4252 gimple *def_temp;
ddb555ed
JJ
4253
4254 arg = gimple_call_arg (stmt, adj->base_index);
4255 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4256 {
4257 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4258 continue;
4259 arg = fold_convert_loc (gimple_location (stmt),
4260 TREE_TYPE (origin), arg);
4261 }
4262 if (debug_args == NULL)
4263 debug_args = decl_debug_args_insert (callee_decl);
9771b263 4264 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
4265 if (ddecl == origin)
4266 {
9771b263 4267 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
4268 break;
4269 }
4270 if (ddecl == NULL)
4271 {
4272 ddecl = make_node (DEBUG_EXPR_DECL);
4273 DECL_ARTIFICIAL (ddecl) = 1;
4274 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4275 DECL_MODE (ddecl) = DECL_MODE (origin);
4276
9771b263
DN
4277 vec_safe_push (*debug_args, origin);
4278 vec_safe_push (*debug_args, ddecl);
ddb555ed 4279 }
9771b263 4280 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
4281 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4282 }
3f84bf08
MJ
4283 }
4284
4285 if (dump_file && (dump_flags & TDF_DETAILS))
4286 {
4287 fprintf (dump_file, "replacing stmt:");
4288 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4289 }
4290
3f84bf08 4291 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 4292 vargs.release ();
3f84bf08
MJ
4293 if (gimple_call_lhs (stmt))
4294 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4295
4296 gimple_set_block (new_stmt, gimple_block (stmt));
4297 if (gimple_has_location (stmt))
4298 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 4299 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 4300 gimple_call_copy_flags (new_stmt, stmt);
4df65a85
RB
4301 if (gimple_in_ssa_p (cfun))
4302 {
4303 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4304 if (gimple_vdef (stmt))
4305 {
4306 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4307 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4308 }
4309 }
3f84bf08
MJ
4310
4311 if (dump_file && (dump_flags & TDF_DETAILS))
4312 {
4313 fprintf (dump_file, "with stmt:");
4314 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4315 fprintf (dump_file, "\n");
4316 }
4317 gsi_replace (&gsi, new_stmt, true);
4318 if (cs)
3dafb85c 4319 cs->set_call_stmt (new_stmt);
82338059
MJ
4320 do
4321 {
d52f5295 4322 current_node->record_stmt_references (gsi_stmt (gsi));
82338059
MJ
4323 gsi_prev (&gsi);
4324 }
3d354792 4325 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
3f84bf08
MJ
4326}
4327
31519c38
AH
4328/* If the expression *EXPR should be replaced by a reduction of a parameter, do
4329 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4330 specifies whether the function should care about type incompatibility the
4331 current and new expressions. If it is false, the function will leave
4332 incompatibility issues to the caller. Return true iff the expression
4333 was modified. */
4334
4335bool
4336ipa_modify_expr (tree *expr, bool convert,
4337 ipa_parm_adjustment_vec adjustments)
4338{
4339 struct ipa_parm_adjustment *cand
4340 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4341 if (!cand)
4342 return false;
4343
4344 tree src;
4345 if (cand->by_ref)
ee45a32d
EB
4346 {
4347 src = build_simple_mem_ref (cand->new_decl);
4348 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4349 }
31519c38
AH
4350 else
4351 src = cand->new_decl;
4352
4353 if (dump_file && (dump_flags & TDF_DETAILS))
4354 {
4355 fprintf (dump_file, "About to replace expr ");
4356 print_generic_expr (dump_file, *expr, 0);
4357 fprintf (dump_file, " with ");
4358 print_generic_expr (dump_file, src, 0);
4359 fprintf (dump_file, "\n");
4360 }
4361
4362 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4363 {
4364 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4365 *expr = vce;
4366 }
4367 else
4368 *expr = src;
4369 return true;
4370}
4371
4372/* If T is an SSA_NAME, return NULL if it is not a default def or
4373 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4374 the base variable is always returned, regardless if it is a default
4375 def. Return T if it is not an SSA_NAME. */
4376
4377static tree
4378get_ssa_base_param (tree t, bool ignore_default_def)
4379{
4380 if (TREE_CODE (t) == SSA_NAME)
4381 {
4382 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4383 return SSA_NAME_VAR (t);
4384 else
4385 return NULL_TREE;
4386 }
4387 return t;
4388}
4389
4390/* Given an expression, return an adjustment entry specifying the
4391 transformation to be done on EXPR. If no suitable adjustment entry
4392 was found, returns NULL.
4393
4394 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4395 default def, otherwise bail on them.
4396
4397 If CONVERT is non-NULL, this function will set *CONVERT if the
4398 expression provided is a component reference. ADJUSTMENTS is the
4399 adjustments vector. */
4400
4401ipa_parm_adjustment *
4402ipa_get_adjustment_candidate (tree **expr, bool *convert,
4403 ipa_parm_adjustment_vec adjustments,
4404 bool ignore_default_def)
4405{
4406 if (TREE_CODE (**expr) == BIT_FIELD_REF
4407 || TREE_CODE (**expr) == IMAGPART_EXPR
4408 || TREE_CODE (**expr) == REALPART_EXPR)
4409 {
4410 *expr = &TREE_OPERAND (**expr, 0);
4411 if (convert)
4412 *convert = true;
4413 }
4414
4415 HOST_WIDE_INT offset, size, max_size;
ee45a32d
EB
4416 bool reverse;
4417 tree base
4418 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
31519c38
AH
4419 if (!base || size == -1 || max_size == -1)
4420 return NULL;
4421
4422 if (TREE_CODE (base) == MEM_REF)
4423 {
807e902e 4424 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
31519c38
AH
4425 base = TREE_OPERAND (base, 0);
4426 }
4427
4428 base = get_ssa_base_param (base, ignore_default_def);
4429 if (!base || TREE_CODE (base) != PARM_DECL)
4430 return NULL;
4431
4432 struct ipa_parm_adjustment *cand = NULL;
4433 unsigned int len = adjustments.length ();
4434 for (unsigned i = 0; i < len; i++)
4435 {
4436 struct ipa_parm_adjustment *adj = &adjustments[i];
4437
4438 if (adj->base == base
4439 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4440 {
4441 cand = adj;
4442 break;
4443 }
4444 }
4445
4446 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4447 return NULL;
4448 return cand;
4449}
4450
3f84bf08
MJ
4451/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4452
4453static bool
4454index_in_adjustments_multiple_times_p (int base_index,
4455 ipa_parm_adjustment_vec adjustments)
4456{
9771b263 4457 int i, len = adjustments.length ();
3f84bf08
MJ
4458 bool one = false;
4459
4460 for (i = 0; i < len; i++)
4461 {
4462 struct ipa_parm_adjustment *adj;
9771b263 4463 adj = &adjustments[i];
3f84bf08
MJ
4464
4465 if (adj->base_index == base_index)
4466 {
4467 if (one)
4468 return true;
4469 else
4470 one = true;
4471 }
4472 }
4473 return false;
4474}
4475
4476
4477/* Return adjustments that should have the same effect on function parameters
4478 and call arguments as if they were first changed according to adjustments in
4479 INNER and then by adjustments in OUTER. */
4480
4481ipa_parm_adjustment_vec
4482ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4483 ipa_parm_adjustment_vec outer)
4484{
9771b263
DN
4485 int i, outlen = outer.length ();
4486 int inlen = inner.length ();
3f84bf08
MJ
4487 int removals = 0;
4488 ipa_parm_adjustment_vec adjustments, tmp;
4489
9771b263 4490 tmp.create (inlen);
3f84bf08
MJ
4491 for (i = 0; i < inlen; i++)
4492 {
4493 struct ipa_parm_adjustment *n;
9771b263 4494 n = &inner[i];
3f84bf08 4495
31519c38 4496 if (n->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4497 removals++;
4498 else
31519c38
AH
4499 {
4500 /* FIXME: Handling of new arguments are not implemented yet. */
4501 gcc_assert (n->op != IPA_PARM_OP_NEW);
4502 tmp.quick_push (*n);
4503 }
3f84bf08
MJ
4504 }
4505
9771b263 4506 adjustments.create (outlen + removals);
3f84bf08
MJ
4507 for (i = 0; i < outlen; i++)
4508 {
f32682ca 4509 struct ipa_parm_adjustment r;
9771b263
DN
4510 struct ipa_parm_adjustment *out = &outer[i];
4511 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 4512
f32682ca 4513 memset (&r, 0, sizeof (r));
31519c38
AH
4514 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4515 if (out->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4516 {
4517 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4518 {
31519c38 4519 r.op = IPA_PARM_OP_REMOVE;
9771b263 4520 adjustments.quick_push (r);
3f84bf08
MJ
4521 }
4522 continue;
4523 }
31519c38
AH
4524 else
4525 {
4526 /* FIXME: Handling of new arguments are not implemented yet. */
4527 gcc_assert (out->op != IPA_PARM_OP_NEW);
4528 }
3f84bf08 4529
f32682ca
DN
4530 r.base_index = in->base_index;
4531 r.type = out->type;
3f84bf08
MJ
4532
4533 /* FIXME: Create nonlocal value too. */
4534
31519c38
AH
4535 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4536 r.op = IPA_PARM_OP_COPY;
4537 else if (in->op == IPA_PARM_OP_COPY)
f32682ca 4538 r.offset = out->offset;
31519c38 4539 else if (out->op == IPA_PARM_OP_COPY)
f32682ca 4540 r.offset = in->offset;
3f84bf08 4541 else
f32682ca 4542 r.offset = in->offset + out->offset;
9771b263 4543 adjustments.quick_push (r);
3f84bf08
MJ
4544 }
4545
4546 for (i = 0; i < inlen; i++)
4547 {
9771b263 4548 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08 4549
31519c38 4550 if (n->op == IPA_PARM_OP_REMOVE)
9771b263 4551 adjustments.quick_push (*n);
3f84bf08
MJ
4552 }
4553
9771b263 4554 tmp.release ();
3f84bf08
MJ
4555 return adjustments;
4556}
4557
4558/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4559 friendly way, assuming they are meant to be applied to FNDECL. */
4560
4561void
4562ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4563 tree fndecl)
4564{
9771b263 4565 int i, len = adjustments.length ();
3f84bf08 4566 bool first = true;
9771b263 4567 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
4568
4569 fprintf (file, "IPA param adjustments: ");
4570 for (i = 0; i < len; i++)
4571 {
4572 struct ipa_parm_adjustment *adj;
9771b263 4573 adj = &adjustments[i];
3f84bf08
MJ
4574
4575 if (!first)
4576 fprintf (file, " ");
4577 else
4578 first = false;
4579
4580 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 4581 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
4582 if (adj->base)
4583 {
4584 fprintf (file, ", base: ");
4585 print_generic_expr (file, adj->base, 0);
4586 }
31519c38 4587 if (adj->new_decl)
3f84bf08 4588 {
31519c38
AH
4589 fprintf (file, ", new_decl: ");
4590 print_generic_expr (file, adj->new_decl, 0);
3f84bf08
MJ
4591 }
4592 if (adj->new_ssa_base)
4593 {
4594 fprintf (file, ", new_ssa_base: ");
4595 print_generic_expr (file, adj->new_ssa_base, 0);
4596 }
4597
31519c38 4598 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08 4599 fprintf (file, ", copy_param");
31519c38 4600 else if (adj->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4601 fprintf (file, ", remove_param");
4602 else
4603 fprintf (file, ", offset %li", (long) adj->offset);
4604 if (adj->by_ref)
4605 fprintf (file, ", by_ref");
4606 print_node_brief (file, ", type: ", adj->type, 0);
4607 fprintf (file, "\n");
4608 }
9771b263 4609 parms.release ();
3f84bf08
MJ
4610}
4611
2c9561b5
MJ
4612/* Dump the AV linked list. */
4613
4614void
4615ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4616{
4617 bool comma = false;
4618 fprintf (f, " Aggregate replacements:");
4619 for (; av; av = av->next)
4620 {
4621 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4622 av->index, av->offset);
4623 print_generic_expr (f, av->value, 0);
4624 comma = true;
4625 }
4626 fprintf (f, "\n");
4627}
4628
fb3f88cc
JH
4629/* Stream out jump function JUMP_FUNC to OB. */
4630
4631static void
4632ipa_write_jump_function (struct output_block *ob,
4633 struct ipa_jump_func *jump_func)
4634{
8b7773a4
MJ
4635 struct ipa_agg_jf_item *item;
4636 struct bitpack_d bp;
4637 int i, count;
fb3f88cc 4638
8b7773a4 4639 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
4640 switch (jump_func->type)
4641 {
4642 case IPA_JF_UNKNOWN:
4643 break;
4644 case IPA_JF_CONST:
5368224f 4645 gcc_assert (
4502fe8d
MJ
4646 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4647 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
4648 break;
4649 case IPA_JF_PASS_THROUGH:
412288f1 4650 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4651 if (jump_func->value.pass_through.operation == NOP_EXPR)
4652 {
4653 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4654 bp = bitpack_create (ob->main_stream);
4655 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4656 streamer_write_bitpack (&bp);
4657 }
4658 else
4659 {
4660 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4661 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4662 }
fb3f88cc
JH
4663 break;
4664 case IPA_JF_ANCESTOR:
412288f1 4665 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
412288f1 4666 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4667 bp = bitpack_create (ob->main_stream);
4668 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4669 streamer_write_bitpack (&bp);
fb3f88cc 4670 break;
8b7773a4
MJ
4671 }
4672
9771b263 4673 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4674 streamer_write_uhwi (ob, count);
4675 if (count)
4676 {
4677 bp = bitpack_create (ob->main_stream);
4678 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4679 streamer_write_bitpack (&bp);
4680 }
4681
9771b263 4682 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4683 {
4684 streamer_write_uhwi (ob, item->offset);
4685 stream_write_tree (ob, item->value, true);
fb3f88cc 4686 }
04be694e 4687
209ca542
PK
4688 bp = bitpack_create (ob->main_stream);
4689 bp_pack_value (&bp, jump_func->bits.known, 1);
4690 streamer_write_bitpack (&bp);
4691 if (jump_func->bits.known)
4692 {
4693 streamer_write_widest_int (ob, jump_func->bits.value);
4694 streamer_write_widest_int (ob, jump_func->bits.mask);
4695 }
8bc5448f
KV
4696 bp_pack_value (&bp, jump_func->vr_known, 1);
4697 streamer_write_bitpack (&bp);
4698 if (jump_func->vr_known)
4699 {
4700 streamer_write_enum (ob->main_stream, value_rang_type,
4701 VR_LAST, jump_func->m_vr.type);
4702 stream_write_tree (ob, jump_func->m_vr.min, true);
4703 stream_write_tree (ob, jump_func->m_vr.max, true);
4704 }
fb3f88cc
JH
4705}
4706
4707/* Read in jump function JUMP_FUNC from IB. */
4708
4709static void
4710ipa_read_jump_function (struct lto_input_block *ib,
4711 struct ipa_jump_func *jump_func,
4502fe8d 4712 struct cgraph_edge *cs,
fb3f88cc
JH
4713 struct data_in *data_in)
4714{
4a53743e
MJ
4715 enum jump_func_type jftype;
4716 enum tree_code operation;
8b7773a4 4717 int i, count;
fb3f88cc 4718
4a53743e
MJ
4719 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4720 switch (jftype)
fb3f88cc
JH
4721 {
4722 case IPA_JF_UNKNOWN:
04be694e 4723 ipa_set_jf_unknown (jump_func);
fb3f88cc
JH
4724 break;
4725 case IPA_JF_CONST:
4502fe8d 4726 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4727 break;
4728 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4729 operation = (enum tree_code) streamer_read_uhwi (ib);
4730 if (operation == NOP_EXPR)
4731 {
4732 int formal_id = streamer_read_uhwi (ib);
4733 struct bitpack_d bp = streamer_read_bitpack (ib);
4734 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4735 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4a53743e
MJ
4736 }
4737 else
4738 {
4739 tree operand = stream_read_tree (ib, data_in);
4740 int formal_id = streamer_read_uhwi (ib);
4741 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4742 operation);
4743 }
fb3f88cc
JH
4744 break;
4745 case IPA_JF_ANCESTOR:
4a53743e
MJ
4746 {
4747 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4a53743e
MJ
4748 int formal_id = streamer_read_uhwi (ib);
4749 struct bitpack_d bp = streamer_read_bitpack (ib);
4750 bool agg_preserved = bp_unpack_value (&bp, 1);
3b97a5c7 4751 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4a53743e
MJ
4752 break;
4753 }
8b7773a4
MJ
4754 }
4755
4756 count = streamer_read_uhwi (ib);
9771b263 4757 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4758 if (count)
4759 {
4a53743e 4760 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4761 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4762 }
4763 for (i = 0; i < count; i++)
4764 {
f32682ca
DN
4765 struct ipa_agg_jf_item item;
4766 item.offset = streamer_read_uhwi (ib);
4767 item.value = stream_read_tree (ib, data_in);
9771b263 4768 jump_func->agg.items->quick_push (item);
fb3f88cc 4769 }
04be694e
MJ
4770
4771 struct bitpack_d bp = streamer_read_bitpack (ib);
209ca542
PK
4772 bool bits_known = bp_unpack_value (&bp, 1);
4773 if (bits_known)
4774 {
4775 jump_func->bits.known = true;
4776 jump_func->bits.value = streamer_read_widest_int (ib);
4777 jump_func->bits.mask = streamer_read_widest_int (ib);
4778 }
4779 else
4780 jump_func->bits.known = false;
8bc5448f
KV
4781
4782 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4783 bool vr_known = bp_unpack_value (&vr_bp, 1);
4784 if (vr_known)
4785 {
4786 jump_func->vr_known = true;
4787 jump_func->m_vr.type = streamer_read_enum (ib,
4788 value_range_type,
4789 VR_LAST);
4790 jump_func->m_vr.min = stream_read_tree (ib, data_in);
4791 jump_func->m_vr.max = stream_read_tree (ib, data_in);
4792 }
4793 else
4794 jump_func->vr_known = false;
fb3f88cc
JH
4795}
4796
e33c6cd6
MJ
4797/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4798 relevant to indirect inlining to OB. */
661e7330
MJ
4799
4800static void
e33c6cd6
MJ
4801ipa_write_indirect_edge_info (struct output_block *ob,
4802 struct cgraph_edge *cs)
661e7330 4803{
e33c6cd6 4804 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4805 struct bitpack_d bp;
e33c6cd6 4806
412288f1 4807 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4808 bp = bitpack_create (ob->main_stream);
4809 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4810 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4811 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4812 bp_pack_value (&bp, ii->by_ref, 1);
91bb9f80 4813 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
0127c169 4814 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4815 streamer_write_bitpack (&bp);
ba392339
JH
4816 if (ii->agg_contents || ii->polymorphic)
4817 streamer_write_hwi (ob, ii->offset);
4818 else
4819 gcc_assert (ii->offset == 0);
b258210c
MJ
4820
4821 if (ii->polymorphic)
4822 {
412288f1 4823 streamer_write_hwi (ob, ii->otr_token);
b9393656 4824 stream_write_tree (ob, ii->otr_type, true);
ba392339 4825 ii->context.stream_out (ob);
b258210c 4826 }
661e7330
MJ
4827}
4828
e33c6cd6
MJ
4829/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4830 relevant to indirect inlining from IB. */
661e7330
MJ
4831
4832static void
e33c6cd6 4833ipa_read_indirect_edge_info (struct lto_input_block *ib,
ba392339 4834 struct data_in *data_in,
e33c6cd6 4835 struct cgraph_edge *cs)
661e7330 4836{
e33c6cd6 4837 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4838 struct bitpack_d bp;
661e7330 4839
412288f1 4840 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 4841 bp = streamer_read_bitpack (ib);
2465dcc2 4842 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4843 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4844 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4845 ii->by_ref = bp_unpack_value (&bp, 1);
91bb9f80 4846 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
0127c169 4847 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
4848 if (ii->agg_contents || ii->polymorphic)
4849 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4850 else
4851 ii->offset = 0;
b258210c
MJ
4852 if (ii->polymorphic)
4853 {
412288f1 4854 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4855 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 4856 ii->context.stream_in (ib, data_in);
b258210c 4857 }
661e7330
MJ
4858}
4859
fb3f88cc
JH
4860/* Stream out NODE info to OB. */
4861
4862static void
4863ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4864{
4865 int node_ref;
7380e6ef 4866 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4867 struct ipa_node_params *info = IPA_NODE_REF (node);
4868 int j;
4869 struct cgraph_edge *e;
2465dcc2 4870 struct bitpack_d bp;
fb3f88cc 4871
7380e6ef 4872 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4873 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4874 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4875
0e8853ee
JH
4876 streamer_write_uhwi (ob, ipa_get_param_count (info));
4877 for (j = 0; j < ipa_get_param_count (info); j++)
4878 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4879 bp = bitpack_create (ob->main_stream);
8aab5218 4880 gcc_assert (info->analysis_done
661e7330 4881 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4882 gcc_assert (!info->node_enqueued);
4883 gcc_assert (!info->ipcp_orig_node);
4884 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4885 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4886 streamer_write_bitpack (&bp);
4502fe8d
MJ
4887 for (j = 0; j < ipa_get_param_count (info); j++)
4888 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
fb3f88cc
JH
4889 for (e = node->callees; e; e = e->next_callee)
4890 {
4891 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4892
5ce97055
JH
4893 streamer_write_uhwi (ob,
4894 ipa_get_cs_argument_count (args) * 2
4895 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 4896 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4897 {
4898 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4899 if (args->polymorphic_call_contexts != NULL)
4900 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4901 }
fb3f88cc 4902 }
e33c6cd6 4903 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4904 {
4905 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4906
5ce97055
JH
4907 streamer_write_uhwi (ob,
4908 ipa_get_cs_argument_count (args) * 2
4909 + (args->polymorphic_call_contexts != NULL));
c8246dbe 4910 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4911 {
4912 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4913 if (args->polymorphic_call_contexts != NULL)
4914 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4915 }
c8246dbe
JH
4916 ipa_write_indirect_edge_info (ob, e);
4917 }
fb3f88cc
JH
4918}
4919
61502ca8 4920/* Stream in NODE info from IB. */
fb3f88cc
JH
4921
4922static void
4923ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4924 struct data_in *data_in)
4925{
4926 struct ipa_node_params *info = IPA_NODE_REF (node);
4927 int k;
4928 struct cgraph_edge *e;
2465dcc2 4929 struct bitpack_d bp;
fb3f88cc 4930
0e8853ee 4931 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4932
0e8853ee
JH
4933 for (k = 0; k < ipa_get_param_count (info); k++)
4934 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4935
412288f1 4936 bp = streamer_read_bitpack (ib);
fb3f88cc 4937 if (ipa_get_param_count (info) != 0)
8aab5218 4938 info->analysis_done = true;
fb3f88cc
JH
4939 info->node_enqueued = false;
4940 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4941 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a
MJ
4942 for (k = 0; k < ipa_get_param_count (info); k++)
4943 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
fb3f88cc
JH
4944 for (e = node->callees; e; e = e->next_callee)
4945 {
4946 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4947 int count = streamer_read_uhwi (ib);
5ce97055
JH
4948 bool contexts_computed = count & 1;
4949 count /= 2;
fb3f88cc 4950
fb3f88cc
JH
4951 if (!count)
4952 continue;
9771b263 4953 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
4954 if (contexts_computed)
4955 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
fb3f88cc 4956
fb3f88cc 4957 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
4958 {
4959 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4960 data_in);
4961 if (contexts_computed)
4962 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4963 }
fb3f88cc 4964 }
e33c6cd6 4965 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4966 {
4967 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4968 int count = streamer_read_uhwi (ib);
5ce97055
JH
4969 bool contexts_computed = count & 1;
4970 count /= 2;
c8246dbe 4971
c8246dbe
JH
4972 if (count)
4973 {
9771b263 4974 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
4975 if (contexts_computed)
4976 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
c8246dbe 4977 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
4978 {
4979 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4980 data_in);
4981 if (contexts_computed)
4982 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4983 }
c8246dbe
JH
4984 }
4985 ipa_read_indirect_edge_info (ib, data_in, e);
4986 }
fb3f88cc
JH
4987}
4988
4989/* Write jump functions for nodes in SET. */
4990
4991void
f27c1867 4992ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4993{
4994 struct cgraph_node *node;
93536c97 4995 struct output_block *ob;
fb3f88cc 4996 unsigned int count = 0;
f27c1867
JH
4997 lto_symtab_encoder_iterator lsei;
4998 lto_symtab_encoder_t encoder;
4999
dd912cb8 5000 if (!ipa_node_params_sum)
93536c97 5001 return;
fb3f88cc 5002
93536c97 5003 ob = create_output_block (LTO_section_jump_functions);
f27c1867 5004 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5005 ob->symbol = NULL;
f27c1867
JH
5006 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5007 lsei_next_function_in_partition (&lsei))
fb3f88cc 5008 {
f27c1867 5009 node = lsei_cgraph_node (lsei);
d52f5295 5010 if (node->has_gimple_body_p ()
c47d0034 5011 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5012 count++;
5013 }
5014
412288f1 5015 streamer_write_uhwi (ob, count);
fb3f88cc
JH
5016
5017 /* Process all of the functions. */
f27c1867
JH
5018 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5019 lsei_next_function_in_partition (&lsei))
fb3f88cc 5020 {
f27c1867 5021 node = lsei_cgraph_node (lsei);
d52f5295 5022 if (node->has_gimple_body_p ()
c47d0034 5023 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5024 ipa_write_node_info (ob, node);
5025 }
412288f1 5026 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
5027 produce_asm (ob, NULL);
5028 destroy_output_block (ob);
5029}
5030
5031/* Read section in file FILE_DATA of length LEN with data DATA. */
5032
5033static void
5034ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5035 size_t len)
5036{
5037 const struct lto_function_header *header =
5038 (const struct lto_function_header *) data;
4ad9a9de
EB
5039 const int cfg_offset = sizeof (struct lto_function_header);
5040 const int main_offset = cfg_offset + header->cfg_size;
5041 const int string_offset = main_offset + header->main_size;
fb3f88cc 5042 struct data_in *data_in;
fb3f88cc
JH
5043 unsigned int i;
5044 unsigned int count;
5045
207c68cd 5046 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5047 header->main_size, file_data->mode_table);
fb3f88cc
JH
5048
5049 data_in =
5050 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5051 header->string_size, vNULL);
412288f1 5052 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
5053
5054 for (i = 0; i < count; i++)
5055 {
5056 unsigned int index;
5057 struct cgraph_node *node;
7380e6ef 5058 lto_symtab_encoder_t encoder;
fb3f88cc 5059
412288f1 5060 index = streamer_read_uhwi (&ib_main);
7380e6ef 5061 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5062 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5063 index));
67348ccc 5064 gcc_assert (node->definition);
fb3f88cc
JH
5065 ipa_read_node_info (&ib_main, node, data_in);
5066 }
5067 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5068 len);
5069 lto_data_in_delete (data_in);
5070}
5071
5072/* Read ipcp jump functions. */
5073
5074void
5075ipa_prop_read_jump_functions (void)
5076{
5077 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5078 struct lto_file_decl_data *file_data;
5079 unsigned int j = 0;
5080
5081 ipa_check_create_node_params ();
5082 ipa_check_create_edge_args ();
5083 ipa_register_cgraph_hooks ();
5084
5085 while ((file_data = file_data_vec[j++]))
5086 {
5087 size_t len;
5088 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5089
5090 if (data)
5091 ipa_prop_read_section (file_data, data, len);
5092 }
5093}
5094
b8698a0f 5095/* After merging units, we can get mismatch in argument counts.
61502ca8 5096 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
5097 Also compute called_with_variable_arg info. */
5098
5099void
5100ipa_update_after_lto_read (void)
5101{
05d3aa37
MJ
5102 ipa_check_create_node_params ();
5103 ipa_check_create_edge_args ();
fb3f88cc 5104}
2c9561b5
MJ
5105
5106void
04be694e 5107write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
2c9561b5
MJ
5108{
5109 int node_ref;
5110 unsigned int count = 0;
5111 lto_symtab_encoder_t encoder;
5112 struct ipa_agg_replacement_value *aggvals, *av;
5113
5114 aggvals = ipa_get_agg_replacements_for_node (node);
5115 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5116 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
5117 streamer_write_uhwi (ob, node_ref);
5118
5119 for (av = aggvals; av; av = av->next)
5120 count++;
5121 streamer_write_uhwi (ob, count);
5122
5123 for (av = aggvals; av; av = av->next)
5124 {
7b920a9a
MJ
5125 struct bitpack_d bp;
5126
2c9561b5
MJ
5127 streamer_write_uhwi (ob, av->offset);
5128 streamer_write_uhwi (ob, av->index);
5129 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
5130
5131 bp = bitpack_create (ob->main_stream);
5132 bp_pack_value (&bp, av->by_ref, 1);
5133 streamer_write_bitpack (&bp);
2c9561b5 5134 }
04be694e
MJ
5135
5136 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
8bc5448f
KV
5137 if (ts && vec_safe_length (ts->m_vr) > 0)
5138 {
5139 count = ts->m_vr->length ();
5140 streamer_write_uhwi (ob, count);
5141 for (unsigned i = 0; i < count; ++i)
5142 {
5143 struct bitpack_d bp;
5144 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5145 bp = bitpack_create (ob->main_stream);
5146 bp_pack_value (&bp, parm_vr->known, 1);
5147 streamer_write_bitpack (&bp);
5148 if (parm_vr->known)
5149 {
5150 streamer_write_enum (ob->main_stream, value_rang_type,
5151 VR_LAST, parm_vr->type);
5152 streamer_write_wide_int (ob, parm_vr->min);
5153 streamer_write_wide_int (ob, parm_vr->max);
5154 }
5155 }
5156 }
5157 else
5158 streamer_write_uhwi (ob, 0);
5159
209ca542
PK
5160 if (ts && vec_safe_length (ts->bits) > 0)
5161 {
5162 count = ts->bits->length ();
5163 streamer_write_uhwi (ob, count);
5164
5165 for (unsigned i = 0; i < count; ++i)
5166 {
5167 const ipa_bits& bits_jfunc = (*ts->bits)[i];
5168 struct bitpack_d bp = bitpack_create (ob->main_stream);
5169 bp_pack_value (&bp, bits_jfunc.known, 1);
5170 streamer_write_bitpack (&bp);
5171 if (bits_jfunc.known)
5172 {
5173 streamer_write_widest_int (ob, bits_jfunc.value);
5174 streamer_write_widest_int (ob, bits_jfunc.mask);
5175 }
5176 }
5177 }
5178 else
5179 streamer_write_uhwi (ob, 0);
2c9561b5
MJ
5180}
5181
5182/* Stream in the aggregate value replacement chain for NODE from IB. */
5183
5184static void
04be694e
MJ
5185read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5186 data_in *data_in)
2c9561b5
MJ
5187{
5188 struct ipa_agg_replacement_value *aggvals = NULL;
5189 unsigned int count, i;
5190
5191 count = streamer_read_uhwi (ib);
5192 for (i = 0; i <count; i++)
5193 {
5194 struct ipa_agg_replacement_value *av;
7b920a9a 5195 struct bitpack_d bp;
2c9561b5 5196
766090c2 5197 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
5198 av->offset = streamer_read_uhwi (ib);
5199 av->index = streamer_read_uhwi (ib);
5200 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
5201 bp = streamer_read_bitpack (ib);
5202 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
5203 av->next = aggvals;
5204 aggvals = av;
5205 }
5206 ipa_set_node_agg_value_chain (node, aggvals);
67b97478 5207
209ca542
PK
5208 count = streamer_read_uhwi (ib);
5209 if (count > 0)
5210 {
5211 ipcp_grow_transformations_if_necessary ();
8bc5448f
KV
5212
5213 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5214 vec_safe_grow_cleared (ts->m_vr, count);
5215 for (i = 0; i < count; i++)
5216 {
5217 ipa_vr *parm_vr;
5218 parm_vr = &(*ts->m_vr)[i];
5219 struct bitpack_d bp;
5220 bp = streamer_read_bitpack (ib);
5221 parm_vr->known = bp_unpack_value (&bp, 1);
5222 if (parm_vr->known)
5223 {
5224 parm_vr->type = streamer_read_enum (ib, value_range_type,
5225 VR_LAST);
5226 parm_vr->min = streamer_read_wide_int (ib);
5227 parm_vr->max = streamer_read_wide_int (ib);
5228 }
5229 }
5230 }
5231 count = streamer_read_uhwi (ib);
5232 if (count > 0)
5233 {
5234 ipcp_grow_transformations_if_necessary ();
5235
209ca542
PK
5236 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5237 vec_safe_grow_cleared (ts->bits, count);
5238
5239 for (i = 0; i < count; i++)
5240 {
5241 ipa_bits& bits_jfunc = (*ts->bits)[i];
5242 struct bitpack_d bp = streamer_read_bitpack (ib);
5243 bits_jfunc.known = bp_unpack_value (&bp, 1);
5244 if (bits_jfunc.known)
5245 {
5246 bits_jfunc.value = streamer_read_widest_int (ib);
5247 bits_jfunc.mask = streamer_read_widest_int (ib);
5248 }
5249 }
5250 }
2c9561b5
MJ
5251}
5252
5253/* Write all aggregate replacement for nodes in set. */
5254
5255void
04be694e 5256ipcp_write_transformation_summaries (void)
2c9561b5
MJ
5257{
5258 struct cgraph_node *node;
5259 struct output_block *ob;
5260 unsigned int count = 0;
5261 lto_symtab_encoder_iterator lsei;
5262 lto_symtab_encoder_t encoder;
5263
2c9561b5
MJ
5264 ob = create_output_block (LTO_section_ipcp_transform);
5265 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5266 ob->symbol = NULL;
2c9561b5
MJ
5267 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5268 lsei_next_function_in_partition (&lsei))
5269 {
5270 node = lsei_cgraph_node (lsei);
04be694e 5271 if (node->has_gimple_body_p ())
2c9561b5
MJ
5272 count++;
5273 }
5274
5275 streamer_write_uhwi (ob, count);
5276
5277 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5278 lsei_next_function_in_partition (&lsei))
5279 {
5280 node = lsei_cgraph_node (lsei);
04be694e
MJ
5281 if (node->has_gimple_body_p ())
5282 write_ipcp_transformation_info (ob, node);
2c9561b5
MJ
5283 }
5284 streamer_write_char_stream (ob->main_stream, 0);
5285 produce_asm (ob, NULL);
5286 destroy_output_block (ob);
5287}
5288
5289/* Read replacements section in file FILE_DATA of length LEN with data
5290 DATA. */
5291
5292static void
5293read_replacements_section (struct lto_file_decl_data *file_data,
5294 const char *data,
5295 size_t len)
5296{
5297 const struct lto_function_header *header =
5298 (const struct lto_function_header *) data;
5299 const int cfg_offset = sizeof (struct lto_function_header);
5300 const int main_offset = cfg_offset + header->cfg_size;
5301 const int string_offset = main_offset + header->main_size;
5302 struct data_in *data_in;
2c9561b5
MJ
5303 unsigned int i;
5304 unsigned int count;
5305
207c68cd 5306 lto_input_block ib_main ((const char *) data + main_offset,
db847fa8 5307 header->main_size, file_data->mode_table);
2c9561b5
MJ
5308
5309 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5310 header->string_size, vNULL);
2c9561b5
MJ
5311 count = streamer_read_uhwi (&ib_main);
5312
5313 for (i = 0; i < count; i++)
5314 {
5315 unsigned int index;
5316 struct cgraph_node *node;
5317 lto_symtab_encoder_t encoder;
5318
5319 index = streamer_read_uhwi (&ib_main);
5320 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5321 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5322 index));
67348ccc 5323 gcc_assert (node->definition);
04be694e 5324 read_ipcp_transformation_info (&ib_main, node, data_in);
2c9561b5
MJ
5325 }
5326 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5327 len);
5328 lto_data_in_delete (data_in);
5329}
5330
5331/* Read IPA-CP aggregate replacements. */
5332
5333void
04be694e 5334ipcp_read_transformation_summaries (void)
2c9561b5
MJ
5335{
5336 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5337 struct lto_file_decl_data *file_data;
5338 unsigned int j = 0;
5339
5340 while ((file_data = file_data_vec[j++]))
5341 {
5342 size_t len;
5343 const char *data = lto_get_section_data (file_data,
5344 LTO_section_ipcp_transform,
5345 NULL, &len);
5346 if (data)
5347 read_replacements_section (file_data, data, len);
5348 }
5349}
5350
5351/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5352 NODE. */
5353
5354static void
5355adjust_agg_replacement_values (struct cgraph_node *node,
5356 struct ipa_agg_replacement_value *aggval)
5357{
5358 struct ipa_agg_replacement_value *v;
5359 int i, c = 0, d = 0, *adj;
5360
5361 if (!node->clone.combined_args_to_skip)
5362 return;
5363
5364 for (v = aggval; v; v = v->next)
5365 {
5366 gcc_assert (v->index >= 0);
5367 if (c < v->index)
5368 c = v->index;
5369 }
5370 c++;
5371
5372 adj = XALLOCAVEC (int, c);
5373 for (i = 0; i < c; i++)
5374 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5375 {
5376 adj[i] = -1;
5377 d++;
5378 }
5379 else
5380 adj[i] = i - d;
5381
5382 for (v = aggval; v; v = v->next)
5383 v->index = adj[v->index];
5384}
5385
8aab5218
MJ
5386/* Dominator walker driving the ipcp modification phase. */
5387
5388class ipcp_modif_dom_walker : public dom_walker
5389{
5390public:
56b40062 5391 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
8aab5218
MJ
5392 vec<ipa_param_descriptor> descs,
5393 struct ipa_agg_replacement_value *av,
5394 bool *sc, bool *cc)
5395 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5396 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5397
3daacdcd 5398 virtual edge before_dom_children (basic_block);
8aab5218
MJ
5399
5400private:
56b40062 5401 struct ipa_func_body_info *m_fbi;
8aab5218
MJ
5402 vec<ipa_param_descriptor> m_descriptors;
5403 struct ipa_agg_replacement_value *m_aggval;
5404 bool *m_something_changed, *m_cfg_changed;
5405};
5406
3daacdcd 5407edge
8aab5218
MJ
5408ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5409{
5410 gimple_stmt_iterator gsi;
5411 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5412 {
5413 struct ipa_agg_replacement_value *v;
355fe088 5414 gimple *stmt = gsi_stmt (gsi);
8aab5218
MJ
5415 tree rhs, val, t;
5416 HOST_WIDE_INT offset, size;
5417 int index;
5418 bool by_ref, vce;
5419
5420 if (!gimple_assign_load_p (stmt))
5421 continue;
5422 rhs = gimple_assign_rhs1 (stmt);
5423 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5424 continue;
2c9561b5 5425
8aab5218
MJ
5426 vce = false;
5427 t = rhs;
5428 while (handled_component_p (t))
5429 {
5430 /* V_C_E can do things like convert an array of integers to one
5431 bigger integer and similar things we do not handle below. */
5432 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5433 {
5434 vce = true;
5435 break;
5436 }
5437 t = TREE_OPERAND (t, 0);
5438 }
5439 if (vce)
5440 continue;
5441
ff302741
PB
5442 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5443 &offset, &size, &by_ref))
8aab5218
MJ
5444 continue;
5445 for (v = m_aggval; v; v = v->next)
5446 if (v->index == index
5447 && v->offset == offset)
5448 break;
5449 if (!v
5450 || v->by_ref != by_ref
5451 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5452 continue;
5453
5454 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5455 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5456 {
5457 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5458 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5459 else if (TYPE_SIZE (TREE_TYPE (rhs))
5460 == TYPE_SIZE (TREE_TYPE (v->value)))
5461 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5462 else
5463 {
5464 if (dump_file)
5465 {
5466 fprintf (dump_file, " const ");
5467 print_generic_expr (dump_file, v->value, 0);
5468 fprintf (dump_file, " can't be converted to type of ");
5469 print_generic_expr (dump_file, rhs, 0);
5470 fprintf (dump_file, "\n");
5471 }
5472 continue;
5473 }
5474 }
5475 else
5476 val = v->value;
5477
5478 if (dump_file && (dump_flags & TDF_DETAILS))
5479 {
5480 fprintf (dump_file, "Modifying stmt:\n ");
5481 print_gimple_stmt (dump_file, stmt, 0, 0);
5482 }
5483 gimple_assign_set_rhs_from_tree (&gsi, val);
5484 update_stmt (stmt);
5485
5486 if (dump_file && (dump_flags & TDF_DETAILS))
5487 {
5488 fprintf (dump_file, "into:\n ");
5489 print_gimple_stmt (dump_file, stmt, 0, 0);
5490 fprintf (dump_file, "\n");
5491 }
5492
5493 *m_something_changed = true;
5494 if (maybe_clean_eh_stmt (stmt)
5495 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5496 *m_cfg_changed = true;
5497 }
3daacdcd 5498 return NULL;
8aab5218
MJ
5499}
5500
209ca542
PK
5501/* Update bits info of formal parameters as described in
5502 ipcp_transformation_summary. */
5503
5504static void
5505ipcp_update_bits (struct cgraph_node *node)
5506{
5507 tree parm = DECL_ARGUMENTS (node->decl);
5508 tree next_parm = parm;
5509 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5510
5511 if (!ts || vec_safe_length (ts->bits) == 0)
5512 return;
5513
5514 vec<ipa_bits, va_gc> &bits = *ts->bits;
5515 unsigned count = bits.length ();
5516
5517 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5518 {
5519 if (node->clone.combined_args_to_skip
5520 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5521 continue;
5522
5523 gcc_checking_assert (parm);
5524 next_parm = DECL_CHAIN (parm);
5525
5526 if (!bits[i].known
67b97478 5527 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm)) || POINTER_TYPE_P (TREE_TYPE (parm)))
209ca542
PK
5528 || !is_gimple_reg (parm))
5529 continue;
5530
5531 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5532 if (!ddef)
5533 continue;
5534
5535 if (dump_file)
5536 {
5537 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5538 print_hex (bits[i].mask, dump_file);
5539 fprintf (dump_file, "\n");
5540 }
5541
67b97478
PK
5542 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5543 {
5544 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5545 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5546
5547 wide_int nonzero_bits = wide_int::from (bits[i].mask, prec, UNSIGNED)
5548 | wide_int::from (bits[i].value, prec, sgn);
5549 set_nonzero_bits (ddef, nonzero_bits);
5550 }
5551 else
5552 {
5553 unsigned tem = bits[i].mask.to_uhwi ();
5554 unsigned HOST_WIDE_INT bitpos = bits[i].value.to_uhwi ();
5555 unsigned align = tem & -tem;
5556 unsigned misalign = bitpos & (align - 1);
209ca542 5557
67b97478
PK
5558 if (align > 1)
5559 {
5560 if (dump_file)
5561 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5562
5563 unsigned old_align, old_misalign;
5564 struct ptr_info_def *pi = get_ptr_info (ddef);
5565 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5566
5567 if (old_known
5568 && old_align > align)
5569 {
5570 if (dump_file)
5571 {
5572 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5573 if ((old_misalign & (align - 1)) != misalign)
5574 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5575 old_misalign, misalign);
5576 }
5577 continue;
5578 }
5579
5580 if (old_known
5581 && ((misalign & (old_align - 1)) != old_misalign)
5582 && dump_file)
5583 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5584 old_misalign, misalign);
5585
5586 set_ptr_info_alignment (pi, align, misalign);
5587 }
5588 }
209ca542
PK
5589 }
5590}
5591
8bc5448f
KV
5592/* Update value range of formal parameters as described in
5593 ipcp_transformation_summary. */
5594
5595static void
5596ipcp_update_vr (struct cgraph_node *node)
5597{
5598 tree fndecl = node->decl;
5599 tree parm = DECL_ARGUMENTS (fndecl);
5600 tree next_parm = parm;
5601 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5602 if (!ts || vec_safe_length (ts->m_vr) == 0)
5603 return;
5604 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5605 unsigned count = vr.length ();
5606
5607 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5608 {
5609 if (node->clone.combined_args_to_skip
5610 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5611 continue;
5612 gcc_checking_assert (parm);
5613 next_parm = DECL_CHAIN (parm);
5614 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5615
5616 if (!ddef || !is_gimple_reg (parm))
5617 continue;
5618
5619 if (vr[i].known
8bc5448f
KV
5620 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5621 {
5622 tree type = TREE_TYPE (ddef);
5623 unsigned prec = TYPE_PRECISION (type);
718625ad
KV
5624 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5625 {
5626 if (dump_file)
5627 {
5628 fprintf (dump_file, "Setting value range of param %u ", i);
5629 fprintf (dump_file, "%s[",
5630 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5631 print_decs (vr[i].min, dump_file);
5632 fprintf (dump_file, ", ");
5633 print_decs (vr[i].max, dump_file);
5634 fprintf (dump_file, "]\n");
5635 }
5636 set_range_info (ddef, vr[i].type,
5637 wide_int_storage::from (vr[i].min, prec,
5638 TYPE_SIGN (type)),
5639 wide_int_storage::from (vr[i].max, prec,
5640 TYPE_SIGN (type)));
5641 }
5642 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5643 && vr[i].type == VR_ANTI_RANGE
5644 && wi::eq_p (vr[i].min, 0)
5645 && wi::eq_p (vr[i].max, 0))
8bc5448f 5646 {
718625ad
KV
5647 if (dump_file)
5648 fprintf (dump_file, "Setting nonnull for %u\n", i);
5649 set_ptr_nonnull (ddef);
8bc5448f 5650 }
8bc5448f
KV
5651 }
5652 }
5653}
5654
8aab5218 5655/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5656
5657unsigned int
5658ipcp_transform_function (struct cgraph_node *node)
5659{
84562394 5660 vec<ipa_param_descriptor> descriptors = vNULL;
56b40062 5661 struct ipa_func_body_info fbi;
2c9561b5 5662 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5663 int param_count;
5664 bool cfg_changed = false, something_changed = false;
5665
5666 gcc_checking_assert (cfun);
5667 gcc_checking_assert (current_function_decl);
5668
5669 if (dump_file)
5670 fprintf (dump_file, "Modification phase of node %s/%i\n",
fec39fa6 5671 node->name (), node->order);
2c9561b5 5672
209ca542 5673 ipcp_update_bits (node);
8bc5448f 5674 ipcp_update_vr (node);
2c9561b5
MJ
5675 aggval = ipa_get_agg_replacements_for_node (node);
5676 if (!aggval)
5677 return 0;
67348ccc 5678 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5679 if (param_count == 0)
5680 return 0;
5681 adjust_agg_replacement_values (node, aggval);
5682 if (dump_file)
5683 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5684
8aab5218
MJ
5685 fbi.node = node;
5686 fbi.info = NULL;
5687 fbi.bb_infos = vNULL;
5688 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5689 fbi.param_count = param_count;
5690 fbi.aa_walked = 0;
2c9561b5 5691
8aab5218
MJ
5692 descriptors.safe_grow_cleared (param_count);
5693 ipa_populate_param_decls (node, descriptors);
5694 calculate_dominance_info (CDI_DOMINATORS);
5695 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5696 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5697
8aab5218
MJ
5698 int i;
5699 struct ipa_bb_info *bi;
5700 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5701 free_ipa_bb_info (bi);
5702 fbi.bb_infos.release ();
5703 free_dominance_info (CDI_DOMINATORS);
04be694e 5704 (*ipcp_transformations)[node->uid].agg_values = NULL;
676b4899
PK
5705 (*ipcp_transformations)[node->uid].bits = NULL;
5706 (*ipcp_transformations)[node->uid].m_vr = NULL;
5707
9771b263 5708 descriptors.release ();
2c9561b5
MJ
5709
5710 if (!something_changed)
5711 return 0;
5712 else if (cfg_changed)
5713 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5714 else
5715 return TODO_update_ssa_only_virtuals;
5716}