]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-inline.c
PR middle-end/49139 fix always_inline diagnostics
[thirdparty/gcc.git] / gcc / tree-inline.c
CommitLineData
5d94cf4d 1/* Tree inlining.
9f28a7ee 2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
46a0e9e8 3 Free Software Foundation, Inc.
1431bff6 4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
5a8b6e6a 6This file is part of GCC.
1431bff6 7
5a8b6e6a 8GCC is free software; you can redistribute it and/or modify
1431bff6 9it under the terms of the GNU General Public License as published by
8c4c00c1 10the Free Software Foundation; either version 3, or (at your option)
1431bff6 11any later version.
12
5a8b6e6a 13GCC is distributed in the hope that it will be useful,
1431bff6 14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
1431bff6 21
22#include "config.h"
23#include "system.h"
805e22b2 24#include "coretypes.h"
25#include "tm.h"
0b205f4c 26#include "diagnostic-core.h"
1431bff6 27#include "tree.h"
28#include "tree-inline.h"
e343483a 29#include "flags.h"
30#include "params.h"
31#include "input.h"
32#include "insn-config.h"
e343483a 33#include "hashtab.h"
b0278d39 34#include "langhooks.h"
e27482aa 35#include "basic-block.h"
36#include "tree-iterator.h"
ae01b312 37#include "cgraph.h"
9bc3739f 38#include "intl.h"
4ee9c684 39#include "tree-mudflap.h"
7f0f308d 40#include "tree-flow.h"
b0cdf642 41#include "function.h"
e27482aa 42#include "tree-flow.h"
ce084dfc 43#include "tree-pretty-print.h"
e27482aa 44#include "except.h"
9e45f419 45#include "debug.h"
e27482aa 46#include "pointer-set.h"
c5235c0b 47#include "ipa-prop.h"
4992f399 48#include "value-prof.h"
deff5ffd 49#include "tree-pass.h"
7cc6d7a8 50#include "target.h"
51#include "integrate.h"
e343483a 52
8e3cb73b 53#include "rtl.h" /* FIXME: For asm_str_count. */
54
4ee9c684 55/* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
75a70cf9 57#include "gimple.h"
1431bff6 58
51a48c27 59/* Inlining, Cloning, Versioning, Parallelization
e27482aa 60
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
75a70cf9 63 MODIFY_EXPRs that store to a dedicated returned-value variable.
e27482aa 64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
e38def9c 66 statements and RESX statements are adjusted accordingly.
e27482aa 67
e27482aa 68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
72
51a48c27 73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
76
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
80
e27482aa 81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
51a48c27 85 If we're cloning the body, those callgraph edges will be
e27482aa 86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
88
75a70cf9 89 See the CALL_EXPR handling case in copy_tree_body_r (). */
e27482aa 90
e343483a 91/* To Do:
92
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
99
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
102
bc8bb825 103
bc8bb825 104/* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
106
107eni_weights eni_size_weights;
108
109/* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
111
112eni_weights eni_time_weights;
113
e343483a 114/* Prototypes. */
115
524a0531 116static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
51a48c27 117static void remap_block (tree *, copy_body_data *);
51a48c27 118static void copy_bind_expr (tree *, int *, copy_body_data *);
4ee9c684 119static tree mark_local_for_remap_r (tree *, int *, void *);
ac13e8d9 120static void unsave_expr_1 (tree);
4ee9c684 121static tree unsave_r (tree *, int *, void *);
e27482aa 122static void declare_inline_vars (tree, tree);
8c143e71 123static void remap_save_expr (tree *, void *, int *);
cb302f29 124static void prepend_lexical_block (tree current_block, tree new_block);
51a48c27 125static tree copy_decl_to_var (tree, copy_body_data *);
25b3017b 126static tree copy_result_decl_to_var (tree, copy_body_data *);
51a48c27 127static tree copy_decl_maybe_to_var (tree, copy_body_data *);
75a70cf9 128static gimple remap_gimple_stmt (gimple, copy_body_data *);
31359ae8 129static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
e27482aa 130
fcc73461 131/* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
133
51a48c27 134void
135insert_decl_map (copy_body_data *id, tree key, tree value)
fcc73461 136{
e3022db7 137 *pointer_map_insert (id->decl_map, key) = value;
fcc73461 138
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
e3022db7 142 *pointer_map_insert (id->decl_map, value) = value;
fcc73461 143}
144
9845d120 145/* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
147
148static void
149insert_debug_decl_map (copy_body_data *id, tree key, tree value)
150{
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
153
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
156
157 if (!target_for_debug_bind (key))
158 return;
159
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
162
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
165
166 *pointer_map_insert (id->debug_map, key) = value;
167}
168
9f28a7ee 169/* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173static int processing_debug_stmt = 0;
174
deff5ffd 175/* Construct new SSA name for old NAME. ID is the inline context. */
176
177static tree
178remap_ssa_name (tree name, copy_body_data *id)
179{
f4e36c33 180 tree new_tree;
e3022db7 181 tree *n;
deff5ffd 182
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
184
e3022db7 185 n = (tree *) pointer_map_contains (id->decl_map, name);
deff5ffd 186 if (n)
50828ed8 187 return unshare_expr (*n);
deff5ffd 188
9f28a7ee 189 if (processing_debug_stmt)
190 {
191 processing_debug_stmt = -1;
192 return name;
193 }
194
deff5ffd 195 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
196 in copy_bb. */
f4e36c33 197 new_tree = remap_decl (SSA_NAME_VAR (name), id);
75a70cf9 198
deff5ffd 199 /* We might've substituted constant or another SSA_NAME for
48e1416a 200 the variable.
deff5ffd 201
202 Replace the SSA name representing RESULT_DECL by variable during
203 inlining: this saves us from need to introduce PHI node in a case
204 return value is just partly initialized. */
f4e36c33 205 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
deff5ffd 206 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
207 || !id->transform_return_to_modify))
208 {
1a981e1a 209 struct ptr_info_def *pi;
f4e36c33 210 new_tree = make_ssa_name (new_tree, NULL);
211 insert_decl_map (id, name, new_tree);
212 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
deff5ffd 213 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
f4e36c33 214 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
1a981e1a 215 /* At least IPA points-to info can be directly transferred. */
216 if (id->src_cfun->gimple_df
217 && id->src_cfun->gimple_df->ipa_pta
218 && (pi = SSA_NAME_PTR_INFO (name))
219 && !pi->pt.anything)
220 {
221 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
222 new_pi->pt = pi->pt;
223 }
75a70cf9 224 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
186f5fff 225 {
226 /* By inlining function having uninitialized variable, we might
227 extend the lifetime (variable might get reused). This cause
228 ICE in the case we end up extending lifetime of SSA name across
f0b5f617 229 abnormal edge, but also increase register pressure.
186f5fff 230
75a70cf9 231 We simply initialize all uninitialized vars by 0 except
232 for case we are inlining to very first BB. We can avoid
233 this for all BBs that are not inside strongly connected
234 regions of the CFG, but this is expensive to test. */
235 if (id->entry_bb
236 && is_gimple_reg (SSA_NAME_VAR (name))
2e661451 237 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
186f5fff 238 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
e2de770a 239 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
186f5fff 240 || EDGE_COUNT (id->entry_bb->preds) != 1))
241 {
75a70cf9 242 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
243 gimple init_stmt;
385f3f36 244 tree zero = build_zero_cst (TREE_TYPE (new_tree));
48e1416a 245
385f3f36 246 init_stmt = gimple_build_assign (new_tree, zero);
75a70cf9 247 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
f4e36c33 248 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
186f5fff 249 }
250 else
251 {
f4e36c33 252 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
75a70cf9 253 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
254 == name)
f4e36c33 255 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
186f5fff 256 }
257 }
deff5ffd 258 }
259 else
f4e36c33 260 insert_decl_map (id, name, new_tree);
261 return new_tree;
deff5ffd 262}
263
e27482aa 264/* Remap DECL during the copying of the BLOCK tree for the function. */
e343483a 265
51a48c27 266tree
267remap_decl (tree decl, copy_body_data *id)
e343483a 268{
e3022db7 269 tree *n;
e27482aa 270
271 /* We only remap local variables in the current function. */
ecba2e4c 272
e27482aa 273 /* See if we have remapped this declaration. */
274
e3022db7 275 n = (tree *) pointer_map_contains (id->decl_map, decl);
e27482aa 276
9845d120 277 if (!n && processing_debug_stmt)
278 {
279 processing_debug_stmt = -1;
280 return decl;
281 }
282
e27482aa 283 /* If we didn't already have an equivalent for this declaration,
284 create one now. */
e343483a 285 if (!n)
286 {
e343483a 287 /* Make a copy of the variable or label. */
51a48c27 288 tree t = id->copy_decl (decl, id);
48e1416a 289
f10b7a77 290 /* Remember it, so that if we encounter this local entity again
291 we can reuse this copy. Do this early because remap_type may
292 need this decl for TYPE_STUB_DECL. */
293 insert_decl_map (id, decl, t);
294
51a48c27 295 if (!DECL_P (t))
296 return t;
297
ecba2e4c 298 /* Remap types, if necessary. */
299 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
300 if (TREE_CODE (t) == TYPE_DECL)
301 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
ecba2e4c 302
303 /* Remap sizes as necessary. */
75a70cf9 304 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
305 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
e343483a 306
fbf0afd1 307 /* If fields, do likewise for offset and qualifier. */
1f8a6ff8 308 if (TREE_CODE (t) == FIELD_DECL)
309 {
75a70cf9 310 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
1f8a6ff8 311 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
75a70cf9 312 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
1f8a6ff8 313 }
314
649597af 315 if ((TREE_CODE (t) == VAR_DECL
316 || TREE_CODE (t) == RESULT_DECL
317 || TREE_CODE (t) == PARM_DECL)
318 && id->src_fn && DECL_STRUCT_FUNCTION (id->src_fn)
319 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
320 /* We don't want to mark as referenced VAR_DECLs that were
321 not marked as such in the src function. */
322 && (TREE_CODE (decl) != VAR_DECL
323 || referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
324 DECL_UID (decl))))
325 add_referenced_var (t);
fcc73461 326 return t;
e343483a 327 }
328
4189e677 329 if (id->do_not_unshare)
330 return *n;
331 else
332 return unshare_expr (*n);
e343483a 333}
334
ecba2e4c 335static tree
51a48c27 336remap_type_1 (tree type, copy_body_data *id)
ecba2e4c 337{
f4e36c33 338 tree new_tree, t;
ecba2e4c 339
a3575815 340 /* We do need a copy. build and register it now. If this is a pointer or
341 reference type, remap the designated type and make a new pointer or
342 reference type. */
343 if (TREE_CODE (type) == POINTER_TYPE)
344 {
f4e36c33 345 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
a3575815 346 TYPE_MODE (type),
347 TYPE_REF_CAN_ALIAS_ALL (type));
e1f1071c 348 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
349 new_tree = build_type_attribute_qual_variant (new_tree,
350 TYPE_ATTRIBUTES (type),
351 TYPE_QUALS (type));
f4e36c33 352 insert_decl_map (id, type, new_tree);
353 return new_tree;
a3575815 354 }
355 else if (TREE_CODE (type) == REFERENCE_TYPE)
356 {
f4e36c33 357 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
a3575815 358 TYPE_MODE (type),
359 TYPE_REF_CAN_ALIAS_ALL (type));
e1f1071c 360 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
361 new_tree = build_type_attribute_qual_variant (new_tree,
362 TYPE_ATTRIBUTES (type),
363 TYPE_QUALS (type));
f4e36c33 364 insert_decl_map (id, type, new_tree);
365 return new_tree;
a3575815 366 }
367 else
f4e36c33 368 new_tree = copy_node (type);
a3575815 369
f4e36c33 370 insert_decl_map (id, type, new_tree);
ecba2e4c 371
372 /* This is a new type, not a copy of an old type. Need to reassociate
373 variants. We can handle everything except the main variant lazily. */
374 t = TYPE_MAIN_VARIANT (type);
375 if (type != t)
376 {
377 t = remap_type (t, id);
f4e36c33 378 TYPE_MAIN_VARIANT (new_tree) = t;
379 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
380 TYPE_NEXT_VARIANT (t) = new_tree;
ecba2e4c 381 }
382 else
383 {
f4e36c33 384 TYPE_MAIN_VARIANT (new_tree) = new_tree;
385 TYPE_NEXT_VARIANT (new_tree) = NULL;
ecba2e4c 386 }
387
f10b7a77 388 if (TYPE_STUB_DECL (type))
f4e36c33 389 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
f10b7a77 390
ecba2e4c 391 /* Lazily create pointer and reference types. */
f4e36c33 392 TYPE_POINTER_TO (new_tree) = NULL;
393 TYPE_REFERENCE_TO (new_tree) = NULL;
ecba2e4c 394
f4e36c33 395 switch (TREE_CODE (new_tree))
ecba2e4c 396 {
397 case INTEGER_TYPE:
398 case REAL_TYPE:
06f0b99c 399 case FIXED_POINT_TYPE:
ecba2e4c 400 case ENUMERAL_TYPE:
401 case BOOLEAN_TYPE:
f4e36c33 402 t = TYPE_MIN_VALUE (new_tree);
ecba2e4c 403 if (t && TREE_CODE (t) != INTEGER_CST)
f4e36c33 404 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
ab7083b0 405
f4e36c33 406 t = TYPE_MAX_VALUE (new_tree);
ecba2e4c 407 if (t && TREE_CODE (t) != INTEGER_CST)
f4e36c33 408 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
409 return new_tree;
b27ac6b5 410
ecba2e4c 411 case FUNCTION_TYPE:
f4e36c33 412 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
413 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
414 return new_tree;
ecba2e4c 415
416 case ARRAY_TYPE:
f4e36c33 417 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
418 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
ecba2e4c 419 break;
420
421 case RECORD_TYPE:
422 case UNION_TYPE:
423 case QUAL_UNION_TYPE:
03908818 424 {
425 tree f, nf = NULL;
426
1767a056 427 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
03908818 428 {
429 t = remap_decl (f, id);
f4e36c33 430 DECL_CONTEXT (t) = new_tree;
1767a056 431 DECL_CHAIN (t) = nf;
03908818 432 nf = t;
433 }
f4e36c33 434 TYPE_FIELDS (new_tree) = nreverse (nf);
03908818 435 }
ecba2e4c 436 break;
437
ecba2e4c 438 case OFFSET_TYPE:
439 default:
440 /* Shouldn't have been thought variable sized. */
8c0963c4 441 gcc_unreachable ();
ecba2e4c 442 }
443
f4e36c33 444 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
445 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
ecba2e4c 446
f4e36c33 447 return new_tree;
ecba2e4c 448}
449
51a48c27 450tree
451remap_type (tree type, copy_body_data *id)
03908818 452{
e3022db7 453 tree *node;
ed6049c8 454 tree tmp;
03908818 455
456 if (type == NULL)
457 return type;
458
459 /* See if we have remapped this type. */
e3022db7 460 node = (tree *) pointer_map_contains (id->decl_map, type);
03908818 461 if (node)
e3022db7 462 return *node;
03908818 463
464 /* The type only needs remapping if it's variably modified. */
51a48c27 465 if (! variably_modified_type_p (type, id->src_fn))
03908818 466 {
467 insert_decl_map (id, type, type);
468 return type;
469 }
470
ed6049c8 471 id->remapping_type_depth++;
472 tmp = remap_type_1 (type, id);
473 id->remapping_type_depth--;
474
475 return tmp;
03908818 476}
477
463cc256 478/* Return previously remapped type of TYPE in ID. Return NULL if TYPE
479 is NULL or TYPE has not been remapped before. */
480
481static tree
482remapped_type (tree type, copy_body_data *id)
483{
484 tree *node;
485
486 if (type == NULL)
487 return type;
488
489 /* See if we have remapped this type. */
490 node = (tree *) pointer_map_contains (id->decl_map, type);
491 if (node)
492 return *node;
493 else
494 return NULL;
495}
496
497 /* The type only needs remapping if it's variably modified. */
4b5d70fd 498/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
48e1416a 499
4b5d70fd 500static bool
501can_be_nonlocal (tree decl, copy_body_data *id)
502{
503 /* We can not duplicate function decls. */
504 if (TREE_CODE (decl) == FUNCTION_DECL)
505 return true;
506
507 /* Local static vars must be non-local or we get multiple declaration
508 problems. */
509 if (TREE_CODE (decl) == VAR_DECL
510 && !auto_var_in_fn_p (decl, id->src_fn))
511 return true;
512
513 /* At the moment dwarf2out can handle only these types of nodes. We
514 can support more later. */
515 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
516 return false;
517
463cc256 518 /* We must use global type. We call remapped_type instead of
519 remap_type since we don't want to remap this type here if it
520 hasn't been remapped before. */
521 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
4b5d70fd 522 return false;
523
524 /* Wihtout SSA we can't tell if variable is used. */
525 if (!gimple_in_ssa_p (cfun))
526 return false;
527
528 /* Live variables must be copied so we can attach DECL_RTL. */
529 if (var_ann (decl))
530 return false;
531
532 return true;
533}
534
4ee9c684 535static tree
4b5d70fd 536remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
e343483a 537{
4ee9c684 538 tree old_var;
539 tree new_decls = NULL_TREE;
e343483a 540
4ee9c684 541 /* Remap its variables. */
1767a056 542 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
e343483a 543 {
4ee9c684 544 tree new_var;
545
4b5d70fd 546 if (can_be_nonlocal (old_var, id))
127d7f21 547 {
4b5d70fd 548 if (TREE_CODE (old_var) == VAR_DECL
800b8c39 549 && ! DECL_EXTERNAL (old_var)
4b5d70fd 550 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
2ab2ce89 551 add_local_decl (cfun, old_var);
0c1c155a 552 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
4b5d70fd 553 && !DECL_IGNORED_P (old_var)
554 && nonlocalized_list)
f24c0e3a 555 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
127d7f21 556 continue;
557 }
558
4ee9c684 559 /* Remap the variable. */
560 new_var = remap_decl (old_var, id);
561
75a70cf9 562 /* If we didn't remap this variable, we can't mess with its
4ee9c684 563 TREE_CHAIN. If we remapped this variable to the return slot, it's
564 already declared somewhere else, so don't declare it here. */
48e1416a 565
4b5d70fd 566 if (new_var == id->retvar)
4ee9c684 567 ;
4b5d70fd 568 else if (!new_var)
569 {
0c1c155a 570 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
4b5d70fd 571 && !DECL_IGNORED_P (old_var)
572 && nonlocalized_list)
f24c0e3a 573 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
4b5d70fd 574 }
e343483a 575 else
576 {
8c0963c4 577 gcc_assert (DECL_P (new_var));
1767a056 578 DECL_CHAIN (new_var) = new_decls;
4ee9c684 579 new_decls = new_var;
cd3ece53 580
581 /* Also copy value-expressions. */
582 if (TREE_CODE (new_var) == VAR_DECL
583 && DECL_HAS_VALUE_EXPR_P (new_var))
584 {
585 tree tem = DECL_VALUE_EXPR (new_var);
586 bool old_regimplify = id->regimplify;
587 id->remapping_type_depth++;
588 walk_tree (&tem, copy_tree_body_r, id, NULL);
589 id->remapping_type_depth--;
590 id->regimplify = old_regimplify;
591 SET_DECL_VALUE_EXPR (new_var, tem);
592 }
e343483a 593 }
e343483a 594 }
e343483a 595
4ee9c684 596 return nreverse (new_decls);
597}
598
599/* Copy the BLOCK to contain remapped versions of the variables
600 therein. And hook the new block into the block-tree. */
601
602static void
51a48c27 603remap_block (tree *block, copy_body_data *id)
4ee9c684 604{
d57cd35f 605 tree old_block;
606 tree new_block;
d57cd35f 607
608 /* Make the new block. */
609 old_block = *block;
610 new_block = make_node (BLOCK);
611 TREE_USED (new_block) = TREE_USED (old_block);
612 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
44276901 613 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
4b5d70fd 614 BLOCK_NONLOCALIZED_VARS (new_block)
615 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
d57cd35f 616 *block = new_block;
617
618 /* Remap its variables. */
4b5d70fd 619 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
620 &BLOCK_NONLOCALIZED_VARS (new_block),
621 id);
d57cd35f 622
51a48c27 623 if (id->transform_lang_insert_block)
32020b10 624 id->transform_lang_insert_block (new_block);
51a48c27 625
d57cd35f 626 /* Remember the remapped block. */
4ee9c684 627 insert_decl_map (id, old_block, new_block);
e343483a 628}
629
b3d24a23 630/* Copy the whole block tree and root it in id->block. */
631static tree
51a48c27 632remap_blocks (tree block, copy_body_data *id)
b3d24a23 633{
634 tree t;
f4e36c33 635 tree new_tree = block;
b3d24a23 636
637 if (!block)
638 return NULL;
639
f4e36c33 640 remap_block (&new_tree, id);
641 gcc_assert (new_tree != block);
b3d24a23 642 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
cb302f29 643 prepend_lexical_block (new_tree, remap_blocks (t, id));
644 /* Blocks are in arbitrary order, but make things slightly prettier and do
645 not swap order when producing a copy. */
646 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
f4e36c33 647 return new_tree;
b3d24a23 648}
649
e343483a 650static void
4ee9c684 651copy_statement_list (tree *tp)
e343483a 652{
4ee9c684 653 tree_stmt_iterator oi, ni;
f4e36c33 654 tree new_tree;
4ee9c684 655
f4e36c33 656 new_tree = alloc_stmt_list ();
657 ni = tsi_start (new_tree);
4ee9c684 658 oi = tsi_start (*tp);
29f90295 659 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
f4e36c33 660 *tp = new_tree;
4ee9c684 661
662 for (; !tsi_end_p (oi); tsi_next (&oi))
bfec3452 663 {
664 tree stmt = tsi_stmt (oi);
18642e81 665 if (TREE_CODE (stmt) == STATEMENT_LIST)
666 /* This copy is not redundant; tsi_link_after will smash this
667 STATEMENT_LIST into the end of the one we're building, and we
668 don't want to do that with the original. */
669 copy_statement_list (&stmt);
bfec3452 670 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
671 }
4ee9c684 672}
e343483a 673
4ee9c684 674static void
51a48c27 675copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
4ee9c684 676{
677 tree block = BIND_EXPR_BLOCK (*tp);
e343483a 678 /* Copy (and replace) the statement. */
679 copy_tree_r (tp, walk_subtrees, NULL);
4ee9c684 680 if (block)
681 {
682 remap_block (&block, id);
683 BIND_EXPR_BLOCK (*tp) = block;
684 }
e343483a 685
4ee9c684 686 if (BIND_EXPR_VARS (*tp))
cd3ece53 687 /* This will remap a lot of the same decls again, but this should be
688 harmless. */
689 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
e343483a 690}
691
75a70cf9 692
693/* Create a new gimple_seq by remapping all the statements in BODY
694 using the inlining information in ID. */
695
6f932b06 696static gimple_seq
75a70cf9 697remap_gimple_seq (gimple_seq body, copy_body_data *id)
698{
699 gimple_stmt_iterator si;
700 gimple_seq new_body = NULL;
701
702 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
703 {
704 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
705 gimple_seq_add_stmt (&new_body, new_stmt);
706 }
707
708 return new_body;
709}
710
711
712/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
713 block using the mapping information in ID. */
714
715static gimple
716copy_gimple_bind (gimple stmt, copy_body_data *id)
717{
718 gimple new_bind;
719 tree new_block, new_vars;
720 gimple_seq body, new_body;
721
722 /* Copy the statement. Note that we purposely don't use copy_stmt
723 here because we need to remap statements as we copy. */
724 body = gimple_bind_body (stmt);
725 new_body = remap_gimple_seq (body, id);
726
727 new_block = gimple_bind_block (stmt);
728 if (new_block)
729 remap_block (&new_block, id);
730
731 /* This will remap a lot of the same decls again, but this should be
732 harmless. */
733 new_vars = gimple_bind_vars (stmt);
734 if (new_vars)
4b5d70fd 735 new_vars = remap_decls (new_vars, NULL, id);
75a70cf9 736
737 new_bind = gimple_build_bind (new_vars, new_body, new_block);
738
739 return new_bind;
740}
741
742
743/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
744 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
745 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
746 recursing into the children nodes of *TP. */
747
748static tree
749remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
750{
751 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
752 copy_body_data *id = (copy_body_data *) wi_p->info;
753 tree fn = id->src_fn;
754
755 if (TREE_CODE (*tp) == SSA_NAME)
756 {
757 *tp = remap_ssa_name (*tp, id);
758 *walk_subtrees = 0;
759 return NULL;
760 }
761 else if (auto_var_in_fn_p (*tp, fn))
762 {
763 /* Local variables and labels need to be replaced by equivalent
764 variables. We don't want to copy static variables; there's
765 only one of those, no matter how many times we inline the
766 containing function. Similarly for globals from an outer
767 function. */
768 tree new_decl;
769
770 /* Remap the declaration. */
771 new_decl = remap_decl (*tp, id);
772 gcc_assert (new_decl);
773 /* Replace this variable with the copy. */
774 STRIP_TYPE_NOPS (new_decl);
ae2a9b00 775 /* ??? The C++ frontend uses void * pointer zero to initialize
776 any other type. This confuses the middle-end type verification.
777 As cloned bodies do not go through gimplification again the fixup
778 there doesn't trigger. */
779 if (TREE_CODE (new_decl) == INTEGER_CST
780 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
781 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
75a70cf9 782 *tp = new_decl;
783 *walk_subtrees = 0;
784 }
785 else if (TREE_CODE (*tp) == STATEMENT_LIST)
786 gcc_unreachable ();
787 else if (TREE_CODE (*tp) == SAVE_EXPR)
788 gcc_unreachable ();
789 else if (TREE_CODE (*tp) == LABEL_DECL
790 && (!DECL_CONTEXT (*tp)
791 || decl_function_context (*tp) == id->src_fn))
792 /* These may need to be remapped for EH handling. */
793 *tp = remap_decl (*tp, id);
794 else if (TYPE_P (*tp))
795 /* Types may need remapping as well. */
796 *tp = remap_type (*tp, id);
797 else if (CONSTANT_CLASS_P (*tp))
798 {
799 /* If this is a constant, we have to copy the node iff the type
800 will be remapped. copy_tree_r will not copy a constant. */
801 tree new_type = remap_type (TREE_TYPE (*tp), id);
802
803 if (new_type == TREE_TYPE (*tp))
804 *walk_subtrees = 0;
805
806 else if (TREE_CODE (*tp) == INTEGER_CST)
807 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
808 TREE_INT_CST_HIGH (*tp));
809 else
810 {
811 *tp = copy_node (*tp);
812 TREE_TYPE (*tp) = new_type;
813 }
814 }
815 else
816 {
817 /* Otherwise, just copy the node. Note that copy_tree_r already
818 knows not to copy VAR_DECLs, etc., so this is safe. */
a37d97a5 819
820 /* We should never have TREE_BLOCK set on non-statements. */
821 if (EXPR_P (*tp))
822 gcc_assert (!TREE_BLOCK (*tp));
823
182cf5a9 824 if (TREE_CODE (*tp) == MEM_REF)
75a70cf9 825 {
7003a7e5 826 tree ptr = TREE_OPERAND (*tp, 0);
a37d97a5 827 tree type = remap_type (TREE_TYPE (*tp), id);
7003a7e5 828 tree old = *tp;
829 tree tem;
830
182cf5a9 831 /* We need to re-canonicalize MEM_REFs from inline substitutions
7003a7e5 832 that can happen when a pointer argument is an ADDR_EXPR.
833 Recurse here manually to allow that. */
834 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
835 if ((tem = maybe_fold_offset_to_reference (EXPR_LOCATION (*tp),
836 ptr,
837 TREE_OPERAND (*tp, 1),
a37d97a5 838 type))
7003a7e5 839 && TREE_THIS_VOLATILE (tem) == TREE_THIS_VOLATILE (old))
75a70cf9 840 {
7003a7e5 841 tree *tem_basep = &tem;
842 while (handled_component_p (*tem_basep))
843 tem_basep = &TREE_OPERAND (*tem_basep, 0);
844 if (TREE_CODE (*tem_basep) == MEM_REF)
845 *tem_basep
846 = build2 (MEM_REF, TREE_TYPE (*tem_basep),
847 TREE_OPERAND (*tem_basep, 0),
848 fold_convert (TREE_TYPE (TREE_OPERAND (*tp, 1)),
849 TREE_OPERAND (*tem_basep, 1)));
182cf5a9 850 else
7003a7e5 851 *tem_basep
852 = build2 (MEM_REF, TREE_TYPE (*tem_basep),
853 build_fold_addr_expr (*tem_basep),
854 build_int_cst
855 (TREE_TYPE (TREE_OPERAND (*tp, 1)), 0));
856 *tp = tem;
857 }
858 else
859 {
a37d97a5 860 *tp = fold_build2 (MEM_REF, type,
7003a7e5 861 ptr, TREE_OPERAND (*tp, 1));
862 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
863 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
75a70cf9 864 }
7003a7e5 865 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
866 *walk_subtrees = 0;
867 return NULL;
75a70cf9 868 }
869
870 /* Here is the "usual case". Copy this tree node, and then
871 tweak some special cases. */
872 copy_tree_r (tp, walk_subtrees, NULL);
873
a37d97a5 874 if (TREE_CODE (*tp) != OMP_CLAUSE)
875 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
876
75a70cf9 877 /* Global variables we haven't seen yet need to go into referenced
878 vars. If not referenced from types only. */
879 if (gimple_in_ssa_p (cfun)
880 && TREE_CODE (*tp) == VAR_DECL
9845d120 881 && id->remapping_type_depth == 0
882 && !processing_debug_stmt)
75a70cf9 883 add_referenced_var (*tp);
884
75a70cf9 885 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
886 {
887 /* The copied TARGET_EXPR has never been expanded, even if the
888 original node was expanded already. */
889 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
890 TREE_OPERAND (*tp, 3) = NULL_TREE;
891 }
892 else if (TREE_CODE (*tp) == ADDR_EXPR)
893 {
894 /* Variable substitution need not be simple. In particular,
182cf5a9 895 the MEM_REF substitution above. Make sure that
75a70cf9 896 TREE_CONSTANT and friends are up-to-date. But make sure
897 to not improperly set TREE_BLOCK on some sub-expressions. */
898 int invariant = is_gimple_min_invariant (*tp);
899 tree block = id->block;
900 id->block = NULL_TREE;
d603fd86 901 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
75a70cf9 902 id->block = block;
182cf5a9 903 recompute_tree_invariant_for_addr_expr (*tp);
75a70cf9 904
905 /* If this used to be invariant, but is not any longer,
906 then regimplification is probably needed. */
907 if (invariant && !is_gimple_min_invariant (*tp))
908 id->regimplify = true;
909
910 *walk_subtrees = 0;
911 }
912 }
913
914 /* Keep iterating. */
915 return NULL_TREE;
916}
917
918
919/* Called from copy_body_id via walk_tree. DATA is really a
51a48c27 920 `copy_body_data *'. */
5ff0afa2 921
51a48c27 922tree
75a70cf9 923copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
e343483a 924{
51a48c27 925 copy_body_data *id = (copy_body_data *) data;
926 tree fn = id->src_fn;
b3d24a23 927 tree new_block;
e343483a 928
e27482aa 929 /* Begin by recognizing trees that we'll completely rewrite for the
930 inlining context. Our output for these trees is completely
931 different from out input (e.g. RETURN_EXPR is deleted, and morphs
932 into an edge). Further down, we'll handle trees that get
933 duplicated and/or tweaked. */
e343483a 934
51a48c27 935 /* When requested, RETURN_EXPRs should be transformed to just the
75a70cf9 936 contained MODIFY_EXPR. The branch semantics of the return will
51a48c27 937 be handled elsewhere by manipulating the CFG rather than a statement. */
938 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
e343483a 939 {
e27482aa 940 tree assignment = TREE_OPERAND (*tp, 0);
e343483a 941
942 /* If we're returning something, just turn that into an
e27482aa 943 assignment into the equivalent of the original RESULT_DECL.
944 If the "assignment" is just the result decl, the result
945 decl has already been set (e.g. a recent "foo (&result_decl,
946 ...)"); just toss the entire RETURN_EXPR. */
75a70cf9 947 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
e27482aa 948 {
949 /* Replace the RETURN_EXPR with (a copy of) the
75a70cf9 950 MODIFY_EXPR hanging underneath. */
e27482aa 951 *tp = copy_node (assignment);
952 }
953 else /* Else the RETURN_EXPR returns no value. */
954 {
955 *tp = NULL;
a9c6c0e3 956 return (tree) (void *)1;
e27482aa 957 }
e343483a 958 }
deff5ffd 959 else if (TREE_CODE (*tp) == SSA_NAME)
960 {
961 *tp = remap_ssa_name (*tp, id);
962 *walk_subtrees = 0;
963 return NULL;
964 }
e27482aa 965
e343483a 966 /* Local variables and labels need to be replaced by equivalent
967 variables. We don't want to copy static variables; there's only
968 one of those, no matter how many times we inline the containing
1f8a6ff8 969 function. Similarly for globals from an outer function. */
2ef41604 970 else if (auto_var_in_fn_p (*tp, fn))
e343483a 971 {
972 tree new_decl;
973
974 /* Remap the declaration. */
975 new_decl = remap_decl (*tp, id);
8c0963c4 976 gcc_assert (new_decl);
e343483a 977 /* Replace this variable with the copy. */
978 STRIP_TYPE_NOPS (new_decl);
979 *tp = new_decl;
db7439ff 980 *walk_subtrees = 0;
e343483a 981 }
4ee9c684 982 else if (TREE_CODE (*tp) == STATEMENT_LIST)
983 copy_statement_list (tp);
bfec3452 984 else if (TREE_CODE (*tp) == SAVE_EXPR
985 || TREE_CODE (*tp) == TARGET_EXPR)
67c155cb 986 remap_save_expr (tp, id->decl_map, walk_subtrees);
8266b8c5 987 else if (TREE_CODE (*tp) == LABEL_DECL
988 && (! DECL_CONTEXT (*tp)
51a48c27 989 || decl_function_context (*tp) == id->src_fn))
e27482aa 990 /* These may need to be remapped for EH handling. */
8266b8c5 991 *tp = remap_decl (*tp, id);
4ee9c684 992 else if (TREE_CODE (*tp) == BIND_EXPR)
993 copy_bind_expr (tp, walk_subtrees, id);
ecba2e4c 994 /* Types may need remapping as well. */
995 else if (TYPE_P (*tp))
996 *tp = remap_type (*tp, id);
997
bfc01d24 998 /* If this is a constant, we have to copy the node iff the type will be
999 remapped. copy_tree_r will not copy a constant. */
0b7784b5 1000 else if (CONSTANT_CLASS_P (*tp))
bfc01d24 1001 {
1002 tree new_type = remap_type (TREE_TYPE (*tp), id);
1003
1004 if (new_type == TREE_TYPE (*tp))
1005 *walk_subtrees = 0;
1006
1007 else if (TREE_CODE (*tp) == INTEGER_CST)
1008 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1009 TREE_INT_CST_HIGH (*tp));
1010 else
1011 {
1012 *tp = copy_node (*tp);
1013 TREE_TYPE (*tp) = new_type;
1014 }
1015 }
1016
e343483a 1017 /* Otherwise, just copy the node. Note that copy_tree_r already
1018 knows not to copy VAR_DECLs, etc., so this is safe. */
1019 else
1020 {
e27482aa 1021 /* Here we handle trees that are not completely rewritten.
1022 First we detect some inlining-induced bogosities for
1023 discarding. */
75a70cf9 1024 if (TREE_CODE (*tp) == MODIFY_EXPR
1025 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1026 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
e343483a 1027 {
1028 /* Some assignments VAR = VAR; don't generate any rtl code
1029 and thus don't count as variable modification. Avoid
1030 keeping bogosities like 0 = 0. */
75a70cf9 1031 tree decl = TREE_OPERAND (*tp, 0), value;
e3022db7 1032 tree *n;
e343483a 1033
e3022db7 1034 n = (tree *) pointer_map_contains (id->decl_map, decl);
e343483a 1035 if (n)
1036 {
e3022db7 1037 value = *n;
e343483a 1038 STRIP_TYPE_NOPS (value);
9c2a0c05 1039 if (TREE_CONSTANT (value) || TREE_READONLY (value))
ffee01dc 1040 {
e60a6f7b 1041 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
75a70cf9 1042 return copy_tree_body_r (tp, walk_subtrees, data);
ffee01dc 1043 }
e343483a 1044 }
1045 }
51a48c27 1046 else if (TREE_CODE (*tp) == INDIRECT_REF)
4ee9c684 1047 {
1048 /* Get rid of *& from inline substitutions that can happen when a
1049 pointer argument is an ADDR_EXPR. */
eb0fefdb 1050 tree decl = TREE_OPERAND (*tp, 0);
e3022db7 1051 tree *n;
4ee9c684 1052
e3022db7 1053 n = (tree *) pointer_map_contains (id->decl_map, decl);
4ee9c684 1054 if (n)
1055 {
f4e36c33 1056 tree new_tree;
e4a492cd 1057 tree old;
51fc648f 1058 /* If we happen to get an ADDR_EXPR in n->value, strip
1059 it manually here as we'll eventually get ADDR_EXPRs
1060 which lie about their types pointed to. In this case
1061 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
9ea9fcf0 1062 but we absolutely rely on that. As fold_indirect_ref
1063 does other useful transformations, try that first, though. */
e3022db7 1064 tree type = TREE_TYPE (TREE_TYPE (*n));
4189e677 1065 if (id->do_not_unshare)
1066 new_tree = *n;
1067 else
1068 new_tree = unshare_expr (*n);
e4a492cd 1069 old = *tp;
f4e36c33 1070 *tp = gimple_fold_indirect_ref (new_tree);
9ea9fcf0 1071 if (! *tp)
1072 {
f4e36c33 1073 if (TREE_CODE (new_tree) == ADDR_EXPR)
db812c94 1074 {
389dd41b 1075 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1076 type, new_tree);
db812c94 1077 /* ??? We should either assert here or build
1078 a VIEW_CONVERT_EXPR instead of blindly leaking
1079 incompatible types to our IL. */
1080 if (! *tp)
f4e36c33 1081 *tp = TREE_OPERAND (new_tree, 0);
db812c94 1082 }
9ea9fcf0 1083 else
e4a492cd 1084 {
f4e36c33 1085 *tp = build1 (INDIRECT_REF, type, new_tree);
e4a492cd 1086 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
74cbc691 1087 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
a67e70de 1088 TREE_READONLY (*tp) = TREE_READONLY (old);
1089 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
e4a492cd 1090 }
9ea9fcf0 1091 }
eb0fefdb 1092 *walk_subtrees = 0;
1093 return NULL;
ffee01dc 1094 }
1095 }
182cf5a9 1096 else if (TREE_CODE (*tp) == MEM_REF)
1097 {
1098 /* We need to re-canonicalize MEM_REFs from inline substitutions
1099 that can happen when a pointer argument is an ADDR_EXPR. */
1100 tree decl = TREE_OPERAND (*tp, 0);
1101 tree *n;
1102
1103 n = (tree *) pointer_map_contains (id->decl_map, decl);
1104 if (n)
1105 {
1106 tree old = *tp;
1107 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1108 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1109 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1110 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1111 *walk_subtrees = 0;
1112 return NULL;
1113 }
1114 }
ffee01dc 1115
e27482aa 1116 /* Here is the "usual case". Copy this tree node, and then
1117 tweak some special cases. */
51a48c27 1118 copy_tree_r (tp, walk_subtrees, NULL);
deff5ffd 1119
ed6049c8 1120 /* Global variables we haven't seen yet needs to go into referenced
9845d120 1121 vars. If not referenced from types or debug stmts only. */
75a70cf9 1122 if (gimple_in_ssa_p (cfun)
1123 && TREE_CODE (*tp) == VAR_DECL
9845d120 1124 && id->remapping_type_depth == 0
1125 && !processing_debug_stmt)
deff5ffd 1126 add_referenced_var (*tp);
48e1416a 1127
b3d24a23 1128 /* If EXPR has block defined, map it to newly constructed block.
1129 When inlining we want EXPRs without block appear in the block
edbb328f 1130 of function call if we are not remapping a type. */
75a70cf9 1131 if (EXPR_P (*tp))
b3d24a23 1132 {
edbb328f 1133 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
b3d24a23 1134 if (TREE_BLOCK (*tp))
1135 {
e3022db7 1136 tree *n;
1137 n = (tree *) pointer_map_contains (id->decl_map,
1138 TREE_BLOCK (*tp));
cd3ece53 1139 gcc_assert (n || id->remapping_type_depth != 0);
1140 if (n)
1141 new_block = *n;
b3d24a23 1142 }
1143 TREE_BLOCK (*tp) = new_block;
1144 }
ffee01dc 1145
75a70cf9 1146 if (TREE_CODE (*tp) != OMP_CLAUSE)
35cc02b5 1147 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
ecba2e4c 1148
ffee01dc 1149 /* The copied TARGET_EXPR has never been expanded, even if the
1150 original node was expanded already. */
1151 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1152 {
1153 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1154 TREE_OPERAND (*tp, 3) = NULL_TREE;
1155 }
579a1f45 1156
1157 /* Variable substitution need not be simple. In particular, the
1158 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1159 and friends are up-to-date. */
1160 else if (TREE_CODE (*tp) == ADDR_EXPR)
1161 {
71d9af81 1162 int invariant = is_gimple_min_invariant (*tp);
75a70cf9 1163 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1164
52a7bd0e 1165 /* Handle the case where we substituted an INDIRECT_REF
1166 into the operand of the ADDR_EXPR. */
1167 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1168 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1169 else
1170 recompute_tree_invariant_for_addr_expr (*tp);
75a70cf9 1171
3d053ea5 1172 /* If this used to be invariant, but is not any longer,
1173 then regimplification is probably needed. */
71d9af81 1174 if (invariant && !is_gimple_min_invariant (*tp))
3d053ea5 1175 id->regimplify = true;
75a70cf9 1176
579a1f45 1177 *walk_subtrees = 0;
1178 }
e343483a 1179 }
1180
1181 /* Keep iterating. */
1182 return NULL_TREE;
1183}
1184
e38def9c 1185/* Helper for remap_gimple_stmt. Given an EH region number for the
1186 source function, map that to the duplicate EH region number in
1187 the destination function. */
1188
1189static int
1190remap_eh_region_nr (int old_nr, copy_body_data *id)
1191{
1192 eh_region old_r, new_r;
1193 void **slot;
1194
1195 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1196 slot = pointer_map_contains (id->eh_map, old_r);
1197 new_r = (eh_region) *slot;
1198
1199 return new_r->index;
1200}
1201
1202/* Similar, but operate on INTEGER_CSTs. */
1203
1204static tree
1205remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1206{
1207 int old_nr, new_nr;
1208
1209 old_nr = tree_low_cst (old_t_nr, 0);
1210 new_nr = remap_eh_region_nr (old_nr, id);
1211
b3d480fb 1212 return build_int_cst (integer_type_node, new_nr);
e38def9c 1213}
75a70cf9 1214
1215/* Helper for copy_bb. Remap statement STMT using the inlining
1216 information in ID. Return the new statement copy. */
1217
1218static gimple
1219remap_gimple_stmt (gimple stmt, copy_body_data *id)
1220{
1221 gimple copy = NULL;
1222 struct walk_stmt_info wi;
1223 tree new_block;
508d0d3f 1224 bool skip_first = false;
75a70cf9 1225
1226 /* Begin by recognizing trees that we'll completely rewrite for the
1227 inlining context. Our output for these trees is completely
1228 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1229 into an edge). Further down, we'll handle trees that get
1230 duplicated and/or tweaked. */
1231
1232 /* When requested, GIMPLE_RETURNs should be transformed to just the
1233 contained GIMPLE_ASSIGN. The branch semantics of the return will
1234 be handled elsewhere by manipulating the CFG rather than the
1235 statement. */
1236 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1237 {
1238 tree retval = gimple_return_retval (stmt);
1239
1240 /* If we're returning something, just turn that into an
1241 assignment into the equivalent of the original RESULT_DECL.
1242 If RETVAL is just the result decl, the result decl has
1243 already been set (e.g. a recent "foo (&result_decl, ...)");
1244 just toss the entire GIMPLE_RETURN. */
524a0531 1245 if (retval
1246 && (TREE_CODE (retval) != RESULT_DECL
1247 && (TREE_CODE (retval) != SSA_NAME
1248 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
508d0d3f 1249 {
1250 copy = gimple_build_assign (id->retvar, retval);
1251 /* id->retvar is already substituted. Skip it on later remapping. */
1252 skip_first = true;
1253 }
75a70cf9 1254 else
1255 return gimple_build_nop ();
1256 }
1257 else if (gimple_has_substatements (stmt))
1258 {
1259 gimple_seq s1, s2;
1260
1261 /* When cloning bodies from the C++ front end, we will be handed bodies
1262 in High GIMPLE form. Handle here all the High GIMPLE statements that
1263 have embedded statements. */
1264 switch (gimple_code (stmt))
1265 {
1266 case GIMPLE_BIND:
1267 copy = copy_gimple_bind (stmt, id);
1268 break;
1269
1270 case GIMPLE_CATCH:
1271 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1272 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1273 break;
1274
1275 case GIMPLE_EH_FILTER:
1276 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1277 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1278 break;
1279
1280 case GIMPLE_TRY:
1281 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1282 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
48e1416a 1283 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
75a70cf9 1284 break;
1285
1286 case GIMPLE_WITH_CLEANUP_EXPR:
1287 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1288 copy = gimple_build_wce (s1);
1289 break;
1290
1291 case GIMPLE_OMP_PARALLEL:
1292 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1293 copy = gimple_build_omp_parallel
1294 (s1,
1295 gimple_omp_parallel_clauses (stmt),
1296 gimple_omp_parallel_child_fn (stmt),
1297 gimple_omp_parallel_data_arg (stmt));
1298 break;
1299
1300 case GIMPLE_OMP_TASK:
1301 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1302 copy = gimple_build_omp_task
1303 (s1,
1304 gimple_omp_task_clauses (stmt),
1305 gimple_omp_task_child_fn (stmt),
1306 gimple_omp_task_data_arg (stmt),
1307 gimple_omp_task_copy_fn (stmt),
1308 gimple_omp_task_arg_size (stmt),
1309 gimple_omp_task_arg_align (stmt));
1310 break;
1311
1312 case GIMPLE_OMP_FOR:
1313 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1314 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1315 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1316 gimple_omp_for_collapse (stmt), s2);
1317 {
1318 size_t i;
1319 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1320 {
1321 gimple_omp_for_set_index (copy, i,
1322 gimple_omp_for_index (stmt, i));
1323 gimple_omp_for_set_initial (copy, i,
1324 gimple_omp_for_initial (stmt, i));
1325 gimple_omp_for_set_final (copy, i,
1326 gimple_omp_for_final (stmt, i));
1327 gimple_omp_for_set_incr (copy, i,
1328 gimple_omp_for_incr (stmt, i));
1329 gimple_omp_for_set_cond (copy, i,
1330 gimple_omp_for_cond (stmt, i));
1331 }
1332 }
1333 break;
1334
1335 case GIMPLE_OMP_MASTER:
1336 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1337 copy = gimple_build_omp_master (s1);
1338 break;
1339
1340 case GIMPLE_OMP_ORDERED:
1341 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1342 copy = gimple_build_omp_ordered (s1);
1343 break;
1344
1345 case GIMPLE_OMP_SECTION:
1346 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1347 copy = gimple_build_omp_section (s1);
1348 break;
1349
1350 case GIMPLE_OMP_SECTIONS:
1351 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1352 copy = gimple_build_omp_sections
1353 (s1, gimple_omp_sections_clauses (stmt));
1354 break;
1355
1356 case GIMPLE_OMP_SINGLE:
1357 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1358 copy = gimple_build_omp_single
1359 (s1, gimple_omp_single_clauses (stmt));
1360 break;
1361
12d3d5cb 1362 case GIMPLE_OMP_CRITICAL:
1363 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1364 copy
1365 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1366 break;
1367
75a70cf9 1368 default:
1369 gcc_unreachable ();
1370 }
1371 }
1372 else
1373 {
1374 if (gimple_assign_copy_p (stmt)
1375 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1376 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1377 {
1378 /* Here we handle statements that are not completely rewritten.
1379 First we detect some inlining-induced bogosities for
1380 discarding. */
1381
1382 /* Some assignments VAR = VAR; don't generate any rtl code
1383 and thus don't count as variable modification. Avoid
1384 keeping bogosities like 0 = 0. */
1385 tree decl = gimple_assign_lhs (stmt), value;
1386 tree *n;
1387
1388 n = (tree *) pointer_map_contains (id->decl_map, decl);
1389 if (n)
1390 {
1391 value = *n;
1392 STRIP_TYPE_NOPS (value);
1393 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1394 return gimple_build_nop ();
1395 }
1396 }
1397
9845d120 1398 if (gimple_debug_bind_p (stmt))
1399 {
1400 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1401 gimple_debug_bind_get_value (stmt),
1402 stmt);
1403 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1404 return copy;
1405 }
e38def9c 1406
1407 /* Create a new deep copy of the statement. */
1408 copy = gimple_copy (stmt);
1409
1410 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1411 RESX and EH_DISPATCH. */
1412 if (id->eh_map)
1413 switch (gimple_code (copy))
1414 {
1415 case GIMPLE_CALL:
1416 {
1417 tree r, fndecl = gimple_call_fndecl (copy);
1418 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1419 switch (DECL_FUNCTION_CODE (fndecl))
1420 {
1421 case BUILT_IN_EH_COPY_VALUES:
1422 r = gimple_call_arg (copy, 1);
1423 r = remap_eh_region_tree_nr (r, id);
1424 gimple_call_set_arg (copy, 1, r);
1425 /* FALLTHRU */
1426
1427 case BUILT_IN_EH_POINTER:
1428 case BUILT_IN_EH_FILTER:
1429 r = gimple_call_arg (copy, 0);
1430 r = remap_eh_region_tree_nr (r, id);
1431 gimple_call_set_arg (copy, 0, r);
1432 break;
1433
1434 default:
1435 break;
1436 }
cb245216 1437
1a981e1a 1438 /* Reset alias info if we didn't apply measures to
1439 keep it valid over inlining by setting DECL_PT_UID. */
1440 if (!id->src_cfun->gimple_df
1441 || !id->src_cfun->gimple_df->ipa_pta)
1442 gimple_call_reset_alias_info (copy);
e38def9c 1443 }
1444 break;
1445
1446 case GIMPLE_RESX:
1447 {
1448 int r = gimple_resx_region (copy);
1449 r = remap_eh_region_nr (r, id);
1450 gimple_resx_set_region (copy, r);
1451 }
1452 break;
1453
1454 case GIMPLE_EH_DISPATCH:
1455 {
1456 int r = gimple_eh_dispatch_region (copy);
1457 r = remap_eh_region_nr (r, id);
1458 gimple_eh_dispatch_set_region (copy, r);
1459 }
1460 break;
1461
1462 default:
1463 break;
1464 }
75a70cf9 1465 }
1466
1467 /* If STMT has a block defined, map it to the newly constructed
1468 block. When inlining we want statements without a block to
1469 appear in the block of the function call. */
1470 new_block = id->block;
1471 if (gimple_block (copy))
1472 {
1473 tree *n;
1474 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1475 gcc_assert (n);
1476 new_block = *n;
1477 }
1478
1479 gimple_set_block (copy, new_block);
1480
9845d120 1481 if (gimple_debug_bind_p (copy))
1482 return copy;
1483
75a70cf9 1484 /* Remap all the operands in COPY. */
1485 memset (&wi, 0, sizeof (wi));
1486 wi.info = id;
508d0d3f 1487 if (skip_first)
1488 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1489 else
48e1416a 1490 walk_gimple_op (copy, remap_gimple_op_r, &wi);
75a70cf9 1491
dd277d48 1492 /* Clear the copied virtual operands. We are not remapping them here
1493 but are going to recreate them from scratch. */
1494 if (gimple_has_mem_ops (copy))
1495 {
1496 gimple_set_vdef (copy, NULL_TREE);
1497 gimple_set_vuse (copy, NULL_TREE);
1498 }
1499
75a70cf9 1500 return copy;
1501}
1502
1503
e27482aa 1504/* Copy basic block, scale profile accordingly. Edges will be taken care of
1505 later */
1506
1507static basic_block
5b17b7ae 1508copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1509 gcov_type count_scale)
e27482aa 1510{
b236ac0e 1511 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
e27482aa 1512 basic_block copy_basic_block;
75a70cf9 1513 tree decl;
e2d3f422 1514 gcov_type freq;
b06ab5fa 1515 basic_block prev;
1516
1517 /* Search for previous copied basic block. */
1518 prev = bb->prev_bb;
1519 while (!prev->aux)
1520 prev = prev->prev_bb;
e27482aa 1521
1522 /* create_basic_block() will append every new block to
1523 basic_block_info automatically. */
a9c6c0e3 1524 copy_basic_block = create_basic_block (NULL, (void *) 0,
b06ab5fa 1525 (basic_block) prev->aux);
e27482aa 1526 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
4ae20857 1527
75a70cf9 1528 /* We are going to rebuild frequencies from scratch. These values
1529 have just small importance to drive canonicalize_loop_headers. */
e2d3f422 1530 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
75a70cf9 1531
e2d3f422 1532 /* We recompute frequencies after inlining, so this is quite safe. */
1533 if (freq > BB_FREQ_MAX)
1534 freq = BB_FREQ_MAX;
1535 copy_basic_block->frequency = freq;
e27482aa 1536
75a70cf9 1537 copy_gsi = gsi_start_bb (copy_basic_block);
1538
1539 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e27482aa 1540 {
75a70cf9 1541 gimple stmt = gsi_stmt (gsi);
1542 gimple orig_stmt = stmt;
e27482aa 1543
3d053ea5 1544 id->regimplify = false;
75a70cf9 1545 stmt = remap_gimple_stmt (stmt, id);
1546 if (gimple_nop_p (stmt))
1547 continue;
1548
1549 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
b236ac0e 1550 seq_gsi = copy_gsi;
75a70cf9 1551
1552 /* With return slot optimization we can end up with
1553 non-gimple (foo *)&this->m, fix that here. */
da143e71 1554 if (is_gimple_assign (stmt)
1555 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1556 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
e27482aa 1557 {
75a70cf9 1558 tree new_rhs;
b236ac0e 1559 new_rhs = force_gimple_operand_gsi (&seq_gsi,
567cd518 1560 gimple_assign_rhs1 (stmt),
09cfc275 1561 true, NULL, false,
1562 GSI_CONTINUE_LINKING);
75a70cf9 1563 gimple_assign_set_rhs1 (stmt, new_rhs);
b236ac0e 1564 id->regimplify = false;
75a70cf9 1565 }
5bfb60b4 1566
b236ac0e 1567 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1568
1569 if (id->regimplify)
1570 gimple_regimplify_operands (stmt, &seq_gsi);
1571
1572 /* If copy_basic_block has been empty at the start of this iteration,
1573 call gsi_start_bb again to get at the newly added statements. */
1574 if (gsi_end_p (copy_gsi))
1575 copy_gsi = gsi_start_bb (copy_basic_block);
1576 else
1577 gsi_next (&copy_gsi);
deff5ffd 1578
75a70cf9 1579 /* Process the new statement. The call to gimple_regimplify_operands
1580 possibly turned the statement into multiple statements, we
1581 need to process all of them. */
b236ac0e 1582 do
75a70cf9 1583 {
ccf4ab6b 1584 tree fn;
1585
b236ac0e 1586 stmt = gsi_stmt (copy_gsi);
75a70cf9 1587 if (is_gimple_call (stmt)
1588 && gimple_call_va_arg_pack_p (stmt)
1589 && id->gimple_call)
1590 {
1591 /* __builtin_va_arg_pack () should be replaced by
1592 all arguments corresponding to ... in the caller. */
1593 tree p;
1594 gimple new_call;
1595 VEC(tree, heap) *argarray;
1596 size_t nargs = gimple_call_num_args (id->gimple_call);
1597 size_t n;
1598
1767a056 1599 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
75a70cf9 1600 nargs--;
1601
1602 /* Create the new array of arguments. */
1603 n = nargs + gimple_call_num_args (stmt);
1604 argarray = VEC_alloc (tree, heap, n);
1605 VEC_safe_grow (tree, heap, argarray, n);
1606
1607 /* Copy all the arguments before '...' */
1608 memcpy (VEC_address (tree, argarray),
1609 gimple_call_arg_ptr (stmt, 0),
1610 gimple_call_num_args (stmt) * sizeof (tree));
1611
1612 /* Append the arguments passed in '...' */
1613 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1614 gimple_call_arg_ptr (id->gimple_call, 0)
1615 + (gimple_call_num_args (id->gimple_call) - nargs),
1616 nargs * sizeof (tree));
1617
1618 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1619 argarray);
1620
1621 VEC_free (tree, heap, argarray);
1622
1623 /* Copy all GIMPLE_CALL flags, location and block, except
1624 GF_CALL_VA_ARG_PACK. */
1625 gimple_call_copy_flags (new_call, stmt);
1626 gimple_call_set_va_arg_pack (new_call, false);
1627 gimple_set_location (new_call, gimple_location (stmt));
1628 gimple_set_block (new_call, gimple_block (stmt));
1629 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1630
1631 gsi_replace (&copy_gsi, new_call, false);
1632 stmt = new_call;
1633 }
1634 else if (is_gimple_call (stmt)
1635 && id->gimple_call
1636 && (decl = gimple_call_fndecl (stmt))
1637 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1638 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
9bfec7c2 1639 {
75a70cf9 1640 /* __builtin_va_arg_pack_len () should be replaced by
1641 the number of anonymous arguments. */
1642 size_t nargs = gimple_call_num_args (id->gimple_call);
1643 tree count, p;
1644 gimple new_stmt;
1645
1767a056 1646 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
75a70cf9 1647 nargs--;
1648
1649 count = build_int_cst (integer_type_node, nargs);
1650 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1651 gsi_replace (&copy_gsi, new_stmt, false);
1652 stmt = new_stmt;
1653 }
3e9045dd 1654
75a70cf9 1655 /* Statements produced by inlining can be unfolded, especially
1656 when we constant propagated some operands. We can't fold
1657 them right now for two reasons:
1658 1) folding require SSA_NAME_DEF_STMTs to be correct
1659 2) we can't change function calls to builtins.
1660 So we just mark statement for later folding. We mark
1661 all new statements, instead just statements that has changed
1662 by some nontrivial substitution so even statements made
1663 foldable indirectly are updated. If this turns out to be
1664 expensive, copy_body can be told to watch for nontrivial
1665 changes. */
1666 if (id->statements_to_fold)
1667 pointer_set_insert (id->statements_to_fold, stmt);
1668
1669 /* We're duplicating a CALL_EXPR. Find any corresponding
1670 callgraph edges and update or duplicate them. */
1671 if (is_gimple_call (stmt))
1672 {
da50fe8f 1673 struct cgraph_edge *edge;
28efe441 1674 int flags;
48dc2227 1675
75a70cf9 1676 switch (id->transform_call_graph_edges)
9bfec7c2 1677 {
da50fe8f 1678 case CB_CGE_DUPLICATE:
1679 edge = cgraph_edge (id->src_node, orig_stmt);
1680 if (edge)
e2d3f422 1681 {
1682 int edge_freq = edge->frequency;
1683 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1684 gimple_uid (stmt),
1685 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
0835ad03 1686 true);
e2d3f422 1687 /* We could also just rescale the frequency, but
1688 doing so would introduce roundoff errors and make
1689 verifier unhappy. */
48e1416a 1690 edge->frequency
e2d3f422 1691 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1692 copy_basic_block);
1693 if (dump_file
1694 && profile_status_for_function (cfun) != PROFILE_ABSENT
1695 && (edge_freq > edge->frequency + 10
1696 || edge_freq < edge->frequency - 10))
1697 {
1698 fprintf (dump_file, "Edge frequency estimated by "
1699 "cgraph %i diverge from inliner's estimate %i\n",
1700 edge_freq,
1701 edge->frequency);
1702 fprintf (dump_file,
1703 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1704 bb->index,
1705 bb->frequency,
1706 copy_basic_block->frequency);
1707 }
c596d830 1708 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
e2d3f422 1709 }
da50fe8f 1710 break;
1711
1712 case CB_CGE_MOVE_CLONES:
1713 cgraph_set_call_stmt_including_clones (id->dst_node,
1714 orig_stmt, stmt);
1715 edge = cgraph_edge (id->dst_node, stmt);
1716 break;
1717
1718 case CB_CGE_MOVE:
1719 edge = cgraph_edge (id->dst_node, orig_stmt);
1720 if (edge)
1721 cgraph_set_call_stmt (edge, stmt);
1722 break;
1723
1724 default:
1725 gcc_unreachable ();
deff5ffd 1726 }
28efe441 1727
da50fe8f 1728 /* Constant propagation on argument done during inlining
1729 may create new direct call. Produce an edge for it. */
48e1416a 1730 if ((!edge
799c8711 1731 || (edge->indirect_inlining_edge
da50fe8f 1732 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
a5c37976 1733 && id->dst_node->analyzed
da50fe8f 1734 && (fn = gimple_call_fndecl (stmt)) != NULL)
1735 {
fd6a3c41 1736 struct cgraph_node *dest = cgraph_get_node (fn);
da50fe8f 1737
1738 /* We have missing edge in the callgraph. This can happen
1739 when previous inlining turned an indirect call into a
6d1cc52c 1740 direct call by constant propagating arguments or we are
292233cd 1741 producing dead clone (for further cloning). In all
da50fe8f 1742 other cases we hit a bug (incorrect node sharing is the
1743 most common reason for missing edges). */
6d1cc52c 1744 gcc_assert (dest->needed || !dest->analyzed
cdedc740 1745 || dest->address_taken
083e227a 1746 || !id->src_node->analyzed
1747 || !id->dst_node->analyzed);
da50fe8f 1748 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1749 cgraph_create_edge_including_clones
ee3f5fc0 1750 (id->dst_node, dest, orig_stmt, stmt, bb->count,
48e1416a 1751 compute_call_stmt_bb_frequency (id->dst_node->decl,
e2d3f422 1752 copy_basic_block),
0835ad03 1753 CIF_ORIGINALLY_INDIRECT_CALL);
da50fe8f 1754 else
1755 cgraph_create_edge (id->dst_node, dest, stmt,
ee3f5fc0 1756 bb->count,
1757 compute_call_stmt_bb_frequency
0835ad03 1758 (id->dst_node->decl, copy_basic_block))->inline_failed
da50fe8f 1759 = CIF_ORIGINALLY_INDIRECT_CALL;
1760 if (dump_file)
1761 {
b06ab5fa 1762 fprintf (dump_file, "Created new direct edge to %s\n",
da50fe8f 1763 cgraph_node_name (dest));
1764 }
1765 }
ccf4ab6b 1766
28efe441 1767 flags = gimple_call_flags (stmt);
28efe441 1768 if (flags & ECF_MAY_BE_ALLOCA)
1769 cfun->calls_alloca = true;
1770 if (flags & ECF_RETURNS_TWICE)
1771 cfun->calls_setjmp = true;
75a70cf9 1772 }
e27482aa 1773
e38def9c 1774 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1775 id->eh_map, id->eh_lp_nr);
75a70cf9 1776
9845d120 1777 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
75a70cf9 1778 {
1779 ssa_op_iter i;
1780 tree def;
1781
1782 find_new_referenced_vars (gsi_stmt (copy_gsi));
1783 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1784 if (TREE_CODE (def) == SSA_NAME)
1785 SSA_NAME_DEF_STMT (def) = stmt;
1786 }
1787
1788 gsi_next (&copy_gsi);
e27482aa 1789 }
b236ac0e 1790 while (!gsi_end_p (copy_gsi));
75a70cf9 1791
1792 copy_gsi = gsi_last_bb (copy_basic_block);
e27482aa 1793 }
75a70cf9 1794
e27482aa 1795 return copy_basic_block;
1796}
1797
deff5ffd 1798/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1799 form is quite easy, since dominator relationship for old basic blocks does
1800 not change.
1801
1802 There is however exception where inlining might change dominator relation
1803 across EH edges from basic block within inlined functions destinating
060a1cf3 1804 to landing pads in function we inline into.
deff5ffd 1805
fd71b542 1806 The function fills in PHI_RESULTs of such PHI nodes if they refer
1807 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1808 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1809 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1810 set, and this means that there will be no overlapping live ranges
deff5ffd 1811 for the underlying symbol.
1812
1813 This might change in future if we allow redirecting of EH edges and
1814 we might want to change way build CFG pre-inlining to include
1815 all the possible edges then. */
1816static void
fd71b542 1817update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1818 bool can_throw, bool nonlocal_goto)
deff5ffd 1819{
1820 edge e;
1821 edge_iterator ei;
1822
1823 FOR_EACH_EDGE (e, ei, bb->succs)
1824 if (!e->dest->aux
1825 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1826 {
75a70cf9 1827 gimple phi;
1828 gimple_stmt_iterator si;
deff5ffd 1829
fd71b542 1830 if (!nonlocal_goto)
1831 gcc_assert (e->flags & EDGE_EH);
75a70cf9 1832
fd71b542 1833 if (!can_throw)
1834 gcc_assert (!(e->flags & EDGE_EH));
75a70cf9 1835
1836 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
deff5ffd 1837 {
fd71b542 1838 edge re;
1839
75a70cf9 1840 phi = gsi_stmt (si);
1841
fd71b542 1842 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1843 gcc_assert (!e->dest->aux);
1844
3d1eacdb 1845 gcc_assert ((e->flags & EDGE_EH)
1846 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
fd71b542 1847
1848 if (!is_gimple_reg (PHI_RESULT (phi)))
1849 {
75a70cf9 1850 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
fd71b542 1851 continue;
1852 }
1853
1854 re = find_edge (ret_bb, e->dest);
e782a310 1855 gcc_assert (re);
fd71b542 1856 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1857 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1858
1859 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1860 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
deff5ffd 1861 }
1862 }
1863}
1864
75a70cf9 1865
c78cbec8 1866/* Copy edges from BB into its copy constructed earlier, scale profile
1867 accordingly. Edges will be taken care of later. Assume aux
39a98435 1868 pointers to point to the copies of each BB. Return true if any
1869 debug stmts are left after a statement that must end the basic block. */
75a70cf9 1870
39a98435 1871static bool
5b17b7ae 1872copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
e27482aa 1873{
a9c6c0e3 1874 basic_block new_bb = (basic_block) bb->aux;
e27482aa 1875 edge_iterator ei;
1876 edge old_edge;
75a70cf9 1877 gimple_stmt_iterator si;
e27482aa 1878 int flags;
39a98435 1879 bool need_debug_cleanup = false;
e27482aa 1880
1881 /* Use the indices from the original blocks to create edges for the
1882 new ones. */
1883 FOR_EACH_EDGE (old_edge, ei, bb->succs)
9bfec7c2 1884 if (!(old_edge->flags & EDGE_EH))
1885 {
f4e36c33 1886 edge new_edge;
e27482aa 1887
9bfec7c2 1888 flags = old_edge->flags;
e27482aa 1889
9bfec7c2 1890 /* Return edges do get a FALLTHRU flag when the get inlined. */
1891 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1892 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1893 flags |= EDGE_FALLTHRU;
f4e36c33 1894 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1895 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1896 new_edge->probability = old_edge->probability;
9bfec7c2 1897 }
e27482aa 1898
1899 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
39a98435 1900 return false;
e27482aa 1901
75a70cf9 1902 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
e27482aa 1903 {
75a70cf9 1904 gimple copy_stmt;
fd71b542 1905 bool can_throw, nonlocal_goto;
e27482aa 1906
75a70cf9 1907 copy_stmt = gsi_stmt (si);
9845d120 1908 if (!is_gimple_debug (copy_stmt))
1909 {
1910 update_stmt (copy_stmt);
1911 if (gimple_in_ssa_p (cfun))
1912 mark_symbols_for_renaming (copy_stmt);
1913 }
75a70cf9 1914
e27482aa 1915 /* Do this before the possible split_block. */
75a70cf9 1916 gsi_next (&si);
e27482aa 1917
1918 /* If this tree could throw an exception, there are two
1919 cases where we need to add abnormal edge(s): the
1920 tree wasn't in a region and there is a "current
1921 region" in the caller; or the original tree had
1922 EH edges. In both cases split the block after the tree,
1923 and add abnormal edge(s) as needed; we need both
1924 those from the callee and the caller.
1925 We check whether the copy can throw, because the const
1926 propagation can change an INDIRECT_REF which throws
1927 into a COMPONENT_REF which doesn't. If the copy
1928 can throw, the original could also throw. */
75a70cf9 1929 can_throw = stmt_can_throw_internal (copy_stmt);
1930 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
fd71b542 1931
1932 if (can_throw || nonlocal_goto)
e27482aa 1933 {
39a98435 1934 if (!gsi_end_p (si))
1935 {
1936 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1937 gsi_next (&si);
1938 if (gsi_end_p (si))
1939 need_debug_cleanup = true;
1940 }
75a70cf9 1941 if (!gsi_end_p (si))
e27482aa 1942 /* Note that bb's predecessor edges aren't necessarily
1943 right at this point; split_block doesn't care. */
1944 {
1945 edge e = split_block (new_bb, copy_stmt);
deff5ffd 1946
e27482aa 1947 new_bb = e->dest;
deff5ffd 1948 new_bb->aux = e->src->aux;
75a70cf9 1949 si = gsi_start_bb (new_bb);
e27482aa 1950 }
fd71b542 1951 }
e27482aa 1952
e38def9c 1953 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1954 make_eh_dispatch_edges (copy_stmt);
1955 else if (can_throw)
fd71b542 1956 make_eh_edges (copy_stmt);
deff5ffd 1957
fd71b542 1958 if (nonlocal_goto)
75a70cf9 1959 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
fd71b542 1960
1961 if ((can_throw || nonlocal_goto)
1962 && gimple_in_ssa_p (cfun))
75a70cf9 1963 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
fd71b542 1964 can_throw, nonlocal_goto);
deff5ffd 1965 }
39a98435 1966 return need_debug_cleanup;
deff5ffd 1967}
1968
1969/* Copy the PHIs. All blocks and edges are copied, some blocks
1970 was possibly split and new outgoing EH edges inserted.
1971 BB points to the block of original function and AUX pointers links
1972 the original and newly copied blocks. */
1973
1974static void
1975copy_phis_for_bb (basic_block bb, copy_body_data *id)
1976{
45ba1503 1977 basic_block const new_bb = (basic_block) bb->aux;
deff5ffd 1978 edge_iterator ei;
75a70cf9 1979 gimple phi;
1980 gimple_stmt_iterator si;
10f8ddfc 1981 edge new_edge;
1982 bool inserted = false;
deff5ffd 1983
75a70cf9 1984 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
deff5ffd 1985 {
75a70cf9 1986 tree res, new_res;
1987 gimple new_phi;
deff5ffd 1988
75a70cf9 1989 phi = gsi_stmt (si);
1990 res = PHI_RESULT (phi);
1991 new_res = res;
deff5ffd 1992 if (is_gimple_reg (res))
1993 {
75a70cf9 1994 walk_tree (&new_res, copy_tree_body_r, id, NULL);
deff5ffd 1995 SSA_NAME_DEF_STMT (new_res)
1996 = new_phi = create_phi_node (new_res, new_bb);
1997 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1998 {
6a69e813 1999 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2000 tree arg;
2001 tree new_arg;
75a70cf9 2002 tree block = id->block;
6a69e813 2003 edge_iterator ei2;
2004
292233cd 2005 /* When doing partial cloning, we allow PHIs on the entry block
6a69e813 2006 as long as all the arguments are the same. Find any input
2007 edge to see argument to copy. */
2008 if (!old_edge)
2009 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2010 if (!old_edge->src->aux)
2011 break;
2012
2013 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2014 new_arg = arg;
75a70cf9 2015 id->block = NULL_TREE;
2016 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2017 id->block = block;
deff5ffd 2018 gcc_assert (new_arg);
9915e061 2019 /* With return slot optimization we can end up with
2020 non-gimple (foo *)&this->m, fix that here. */
2021 if (TREE_CODE (new_arg) != SSA_NAME
2022 && TREE_CODE (new_arg) != FUNCTION_DECL
2023 && !is_gimple_val (new_arg))
2024 {
75a70cf9 2025 gimple_seq stmts = NULL;
2026 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
10f8ddfc 2027 gsi_insert_seq_on_edge (new_edge, stmts);
2028 inserted = true;
9915e061 2029 }
48e1416a 2030 add_phi_arg (new_phi, new_arg, new_edge,
efbcb6de 2031 gimple_phi_arg_location_from_edge (phi, old_edge));
deff5ffd 2032 }
e27482aa 2033 }
2034 }
10f8ddfc 2035
2036 /* Commit the delayed edge insertions. */
2037 if (inserted)
2038 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2039 gsi_commit_one_edge_insert (new_edge, NULL);
e27482aa 2040}
2041
75a70cf9 2042
e27482aa 2043/* Wrapper for remap_decl so it can be used as a callback. */
75a70cf9 2044
e27482aa 2045static tree
2046remap_decl_1 (tree decl, void *data)
2047{
51a48c27 2048 return remap_decl (decl, (copy_body_data *) data);
e27482aa 2049}
2050
deff5ffd 2051/* Build struct function and associated datastructures for the new clone
2052 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2053
2054static void
e2d3f422 2055initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
deff5ffd 2056{
deff5ffd 2057 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
e2d3f422 2058 gcov_type count_scale;
deff5ffd 2059
2060 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2061 count_scale = (REG_BR_PROB_BASE * count
2062 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2063 else
e2d3f422 2064 count_scale = REG_BR_PROB_BASE;
deff5ffd 2065
2066 /* Register specific tree functions. */
75a70cf9 2067 gimple_register_cfg_hooks ();
1a1a827a 2068
2069 /* Get clean struct function. */
2070 push_struct_function (new_fndecl);
2071
2072 /* We will rebuild these, so just sanity check that they are empty. */
2073 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2074 gcc_assert (cfun->local_decls == NULL);
2075 gcc_assert (cfun->cfg == NULL);
2076 gcc_assert (cfun->decl == new_fndecl);
2077
292233cd 2078 /* Copy items we preserve during cloning. */
1a1a827a 2079 cfun->static_chain_decl = src_cfun->static_chain_decl;
2080 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2081 cfun->function_end_locus = src_cfun->function_end_locus;
2082 cfun->curr_properties = src_cfun->curr_properties;
2083 cfun->last_verified = src_cfun->last_verified;
1a1a827a 2084 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2085 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
1a1a827a 2086 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2087 cfun->stdarg = src_cfun->stdarg;
1a1a827a 2088 cfun->after_inlining = src_cfun->after_inlining;
cbeb677e 2089 cfun->can_throw_non_call_exceptions
2090 = src_cfun->can_throw_non_call_exceptions;
1a1a827a 2091 cfun->returns_struct = src_cfun->returns_struct;
2092 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2093 cfun->after_tree_profile = src_cfun->after_tree_profile;
2094
deff5ffd 2095 init_empty_tree_cfg ();
2096
e2d3f422 2097 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
deff5ffd 2098 ENTRY_BLOCK_PTR->count =
2099 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2100 REG_BR_PROB_BASE);
e2d3f422 2101 ENTRY_BLOCK_PTR->frequency
2102 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
deff5ffd 2103 EXIT_BLOCK_PTR->count =
2104 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2105 REG_BR_PROB_BASE);
2106 EXIT_BLOCK_PTR->frequency =
e2d3f422 2107 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
deff5ffd 2108 if (src_cfun->eh)
2109 init_eh_for_function ();
2110
2111 if (src_cfun->gimple_df)
2112 {
bcaa2770 2113 init_tree_ssa (cfun);
deff5ffd 2114 cfun->gimple_df->in_ssa_p = true;
2115 init_ssa_operands ();
2116 }
2117 pop_cfun ();
2118}
2119
39a98435 2120/* Helper function for copy_cfg_body. Move debug stmts from the end
2121 of NEW_BB to the beginning of successor basic blocks when needed. If the
2122 successor has multiple predecessors, reset them, otherwise keep
2123 their value. */
2124
2125static void
2126maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2127{
2128 edge e;
2129 edge_iterator ei;
2130 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2131
2132 if (gsi_end_p (si)
2133 || gsi_one_before_end_p (si)
2134 || !(stmt_can_throw_internal (gsi_stmt (si))
2135 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2136 return;
2137
2138 FOR_EACH_EDGE (e, ei, new_bb->succs)
2139 {
2140 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2141 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2142 while (is_gimple_debug (gsi_stmt (ssi)))
2143 {
2144 gimple stmt = gsi_stmt (ssi), new_stmt;
2145 tree var;
2146 tree value;
2147
2148 /* For the last edge move the debug stmts instead of copying
2149 them. */
2150 if (ei_one_before_end_p (ei))
2151 {
2152 si = ssi;
2153 gsi_prev (&ssi);
2154 if (!single_pred_p (e->dest))
2155 gimple_debug_bind_reset_value (stmt);
2156 gsi_remove (&si, false);
2157 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2158 continue;
2159 }
2160
2161 var = gimple_debug_bind_get_var (stmt);
2162 if (single_pred_p (e->dest))
2163 {
2164 value = gimple_debug_bind_get_value (stmt);
2165 value = unshare_expr (value);
2166 }
2167 else
2168 value = NULL_TREE;
2169 new_stmt = gimple_build_debug_bind (var, value, stmt);
2170 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2171 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2172 gsi_prev (&ssi);
2173 }
2174 }
2175}
2176
e27482aa 2177/* Make a copy of the body of FN so that it can be inserted inline in
2178 another function. Walks FN via CFG, returns new fndecl. */
2179
2180static tree
e2d3f422 2181copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
b06ab5fa 2182 basic_block entry_block_map, basic_block exit_block_map,
2183 bitmap blocks_to_copy, basic_block new_entry)
e27482aa 2184{
51a48c27 2185 tree callee_fndecl = id->src_fn;
e27482aa 2186 /* Original cfun for the callee, doesn't change. */
51a48c27 2187 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
deff5ffd 2188 struct function *cfun_to_copy;
e27482aa 2189 basic_block bb;
2190 tree new_fndecl = NULL;
39a98435 2191 bool need_debug_cleanup = false;
e2d3f422 2192 gcov_type count_scale;
deff5ffd 2193 int last;
292233cd 2194 int incoming_frequency = 0;
2195 gcov_type incoming_count = 0;
e27482aa 2196
51a48c27 2197 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
e27482aa 2198 count_scale = (REG_BR_PROB_BASE * count
51a48c27 2199 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
e27482aa 2200 else
e2d3f422 2201 count_scale = REG_BR_PROB_BASE;
e27482aa 2202
2203 /* Register specific tree functions. */
75a70cf9 2204 gimple_register_cfg_hooks ();
e27482aa 2205
555e8b05 2206 /* If we are inlining just region of the function, make sure to connect new entry
2207 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2208 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
292233cd 2209 probabilities of edges incoming from nonduplicated region. */
555e8b05 2210 if (new_entry)
2211 {
2212 edge e;
2213 edge_iterator ei;
2214
2215 FOR_EACH_EDGE (e, ei, new_entry->preds)
2216 if (!e->src->aux)
2217 {
292233cd 2218 incoming_frequency += EDGE_FREQUENCY (e);
2219 incoming_count += e->count;
555e8b05 2220 }
292233cd 2221 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2222 incoming_frequency
2223 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2224 ENTRY_BLOCK_PTR->count = incoming_count;
2225 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
555e8b05 2226 }
2227
e27482aa 2228 /* Must have a CFG here at this point. */
2229 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2230 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2231
deff5ffd 2232 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2233
e27482aa 2234 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2235 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
deff5ffd 2236 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2237 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
e27482aa 2238
e27482aa 2239 /* Duplicate any exception-handling regions. */
2240 if (cfun->eh)
e38def9c 2241 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2242 remap_decl_1, id);
75a70cf9 2243
e27482aa 2244 /* Use aux pointers to map the original blocks to copy. */
2245 FOR_EACH_BB_FN (bb, cfun_to_copy)
b06ab5fa 2246 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2247 {
2248 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2249 bb->aux = new_bb;
2250 new_bb->aux = bb;
2251 }
deff5ffd 2252
944cca50 2253 last = last_basic_block;
75a70cf9 2254
e27482aa 2255 /* Now that we've duplicated the blocks, duplicate their edges. */
2256 FOR_ALL_BB_FN (bb, cfun_to_copy)
b06ab5fa 2257 if (!blocks_to_copy
2258 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2259 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
75a70cf9 2260
b06ab5fa 2261 if (new_entry)
deff5ffd 2262 {
555e8b05 2263 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
b06ab5fa 2264 e->probability = REG_BR_PROB_BASE;
292233cd 2265 e->count = incoming_count;
deff5ffd 2266 }
75a70cf9 2267
6a69e813 2268 if (gimple_in_ssa_p (cfun))
2269 FOR_ALL_BB_FN (bb, cfun_to_copy)
2270 if (!blocks_to_copy
2271 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2272 copy_phis_for_bb (bb, id);
2273
b06ab5fa 2274 FOR_ALL_BB_FN (bb, cfun_to_copy)
2275 if (bb->aux)
2276 {
2277 if (need_debug_cleanup
2278 && bb->index != ENTRY_BLOCK
2279 && bb->index != EXIT_BLOCK)
2280 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2281 ((basic_block)bb->aux)->aux = NULL;
2282 bb->aux = NULL;
2283 }
2284
deff5ffd 2285 /* Zero out AUX fields of newly created block during EH edge
2286 insertion. */
944cca50 2287 for (; last < last_basic_block; last++)
39a98435 2288 {
2289 if (need_debug_cleanup)
2290 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2291 BASIC_BLOCK (last)->aux = NULL;
2292 }
deff5ffd 2293 entry_block_map->aux = NULL;
2294 exit_block_map->aux = NULL;
e27482aa 2295
e38def9c 2296 if (id->eh_map)
2297 {
2298 pointer_map_destroy (id->eh_map);
2299 id->eh_map = NULL;
2300 }
2301
e27482aa 2302 return new_fndecl;
2303}
2304
9845d120 2305/* Copy the debug STMT using ID. We deal with these statements in a
2306 special way: if any variable in their VALUE expression wasn't
2307 remapped yet, we won't remap it, because that would get decl uids
2308 out of sync, causing codegen differences between -g and -g0. If
2309 this arises, we drop the VALUE expression altogether. */
2310
2311static void
2312copy_debug_stmt (gimple stmt, copy_body_data *id)
2313{
2314 tree t, *n;
2315 struct walk_stmt_info wi;
2316
2317 t = id->block;
2318 if (gimple_block (stmt))
2319 {
2320 tree *n;
2321 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2322 if (n)
2323 t = *n;
2324 }
2325 gimple_set_block (stmt, t);
2326
2327 /* Remap all the operands in COPY. */
2328 memset (&wi, 0, sizeof (wi));
2329 wi.info = id;
2330
2331 processing_debug_stmt = 1;
2332
2333 t = gimple_debug_bind_get_var (stmt);
2334
2335 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2336 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2337 {
2338 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2339 t = *n;
2340 }
e0667160 2341 else if (TREE_CODE (t) == VAR_DECL
2342 && !TREE_STATIC (t)
2343 && gimple_in_ssa_p (cfun)
2344 && !pointer_map_contains (id->decl_map, t)
2345 && !var_ann (t))
2346 /* T is a non-localized variable. */;
9845d120 2347 else
2348 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2349
2350 gimple_debug_bind_set_var (stmt, t);
2351
2352 if (gimple_debug_bind_has_value_p (stmt))
2353 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2354 remap_gimple_op_r, &wi, NULL);
2355
2356 /* Punt if any decl couldn't be remapped. */
2357 if (processing_debug_stmt < 0)
2358 gimple_debug_bind_reset_value (stmt);
2359
2360 processing_debug_stmt = 0;
2361
2362 update_stmt (stmt);
2363 if (gimple_in_ssa_p (cfun))
2364 mark_symbols_for_renaming (stmt);
2365}
2366
2367/* Process deferred debug stmts. In order to give values better odds
2368 of being successfully remapped, we delay the processing of debug
2369 stmts until all other stmts that might require remapping are
2370 processed. */
2371
2372static void
2373copy_debug_stmts (copy_body_data *id)
2374{
2375 size_t i;
2376 gimple stmt;
2377
2378 if (!id->debug_stmts)
2379 return;
2380
48148244 2381 FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
9845d120 2382 copy_debug_stmt (stmt, id);
2383
2384 VEC_free (gimple, heap, id->debug_stmts);
2385}
2386
4189e677 2387/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2388 another function. */
2389
2390static tree
2391copy_tree_body (copy_body_data *id)
2392{
2393 tree fndecl = id->src_fn;
2394 tree body = DECL_SAVED_TREE (fndecl);
2395
2396 walk_tree (&body, copy_tree_body_r, id, NULL);
2397
2398 return body;
2399}
2400
9845d120 2401/* Make a copy of the body of FN so that it can be inserted inline in
2402 another function. */
2403
e27482aa 2404static tree
e2d3f422 2405copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
b06ab5fa 2406 basic_block entry_block_map, basic_block exit_block_map,
2407 bitmap blocks_to_copy, basic_block new_entry)
e27482aa 2408{
51a48c27 2409 tree fndecl = id->src_fn;
e27482aa 2410 tree body;
2411
2412 /* If this body has a CFG, walk CFG and copy. */
2413 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
b06ab5fa 2414 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2415 blocks_to_copy, new_entry);
9845d120 2416 copy_debug_stmts (id);
e27482aa 2417
2418 return body;
2419}
2420
78a289a9 2421/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2422 defined in function FN, or of a data member thereof. */
2423
2424static bool
2425self_inlining_addr_expr (tree value, tree fn)
2426{
2427 tree var;
2428
2429 if (TREE_CODE (value) != ADDR_EXPR)
2430 return false;
2431
2432 var = get_base_address (TREE_OPERAND (value, 0));
e27482aa 2433
2ef41604 2434 return var && auto_var_in_fn_p (var, fn);
78a289a9 2435}
2436
9845d120 2437/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2438 lexical block and line number information from base_stmt, if given,
2439 or from the last stmt of the block otherwise. */
2440
2441static gimple
2442insert_init_debug_bind (copy_body_data *id,
2443 basic_block bb, tree var, tree value,
2444 gimple base_stmt)
2445{
2446 gimple note;
2447 gimple_stmt_iterator gsi;
2448 tree tracked_var;
2449
2450 if (!gimple_in_ssa_p (id->src_cfun))
2451 return NULL;
2452
2453 if (!MAY_HAVE_DEBUG_STMTS)
2454 return NULL;
2455
2456 tracked_var = target_for_debug_bind (var);
2457 if (!tracked_var)
2458 return NULL;
2459
2460 if (bb)
2461 {
2462 gsi = gsi_last_bb (bb);
2463 if (!base_stmt && !gsi_end_p (gsi))
2464 base_stmt = gsi_stmt (gsi);
2465 }
2466
2467 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2468
2469 if (bb)
2470 {
2471 if (!gsi_end_p (gsi))
2472 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2473 else
2474 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2475 }
2476
2477 return note;
2478}
2479
4ee9c684 2480static void
9845d120 2481insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
13e50f08 2482{
13e50f08 2483 /* If VAR represents a zero-sized variable, it's possible that the
2484 assignment statement may result in no gimple statements. */
2485 if (init_stmt)
b236ac0e 2486 {
2487 gimple_stmt_iterator si = gsi_last_bb (bb);
13e50f08 2488
8d41b0dd 2489 /* We can end up with init statements that store to a non-register
2490 from a rhs with a conversion. Handle that here by forcing the
2491 rhs into a temporary. gimple_regimplify_operands is not
2492 prepared to do this for us. */
9845d120 2493 if (!is_gimple_debug (init_stmt)
2494 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
8d41b0dd 2495 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2496 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2497 {
2498 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2499 gimple_expr_type (init_stmt),
2500 gimple_assign_rhs1 (init_stmt));
2501 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2502 GSI_NEW_STMT);
2503 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2504 gimple_assign_set_rhs1 (init_stmt, rhs);
2505 }
b236ac0e 2506 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2507 gimple_regimplify_operands (init_stmt, &si);
2508 mark_symbols_for_renaming (init_stmt);
9845d120 2509
2510 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2511 {
2512 tree var, def = gimple_assign_lhs (init_stmt);
2513
2514 if (TREE_CODE (def) == SSA_NAME)
2515 var = SSA_NAME_VAR (def);
2516 else
2517 var = def;
2518
2519 insert_init_debug_bind (id, bb, var, def, init_stmt);
2520 }
b236ac0e 2521 }
13e50f08 2522}
2523
2524/* Initialize parameter P with VALUE. If needed, produce init statement
2525 at the end of BB. When BB is NULL, we return init statement to be
2526 output later. */
2527static gimple
51a48c27 2528setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
e27482aa 2529 basic_block bb, tree *vars)
4ee9c684 2530{
13e50f08 2531 gimple init_stmt = NULL;
4ee9c684 2532 tree var;
c8ca3ee7 2533 tree rhs = value;
deff5ffd 2534 tree def = (gimple_in_ssa_p (cfun)
2535 ? gimple_default_def (id->src_cfun, p) : NULL);
4ee9c684 2536
c8ca3ee7 2537 if (value
2538 && value != error_mark_node
2539 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
62437c86 2540 {
2541 if (fold_convertible_p (TREE_TYPE (p), value))
2542 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2543 else
2544 /* ??? For valid (GIMPLE) programs we should not end up here.
2545 Still if something has gone wrong and we end up with truly
2546 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2547 to not leak invalid GIMPLE to the following passes. */
2548 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2549 }
c8ca3ee7 2550
9845d120 2551 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2552 here since the type of this decl must be visible to the calling
2553 function. */
2554 var = copy_decl_to_var (p, id);
2555
2556 /* We're actually using the newly-created var. */
2557 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
649597af 2558 add_referenced_var (var);
9845d120 2559
2560 /* Declare this new variable. */
1767a056 2561 DECL_CHAIN (var) = *vars;
9845d120 2562 *vars = var;
2563
2564 /* Make gimplifier happy about this variable. */
2565 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2566
deff5ffd 2567 /* If the parameter is never assigned to, has no SSA_NAMEs created,
9845d120 2568 we would not need to create a new variable here at all, if it
2569 weren't for debug info. Still, we can just use the argument
2570 value. */
4ee9c684 2571 if (TREE_READONLY (p)
2572 && !TREE_ADDRESSABLE (p)
deff5ffd 2573 && value && !TREE_SIDE_EFFECTS (value)
2574 && !def)
4ee9c684 2575 {
11fe6c8b 2576 /* We may produce non-gimple trees by adding NOPs or introduce
2577 invalid sharing when operand is not really constant.
2578 It is not big deal to prohibit constant propagation here as
2579 we will constant propagate in DOM1 pass anyway. */
2580 if (is_gimple_min_invariant (value)
c8ca3ee7 2581 && useless_type_conversion_p (TREE_TYPE (p),
2582 TREE_TYPE (value))
78a289a9 2583 /* We have to be very careful about ADDR_EXPR. Make sure
2584 the base variable isn't a local variable of the inlined
2585 function, e.g., when doing recursive inlining, direct or
2586 mutually-recursive or whatever, which is why we don't
2587 just test whether fn == current_function_decl. */
2588 && ! self_inlining_addr_expr (value, fn))
4ee9c684 2589 {
4ee9c684 2590 insert_decl_map (id, p, value);
9845d120 2591 insert_debug_decl_map (id, p, var);
2592 return insert_init_debug_bind (id, bb, var, value, NULL);
4ee9c684 2593 }
2594 }
2595
4ee9c684 2596 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2597 that way, when the PARM_DECL is encountered, it will be
2598 automatically replaced by the VAR_DECL. */
9609c446 2599 insert_decl_map (id, p, var);
4ee9c684 2600
4ee9c684 2601 /* Even if P was TREE_READONLY, the new VAR should not be.
2602 In the original code, we would have constructed a
2603 temporary, and then the function body would have never
2604 changed the value of P. However, now, we will be
2605 constructing VAR directly. The constructor body may
2606 change its value multiple times as it is being
2607 constructed. Therefore, it must not be TREE_READONLY;
2608 the back-end assumes that TREE_READONLY variable is
2609 assigned to only once. */
2610 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2611 TREE_READONLY (var) = 0;
2612
deff5ffd 2613 /* If there is no setup required and we are in SSA, take the easy route
2614 replacing all SSA names representing the function parameter by the
2615 SSA name passed to function.
2616
2617 We need to construct map for the variable anyway as it might be used
2618 in different SSA names when parameter is set in function.
2619
928059d5 2620 Do replacement at -O0 for const arguments replaced by constant.
2621 This is important for builtin_constant_p and other construct requiring
9845d120 2622 constant argument to be visible in inlined function body. */
deff5ffd 2623 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
928059d5 2624 && (optimize
2625 || (TREE_READONLY (p)
2626 && is_gimple_min_invariant (rhs)))
deff5ffd 2627 && (TREE_CODE (rhs) == SSA_NAME
35b8d56b 2628 || is_gimple_min_invariant (rhs))
2629 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
deff5ffd 2630 {
2631 insert_decl_map (id, def, rhs);
9845d120 2632 return insert_init_debug_bind (id, bb, var, rhs, NULL);
deff5ffd 2633 }
2634
5dc57610 2635 /* If the value of argument is never used, don't care about initializing
2636 it. */
12e12890 2637 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
5dc57610 2638 {
2639 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
9845d120 2640 return insert_init_debug_bind (id, bb, var, rhs, NULL);
5dc57610 2641 }
2642
4ee9c684 2643 /* Initialize this VAR_DECL from the equivalent argument. Convert
2644 the argument to the proper type in case it was promoted. */
2645 if (value)
2646 {
4ee9c684 2647 if (rhs == error_mark_node)
deff5ffd 2648 {
9609c446 2649 insert_decl_map (id, p, var);
9845d120 2650 return insert_init_debug_bind (id, bb, var, rhs, NULL);
deff5ffd 2651 }
51ab7e35 2652
aee8cc15 2653 STRIP_USELESS_TYPE_CONVERSION (rhs);
4ee9c684 2654
75a70cf9 2655 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
4ee9c684 2656 keep our trees in gimple form. */
deff5ffd 2657 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2658 {
2659 def = remap_ssa_name (def, id);
75a70cf9 2660 init_stmt = gimple_build_assign (def, rhs);
deff5ffd 2661 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2662 set_default_def (var, NULL);
2663 }
2664 else
75a70cf9 2665 init_stmt = gimple_build_assign (var, rhs);
4ee9c684 2666
13e50f08 2667 if (bb && init_stmt)
9845d120 2668 insert_init_stmt (id, bb, init_stmt);
4ee9c684 2669 }
13e50f08 2670 return init_stmt;
4ee9c684 2671}
2672
e343483a 2673/* Generate code to initialize the parameters of the function at the
75a70cf9 2674 top of the stack in ID from the GIMPLE_CALL STMT. */
e343483a 2675
e27482aa 2676static void
75a70cf9 2677initialize_inlined_parameters (copy_body_data *id, gimple stmt,
e27482aa 2678 tree fn, basic_block bb)
e343483a 2679{
e343483a 2680 tree parms;
75a70cf9 2681 size_t i;
e343483a 2682 tree p;
d57cd35f 2683 tree vars = NULL_TREE;
75a70cf9 2684 tree static_chain = gimple_call_chain (stmt);
e343483a 2685
2686 /* Figure out what the parameters are. */
b0cdf642 2687 parms = DECL_ARGUMENTS (fn);
e343483a 2688
e343483a 2689 /* Loop through the parameter declarations, replacing each with an
2690 equivalent VAR_DECL, appropriately initialized. */
1767a056 2691 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
75a70cf9 2692 {
2693 tree val;
2694 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2695 setup_one_parameter (id, p, val, fn, bb, &vars);
2696 }
2f25e4ca 2697 /* After remapping parameters remap their types. This has to be done
2698 in a second loop over all parameters to appropriately remap
2699 variable sized arrays when the size is specified in a
2700 parameter following the array. */
1767a056 2701 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2f25e4ca 2702 {
2703 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2704 if (varp
2705 && TREE_CODE (*varp) == VAR_DECL)
2706 {
bc6af3fe 2707 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2f25e4ca 2708 ? gimple_default_def (id->src_cfun, p) : NULL);
bc6af3fe 2709 tree var = *varp;
2710 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2f25e4ca 2711 /* Also remap the default definition if it was remapped
2712 to the default definition of the parameter replacement
2713 by the parameter setup. */
bc6af3fe 2714 if (def)
2f25e4ca 2715 {
2716 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2717 if (defp
2718 && TREE_CODE (*defp) == SSA_NAME
bc6af3fe 2719 && SSA_NAME_VAR (*defp) == var)
2720 TREE_TYPE (*defp) = TREE_TYPE (var);
2f25e4ca 2721 }
2722 }
2723 }
e619d7b1 2724
4ee9c684 2725 /* Initialize the static chain. */
2726 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
469679ab 2727 gcc_assert (fn != current_function_decl);
4ee9c684 2728 if (p)
2729 {
2730 /* No static chain? Seems like a bug in tree-nested.c. */
8c0963c4 2731 gcc_assert (static_chain);
e619d7b1 2732
e27482aa 2733 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
e619d7b1 2734 }
2735
e27482aa 2736 declare_inline_vars (id->block, vars);
e343483a 2737}
2738
75a70cf9 2739
e27482aa 2740/* Declare a return variable to replace the RESULT_DECL for the
2741 function we are calling. An appropriate DECL_STMT is returned.
2742 The USE_STMT is filled to contain a use of the declaration to
2743 indicate the return value of the function.
2744
deff5ffd 2745 RETURN_SLOT, if non-null is place where to store the result. It
2746 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
75a70cf9 2747 was the LHS of the MODIFY_EXPR to which this call is the RHS.
8e7912a5 2748
f018d957 2749 The return value is a (possibly null) value that holds the result
2750 as seen by the caller. */
e343483a 2751
d57cd35f 2752static tree
524a0531 2753declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2754 basic_block entry_bb)
e343483a 2755{
51a48c27 2756 tree callee = id->src_fn;
8e7912a5 2757 tree result = DECL_RESULT (callee);
2758 tree callee_type = TREE_TYPE (result);
c21cf15c 2759 tree caller_type;
8e7912a5 2760 tree var, use;
e343483a 2761
c21cf15c 2762 /* Handle type-mismatches in the function declaration return type
2763 vs. the call expression. */
2764 if (modify_dest)
2765 caller_type = TREE_TYPE (modify_dest);
2766 else
2767 caller_type = TREE_TYPE (TREE_TYPE (callee));
2768
e343483a 2769 /* We don't need to do anything for functions that don't return
2770 anything. */
8e7912a5 2771 if (!result || VOID_TYPE_P (callee_type))
f018d957 2772 return NULL_TREE;
e343483a 2773
806e4c12 2774 /* If there was a return slot, then the return value is the
8e7912a5 2775 dereferenced address of that object. */
deff5ffd 2776 if (return_slot)
8e7912a5 2777 {
deff5ffd 2778 /* The front end shouldn't have used both return_slot and
8e7912a5 2779 a modify expression. */
8c0963c4 2780 gcc_assert (!modify_dest);
806e4c12 2781 if (DECL_BY_REFERENCE (result))
deff5ffd 2782 {
2783 tree return_slot_addr = build_fold_addr_expr (return_slot);
2784 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2785
2786 /* We are going to construct *&return_slot and we can't do that
48e1416a 2787 for variables believed to be not addressable.
deff5ffd 2788
2789 FIXME: This check possibly can match, because values returned
2790 via return slot optimization are not believed to have address
2791 taken by alias analysis. */
2792 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
deff5ffd 2793 var = return_slot_addr;
2794 }
806e4c12 2795 else
deff5ffd 2796 {
2797 var = return_slot;
2798 gcc_assert (TREE_CODE (var) != SSA_NAME);
dfbb5324 2799 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
deff5ffd 2800 }
8ea8de24 2801 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2802 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2803 && !DECL_GIMPLE_REG_P (result)
cd276752 2804 && DECL_P (var))
8ea8de24 2805 DECL_GIMPLE_REG_P (var) = 0;
8e7912a5 2806 use = NULL;
2807 goto done;
2808 }
2809
2810 /* All types requiring non-trivial constructors should have been handled. */
8c0963c4 2811 gcc_assert (!TREE_ADDRESSABLE (callee_type));
8e7912a5 2812
2813 /* Attempt to avoid creating a new temporary variable. */
deff5ffd 2814 if (modify_dest
2815 && TREE_CODE (modify_dest) != SSA_NAME)
8e7912a5 2816 {
2817 bool use_it = false;
2818
2819 /* We can't use MODIFY_DEST if there's type promotion involved. */
c8ca3ee7 2820 if (!useless_type_conversion_p (callee_type, caller_type))
8e7912a5 2821 use_it = false;
2822
2823 /* ??? If we're assigning to a variable sized type, then we must
2824 reuse the destination variable, because we've no good way to
2825 create variable sized temporaries at this point. */
2826 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2827 use_it = true;
2828
2829 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2830 reuse it as the result of the call directly. Don't do this if
2831 it would promote MODIFY_DEST to addressable. */
560613ae 2832 else if (TREE_ADDRESSABLE (result))
2833 use_it = false;
2834 else
2835 {
2836 tree base_m = get_base_address (modify_dest);
2837
2838 /* If the base isn't a decl, then it's a pointer, and we don't
2839 know where that's going to go. */
2840 if (!DECL_P (base_m))
2841 use_it = false;
2842 else if (is_global_var (base_m))
2843 use_it = false;
8ea8de24 2844 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2845 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2846 && !DECL_GIMPLE_REG_P (result)
2847 && DECL_GIMPLE_REG_P (base_m))
3808a9ff 2848 use_it = false;
560613ae 2849 else if (!TREE_ADDRESSABLE (base_m))
2850 use_it = true;
2851 }
8e7912a5 2852
2853 if (use_it)
2854 {
2855 var = modify_dest;
2856 use = NULL;
2857 goto done;
2858 }
2859 }
2860
8c0963c4 2861 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
8e7912a5 2862
25b3017b 2863 var = copy_result_decl_to_var (result, id);
deff5ffd 2864 if (gimple_in_ssa_p (cfun))
649597af 2865 add_referenced_var (var);
e27482aa 2866
8e7912a5 2867 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
8e7912a5 2868
4ee9c684 2869 /* Do not have the rest of GCC warn about this variable as it should
dac49aa5 2870 not be visible to the user. */
4ee9c684 2871 TREE_NO_WARNING (var) = 1;
e343483a 2872
25b3017b 2873 declare_inline_vars (id->block, var);
2874
8e7912a5 2875 /* Build the use expr. If the return type of the function was
2876 promoted, convert it back to the expected type. */
2877 use = var;
c8ca3ee7 2878 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
8e7912a5 2879 use = fold_convert (caller_type, var);
48e1416a 2880
aee8cc15 2881 STRIP_USELESS_TYPE_CONVERSION (use);
8e7912a5 2882
25b3017b 2883 if (DECL_BY_REFERENCE (result))
8ad54c54 2884 {
2885 TREE_ADDRESSABLE (var) = 1;
2886 var = build_fold_addr_expr (var);
2887 }
25b3017b 2888
8e7912a5 2889 done:
e343483a 2890 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2891 way, when the RESULT_DECL is encountered, it will be
524a0531 2892 automatically replaced by the VAR_DECL.
2893
2894 When returning by reference, ensure that RESULT_DECL remaps to
2895 gimple_val. */
2896 if (DECL_BY_REFERENCE (result)
2897 && !is_gimple_val (var))
2898 {
2899 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
1aeb48b8 2900 if (gimple_in_ssa_p (id->src_cfun))
649597af 2901 add_referenced_var (temp);
524a0531 2902 insert_decl_map (id, result, temp);
883fe359 2903 /* When RESULT_DECL is in SSA form, we need to use it's default_def
2904 SSA_NAME. */
2905 if (gimple_in_ssa_p (id->src_cfun) && gimple_default_def (id->src_cfun, result))
2906 temp = remap_ssa_name (gimple_default_def (id->src_cfun, result), id);
524a0531 2907 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2908 }
2909 else
2910 insert_decl_map (id, result, var);
e343483a 2911
4ee9c684 2912 /* Remember this so we can ignore it in remap_decls. */
2913 id->retvar = var;
2914
f018d957 2915 return use;
e343483a 2916}
2917
d747fdfb 2918/* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2919 to a local label. */
e619d7b1 2920
d747fdfb 2921static tree
2922has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
e619d7b1 2923{
d747fdfb 2924 tree node = *nodep;
2925 tree fn = (tree) fnp;
75a70cf9 2926
d747fdfb 2927 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2928 return node;
2929
2930 if (TYPE_P (node))
2931 *walk_subtrees = 0;
2932
2933 return NULL_TREE;
2934}
75a70cf9 2935
d747fdfb 2936/* Determine if the function can be copied. If so return NULL. If
2937 not return a string describng the reason for failure. */
2938
2939static const char *
2940copy_forbidden (struct function *fun, tree fndecl)
2941{
2942 const char *reason = fun->cannot_be_copied_reason;
2ab2ce89 2943 tree decl;
2944 unsigned ix;
d747fdfb 2945
2946 /* Only examine the function once. */
2947 if (fun->cannot_be_copied_set)
2948 return reason;
2949
2950 /* We cannot copy a function that receives a non-local goto
2951 because we cannot remap the destination label used in the
2952 function that is performing the non-local goto. */
2953 /* ??? Actually, this should be possible, if we work at it.
2954 No doubt there's just a handful of places that simply
2955 assume it doesn't happen and don't substitute properly. */
2956 if (fun->has_nonlocal_label)
2957 {
2958 reason = G_("function %q+F can never be copied "
2959 "because it receives a non-local goto");
2960 goto fail;
2961 }
2962
2ab2ce89 2963 FOR_EACH_LOCAL_DECL (fun, ix, decl)
2964 if (TREE_CODE (decl) == VAR_DECL
2965 && TREE_STATIC (decl)
2966 && !DECL_EXTERNAL (decl)
2967 && DECL_INITIAL (decl)
2968 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2969 has_label_address_in_static_1,
2970 fndecl))
2971 {
2972 reason = G_("function %q+F can never be copied because it saves "
2973 "address of local label in a static variable");
2974 goto fail;
2975 }
d747fdfb 2976
2977 fail:
2978 fun->cannot_be_copied_reason = reason;
2979 fun->cannot_be_copied_set = true;
2980 return reason;
2981}
2982
2983
2984static const char *inline_forbidden_reason;
2985
2986/* A callback for walk_gimple_seq to handle statements. Returns non-null
2987 iff a function can not be inlined. Also sets the reason why. */
9a7ecb49 2988
9a7ecb49 2989static tree
75a70cf9 2990inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2991 struct walk_stmt_info *wip)
9a7ecb49 2992{
75a70cf9 2993 tree fn = (tree) wip->info;
bc597501 2994 tree t;
75a70cf9 2995 gimple stmt = gsi_stmt (*gsi);
9a7ecb49 2996
75a70cf9 2997 switch (gimple_code (stmt))
bc597501 2998 {
75a70cf9 2999 case GIMPLE_CALL:
1f2706b2 3000 /* Refuse to inline alloca call unless user explicitly forced so as
3001 this may change program's memory overhead drastically when the
3002 function using alloca is called in loop. In GCC present in
3003 SPEC2000 inlining into schedule_block cause it to require 2GB of
a882d754 3004 RAM instead of 256MB. Don't do so for alloca calls emitted for
3005 VLA objects as those can't cause unbounded growth (they're always
3006 wrapped inside stack_save/stack_restore regions. */
75a70cf9 3007 if (gimple_alloca_call_p (stmt)
a882d754 3008 && !gimple_call_alloca_for_var_p (stmt)
bc597501 3009 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3010 {
9bc3739f 3011 inline_forbidden_reason
3cf8b391 3012 = G_("function %q+F can never be inlined because it uses "
9bc3739f 3013 "alloca (override using the always_inline attribute)");
75a70cf9 3014 *handled_ops_p = true;
3015 return fn;
bc597501 3016 }
75a70cf9 3017
3018 t = gimple_call_fndecl (stmt);
3019 if (t == NULL_TREE)
bc597501 3020 break;
cb9d4058 3021
bc597501 3022 /* We cannot inline functions that call setjmp. */
3023 if (setjmp_call_p (t))
3024 {
9bc3739f 3025 inline_forbidden_reason
3cf8b391 3026 = G_("function %q+F can never be inlined because it uses setjmp");
75a70cf9 3027 *handled_ops_p = true;
3028 return t;
bc597501 3029 }
3030
4ee9c684 3031 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1f2706b2 3032 switch (DECL_FUNCTION_CODE (t))
bc597501 3033 {
1f2706b2 3034 /* We cannot inline functions that take a variable number of
3035 arguments. */
3036 case BUILT_IN_VA_START:
1f2706b2 3037 case BUILT_IN_NEXT_ARG:
3038 case BUILT_IN_VA_END:
4ee9c684 3039 inline_forbidden_reason
3cf8b391 3040 = G_("function %q+F can never be inlined because it "
4ee9c684 3041 "uses variable argument lists");
75a70cf9 3042 *handled_ops_p = true;
3043 return t;
4ee9c684 3044
1f2706b2 3045 case BUILT_IN_LONGJMP:
4ee9c684 3046 /* We can't inline functions that call __builtin_longjmp at
3047 all. The non-local goto machinery really requires the
3048 destination be in a different function. If we allow the
3049 function calling __builtin_longjmp to be inlined into the
3050 function calling __builtin_setjmp, Things will Go Awry. */
3051 inline_forbidden_reason
3cf8b391 3052 = G_("function %q+F can never be inlined because "
4ee9c684 3053 "it uses setjmp-longjmp exception handling");
75a70cf9 3054 *handled_ops_p = true;
3055 return t;
4ee9c684 3056
3057 case BUILT_IN_NONLOCAL_GOTO:
3058 /* Similarly. */
3059 inline_forbidden_reason
3cf8b391 3060 = G_("function %q+F can never be inlined because "
4ee9c684 3061 "it uses non-local goto");
75a70cf9 3062 *handled_ops_p = true;
3063 return t;
bc597501 3064
65061660 3065 case BUILT_IN_RETURN:
3066 case BUILT_IN_APPLY_ARGS:
3067 /* If a __builtin_apply_args caller would be inlined,
3068 it would be saving arguments of the function it has
3069 been inlined into. Similarly __builtin_return would
3070 return from the function the inline has been inlined into. */
3071 inline_forbidden_reason
3cf8b391 3072 = G_("function %q+F can never be inlined because "
65061660 3073 "it uses __builtin_return or __builtin_apply_args");
75a70cf9 3074 *handled_ops_p = true;
3075 return t;
65061660 3076
1f2706b2 3077 default:
3078 break;
3079 }
bc597501 3080 break;
3081
75a70cf9 3082 case GIMPLE_GOTO:
3083 t = gimple_goto_dest (stmt);
bc597501 3084
3085 /* We will not inline a function which uses computed goto. The
3086 addresses of its local labels, which may be tucked into
3087 global storage, are of course not constant across
3088 instantiations, which causes unexpected behavior. */
3089 if (TREE_CODE (t) != LABEL_DECL)
3090 {
9bc3739f 3091 inline_forbidden_reason
3cf8b391 3092 = G_("function %q+F can never be inlined "
9bc3739f 3093 "because it contains a computed goto");
75a70cf9 3094 *handled_ops_p = true;
3095 return t;
bc597501 3096 }
4ee9c684 3097 break;
bc597501 3098
bc597501 3099 default:
3100 break;
3101 }
3102
75a70cf9 3103 *handled_ops_p = false;
bc597501 3104 return NULL_TREE;
cb9d4058 3105}
3106
75a70cf9 3107/* Return true if FNDECL is a function that cannot be inlined into
3108 another one. */
3109
3110static bool
bc597501 3111inline_forbidden_p (tree fndecl)
cb9d4058 3112{
361c9aad 3113 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
75a70cf9 3114 struct walk_stmt_info wi;
3115 struct pointer_set_t *visited_nodes;
3116 basic_block bb;
3117 bool forbidden_p = false;
3118
d747fdfb 3119 /* First check for shared reasons not to copy the code. */
3120 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3121 if (inline_forbidden_reason != NULL)
3122 return true;
3123
3124 /* Next, walk the statements of the function looking for
3125 constraucts we can't handle, or are non-optimal for inlining. */
75a70cf9 3126 visited_nodes = pointer_set_create ();
3127 memset (&wi, 0, sizeof (wi));
3128 wi.info = (void *) fndecl;
3129 wi.pset = visited_nodes;
e27482aa 3130
361c9aad 3131 FOR_EACH_BB_FN (bb, fun)
75a70cf9 3132 {
3133 gimple ret;
3134 gimple_seq seq = bb_seq (bb);
d747fdfb 3135 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
75a70cf9 3136 forbidden_p = (ret != NULL);
3137 if (forbidden_p)
d747fdfb 3138 break;
361c9aad 3139 }
3140
75a70cf9 3141 pointer_set_destroy (visited_nodes);
75a70cf9 3142 return forbidden_p;
cb9d4058 3143}
3144
746149b7 3145/* Returns nonzero if FN is a function that does not have any
3146 fundamental inline blocking properties. */
e343483a 3147
d747fdfb 3148bool
3149tree_inlinable_function_p (tree fn)
e343483a 3150{
746149b7 3151 bool inlinable = true;
7cc6d7a8 3152 bool do_warning;
3153 tree always_inline;
e343483a 3154
3155 /* If we've already decided this function shouldn't be inlined,
3156 there's no need to check again. */
3157 if (DECL_UNINLINABLE (fn))
746149b7 3158 return false;
e343483a 3159
7cc6d7a8 3160 /* We only warn for functions declared `inline' by the user. */
3161 do_warning = (warn_inline
7cc6d7a8 3162 && DECL_DECLARED_INLINE_P (fn)
73b5e722 3163 && !DECL_NO_INLINE_WARNING_P (fn)
7cc6d7a8 3164 && !DECL_IN_SYSTEM_HEADER (fn));
3165
3166 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3167
1c2f0012 3168 if (flag_no_inline
7cc6d7a8 3169 && always_inline == NULL)
3170 {
3171 if (do_warning)
3172 warning (OPT_Winline, "function %q+F can never be inlined because it "
3173 "is suppressed using -fno-inline", fn);
3174 inlinable = false;
3175 }
3176
7cc6d7a8 3177 else if (!function_attribute_inlinable_p (fn))
3178 {
3179 if (do_warning)
3180 warning (OPT_Winline, "function %q+F can never be inlined because it "
3181 "uses attributes conflicting with inlining", fn);
3182 inlinable = false;
3183 }
60b8c5b3 3184
bc597501 3185 else if (inline_forbidden_p (fn))
746149b7 3186 {
3187 /* See if we should warn about uninlinable functions. Previously,
3188 some of these warnings would be issued while trying to expand
3189 the function inline, but that would cause multiple warnings
3190 about functions that would for example call alloca. But since
3191 this a property of the function, just one warning is enough.
3192 As a bonus we can now give more details about the reason why a
7cc6d7a8 3193 function is not inlinable. */
3194 if (always_inline)
a522e9eb 3195 error (inline_forbidden_reason, fn);
d731003e 3196 else if (do_warning)
b0ff69b1 3197 warning (OPT_Winline, inline_forbidden_reason, fn);
746149b7 3198
3199 inlinable = false;
3200 }
e343483a 3201
3202 /* Squirrel away the result so that we don't have to check again. */
746149b7 3203 DECL_UNINLINABLE (fn) = !inlinable;
e343483a 3204
746149b7 3205 return inlinable;
3206}
3207
bc7a14a6 3208/* Estimate the cost of a memory move. Use machine dependent
3209 word size and take possible memcpy call into account. */
3210
3211int
3212estimate_move_cost (tree type)
3213{
3214 HOST_WIDE_INT size;
3215
31359ae8 3216 gcc_assert (!VOID_TYPE_P (type));
3217
a97439f9 3218 if (TREE_CODE (type) == VECTOR_TYPE)
3219 {
3220 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3221 enum machine_mode simd
3222 = targetm.vectorize.preferred_simd_mode (inner);
3223 int simd_mode_size = GET_MODE_SIZE (simd);
3224 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3225 / simd_mode_size);
3226 }
3227
bc7a14a6 3228 size = int_size_in_bytes (type);
3229
f5733e7c 3230 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
bc7a14a6 3231 /* Cost of a memcpy call, 3 arguments and the call. */
3232 return 4;
3233 else
3234 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3235}
3236
75a70cf9 3237/* Returns cost of operation CODE, according to WEIGHTS */
bc8bb825 3238
75a70cf9 3239static int
94e6e359 3240estimate_operator_cost (enum tree_code code, eni_weights *weights,
3241 tree op1 ATTRIBUTE_UNUSED, tree op2)
4ee9c684 3242{
75a70cf9 3243 switch (code)
4ee9c684 3244 {
75a70cf9 3245 /* These are "free" conversions, or their presumed cost
3246 is folded into other operations. */
51984d2b 3247 case RANGE_EXPR:
d9659041 3248 CASE_CONVERT:
75a70cf9 3249 case COMPLEX_EXPR:
3250 case PAREN_EXPR:
a790c42a 3251 case VIEW_CONVERT_EXPR:
75a70cf9 3252 return 0;
4ee9c684 3253
bc7a14a6 3254 /* Assign cost of 1 to usual operations.
3255 ??? We may consider mapping RTL costs to this. */
4ee9c684 3256 case COND_EXPR:
dcfca6f8 3257 case VEC_COND_EXPR:
4ee9c684 3258
3259 case PLUS_EXPR:
0de36bdb 3260 case POINTER_PLUS_EXPR:
4ee9c684 3261 case MINUS_EXPR:
3262 case MULT_EXPR:
b9be572e 3263 case FMA_EXPR:
4ee9c684 3264
bd1a81f7 3265 case ADDR_SPACE_CONVERT_EXPR:
06f0b99c 3266 case FIXED_CONVERT_EXPR:
4ee9c684 3267 case FIX_TRUNC_EXPR:
4ee9c684 3268
3269 case NEGATE_EXPR:
3270 case FLOAT_EXPR:
3271 case MIN_EXPR:
3272 case MAX_EXPR:
3273 case ABS_EXPR:
3274
3275 case LSHIFT_EXPR:
3276 case RSHIFT_EXPR:
3277 case LROTATE_EXPR:
3278 case RROTATE_EXPR:
925c62d4 3279 case VEC_LSHIFT_EXPR:
3280 case VEC_RSHIFT_EXPR:
4ee9c684 3281
3282 case BIT_IOR_EXPR:
3283 case BIT_XOR_EXPR:
3284 case BIT_AND_EXPR:
3285 case BIT_NOT_EXPR:
3286
3287 case TRUTH_ANDIF_EXPR:
3288 case TRUTH_ORIF_EXPR:
3289 case TRUTH_AND_EXPR:
3290 case TRUTH_OR_EXPR:
3291 case TRUTH_XOR_EXPR:
3292 case TRUTH_NOT_EXPR:
3293
3294 case LT_EXPR:
3295 case LE_EXPR:
3296 case GT_EXPR:
3297 case GE_EXPR:
3298 case EQ_EXPR:
3299 case NE_EXPR:
3300 case ORDERED_EXPR:
3301 case UNORDERED_EXPR:
3302
3303 case UNLT_EXPR:
3304 case UNLE_EXPR:
3305 case UNGT_EXPR:
3306 case UNGE_EXPR:
3307 case UNEQ_EXPR:
318a728f 3308 case LTGT_EXPR:
4ee9c684 3309
4ee9c684 3310 case CONJ_EXPR:
3311
3312 case PREDECREMENT_EXPR:
3313 case PREINCREMENT_EXPR:
3314 case POSTDECREMENT_EXPR:
3315 case POSTINCREMENT_EXPR:
3316
a0e79db9 3317 case REALIGN_LOAD_EXPR:
3318
ea8f3370 3319 case REDUC_MAX_EXPR:
3320 case REDUC_MIN_EXPR:
3321 case REDUC_PLUS_EXPR:
4a61a337 3322 case WIDEN_SUM_EXPR:
75a70cf9 3323 case WIDEN_MULT_EXPR:
3324 case DOT_PROD_EXPR:
00f4f705 3325 case WIDEN_MULT_PLUS_EXPR:
3326 case WIDEN_MULT_MINUS_EXPR:
75a70cf9 3327
c6c91d61 3328 case VEC_WIDEN_MULT_HI_EXPR:
3329 case VEC_WIDEN_MULT_LO_EXPR:
3330 case VEC_UNPACK_HI_EXPR:
3331 case VEC_UNPACK_LO_EXPR:
8aa4e142 3332 case VEC_UNPACK_FLOAT_HI_EXPR:
3333 case VEC_UNPACK_FLOAT_LO_EXPR:
bb8107e7 3334 case VEC_PACK_TRUNC_EXPR:
c6c91d61 3335 case VEC_PACK_SAT_EXPR:
8aa4e142 3336 case VEC_PACK_FIX_TRUNC_EXPR:
6b8dbb53 3337 case VEC_EXTRACT_EVEN_EXPR:
3338 case VEC_EXTRACT_ODD_EXPR:
3339 case VEC_INTERLEAVE_HIGH_EXPR:
3340 case VEC_INTERLEAVE_LOW_EXPR:
3341
75a70cf9 3342 return 1;
4ee9c684 3343
365db11e 3344 /* Few special cases of expensive operations. This is useful
4ee9c684 3345 to avoid inlining on functions having too many of these. */
3346 case TRUNC_DIV_EXPR:
3347 case CEIL_DIV_EXPR:
3348 case FLOOR_DIV_EXPR:
3349 case ROUND_DIV_EXPR:
3350 case EXACT_DIV_EXPR:
3351 case TRUNC_MOD_EXPR:
3352 case CEIL_MOD_EXPR:
3353 case FLOOR_MOD_EXPR:
3354 case ROUND_MOD_EXPR:
3355 case RDIV_EXPR:
94e6e359 3356 if (TREE_CODE (op2) != INTEGER_CST)
3357 return weights->div_mod_cost;
3358 return 1;
75a70cf9 3359
3360 default:
3361 /* We expect a copy assignment with no operator. */
3362 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3363 return 0;
3364 }
3365}
3366
3367
3368/* Estimate number of instructions that will be created by expanding
3369 the statements in the statement sequence STMTS.
3370 WEIGHTS contains weights attributed to various constructs. */
3371
3372static
3373int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3374{
3375 int cost;
3376 gimple_stmt_iterator gsi;
3377
3378 cost = 0;
3379 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3380 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3381
3382 return cost;
3383}
3384
3385
3386/* Estimate number of instructions that will be created by expanding STMT.
3387 WEIGHTS contains weights attributed to various constructs. */
3388
3389int
3390estimate_num_insns (gimple stmt, eni_weights *weights)
3391{
3392 unsigned cost, i;
3393 enum gimple_code code = gimple_code (stmt);
3394 tree lhs;
94e6e359 3395 tree rhs;
75a70cf9 3396
3397 switch (code)
3398 {
3399 case GIMPLE_ASSIGN:
3400 /* Try to estimate the cost of assignments. We have three cases to
3401 deal with:
3402 1) Simple assignments to registers;
3403 2) Stores to things that must live in memory. This includes
3404 "normal" stores to scalars, but also assignments of large
3405 structures, or constructors of big arrays;
3406
3407 Let us look at the first two cases, assuming we have "a = b + C":
3408 <GIMPLE_ASSIGN <var_decl "a">
3409 <plus_expr <var_decl "b"> <constant C>>
3410 If "a" is a GIMPLE register, the assignment to it is free on almost
3411 any target, because "a" usually ends up in a real register. Hence
3412 the only cost of this expression comes from the PLUS_EXPR, and we
3413 can ignore the GIMPLE_ASSIGN.
3414 If "a" is not a GIMPLE register, the assignment to "a" will most
3415 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3416 of moving something into "a", which we compute using the function
3417 estimate_move_cost. */
3418 lhs = gimple_assign_lhs (stmt);
94e6e359 3419 rhs = gimple_assign_rhs1 (stmt);
3420
75a70cf9 3421 if (is_gimple_reg (lhs))
3422 cost = 0;
3423 else
3424 cost = estimate_move_cost (TREE_TYPE (lhs));
3425
94e6e359 3426 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3427 cost += estimate_move_cost (TREE_TYPE (rhs));
3428
3429 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3430 gimple_assign_rhs1 (stmt),
3431 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3432 == GIMPLE_BINARY_RHS
3433 ? gimple_assign_rhs2 (stmt) : NULL);
75a70cf9 3434 break;
3435
3436 case GIMPLE_COND:
94e6e359 3437 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3438 gimple_op (stmt, 0),
3439 gimple_op (stmt, 1));
75a70cf9 3440 break;
3441
3442 case GIMPLE_SWITCH:
3443 /* Take into account cost of the switch + guess 2 conditional jumps for
48e1416a 3444 each case label.
75a70cf9 3445
3446 TODO: once the switch expansion logic is sufficiently separated, we can
3447 do better job on estimating cost of the switch. */
94e6e359 3448 if (weights->time_based)
3449 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3450 else
3451 cost = gimple_switch_num_labels (stmt) * 2;
4ee9c684 3452 break;
75a70cf9 3453
3454 case GIMPLE_CALL:
4ee9c684 3455 {
75a70cf9 3456 tree decl = gimple_call_fndecl (stmt);
924de091 3457 struct cgraph_node *node;
4ee9c684 3458
958b3c8a 3459 /* Do not special case builtins where we see the body.
3460 This just confuse inliner. */
924de091 3461 if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
5ccf5ee5 3462 ;
958b3c8a 3463 /* For buitins that are likely expanded to nothing or
3464 inlined do not account operand costs. */
3465 else if (is_simple_builtin (decl))
a6b74a67 3466 return 0;
3467 else if (is_inexpensive_builtin (decl))
958b3c8a 3468 return weights->target_builtin_call_cost;
5ccf5ee5 3469 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3470 {
3471 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3472 specialize the cheap expansion we do here.
3473 ??? This asks for a more general solution. */
3474 switch (DECL_FUNCTION_CODE (decl))
3475 {
3476 case BUILT_IN_POW:
3477 case BUILT_IN_POWF:
3478 case BUILT_IN_POWL:
3479 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3480 && REAL_VALUES_EQUAL
3481 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3482 return estimate_operator_cost (MULT_EXPR, weights,
3483 gimple_call_arg (stmt, 0),
3484 gimple_call_arg (stmt, 0));
3485 break;
3486
3487 default:
3488 break;
3489 }
3490 }
48e1416a 3491
5ccf5ee5 3492 cost = weights->call_cost;
5ed92cc9 3493 if (gimple_call_lhs (stmt))
3494 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3495 for (i = 0; i < gimple_call_num_args (stmt); i++)
e0645921 3496 {
5ed92cc9 3497 tree arg = gimple_call_arg (stmt, i);
3498 cost += estimate_move_cost (TREE_TYPE (arg));
e0645921 3499 }
4ee9c684 3500 break;
3501 }
ee68bf10 3502
958b3c8a 3503 case GIMPLE_RETURN:
3504 return weights->return_cost;
3505
75a70cf9 3506 case GIMPLE_GOTO:
3507 case GIMPLE_LABEL:
3508 case GIMPLE_NOP:
3509 case GIMPLE_PHI:
75a70cf9 3510 case GIMPLE_PREDICT:
9845d120 3511 case GIMPLE_DEBUG:
75a70cf9 3512 return 0;
3513
3514 case GIMPLE_ASM:
1ca8efe4 3515 return asm_str_count (gimple_asm_string (stmt));
75a70cf9 3516
e38def9c 3517 case GIMPLE_RESX:
3518 /* This is either going to be an external function call with one
3519 argument, or two register copy statements plus a goto. */
3520 return 2;
3521
3522 case GIMPLE_EH_DISPATCH:
3523 /* ??? This is going to turn into a switch statement. Ideally
3524 we'd have a look at the eh region and estimate the number of
3525 edges involved. */
3526 return 10;
3527
75a70cf9 3528 case GIMPLE_BIND:
3529 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3530
3531 case GIMPLE_EH_FILTER:
3532 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3533
3534 case GIMPLE_CATCH:
3535 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3536
3537 case GIMPLE_TRY:
3538 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3539 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3540
3541 /* OpenMP directives are generally very expensive. */
3542
3543 case GIMPLE_OMP_RETURN:
3544 case GIMPLE_OMP_SECTIONS_SWITCH:
3545 case GIMPLE_OMP_ATOMIC_STORE:
3546 case GIMPLE_OMP_CONTINUE:
3547 /* ...except these, which are cheap. */
3548 return 0;
3549
3550 case GIMPLE_OMP_ATOMIC_LOAD:
3551 return weights->omp_cost;
3552
3553 case GIMPLE_OMP_FOR:
3554 return (weights->omp_cost
3555 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3556 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3557
3558 case GIMPLE_OMP_PARALLEL:
3559 case GIMPLE_OMP_TASK:
3560 case GIMPLE_OMP_CRITICAL:
3561 case GIMPLE_OMP_MASTER:
3562 case GIMPLE_OMP_ORDERED:
3563 case GIMPLE_OMP_SECTION:
3564 case GIMPLE_OMP_SECTIONS:
3565 case GIMPLE_OMP_SINGLE:
3566 return (weights->omp_cost
3567 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
ee68bf10 3568
4ee9c684 3569 default:
8c0963c4 3570 gcc_unreachable ();
4ee9c684 3571 }
75a70cf9 3572
3573 return cost;
4ee9c684 3574}
3575
75a70cf9 3576/* Estimate number of instructions that will be created by expanding
3577 function FNDECL. WEIGHTS contains weights attributed to various
3578 constructs. */
5ff0afa2 3579
4ee9c684 3580int
75a70cf9 3581estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4ee9c684 3582{
75a70cf9 3583 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3584 gimple_stmt_iterator bsi;
e27482aa 3585 basic_block bb;
75a70cf9 3586 int n = 0;
e27482aa 3587
75a70cf9 3588 gcc_assert (my_function && my_function->cfg);
3589 FOR_EACH_BB_FN (bb, my_function)
e27482aa 3590 {
75a70cf9 3591 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3592 n += estimate_num_insns (gsi_stmt (bsi), weights);
e27482aa 3593 }
e27482aa 3594
75a70cf9 3595 return n;
bc8bb825 3596}
3597
75a70cf9 3598
bc8bb825 3599/* Initializes weights used by estimate_num_insns. */
3600
3601void
3602init_inline_once (void)
3603{
bc8bb825 3604 eni_size_weights.call_cost = 1;
4a5b1b7c 3605 eni_size_weights.target_builtin_call_cost = 1;
bc8bb825 3606 eni_size_weights.div_mod_cost = 1;
bc8bb825 3607 eni_size_weights.omp_cost = 40;
94e6e359 3608 eni_size_weights.time_based = false;
958b3c8a 3609 eni_size_weights.return_cost = 1;
bc8bb825 3610
3611 /* Estimating time for call is difficult, since we have no idea what the
3612 called function does. In the current uses of eni_time_weights,
3613 underestimating the cost does less harm than overestimating it, so
85694bac 3614 we choose a rather small value here. */
bc8bb825 3615 eni_time_weights.call_cost = 10;
958b3c8a 3616 eni_time_weights.target_builtin_call_cost = 1;
bc8bb825 3617 eni_time_weights.div_mod_cost = 10;
bc8bb825 3618 eni_time_weights.omp_cost = 40;
94e6e359 3619 eni_time_weights.time_based = true;
958b3c8a 3620 eni_time_weights.return_cost = 2;
4ee9c684 3621}
3622
75a70cf9 3623/* Estimate the number of instructions in a gimple_seq. */
3624
3625int
3626count_insns_seq (gimple_seq seq, eni_weights *weights)
3627{
3628 gimple_stmt_iterator gsi;
3629 int n = 0;
3630 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3631 n += estimate_num_insns (gsi_stmt (gsi), weights);
3632
3633 return n;
3634}
3635
3636
e27482aa 3637/* Install new lexical TREE_BLOCK underneath 'current_block'. */
75a70cf9 3638
e27482aa 3639static void
cb302f29 3640prepend_lexical_block (tree current_block, tree new_block)
e27482aa 3641{
cb302f29 3642 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3643 BLOCK_SUBBLOCKS (current_block) = new_block;
e27482aa 3644 BLOCK_SUPERCONTEXT (new_block) = current_block;
e27482aa 3645}
3646
2ab2ce89 3647/* Add local variables from CALLEE to CALLER. */
3648
3649static inline void
3650add_local_variables (struct function *callee, struct function *caller,
3651 copy_body_data *id, bool check_var_ann)
3652{
3653 tree var;
3654 unsigned ix;
3655
3656 FOR_EACH_LOCAL_DECL (callee, ix, var)
3657 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3658 {
3659 if (!check_var_ann
3660 || (var_ann (var) && add_referenced_var (var)))
3661 add_local_decl (caller, var);
3662 }
3663 else if (!can_be_nonlocal (var, id))
2e4223e3 3664 {
3665 tree new_var = remap_decl (var, id);
3666
3667 /* Remap debug-expressions. */
3668 if (TREE_CODE (new_var) == VAR_DECL
3669 && DECL_DEBUG_EXPR_IS_FROM (new_var)
3670 && new_var != var)
3671 {
3672 tree tem = DECL_DEBUG_EXPR (var);
3673 bool old_regimplify = id->regimplify;
3674 id->remapping_type_depth++;
3675 walk_tree (&tem, copy_tree_body_r, id, NULL);
3676 id->remapping_type_depth--;
3677 id->regimplify = old_regimplify;
3678 SET_DECL_DEBUG_EXPR (new_var, tem);
3679 }
3680 add_local_decl (caller, new_var);
3681 }
2ab2ce89 3682}
3683
75a70cf9 3684/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
e343483a 3685
e27482aa 3686static bool
75a70cf9 3687expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
e343483a 3688{
f018d957 3689 tree use_retvar;
d57cd35f 3690 tree fn;
9845d120 3691 struct pointer_map_t *st, *dst;
deff5ffd 3692 tree return_slot;
8e7912a5 3693 tree modify_dest;
4ee9c684 3694 location_t saved_location;
e27482aa 3695 struct cgraph_edge *cg_edge;
326a9581 3696 cgraph_inline_failed_t reason;
e27482aa 3697 basic_block return_block;
3698 edge e;
75a70cf9 3699 gimple_stmt_iterator gsi, stmt_gsi;
e27482aa 3700 bool successfully_inlined = FALSE;
2c8a1497 3701 bool purge_dead_abnormal_edges;
e343483a 3702
4ee9c684 3703 /* Set input_location here so we get the right instantiation context
3704 if we call instantiate_decl from inlinable_function_p. */
3705 saved_location = input_location;
75a70cf9 3706 if (gimple_has_location (stmt))
3707 input_location = gimple_location (stmt);
4ee9c684 3708
e343483a 3709 /* From here on, we're only interested in CALL_EXPRs. */
75a70cf9 3710 if (gimple_code (stmt) != GIMPLE_CALL)
4ee9c684 3711 goto egress;
e343483a 3712
b819947c 3713 cg_edge = cgraph_edge (id->dst_node, stmt);
3714 gcc_checking_assert (cg_edge);
e343483a 3715 /* First, see if we can figure out what function is being called.
3716 If we cannot, then there is no hope of inlining the function. */
b819947c 3717 if (cg_edge->indirect_unknown_callee)
1caef38b 3718 goto egress;
b819947c 3719 fn = cg_edge->callee->decl;
3720 gcc_checking_assert (fn);
d7c6d889 3721
75a70cf9 3722 /* If FN is a declaration of a function in a nested scope that was
ad850f1c 3723 globally declared inline, we don't set its DECL_INITIAL.
3724 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3725 C++ front-end uses it for cdtors to refer to their internal
3726 declarations, that are not real functions. Fortunately those
3727 don't have trees to be saved, so we can tell by checking their
75a70cf9 3728 gimple_body. */
3729 if (!DECL_INITIAL (fn)
ad850f1c 3730 && DECL_ABSTRACT_ORIGIN (fn)
1a1a827a 3731 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
ad850f1c 3732 fn = DECL_ABSTRACT_ORIGIN (fn);
3733
cbeb677e 3734 /* Don't try to inline functions that are not well-suited to inlining. */
e27482aa 3735 if (!cgraph_inline_p (cg_edge, &reason))
28f45805 3736 {
f8daee9b 3737 /* If this call was originally indirect, we do not want to emit any
3738 inlining related warnings or sorry messages because there are no
3739 guarantees regarding those. */
799c8711 3740 if (cg_edge->indirect_inlining_edge)
f8daee9b 3741 goto egress;
3742
6686ff93 3743 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3744 /* Avoid warnings during early inline pass. */
a522e9eb 3745 && cgraph_global_info_ready
3746 /* PR 20090218-1_0.c. Body can be provided by another module. */
3747 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
d731003e 3748 {
a522e9eb 3749 error ("inlining failed in call to always_inline %q+F: %s", fn,
3750 cgraph_inline_failed_string (reason));
3751 error ("called from here");
d731003e 3752 }
59a52681 3753 else if (warn_inline
3754 && DECL_DECLARED_INLINE_P (fn)
3755 && !DECL_NO_INLINE_WARNING_P (fn)
d731003e 3756 && !DECL_IN_SYSTEM_HEADER (fn)
326a9581 3757 && reason != CIF_UNSPECIFIED
9e0baf4d 3758 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
17c205c9 3759 /* Do not warn about not inlined recursive calls. */
3760 && !cgraph_edge_recursive_p (cg_edge)
9e0baf4d 3761 /* Avoid warnings during early inline pass. */
6329636b 3762 && cgraph_global_info_ready)
28f45805 3763 {
3cf8b391 3764 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
836045d7 3765 fn, _(cgraph_inline_failed_string (reason)));
6bf97f82 3766 warning (OPT_Winline, "called from here");
28f45805 3767 }
4ee9c684 3768 goto egress;
28f45805 3769 }
469679ab 3770 fn = cg_edge->callee->decl;
e343483a 3771
b0cdf642 3772#ifdef ENABLE_CHECKING
51a48c27 3773 if (cg_edge->callee->decl != id->dst_node->decl)
e27482aa 3774 verify_cgraph_node (cg_edge->callee);
b0cdf642 3775#endif
3776
e27482aa 3777 /* We will be inlining this callee. */
e38def9c 3778 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
e27482aa 3779
58d82cd0 3780 /* Update the callers EH personality. */
3781 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3782 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3783 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3784
75a70cf9 3785 /* Split the block holding the GIMPLE_CALL. */
e27482aa 3786 e = split_block (bb, stmt);
3787 bb = e->src;
3788 return_block = e->dest;
3789 remove_edge (e);
3790
2c8a1497 3791 /* split_block splits after the statement; work around this by
3792 moving the call into the second block manually. Not pretty,
3793 but seems easier than doing the CFG manipulation by hand
75a70cf9 3794 when the GIMPLE_CALL is in the last statement of BB. */
3795 stmt_gsi = gsi_last_bb (bb);
3796 gsi_remove (&stmt_gsi, false);
2c8a1497 3797
75a70cf9 3798 /* If the GIMPLE_CALL was in the last statement of BB, it may have
2c8a1497 3799 been the source of abnormal edges. In this case, schedule
3800 the removal of dead abnormal edges. */
75a70cf9 3801 gsi = gsi_start_bb (return_block);
3802 if (gsi_end_p (gsi))
e27482aa 3803 {
75a70cf9 3804 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
2c8a1497 3805 purge_dead_abnormal_edges = true;
e27482aa 3806 }
2c8a1497 3807 else
3808 {
75a70cf9 3809 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
2c8a1497 3810 purge_dead_abnormal_edges = false;
3811 }
3812
75a70cf9 3813 stmt_gsi = gsi_start_bb (return_block);
054e01a7 3814
d57cd35f 3815 /* Build a block containing code to initialize the arguments, the
3816 actual inline expansion of the body, and a label for the return
3817 statements within the function to jump to. The type of the
3818 statement expression is the return type of the function call. */
e27482aa 3819 id->block = make_node (BLOCK);
3820 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
44276901 3821 BLOCK_SOURCE_LOCATION (id->block) = input_location;
cb302f29 3822 prepend_lexical_block (gimple_block (stmt), id->block);
e27482aa 3823
e343483a 3824 /* Local declarations will be replaced by their equivalents in this
3825 map. */
3826 st = id->decl_map;
e3022db7 3827 id->decl_map = pointer_map_create ();
9845d120 3828 dst = id->debug_map;
3829 id->debug_map = NULL;
e343483a 3830
e27482aa 3831 /* Record the function we are about to inline. */
51a48c27 3832 id->src_fn = fn;
3833 id->src_node = cg_edge->callee;
deff5ffd 3834 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
75a70cf9 3835 id->gimple_call = stmt;
51a48c27 3836
7f481d3e 3837 gcc_assert (!id->src_cfun->after_inlining);
3838
186f5fff 3839 id->entry_bb = bb;
1add270f 3840 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3841 {
3842 gimple_stmt_iterator si = gsi_last_bb (bb);
3843 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3844 NOT_TAKEN),
3845 GSI_NEW_STMT);
3846 }
75a70cf9 3847 initialize_inlined_parameters (id, stmt, fn, bb);
e343483a 3848
469679ab 3849 if (DECL_INITIAL (fn))
cb302f29 3850 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
b3d24a23 3851
e343483a 3852 /* Return statements in the function body will be replaced by jumps
3853 to the RET_LABEL. */
8c0963c4 3854 gcc_assert (DECL_INITIAL (fn));
3855 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
9a0c59e9 3856
75a70cf9 3857 /* Find the LHS to which the result of this call is assigned. */
deff5ffd 3858 return_slot = NULL;
75a70cf9 3859 if (gimple_call_lhs (stmt))
b1672e81 3860 {
75a70cf9 3861 modify_dest = gimple_call_lhs (stmt);
b1672e81 3862
3863 /* The function which we are inlining might not return a value,
3864 in which case we should issue a warning that the function
3865 does not return a value. In that case the optimizers will
3866 see that the variable to which the value is assigned was not
3867 initialized. We do not want to issue a warning about that
3868 uninitialized variable. */
3869 if (DECL_P (modify_dest))
3870 TREE_NO_WARNING (modify_dest) = 1;
75a70cf9 3871
3872 if (gimple_call_return_slot_opt_p (stmt))
ea523851 3873 {
deff5ffd 3874 return_slot = modify_dest;
ea523851 3875 modify_dest = NULL;
3876 }
b1672e81 3877 }
8e7912a5 3878 else
3879 modify_dest = NULL;
3880
68d6de5b 3881 /* If we are inlining a call to the C++ operator new, we don't want
3882 to use type based alias analysis on the return value. Otherwise
3883 we may get confused if the compiler sees that the inlined new
3884 function returns a pointer which was just deleted. See bug
3885 33407. */
3886 if (DECL_IS_OPERATOR_NEW (fn))
3887 {
3888 return_slot = NULL;
3889 modify_dest = NULL;
3890 }
3891
e343483a 3892 /* Declare the return variable for the function. */
524a0531 3893 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
68d6de5b 3894
b3d24a23 3895 /* Add local vars in this inlined callee to caller. */
2ab2ce89 3896 add_local_variables (id->src_cfun, cfun, id, true);
b3d24a23 3897
e2d3f422 3898 if (dump_file && (dump_flags & TDF_DETAILS))
3899 {
3900 fprintf (dump_file, "Inlining ");
48e1416a 3901 print_generic_expr (dump_file, id->src_fn, 0);
e2d3f422 3902 fprintf (dump_file, " to ");
48e1416a 3903 print_generic_expr (dump_file, id->dst_fn, 0);
e2d3f422 3904 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3905 }
3906
a8305131 3907 /* This is it. Duplicate the callee body. Assume callee is
3908 pre-gimplified. Note that we must not alter the caller
3909 function in any way before this point, as this CALL_EXPR may be
3910 a self-referential call; if we're calling ourselves, we need to
3911 duplicate our body before altering anything. */
e2d3f422 3912 copy_body (id, bb->count,
3913 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
b06ab5fa 3914 bb, return_block, NULL, NULL);
a8305131 3915
cb245216 3916 /* Reset the escaped solution. */
7f81b5ee 3917 if (cfun->gimple_df)
cb245216 3918 pt_solution_reset (&cfun->gimple_df->escaped);
7f81b5ee 3919
e343483a 3920 /* Clean up. */
9845d120 3921 if (id->debug_map)
3922 {
3923 pointer_map_destroy (id->debug_map);
3924 id->debug_map = dst;
3925 }
e3022db7 3926 pointer_map_destroy (id->decl_map);
e343483a 3927 id->decl_map = st;
3928
dd277d48 3929 /* Unlink the calls virtual operands before replacing it. */
3930 unlink_stmt_vdef (stmt);
3931
11fe6c8b 3932 /* If the inlined function returns a result that we care about,
75a70cf9 3933 substitute the GIMPLE_CALL with an assignment of the return
3934 variable to the LHS of the call. That is, if STMT was
3935 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3936 if (use_retvar && gimple_call_lhs (stmt))
e27482aa 3937 {
75a70cf9 3938 gimple old_stmt = stmt;
3939 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3940 gsi_replace (&stmt_gsi, stmt, false);
deff5ffd 3941 if (gimple_in_ssa_p (cfun))
dd277d48 3942 mark_symbols_for_renaming (stmt);
75a70cf9 3943 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
e27482aa 3944 }
4ee9c684 3945 else
deff5ffd 3946 {
75a70cf9 3947 /* Handle the case of inlining a function with no return
3948 statement, which causes the return value to become undefined. */
3949 if (gimple_call_lhs (stmt)
3950 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
deff5ffd 3951 {
75a70cf9 3952 tree name = gimple_call_lhs (stmt);
3953 tree var = SSA_NAME_VAR (name);
deff5ffd 3954 tree def = gimple_default_def (cfun, var);
3955
deff5ffd 3956 if (def)
3957 {
75a70cf9 3958 /* If the variable is used undefined, make this name
3959 undefined via a move. */
3960 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3961 gsi_replace (&stmt_gsi, stmt, true);
deff5ffd 3962 }
deff5ffd 3963 else
3964 {
75a70cf9 3965 /* Otherwise make this variable undefined. */
3966 gsi_remove (&stmt_gsi, true);
deff5ffd 3967 set_default_def (var, name);
75a70cf9 3968 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
deff5ffd 3969 }
3970 }
3971 else
75a70cf9 3972 gsi_remove (&stmt_gsi, true);
deff5ffd 3973 }
e343483a 3974
2c8a1497 3975 if (purge_dead_abnormal_edges)
10f52eb8 3976 {
3977 gimple_purge_dead_eh_edges (return_block);
3978 gimple_purge_dead_abnormal_call_edges (return_block);
3979 }
11fe6c8b 3980
e27482aa 3981 /* If the value of the new expression is ignored, that's OK. We
3982 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3983 the equivalent inlined version either. */
75a70cf9 3984 if (is_gimple_assign (stmt))
3985 {
3986 gcc_assert (gimple_assign_single_p (stmt)
d9659041 3987 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
75a70cf9 3988 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3989 }
11fe6c8b 3990
9e45f419 3991 /* Output the inlining info for this abstract function, since it has been
3992 inlined. If we don't do this now, we can lose the information about the
3993 variables in the function when the blocks get blown away as soon as we
3994 remove the cgraph node. */
e27482aa 3995 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
11fe6c8b 3996
833eb724 3997 /* Update callgraph if needed. */
e27482aa 3998 cgraph_remove_node (cg_edge->callee);
833eb724 3999
e27482aa 4000 id->block = NULL_TREE;
e27482aa 4001 successfully_inlined = TRUE;
054e01a7 4002
4ee9c684 4003 egress:
4004 input_location = saved_location;
e27482aa 4005 return successfully_inlined;
e343483a 4006}
4ee9c684 4007
e27482aa 4008/* Expand call statements reachable from STMT_P.
4009 We can only have CALL_EXPRs as the "toplevel" tree code or nested
81943faa 4010 in a MODIFY_EXPR. See gimple.c:get_call_expr_in(). We can
e27482aa 4011 unfortunately not use that function here because we need a pointer
4012 to the CALL_EXPR, not the tree itself. */
4013
4014static bool
51a48c27 4015gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4ee9c684 4016{
75a70cf9 4017 gimple_stmt_iterator gsi;
4ee9c684 4018
75a70cf9 4019 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 4020 {
75a70cf9 4021 gimple stmt = gsi_stmt (gsi);
e27482aa 4022
75a70cf9 4023 if (is_gimple_call (stmt)
4024 && expand_call_inline (bb, stmt, id))
4025 return true;
4ee9c684 4026 }
75a70cf9 4027
e27482aa 4028 return false;
4ee9c684 4029}
4030
75a70cf9 4031
3e9045dd 4032/* Walk all basic blocks created after FIRST and try to fold every statement
4033 in the STATEMENTS pointer set. */
75a70cf9 4034
3e9045dd 4035static void
4036fold_marked_statements (int first, struct pointer_set_t *statements)
4037{
75a70cf9 4038 for (; first < n_basic_blocks; first++)
3e9045dd 4039 if (BASIC_BLOCK (first))
4040 {
75a70cf9 4041 gimple_stmt_iterator gsi;
4042
4043 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4044 !gsi_end_p (gsi);
4045 gsi_next (&gsi))
4046 if (pointer_set_contains (statements, gsi_stmt (gsi)))
2fcc7de2 4047 {
75a70cf9 4048 gimple old_stmt = gsi_stmt (gsi);
8d8c4f3e 4049 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
117ef3d7 4050
a65c4d64 4051 if (old_decl && DECL_BUILT_IN (old_decl))
4052 {
4053 /* Folding builtins can create multiple instructions,
4054 we need to look at all of them. */
4055 gimple_stmt_iterator i2 = gsi;
4056 gsi_prev (&i2);
4057 if (fold_stmt (&gsi))
4058 {
4059 gimple new_stmt;
bb4322f9 4060 /* If a builtin at the end of a bb folded into nothing,
4061 the following loop won't work. */
4062 if (gsi_end_p (gsi))
4063 {
4064 cgraph_update_edges_for_call_stmt (old_stmt,
4065 old_decl, NULL);
4066 break;
4067 }
a65c4d64 4068 if (gsi_end_p (i2))
4069 i2 = gsi_start_bb (BASIC_BLOCK (first));
4070 else
4071 gsi_next (&i2);
4072 while (1)
4073 {
4074 new_stmt = gsi_stmt (i2);
4075 update_stmt (new_stmt);
4076 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4077 new_stmt);
4078
4079 if (new_stmt == gsi_stmt (gsi))
4080 {
4081 /* It is okay to check only for the very last
4082 of these statements. If it is a throwing
4083 statement nothing will change. If it isn't
4084 this can remove EH edges. If that weren't
4085 correct then because some intermediate stmts
4086 throw, but not the last one. That would mean
4087 we'd have to split the block, which we can't
4088 here and we'd loose anyway. And as builtins
4089 probably never throw, this all
4090 is mood anyway. */
4091 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4092 new_stmt))
4093 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4094 break;
4095 }
4096 gsi_next (&i2);
4097 }
4098 }
4099 }
4100 else if (fold_stmt (&gsi))
2fcc7de2 4101 {
75a70cf9 4102 /* Re-read the statement from GSI as fold_stmt() may
4103 have changed it. */
4104 gimple new_stmt = gsi_stmt (gsi);
4105 update_stmt (new_stmt);
4106
8d8c4f3e 4107 if (is_gimple_call (old_stmt)
4108 || is_gimple_call (new_stmt))
a65c4d64 4109 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4110 new_stmt);
75a70cf9 4111
4112 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4113 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
2fcc7de2 4114 }
4115 }
3e9045dd 4116 }
4117}
4118
223e470b 4119/* Return true if BB has at least one abnormal outgoing edge. */
4120
4121static inline bool
4122has_abnormal_outgoing_edge_p (basic_block bb)
4123{
4124 edge e;
4125 edge_iterator ei;
4126
4127 FOR_EACH_EDGE (e, ei, bb->succs)
4128 if (e->flags & EDGE_ABNORMAL)
4129 return true;
4130
4131 return false;
4132}
4133
e343483a 4134/* Expand calls to inline functions in the body of FN. */
4135
09a2e412 4136unsigned int
60b8c5b3 4137optimize_inline_calls (tree fn)
e343483a 4138{
51a48c27 4139 copy_body_data id;
e27482aa 4140 basic_block bb;
3e9045dd 4141 int last = n_basic_blocks;
dac18d1a 4142 struct gimplify_ctx gctx;
d1ead98b 4143 bool inlined_p = false;
dac18d1a 4144
9d5baf96 4145 /* There is no point in performing inlining if errors have already
4146 occurred -- and we might crash if we try to inline invalid
4147 code. */
852f689e 4148 if (seen_error ())
09a2e412 4149 return 0;
9d5baf96 4150
e343483a 4151 /* Clear out ID. */
4152 memset (&id, 0, sizeof (id));
4153
fd6a3c41 4154 id.src_node = id.dst_node = cgraph_get_node (fn);
222bc9b9 4155 gcc_assert (id.dst_node->analyzed);
51a48c27 4156 id.dst_fn = fn;
e343483a 4157 /* Or any functions that aren't finished yet. */
e343483a 4158 if (current_function_decl)
f018d957 4159 id.dst_fn = current_function_decl;
51a48c27 4160
4161 id.copy_decl = copy_decl_maybe_to_var;
4162 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4163 id.transform_new_cfg = false;
4164 id.transform_return_to_modify = true;
32020b10 4165 id.transform_lang_insert_block = NULL;
3e9045dd 4166 id.statements_to_fold = pointer_set_create ();
51a48c27 4167
dac18d1a 4168 push_gimplify_context (&gctx);
e343483a 4169
31a8456e 4170 /* We make no attempts to keep dominance info up-to-date. */
4171 free_dominance_info (CDI_DOMINATORS);
4172 free_dominance_info (CDI_POST_DOMINATORS);
4173
75a70cf9 4174 /* Register specific gimple functions. */
4175 gimple_register_cfg_hooks ();
4176
e27482aa 4177 /* Reach the trees by walking over the CFG, and note the
4178 enclosing basic-blocks in the call edges. */
4179 /* We walk the blocks going forward, because inlined function bodies
4180 will split id->current_basic_block, and the new blocks will
4181 follow it; we'll trudge through them, processing their CALL_EXPRs
4182 along the way. */
4183 FOR_EACH_BB (bb)
d1ead98b 4184 inlined_p |= gimple_expand_calls_inline (bb, &id);
e343483a 4185
e27482aa 4186 pop_gimplify_context (NULL);
4ee9c684 4187
b0cdf642 4188#ifdef ENABLE_CHECKING
4189 {
4190 struct cgraph_edge *e;
4191
51a48c27 4192 verify_cgraph_node (id.dst_node);
b0cdf642 4193
4194 /* Double check that we inlined everything we are supposed to inline. */
51a48c27 4195 for (e = id.dst_node->callees; e; e = e->next_callee)
8c0963c4 4196 gcc_assert (e->inline_failed);
b0cdf642 4197 }
4198#endif
48e1416a 4199
d1ead98b 4200 /* Fold queued statements. */
184e9f8f 4201 fold_marked_statements (last, id.statements_to_fold);
4202 pointer_set_destroy (id.statements_to_fold);
48e1416a 4203
9845d120 4204 gcc_assert (!id.debug_stmts);
4205
d1ead98b 4206 /* If we didn't inline into the function there is nothing to do. */
4207 if (!inlined_p)
4208 return 0;
4209
184e9f8f 4210 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4211 number_blocks (fn);
3e9045dd 4212
31359ae8 4213 delete_unreachable_blocks_update_callgraph (&id);
4214#ifdef ENABLE_CHECKING
4215 verify_cgraph_node (id.dst_node);
4216#endif
75a70cf9 4217
deff5ffd 4218 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4219 not possible yet - the IPA passes might make various functions to not
4220 throw and they don't care to proactively update local EH info. This is
4221 done later in fixup_cfg pass that also execute the verification. */
75a70cf9 4222 return (TODO_update_ssa
4223 | TODO_cleanup_cfg
4ae20857 4224 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
d1ead98b 4225 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4ae20857 4226 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
e343483a 4227}
4228
e343483a 4229/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4230
4231tree
60b8c5b3 4232copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
e343483a 4233{
4234 enum tree_code code = TREE_CODE (*tp);
35cc02b5 4235 enum tree_code_class cl = TREE_CODE_CLASS (code);
e343483a 4236
4237 /* We make copies of most nodes. */
35cc02b5 4238 if (IS_EXPR_CODE_CLASS (cl)
e343483a 4239 || code == TREE_LIST
4240 || code == TREE_VEC
55d97af6 4241 || code == TYPE_DECL
4242 || code == OMP_CLAUSE)
e343483a 4243 {
4244 /* Because the chain gets clobbered when we make a copy, we save it
4245 here. */
f4e36c33 4246 tree chain = NULL_TREE, new_tree;
35cc02b5 4247
9b88d08d 4248 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4249 chain = TREE_CHAIN (*tp);
e343483a 4250
4251 /* Copy the node. */
f4e36c33 4252 new_tree = copy_node (*tp);
4ee9c684 4253
4254 /* Propagate mudflap marked-ness. */
4255 if (flag_mudflap && mf_marked_p (*tp))
f4e36c33 4256 mf_mark (new_tree);
4ee9c684 4257
f4e36c33 4258 *tp = new_tree;
e343483a 4259
4260 /* Now, restore the chain, if appropriate. That will cause
4261 walk_tree to walk into the chain as well. */
773c5ba7 4262 if (code == PARM_DECL
4263 || code == TREE_LIST
55d6e7cd 4264 || code == OMP_CLAUSE)
e343483a 4265 TREE_CHAIN (*tp) = chain;
4266
4267 /* For now, we don't update BLOCKs when we make copies. So, we
4ee9c684 4268 have to nullify all BIND_EXPRs. */
4269 if (TREE_CODE (*tp) == BIND_EXPR)
4270 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
e343483a 4271 }
c75b4594 4272 else if (code == CONSTRUCTOR)
4273 {
4274 /* CONSTRUCTOR nodes need special handling because
4275 we need to duplicate the vector of elements. */
f4e36c33 4276 tree new_tree;
c75b4594 4277
f4e36c33 4278 new_tree = copy_node (*tp);
c75b4594 4279
4280 /* Propagate mudflap marked-ness. */
4281 if (flag_mudflap && mf_marked_p (*tp))
f4e36c33 4282 mf_mark (new_tree);
b27ac6b5 4283
f4e36c33 4284 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
c75b4594 4285 CONSTRUCTOR_ELTS (*tp));
f4e36c33 4286 *tp = new_tree;
c75b4594 4287 }
d55ebb6a 4288 else if (code == STATEMENT_LIST)
17476aac 4289 /* We used to just abort on STATEMENT_LIST, but we can run into them
4290 with statement-expressions (c++/40975). */
4291 copy_statement_list (tp);
ce45a448 4292 else if (TREE_CODE_CLASS (code) == tcc_type)
e343483a 4293 *walk_subtrees = 0;
ce45a448 4294 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4ee9c684 4295 *walk_subtrees = 0;
d5406300 4296 else if (TREE_CODE_CLASS (code) == tcc_constant)
4297 *walk_subtrees = 0;
e343483a 4298 return NULL_TREE;
4299}
4300
4301/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5ff0afa2 4302 information indicating to what new SAVE_EXPR this one should be mapped,
e27482aa 4303 use that one. Otherwise, create a new node and enter it in ST. FN is
4304 the function into which the copy will be placed. */
e343483a 4305
8c143e71 4306static void
67c155cb 4307remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
e343483a 4308{
e3022db7 4309 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4310 tree *n;
fcc73461 4311 tree t;
e343483a 4312
4313 /* See if we already encountered this SAVE_EXPR. */
e3022db7 4314 n = (tree *) pointer_map_contains (st, *tp);
40570cc2 4315
e343483a 4316 /* If we didn't already remap this SAVE_EXPR, do so now. */
4317 if (!n)
4318 {
fcc73461 4319 t = copy_node (*tp);
e343483a 4320
e343483a 4321 /* Remember this SAVE_EXPR. */
e3022db7 4322 *pointer_map_insert (st, *tp) = t;
da3bde1a 4323 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
e3022db7 4324 *pointer_map_insert (st, t) = t;
e343483a 4325 }
4326 else
fcc73461 4327 {
4328 /* We've already walked into this SAVE_EXPR; don't do it again. */
4329 *walk_subtrees = 0;
e3022db7 4330 t = *n;
fcc73461 4331 }
e343483a 4332
4333 /* Replace this SAVE_EXPR with the copy. */
fcc73461 4334 *tp = t;
e343483a 4335}
d57cd35f 4336
5ff0afa2 4337/* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4338 copies the declaration and enters it in the splay_tree in DATA (which is
51a48c27 4339 really an `copy_body_data *'). */
4ee9c684 4340
4341static tree
4342mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4343 void *data)
4344{
51a48c27 4345 copy_body_data *id = (copy_body_data *) data;
4ee9c684 4346
4347 /* Don't walk into types. */
7dd37241 4348 if (TYPE_P (*tp))
4349 *walk_subtrees = 0;
4ee9c684 4350
7dd37241 4351 else if (TREE_CODE (*tp) == LABEL_EXPR)
4ee9c684 4352 {
7dd37241 4353 tree decl = TREE_OPERAND (*tp, 0);
4ee9c684 4354
7dd37241 4355 /* Copy the decl and remember the copy. */
51a48c27 4356 insert_decl_map (id, decl, id->copy_decl (decl, id));
4ee9c684 4357 }
4358
4359 return NULL_TREE;
4360}
4361
ac13e8d9 4362/* Perform any modifications to EXPR required when it is unsaved. Does
4363 not recurse into EXPR's subtrees. */
4364
4365static void
4366unsave_expr_1 (tree expr)
4367{
4368 switch (TREE_CODE (expr))
4369 {
4370 case TARGET_EXPR:
4371 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4372 It's OK for this to happen if it was part of a subtree that
4373 isn't immediately expanded, such as operand 2 of another
4374 TARGET_EXPR. */
4375 if (TREE_OPERAND (expr, 1))
4376 break;
4377
4378 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4379 TREE_OPERAND (expr, 3) = NULL_TREE;
4380 break;
4381
4382 default:
4383 break;
4384 }
4385}
4386
4ee9c684 4387/* Called via walk_tree when an expression is unsaved. Using the
4388 splay_tree pointed to by ST (which is really a `splay_tree'),
4389 remaps all local declarations to appropriate replacements. */
d57cd35f 4390
4391static tree
4ee9c684 4392unsave_r (tree *tp, int *walk_subtrees, void *data)
d57cd35f 4393{
51a48c27 4394 copy_body_data *id = (copy_body_data *) data;
e3022db7 4395 struct pointer_map_t *st = id->decl_map;
4396 tree *n;
4ee9c684 4397
4398 /* Only a local declaration (variable or label). */
4399 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4400 || TREE_CODE (*tp) == LABEL_DECL)
4401 {
4402 /* Lookup the declaration. */
e3022db7 4403 n = (tree *) pointer_map_contains (st, *tp);
b27ac6b5 4404
4ee9c684 4405 /* If it's there, remap it. */
4406 if (n)
e3022db7 4407 *tp = *n;
4ee9c684 4408 }
5ff0afa2 4409
4ee9c684 4410 else if (TREE_CODE (*tp) == STATEMENT_LIST)
75a70cf9 4411 gcc_unreachable ();
4ee9c684 4412 else if (TREE_CODE (*tp) == BIND_EXPR)
4413 copy_bind_expr (tp, walk_subtrees, id);
bfec3452 4414 else if (TREE_CODE (*tp) == SAVE_EXPR
4415 || TREE_CODE (*tp) == TARGET_EXPR)
67c155cb 4416 remap_save_expr (tp, st, walk_subtrees);
d57cd35f 4417 else
4ee9c684 4418 {
4419 copy_tree_r (tp, walk_subtrees, NULL);
4420
4421 /* Do whatever unsaving is required. */
4422 unsave_expr_1 (*tp);
4423 }
4424
4425 /* Keep iterating. */
4426 return NULL_TREE;
d57cd35f 4427}
4428
ac13e8d9 4429/* Copies everything in EXPR and replaces variables, labels
4430 and SAVE_EXPRs local to EXPR. */
4ee9c684 4431
4432tree
ac13e8d9 4433unsave_expr_now (tree expr)
4ee9c684 4434{
51a48c27 4435 copy_body_data id;
4ee9c684 4436
4437 /* There's nothing to do for NULL_TREE. */
4438 if (expr == 0)
4439 return expr;
4440
4441 /* Set up ID. */
4442 memset (&id, 0, sizeof (id));
51a48c27 4443 id.src_fn = current_function_decl;
4444 id.dst_fn = current_function_decl;
e3022db7 4445 id.decl_map = pointer_map_create ();
9845d120 4446 id.debug_map = NULL;
4ee9c684 4447
51a48c27 4448 id.copy_decl = copy_decl_no_change;
4449 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4450 id.transform_new_cfg = false;
4451 id.transform_return_to_modify = false;
32020b10 4452 id.transform_lang_insert_block = NULL;
51a48c27 4453
4ee9c684 4454 /* Walk the tree once to find local labels. */
4455 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4456
4457 /* Walk the tree again, copying, remapping, and unsaving. */
4458 walk_tree (&expr, unsave_r, &id, NULL);
4459
4460 /* Clean up. */
e3022db7 4461 pointer_map_destroy (id.decl_map);
9845d120 4462 if (id.debug_map)
4463 pointer_map_destroy (id.debug_map);
4ee9c684 4464
4465 return expr;
4466}
4467
75a70cf9 4468/* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4469 label, copies the declaration and enters it in the splay_tree in DATA (which
4470 is really a 'copy_body_data *'. */
4471
4472static tree
4473mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4474 bool *handled_ops_p ATTRIBUTE_UNUSED,
4475 struct walk_stmt_info *wi)
4476{
4477 copy_body_data *id = (copy_body_data *) wi->info;
4478 gimple stmt = gsi_stmt (*gsip);
4479
4480 if (gimple_code (stmt) == GIMPLE_LABEL)
4481 {
4482 tree decl = gimple_label_label (stmt);
4483
4484 /* Copy the decl and remember the copy. */
4485 insert_decl_map (id, decl, id->copy_decl (decl, id));
4486 }
4487
4488 return NULL_TREE;
4489}
4490
4491
4492/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4493 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4494 remaps all local declarations to appropriate replacements in gimple
4495 operands. */
4496
4497static tree
4498replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4499{
4500 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4501 copy_body_data *id = (copy_body_data *) wi->info;
4502 struct pointer_map_t *st = id->decl_map;
4503 tree *n;
4504 tree expr = *tp;
4505
4506 /* Only a local declaration (variable or label). */
4507 if ((TREE_CODE (expr) == VAR_DECL
4508 && !TREE_STATIC (expr))
4509 || TREE_CODE (expr) == LABEL_DECL)
4510 {
4511 /* Lookup the declaration. */
4512 n = (tree *) pointer_map_contains (st, expr);
4513
4514 /* If it's there, remap it. */
4515 if (n)
4516 *tp = *n;
4517 *walk_subtrees = 0;
4518 }
4519 else if (TREE_CODE (expr) == STATEMENT_LIST
4520 || TREE_CODE (expr) == BIND_EXPR
4521 || TREE_CODE (expr) == SAVE_EXPR)
4522 gcc_unreachable ();
4523 else if (TREE_CODE (expr) == TARGET_EXPR)
4524 {
4525 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4526 It's OK for this to happen if it was part of a subtree that
4527 isn't immediately expanded, such as operand 2 of another
4528 TARGET_EXPR. */
4529 if (!TREE_OPERAND (expr, 1))
4530 {
4531 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4532 TREE_OPERAND (expr, 3) = NULL_TREE;
4533 }
4534 }
4535
4536 /* Keep iterating. */
4537 return NULL_TREE;
4538}
4539
4540
4541/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4542 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4543 remaps all local declarations to appropriate replacements in gimple
4544 statements. */
4545
4546static tree
4547replace_locals_stmt (gimple_stmt_iterator *gsip,
4548 bool *handled_ops_p ATTRIBUTE_UNUSED,
4549 struct walk_stmt_info *wi)
4550{
4551 copy_body_data *id = (copy_body_data *) wi->info;
4552 gimple stmt = gsi_stmt (*gsip);
4553
4554 if (gimple_code (stmt) == GIMPLE_BIND)
4555 {
4556 tree block = gimple_bind_block (stmt);
4557
4558 if (block)
4559 {
4560 remap_block (&block, id);
4561 gimple_bind_set_block (stmt, block);
4562 }
4563
4564 /* This will remap a lot of the same decls again, but this should be
4565 harmless. */
4566 if (gimple_bind_vars (stmt))
4b5d70fd 4567 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
75a70cf9 4568 }
4569
4570 /* Keep iterating. */
4571 return NULL_TREE;
4572}
4573
4574
4575/* Copies everything in SEQ and replaces variables and labels local to
4576 current_function_decl. */
4577
4578gimple_seq
4579copy_gimple_seq_and_replace_locals (gimple_seq seq)
4580{
4581 copy_body_data id;
4582 struct walk_stmt_info wi;
4583 struct pointer_set_t *visited;
4584 gimple_seq copy;
4585
4586 /* There's nothing to do for NULL_TREE. */
4587 if (seq == NULL)
4588 return seq;
4589
4590 /* Set up ID. */
4591 memset (&id, 0, sizeof (id));
4592 id.src_fn = current_function_decl;
4593 id.dst_fn = current_function_decl;
4594 id.decl_map = pointer_map_create ();
9845d120 4595 id.debug_map = NULL;
75a70cf9 4596
4597 id.copy_decl = copy_decl_no_change;
4598 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4599 id.transform_new_cfg = false;
4600 id.transform_return_to_modify = false;
4601 id.transform_lang_insert_block = NULL;
4602
4603 /* Walk the tree once to find local labels. */
4604 memset (&wi, 0, sizeof (wi));
4605 visited = pointer_set_create ();
4606 wi.info = &id;
4607 wi.pset = visited;
4608 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4609 pointer_set_destroy (visited);
4610
4611 copy = gimple_seq_copy (seq);
4612
4613 /* Walk the copy, remapping decls. */
4614 memset (&wi, 0, sizeof (wi));
4615 wi.info = &id;
4616 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4617
4618 /* Clean up. */
4619 pointer_map_destroy (id.decl_map);
9845d120 4620 if (id.debug_map)
4621 pointer_map_destroy (id.debug_map);
75a70cf9 4622
4623 return copy;
4624}
4625
4626
4ee9c684 4627/* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5ff0afa2 4628
4ee9c684 4629static tree
4630debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4631{
4632 if (*tp == data)
4633 return (tree) data;
4634 else
4635 return NULL;
4636}
4637
4b987fac 4638DEBUG_FUNCTION bool
4ee9c684 4639debug_find_tree (tree top, tree search)
4640{
4641 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4642}
4643
e27482aa 4644
4ee9c684 4645/* Declare the variables created by the inliner. Add all the variables in
4646 VARS to BIND_EXPR. */
4647
4648static void
e27482aa 4649declare_inline_vars (tree block, tree vars)
4ee9c684 4650{
11fe6c8b 4651 tree t;
1767a056 4652 for (t = vars; t; t = DECL_CHAIN (t))
8e224d28 4653 {
4654 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4655 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
2ab2ce89 4656 add_local_decl (cfun, t);
8e224d28 4657 }
4ee9c684 4658
e27482aa 4659 if (block)
4660 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4661}
4662
c5235c0b 4663/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
51a48c27 4664 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4665 VAR_DECL translation. */
c5235c0b 4666
51a48c27 4667static tree
4668copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
c5235c0b 4669{
c5235c0b 4670 /* Don't generate debug information for the copy if we wouldn't have
4671 generated it for the copy either. */
4672 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4673 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4674
4675 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
48e1416a 4676 declaration inspired this copy. */
c5235c0b 4677 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4678
4679 /* The new variable/label has no RTL, yet. */
68331616 4680 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4681 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
8e3cb73b 4682 SET_DECL_RTL (copy, 0);
48e1416a 4683
c5235c0b 4684 /* These args would always appear unused, if not for this. */
4685 TREE_USED (copy) = 1;
4686
4687 /* Set the context for the new declaration. */
4688 if (!DECL_CONTEXT (decl))
4689 /* Globals stay global. */
4690 ;
51a48c27 4691 else if (DECL_CONTEXT (decl) != id->src_fn)
c5235c0b 4692 /* Things that weren't in the scope of the function we're inlining
4693 from aren't in the scope we're inlining to, either. */
4694 ;
4695 else if (TREE_STATIC (decl))
4696 /* Function-scoped static variables should stay in the original
4697 function. */
4698 ;
4699 else
4700 /* Ordinary automatic local variables are now in the scope of the
4701 new function. */
51a48c27 4702 DECL_CONTEXT (copy) = id->dst_fn;
c5235c0b 4703
649597af 4704 if (TREE_CODE (decl) == VAR_DECL
4705 /* C++ clones functions during parsing, before
4706 referenced_vars. */
4707 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
4708 && referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
4709 DECL_UID (decl)))
4710 add_referenced_var (copy);
4711
c5235c0b 4712 return copy;
4713}
4714
51a48c27 4715static tree
4716copy_decl_to_var (tree decl, copy_body_data *id)
4717{
4718 tree copy, type;
4719
4720 gcc_assert (TREE_CODE (decl) == PARM_DECL
4721 || TREE_CODE (decl) == RESULT_DECL);
4722
4723 type = TREE_TYPE (decl);
4724
e60a6f7b 4725 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4726 VAR_DECL, DECL_NAME (decl), type);
1a981e1a 4727 if (DECL_PT_UID_SET_P (decl))
4728 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
51a48c27 4729 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4730 TREE_READONLY (copy) = TREE_READONLY (decl);
4731 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
8ea8de24 4732 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
51a48c27 4733
4734 return copy_decl_for_dup_finish (id, decl, copy);
4735}
4736
25b3017b 4737/* Like copy_decl_to_var, but create a return slot object instead of a
4738 pointer variable for return by invisible reference. */
4739
4740static tree
4741copy_result_decl_to_var (tree decl, copy_body_data *id)
4742{
4743 tree copy, type;
4744
4745 gcc_assert (TREE_CODE (decl) == PARM_DECL
4746 || TREE_CODE (decl) == RESULT_DECL);
4747
4748 type = TREE_TYPE (decl);
4749 if (DECL_BY_REFERENCE (decl))
4750 type = TREE_TYPE (type);
4751
e60a6f7b 4752 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4753 VAR_DECL, DECL_NAME (decl), type);
1a981e1a 4754 if (DECL_PT_UID_SET_P (decl))
4755 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
25b3017b 4756 TREE_READONLY (copy) = TREE_READONLY (decl);
4757 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4758 if (!DECL_BY_REFERENCE (decl))
4759 {
4760 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
8ea8de24 4761 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
25b3017b 4762 }
4763
4764 return copy_decl_for_dup_finish (id, decl, copy);
4765}
4766
32020b10 4767tree
51a48c27 4768copy_decl_no_change (tree decl, copy_body_data *id)
4769{
4770 tree copy;
4771
4772 copy = copy_node (decl);
4773
4774 /* The COPY is not abstract; it will be generated in DST_FN. */
4775 DECL_ABSTRACT (copy) = 0;
4776 lang_hooks.dup_lang_specific_decl (copy);
4777
4778 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4779 been taken; it's for internal bookkeeping in expand_goto_internal. */
4780 if (TREE_CODE (copy) == LABEL_DECL)
4781 {
4782 TREE_ADDRESSABLE (copy) = 0;
4783 LABEL_DECL_UID (copy) = -1;
4784 }
4785
4786 return copy_decl_for_dup_finish (id, decl, copy);
4787}
4788
4789static tree
4790copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4791{
4792 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4793 return copy_decl_to_var (decl, id);
4794 else
4795 return copy_decl_no_change (decl, id);
4796}
4797
c5235c0b 4798/* Return a copy of the function's argument tree. */
4799static tree
5afe38fe 4800copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4801 bitmap args_to_skip, tree *vars)
c5235c0b 4802{
5afe38fe 4803 tree arg, *parg;
4804 tree new_parm = NULL;
4805 int i = 0;
c5235c0b 4806
5afe38fe 4807 parg = &new_parm;
4808
1767a056 4809 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5afe38fe 4810 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4811 {
4812 tree new_tree = remap_decl (arg, id);
4813 lang_hooks.dup_lang_specific_decl (new_tree);
4814 *parg = new_tree;
1767a056 4815 parg = &DECL_CHAIN (new_tree);
5afe38fe 4816 }
a8305131 4817 else if (!pointer_map_contains (id->decl_map, arg))
5afe38fe 4818 {
4819 /* Make an equivalent VAR_DECL. If the argument was used
4820 as temporary variable later in function, the uses will be
4821 replaced by local variable. */
4822 tree var = copy_decl_to_var (arg, id);
5afe38fe 4823 add_referenced_var (var);
4824 insert_decl_map (id, arg, var);
4825 /* Declare this new variable. */
1767a056 4826 DECL_CHAIN (var) = *vars;
5afe38fe 4827 *vars = var;
4828 }
4829 return new_parm;
c5235c0b 4830}
4831
4832/* Return a copy of the function's static chain. */
4833static tree
51a48c27 4834copy_static_chain (tree static_chain, copy_body_data * id)
c5235c0b 4835{
4836 tree *chain_copy, *pvar;
4837
4838 chain_copy = &static_chain;
1767a056 4839 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
c5235c0b 4840 {
f4e36c33 4841 tree new_tree = remap_decl (*pvar, id);
4842 lang_hooks.dup_lang_specific_decl (new_tree);
1767a056 4843 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
f4e36c33 4844 *pvar = new_tree;
c5235c0b 4845 }
4846 return static_chain;
4847}
4848
4849/* Return true if the function is allowed to be versioned.
4850 This is a guard for the versioning functionality. */
d747fdfb 4851
c5235c0b 4852bool
4853tree_versionable_function_p (tree fndecl)
4854{
bdb1f0d1 4855 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4856 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
c5235c0b 4857}
4858
ccf4ab6b 4859/* Delete all unreachable basic blocks and update callgraph.
4860 Doing so is somewhat nontrivial because we need to update all clones and
4861 remove inline function that become unreachable. */
38150ede 4862
ccf4ab6b 4863static bool
4864delete_unreachable_blocks_update_callgraph (copy_body_data *id)
38150ede 4865{
ccf4ab6b 4866 bool changed = false;
4867 basic_block b, next_bb;
4868
4869 find_unreachable_blocks ();
4870
4871 /* Delete all unreachable basic blocks. */
4872
4873 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4874 {
4875 next_bb = b->next_bb;
4876
4877 if (!(b->flags & BB_REACHABLE))
4878 {
4879 gimple_stmt_iterator bsi;
4880
4881 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4882 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4883 {
4884 struct cgraph_edge *e;
4885 struct cgraph_node *node;
4886
4887 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4888 {
4889 if (!e->inline_failed)
4890 cgraph_remove_node_and_inline_clones (e->callee);
4891 else
4892 cgraph_remove_edge (e);
4893 }
4894 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4895 && id->dst_node->clones)
4896 for (node = id->dst_node->clones; node != id->dst_node;)
4897 {
4898 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4899 {
4900 if (!e->inline_failed)
4901 cgraph_remove_node_and_inline_clones (e->callee);
4902 else
4903 cgraph_remove_edge (e);
4904 }
48e1416a 4905
ccf4ab6b 4906 if (node->clones)
4907 node = node->clones;
4908 else if (node->next_sibling_clone)
4909 node = node->next_sibling_clone;
4910 else
4911 {
4912 while (node != id->dst_node && !node->next_sibling_clone)
4913 node = node->clone_of;
4914 if (node != id->dst_node)
4915 node = node->next_sibling_clone;
4916 }
4917 }
4918 }
4919 delete_basic_block (b);
4920 changed = true;
4921 }
4922 }
4923
ccf4ab6b 4924 return changed;
38150ede 4925}
4926
e20422ea 4927/* Update clone info after duplication. */
4928
4929static void
4930update_clone_info (copy_body_data * id)
4931{
4932 struct cgraph_node *node;
4933 if (!id->dst_node->clones)
4934 return;
4935 for (node = id->dst_node->clones; node != id->dst_node;)
4936 {
4937 /* First update replace maps to match the new body. */
4938 if (node->clone.tree_map)
4939 {
4940 unsigned int i;
4941 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4942 {
4943 struct ipa_replace_map *replace_info;
4944 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4945 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4946 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4947 }
4948 }
4949 if (node->clones)
4950 node = node->clones;
4951 else if (node->next_sibling_clone)
4952 node = node->next_sibling_clone;
4953 else
4954 {
4955 while (node != id->dst_node && !node->next_sibling_clone)
4956 node = node->clone_of;
4957 if (node != id->dst_node)
4958 node = node->next_sibling_clone;
4959 }
4960 }
4961}
4962
c5235c0b 4963/* Create a copy of a function's tree.
4964 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4965 of the original function and the new copied function
48e1416a 4966 respectively. In case we want to replace a DECL
4967 tree with another tree while duplicating the function's
4968 body, TREE_MAP represents the mapping between these
469679ab 4969 trees. If UPDATE_CLONES is set, the call_stmt fields
b06ab5fa 4970 of edges of clones of the function will be updated.
4971
4972 If non-NULL ARGS_TO_SKIP determine function parameters to remove
4973 from new version.
4974 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
4975 If non_NULL NEW_ENTRY determine new entry BB of the clone.
4976*/
c5235c0b 4977void
d747fdfb 4978tree_function_versioning (tree old_decl, tree new_decl,
4979 VEC(ipa_replace_map_p,gc)* tree_map,
b06ab5fa 4980 bool update_clones, bitmap args_to_skip,
4981 bitmap blocks_to_copy, basic_block new_entry)
c5235c0b 4982{
4983 struct cgraph_node *old_version_node;
4984 struct cgraph_node *new_version_node;
51a48c27 4985 copy_body_data id;
deff5ffd 4986 tree p;
c5235c0b 4987 unsigned i;
4988 struct ipa_replace_map *replace_info;
9845d120 4989 basic_block old_entry_block, bb;
13e50f08 4990 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4991
09a2e412 4992 tree old_current_function_decl = current_function_decl;
13e50f08 4993 tree vars = NULL_TREE;
c5235c0b 4994
4995 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4996 && TREE_CODE (new_decl) == FUNCTION_DECL);
4997 DECL_POSSIBLY_INLINED (old_decl) = 1;
4998
53f79206 4999 old_version_node = cgraph_get_node (old_decl);
5000 gcc_checking_assert (old_version_node);
5001 new_version_node = cgraph_get_node (new_decl);
5002 gcc_checking_assert (new_version_node);
c5235c0b 5003
3d280f42 5004 /* Output the inlining info for this abstract function, since it has been
5005 inlined. If we don't do this now, we can lose the information about the
5006 variables in the function when the blocks get blown away as soon as we
5007 remove the cgraph node. */
5008 (*debug_hooks->outlining_inline_function) (old_decl);
5009
c5235c0b 5010 DECL_ARTIFICIAL (new_decl) = 1;
5011 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
58d82cd0 5012 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
c5235c0b 5013
b2f42a98 5014 /* Prepare the data structures for the tree copy. */
5015 memset (&id, 0, sizeof (id));
5016
c5235c0b 5017 /* Generate a new name for the new version. */
ccf4ab6b 5018 id.statements_to_fold = pointer_set_create ();
9845d120 5019
e3022db7 5020 id.decl_map = pointer_map_create ();
9845d120 5021 id.debug_map = NULL;
51a48c27 5022 id.src_fn = old_decl;
5023 id.dst_fn = new_decl;
5024 id.src_node = old_version_node;
5025 id.dst_node = new_version_node;
5026 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6d1cc52c 5027 if (id.src_node->ipa_transforms_to_apply)
5028 {
5029 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5030 unsigned int i;
5031
5032 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5033 id.src_node->ipa_transforms_to_apply);
5034 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5035 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5036 VEC_index (ipa_opt_pass,
5037 old_transforms_to_apply,
5038 i));
5039 }
48e1416a 5040
51a48c27 5041 id.copy_decl = copy_decl_no_change;
5042 id.transform_call_graph_edges
5043 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5044 id.transform_new_cfg = true;
5045 id.transform_return_to_modify = false;
32020b10 5046 id.transform_lang_insert_block = NULL;
51a48c27 5047
c5235c0b 5048 current_function_decl = new_decl;
deff5ffd 5049 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5050 (DECL_STRUCT_FUNCTION (old_decl));
5051 initialize_cfun (new_decl, old_decl,
e2d3f422 5052 old_entry_block->count);
3912327b 5053 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5054 = id.src_cfun->gimple_df->ipa_pta;
deff5ffd 5055 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
48e1416a 5056
c5235c0b 5057 /* Copy the function's static chain. */
5058 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5059 if (p)
5060 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5061 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5062 &id);
48e1416a 5063
c5235c0b 5064 /* If there's a tree_map, prepare for substitution. */
5065 if (tree_map)
ccf4ab6b 5066 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
c5235c0b 5067 {
13e50f08 5068 gimple init;
ccf4ab6b 5069 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
51a48c27 5070 if (replace_info->replace_p)
b9c94ed7 5071 {
9e9bac20 5072 tree op = replace_info->new_tree;
1bf41320 5073 if (!replace_info->old_tree)
5074 {
5075 int i = replace_info->parm_num;
5076 tree parm;
1767a056 5077 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
1bf41320 5078 i --;
5079 replace_info->old_tree = parm;
5080 }
5081
9e9bac20 5082
5083 STRIP_NOPS (op);
5084
5085 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5086 op = TREE_OPERAND (op, 0);
48e1416a 5087
9e9bac20 5088 if (TREE_CODE (op) == ADDR_EXPR)
b9c94ed7 5089 {
9e9bac20 5090 op = TREE_OPERAND (op, 0);
b9c94ed7 5091 while (handled_component_p (op))
5092 op = TREE_OPERAND (op, 0);
5093 if (TREE_CODE (op) == VAR_DECL)
5094 add_referenced_var (op);
5095 }
13e50f08 5096 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5097 init = setup_one_parameter (&id, replace_info->old_tree,
5098 replace_info->new_tree, id.src_fn,
5099 NULL,
5100 &vars);
5101 if (init)
5102 VEC_safe_push (gimple, heap, init_stmts, init);
b9c94ed7 5103 }
c5235c0b 5104 }
a8305131 5105 /* Copy the function's arguments. */
5106 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5107 DECL_ARGUMENTS (new_decl) =
5108 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5109 args_to_skip, &vars);
48e1416a 5110
a8305131 5111 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
a6519462 5112 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
48e1416a 5113
13e50f08 5114 declare_inline_vars (DECL_INITIAL (new_decl), vars);
ccf4ab6b 5115
2ab2ce89 5116 if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
c5235c0b 5117 /* Add local vars. */
2ab2ce89 5118 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id, false);
48e1416a 5119
c5235c0b 5120 if (DECL_RESULT (old_decl) != NULL_TREE)
5121 {
c6ba6fb8 5122 tree old_name;
5123 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
c5235c0b 5124 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
c6ba6fb8 5125 if (gimple_in_ssa_p (id.src_cfun)
5126 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5127 && (old_name
5128 = gimple_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5129 {
5130 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5131 insert_decl_map (&id, old_name, new_name);
5132 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5133 set_default_def (DECL_RESULT (new_decl), new_name);
5134 }
c5235c0b 5135 }
48e1416a 5136
c6ba6fb8 5137 /* Copy the Function's body. */
5138 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5139 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5140
c5235c0b 5141 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5142 number_blocks (new_decl);
5143
9845d120 5144 /* We want to create the BB unconditionally, so that the addition of
5145 debug stmts doesn't affect BB count, which may in the end cause
5146 codegen differences. */
5147 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5148 while (VEC_length (gimple, init_stmts))
5149 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
e20422ea 5150 update_clone_info (&id);
13e50f08 5151
d747fdfb 5152 /* Remap the nonlocal_goto_save_area, if any. */
5153 if (cfun->nonlocal_goto_save_area)
5154 {
5155 struct walk_stmt_info wi;
5156
5157 memset (&wi, 0, sizeof (wi));
5158 wi.info = &id;
5159 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5160 }
5161
c5235c0b 5162 /* Clean up. */
e3022db7 5163 pointer_map_destroy (id.decl_map);
9845d120 5164 if (id.debug_map)
5165 pointer_map_destroy (id.debug_map);
dd277d48 5166 free_dominance_info (CDI_DOMINATORS);
5167 free_dominance_info (CDI_POST_DOMINATORS);
ccf4ab6b 5168
5169 fold_marked_statements (0, id.statements_to_fold);
5170 pointer_set_destroy (id.statements_to_fold);
5171 fold_cond_expr_cond ();
5172 delete_unreachable_blocks_update_callgraph (&id);
ea7e866e 5173 if (id.dst_node->analyzed)
5174 cgraph_rebuild_references ();
ccf4ab6b 5175 update_ssa (TODO_update_ssa);
555e8b05 5176
5177 /* After partial cloning we need to rescale frequencies, so they are
5178 within proper range in the cloned function. */
5179 if (new_entry)
5180 {
5181 struct cgraph_edge *e;
5182 rebuild_frequencies ();
5183
5184 new_version_node->count = ENTRY_BLOCK_PTR->count;
5185 for (e = new_version_node->callees; e; e = e->next_callee)
5186 {
5187 basic_block bb = gimple_bb (e->call_stmt);
1a036a3b 5188 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5189 bb);
5190 e->count = bb->count;
5191 }
5192 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5193 {
5194 basic_block bb = gimple_bb (e->call_stmt);
5195 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5196 bb);
555e8b05 5197 e->count = bb->count;
5198 }
5199 }
5200
ccf4ab6b 5201 free_dominance_info (CDI_DOMINATORS);
5202 free_dominance_info (CDI_POST_DOMINATORS);
5203
9845d120 5204 gcc_assert (!id.debug_stmts);
13e50f08 5205 VEC_free (gimple, heap, init_stmts);
deff5ffd 5206 pop_cfun ();
09a2e412 5207 current_function_decl = old_current_function_decl;
5208 gcc_assert (!current_function_decl
5209 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
c5235c0b 5210 return;
5211}
5212
4189e677 5213/* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5214 the callee and return the inlined body on success. */
5215
5216tree
5217maybe_inline_call_in_expr (tree exp)
5218{
5219 tree fn = get_callee_fndecl (exp);
5220
5221 /* We can only try to inline "const" functions. */
5222 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5223 {
5224 struct pointer_map_t *decl_map = pointer_map_create ();
5225 call_expr_arg_iterator iter;
5226 copy_body_data id;
5227 tree param, arg, t;
5228
5229 /* Remap the parameters. */
5230 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5231 param;
1767a056 5232 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
4189e677 5233 *pointer_map_insert (decl_map, param) = arg;
5234
5235 memset (&id, 0, sizeof (id));
5236 id.src_fn = fn;
5237 id.dst_fn = current_function_decl;
5238 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5239 id.decl_map = decl_map;
5240
5241 id.copy_decl = copy_decl_no_change;
5242 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5243 id.transform_new_cfg = false;
5244 id.transform_return_to_modify = true;
839c5aac 5245 id.transform_lang_insert_block = NULL;
4189e677 5246
5247 /* Make sure not to unshare trees behind the front-end's back
5248 since front-end specific mechanisms may rely on sharing. */
5249 id.regimplify = false;
5250 id.do_not_unshare = true;
5251
5252 /* We're not inside any EH region. */
e38def9c 5253 id.eh_lp_nr = 0;
4189e677 5254
5255 t = copy_tree_body (&id);
5256 pointer_map_destroy (decl_map);
5257
5258 /* We can only return something suitable for use in a GENERIC
5259 expression tree. */
5260 if (TREE_CODE (t) == MODIFY_EXPR)
5261 return TREE_OPERAND (t, 1);
5262 }
5263
5264 return NULL_TREE;
5265}
5266
03908818 5267/* Duplicate a type, fields and all. */
5268
5269tree
5270build_duplicate_type (tree type)
5271{
51a48c27 5272 struct copy_body_data id;
03908818 5273
5274 memset (&id, 0, sizeof (id));
51a48c27 5275 id.src_fn = current_function_decl;
5276 id.dst_fn = current_function_decl;
5277 id.src_cfun = cfun;
e3022db7 5278 id.decl_map = pointer_map_create ();
9845d120 5279 id.debug_map = NULL;
881eb642 5280 id.copy_decl = copy_decl_no_change;
03908818 5281
5282 type = remap_type_1 (type, &id);
5283
e3022db7 5284 pointer_map_destroy (id.decl_map);
9845d120 5285 if (id.debug_map)
5286 pointer_map_destroy (id.debug_map);
03908818 5287
e8e0078c 5288 TYPE_CANONICAL (type) = type;
5289
03908818 5290 return type;
5291}