]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-inline.c
re PR tree-optimization/55334 (mgrid regression (ipa-cp disables vectorization))
[thirdparty/gcc.git] / gcc / tree-inline.c
CommitLineData
ac534736 1/* Tree inlining.
23a5b65a 2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
588d3ade
AO
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
54a7b573 5This file is part of GCC.
588d3ade 6
54a7b573 7GCC is free software; you can redistribute it and/or modify
588d3ade 8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
588d3ade
AO
10any later version.
11
54a7b573 12GCC is distributed in the hope that it will be useful,
588d3ade
AO
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
588d3ade
AO
20
21#include "config.h"
22#include "system.h"
4977bab6
ZW
23#include "coretypes.h"
24#include "tm.h"
718f9c0f 25#include "diagnostic-core.h"
588d3ade 26#include "tree.h"
d8a2d370
DN
27#include "stor-layout.h"
28#include "calls.h"
588d3ade 29#include "tree-inline.h"
d4e4baa9
AO
30#include "flags.h"
31#include "params.h"
32#include "input.h"
33#include "insn-config.h"
d4e4baa9 34#include "hashtab.h"
d23c55c2 35#include "langhooks.h"
60393bbc
AM
36#include "predict.h"
37#include "vec.h"
38#include "hash-set.h"
39#include "machmode.h"
40#include "hard-reg-set.h"
41#include "function.h"
42#include "dominance.h"
43#include "cfg.h"
44#include "cfganal.h"
e21aff8a
SB
45#include "basic-block.h"
46#include "tree-iterator.h"
ddd2d57e 47#include "intl.h"
2fb9a547
AM
48#include "tree-ssa-alias.h"
49#include "internal-fn.h"
50#include "gimple-fold.h"
51#include "tree-eh.h"
52#include "gimple-expr.h"
53#include "is-a.h"
18f429e2 54#include "gimple.h"
45b0be94 55#include "gimplify.h"
5be5c238 56#include "gimple-iterator.h"
18f429e2 57#include "gimplify-me.h"
5be5c238 58#include "gimple-walk.h"
442b4905
AM
59#include "gimple-ssa.h"
60#include "tree-cfg.h"
61#include "tree-phinodes.h"
62#include "ssa-iterators.h"
d8a2d370 63#include "stringpool.h"
442b4905
AM
64#include "tree-ssanames.h"
65#include "tree-into-ssa.h"
d8a2d370 66#include "expr.h"
442b4905 67#include "tree-dfa.h"
7a300452 68#include "tree-ssa.h"
cf835838 69#include "tree-pretty-print.h"
e21aff8a 70#include "except.h"
1eb3331e 71#include "debug.h"
c582198b
AM
72#include "hash-map.h"
73#include "plugin-api.h"
74#include "ipa-ref.h"
75#include "cgraph.h"
76#include "alloc-pool.h"
19734dd8 77#include "ipa-prop.h"
6946b3f7 78#include "value-prof.h"
110cfe1c 79#include "tree-pass.h"
18177c7e 80#include "target.h"
a9e0d843 81#include "cfgloop.h"
9b2b7279 82#include "builtins.h"
d5e254e1 83#include "tree-chkp.h"
d4e4baa9 84
2eb79bbb
SB
85#include "rtl.h" /* FIXME: For asm_str_count. */
86
6de9cd9a
DN
87/* I'm not real happy about this, but we need to handle gimple and
88 non-gimple trees. */
588d3ade 89
1b369fae 90/* Inlining, Cloning, Versioning, Parallelization
e21aff8a
SB
91
92 Inlining: a function body is duplicated, but the PARM_DECLs are
93 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
726a989a 94 MODIFY_EXPRs that store to a dedicated returned-value variable.
e21aff8a
SB
95 The duplicated eh_region info of the copy will later be appended
96 to the info for the caller; the eh_region info in copied throwing
1d65f45c 97 statements and RESX statements are adjusted accordingly.
e21aff8a 98
e21aff8a
SB
99 Cloning: (only in C++) We have one body for a con/de/structor, and
100 multiple function decls, each with a unique parameter list.
101 Duplicate the body, using the given splay tree; some parameters
102 will become constants (like 0 or 1).
103
1b369fae
RH
104 Versioning: a function body is duplicated and the result is a new
105 function rather than into blocks of an existing function as with
106 inlining. Some parameters will become constants.
107
108 Parallelization: a region of a function is duplicated resulting in
109 a new function. Variables may be replaced with complex expressions
110 to enable shared variable semantics.
111
e21aff8a
SB
112 All of these will simultaneously lookup any callgraph edges. If
113 we're going to inline the duplicated function body, and the given
114 function has some cloned callgraph nodes (one for each place this
115 function will be inlined) those callgraph edges will be duplicated.
1b369fae 116 If we're cloning the body, those callgraph edges will be
e21aff8a
SB
117 updated to point into the new body. (Note that the original
118 callgraph node and edge list will not be altered.)
119
726a989a 120 See the CALL_EXPR handling case in copy_tree_body_r (). */
e21aff8a 121
d4e4baa9
AO
122/* To Do:
123
124 o In order to make inlining-on-trees work, we pessimized
125 function-local static constants. In particular, they are now
126 always output, even when not addressed. Fix this by treating
127 function-local static constants just like global static
128 constants; the back-end already knows not to output them if they
129 are not needed.
130
131 o Provide heuristics to clamp inlining of recursive template
132 calls? */
133
7f9bc51b 134
7f9bc51b
ZD
135/* Weights that estimate_num_insns uses to estimate the size of the
136 produced code. */
137
138eni_weights eni_size_weights;
139
140/* Weights that estimate_num_insns uses to estimate the time necessary
141 to execute the produced code. */
142
143eni_weights eni_time_weights;
144
d4e4baa9
AO
145/* Prototypes. */
146
d5e254e1
IE
147static tree declare_return_variable (copy_body_data *, tree, tree, tree,
148 basic_block);
1b369fae 149static void remap_block (tree *, copy_body_data *);
1b369fae 150static void copy_bind_expr (tree *, int *, copy_body_data *);
e21aff8a 151static void declare_inline_vars (tree, tree);
b787e7a2 152static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
4a283090 153static void prepend_lexical_block (tree current_block, tree new_block);
1b369fae 154static tree copy_decl_to_var (tree, copy_body_data *);
c08cd4c1 155static tree copy_result_decl_to_var (tree, copy_body_data *);
1b369fae 156static tree copy_decl_maybe_to_var (tree, copy_body_data *);
d5e254e1 157static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
078c3644 158static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
d5e254e1 159static void insert_init_stmt (copy_body_data *, basic_block, gimple);
e21aff8a 160
5e20bdd7
JZ
161/* Insert a tree->tree mapping for ID. Despite the name suggests
162 that the trees should be variables, it is used for more than that. */
163
1b369fae
RH
164void
165insert_decl_map (copy_body_data *id, tree key, tree value)
5e20bdd7 166{
b787e7a2 167 id->decl_map->put (key, value);
5e20bdd7
JZ
168
169 /* Always insert an identity map as well. If we see this same new
170 node again, we won't want to duplicate it a second time. */
171 if (key != value)
b787e7a2 172 id->decl_map->put (value, value);
5e20bdd7
JZ
173}
174
b5b8b0ac
AO
175/* Insert a tree->tree mapping for ID. This is only used for
176 variables. */
177
178static void
179insert_debug_decl_map (copy_body_data *id, tree key, tree value)
180{
181 if (!gimple_in_ssa_p (id->src_cfun))
182 return;
183
184 if (!MAY_HAVE_DEBUG_STMTS)
185 return;
186
187 if (!target_for_debug_bind (key))
188 return;
189
190 gcc_assert (TREE_CODE (key) == PARM_DECL);
191 gcc_assert (TREE_CODE (value) == VAR_DECL);
192
193 if (!id->debug_map)
b787e7a2 194 id->debug_map = new hash_map<tree, tree>;
b5b8b0ac 195
b787e7a2 196 id->debug_map->put (key, value);
b5b8b0ac
AO
197}
198
082ab5ff
JJ
199/* If nonzero, we're remapping the contents of inlined debug
200 statements. If negative, an error has occurred, such as a
201 reference to a variable that isn't available in the inlined
202 context. */
203static int processing_debug_stmt = 0;
204
110cfe1c
JH
205/* Construct new SSA name for old NAME. ID is the inline context. */
206
207static tree
208remap_ssa_name (tree name, copy_body_data *id)
209{
70b5e7dc 210 tree new_tree, var;
6be42dd4 211 tree *n;
110cfe1c
JH
212
213 gcc_assert (TREE_CODE (name) == SSA_NAME);
214
b787e7a2 215 n = id->decl_map->get (name);
110cfe1c 216 if (n)
129a37fc 217 return unshare_expr (*n);
110cfe1c 218
082ab5ff
JJ
219 if (processing_debug_stmt)
220 {
67386041
RG
221 if (SSA_NAME_IS_DEFAULT_DEF (name)
222 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
ddb555ed 223 && id->entry_bb == NULL
fefa31b5 224 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
ddb555ed
JJ
225 {
226 tree vexpr = make_node (DEBUG_EXPR_DECL);
227 gimple def_temp;
228 gimple_stmt_iterator gsi;
229 tree val = SSA_NAME_VAR (name);
230
b787e7a2 231 n = id->decl_map->get (val);
ddb555ed
JJ
232 if (n != NULL)
233 val = *n;
234 if (TREE_CODE (val) != PARM_DECL)
235 {
236 processing_debug_stmt = -1;
237 return name;
238 }
239 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
240 DECL_ARTIFICIAL (vexpr) = 1;
241 TREE_TYPE (vexpr) = TREE_TYPE (name);
242 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
fefa31b5 243 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
ddb555ed
JJ
244 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
245 return vexpr;
246 }
247
082ab5ff
JJ
248 processing_debug_stmt = -1;
249 return name;
250 }
251
70b5e7dc
RG
252 /* Remap anonymous SSA names or SSA names of anonymous decls. */
253 var = SSA_NAME_VAR (name);
254 if (!var
255 || (!SSA_NAME_IS_DEFAULT_DEF (name)
256 && TREE_CODE (var) == VAR_DECL
257 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
258 && DECL_ARTIFICIAL (var)
259 && DECL_IGNORED_P (var)
260 && !DECL_NAME (var)))
261 {
262 struct ptr_info_def *pi;
263 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
264 if (!var && SSA_NAME_IDENTIFIER (name))
265 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
266 insert_decl_map (id, name, new_tree);
267 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
268 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
269 /* At least IPA points-to info can be directly transferred. */
270 if (id->src_cfun->gimple_df
271 && id->src_cfun->gimple_df->ipa_pta
272 && (pi = SSA_NAME_PTR_INFO (name))
273 && !pi->pt.anything)
274 {
275 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
276 new_pi->pt = pi->pt;
277 }
278 return new_tree;
279 }
280
110cfe1c
JH
281 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
282 in copy_bb. */
70b5e7dc 283 new_tree = remap_decl (var, id);
726a989a 284
110cfe1c 285 /* We might've substituted constant or another SSA_NAME for
b8698a0f 286 the variable.
110cfe1c
JH
287
288 Replace the SSA name representing RESULT_DECL by variable during
289 inlining: this saves us from need to introduce PHI node in a case
290 return value is just partly initialized. */
82d6e6fc 291 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
70b5e7dc
RG
292 && (!SSA_NAME_VAR (name)
293 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
110cfe1c
JH
294 || !id->transform_return_to_modify))
295 {
25a6a873 296 struct ptr_info_def *pi;
82d6e6fc
KG
297 new_tree = make_ssa_name (new_tree, NULL);
298 insert_decl_map (id, name, new_tree);
299 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
110cfe1c 300 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
25a6a873
RG
301 /* At least IPA points-to info can be directly transferred. */
302 if (id->src_cfun->gimple_df
303 && id->src_cfun->gimple_df->ipa_pta
304 && (pi = SSA_NAME_PTR_INFO (name))
305 && !pi->pt.anything)
306 {
307 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
308 new_pi->pt = pi->pt;
309 }
32244553 310 if (SSA_NAME_IS_DEFAULT_DEF (name))
045685a9
JH
311 {
312 /* By inlining function having uninitialized variable, we might
313 extend the lifetime (variable might get reused). This cause
314 ICE in the case we end up extending lifetime of SSA name across
fa10beec 315 abnormal edge, but also increase register pressure.
045685a9 316
726a989a
RB
317 We simply initialize all uninitialized vars by 0 except
318 for case we are inlining to very first BB. We can avoid
319 this for all BBs that are not inside strongly connected
320 regions of the CFG, but this is expensive to test. */
321 if (id->entry_bb
dcad005d 322 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
70b5e7dc
RG
323 && (!SSA_NAME_VAR (name)
324 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
fefa31b5
DM
325 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
326 0)->dest
045685a9
JH
327 || EDGE_COUNT (id->entry_bb->preds) != 1))
328 {
726a989a
RB
329 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
330 gimple init_stmt;
e8160c9a 331 tree zero = build_zero_cst (TREE_TYPE (new_tree));
b8698a0f 332
e8160c9a 333 init_stmt = gimple_build_assign (new_tree, zero);
726a989a 334 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
82d6e6fc 335 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
045685a9
JH
336 }
337 else
338 {
82d6e6fc 339 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
32244553 340 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
045685a9
JH
341 }
342 }
110cfe1c
JH
343 }
344 else
82d6e6fc
KG
345 insert_decl_map (id, name, new_tree);
346 return new_tree;
110cfe1c
JH
347}
348
e21aff8a 349/* Remap DECL during the copying of the BLOCK tree for the function. */
d4e4baa9 350
1b369fae
RH
351tree
352remap_decl (tree decl, copy_body_data *id)
d4e4baa9 353{
6be42dd4 354 tree *n;
e21aff8a
SB
355
356 /* We only remap local variables in the current function. */
3c2a7a6a 357
e21aff8a
SB
358 /* See if we have remapped this declaration. */
359
b787e7a2 360 n = id->decl_map->get (decl);
e21aff8a 361
b5b8b0ac
AO
362 if (!n && processing_debug_stmt)
363 {
364 processing_debug_stmt = -1;
365 return decl;
366 }
367
e21aff8a
SB
368 /* If we didn't already have an equivalent for this declaration,
369 create one now. */
d4e4baa9
AO
370 if (!n)
371 {
d4e4baa9 372 /* Make a copy of the variable or label. */
1b369fae 373 tree t = id->copy_decl (decl, id);
b8698a0f 374
596b98ce
AO
375 /* Remember it, so that if we encounter this local entity again
376 we can reuse this copy. Do this early because remap_type may
377 need this decl for TYPE_STUB_DECL. */
378 insert_decl_map (id, decl, t);
379
1b369fae
RH
380 if (!DECL_P (t))
381 return t;
382
3c2a7a6a
RH
383 /* Remap types, if necessary. */
384 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
385 if (TREE_CODE (t) == TYPE_DECL)
386 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
3c2a7a6a
RH
387
388 /* Remap sizes as necessary. */
726a989a
RB
389 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
390 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
d4e4baa9 391
8c27b7d4 392 /* If fields, do likewise for offset and qualifier. */
5377d5ba
RK
393 if (TREE_CODE (t) == FIELD_DECL)
394 {
726a989a 395 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
5377d5ba 396 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
726a989a 397 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
5377d5ba
RK
398 }
399
5e20bdd7 400 return t;
d4e4baa9
AO
401 }
402
f82a627c
EB
403 if (id->do_not_unshare)
404 return *n;
405 else
406 return unshare_expr (*n);
d4e4baa9
AO
407}
408
3c2a7a6a 409static tree
1b369fae 410remap_type_1 (tree type, copy_body_data *id)
3c2a7a6a 411{
82d6e6fc 412 tree new_tree, t;
3c2a7a6a 413
ed397c43
RK
414 /* We do need a copy. build and register it now. If this is a pointer or
415 reference type, remap the designated type and make a new pointer or
416 reference type. */
417 if (TREE_CODE (type) == POINTER_TYPE)
418 {
82d6e6fc 419 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
420 TYPE_MODE (type),
421 TYPE_REF_CAN_ALIAS_ALL (type));
040c6d51
MM
422 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
423 new_tree = build_type_attribute_qual_variant (new_tree,
424 TYPE_ATTRIBUTES (type),
425 TYPE_QUALS (type));
82d6e6fc
KG
426 insert_decl_map (id, type, new_tree);
427 return new_tree;
ed397c43
RK
428 }
429 else if (TREE_CODE (type) == REFERENCE_TYPE)
430 {
82d6e6fc 431 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
432 TYPE_MODE (type),
433 TYPE_REF_CAN_ALIAS_ALL (type));
040c6d51
MM
434 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
435 new_tree = build_type_attribute_qual_variant (new_tree,
436 TYPE_ATTRIBUTES (type),
437 TYPE_QUALS (type));
82d6e6fc
KG
438 insert_decl_map (id, type, new_tree);
439 return new_tree;
ed397c43
RK
440 }
441 else
82d6e6fc 442 new_tree = copy_node (type);
ed397c43 443
82d6e6fc 444 insert_decl_map (id, type, new_tree);
3c2a7a6a
RH
445
446 /* This is a new type, not a copy of an old type. Need to reassociate
447 variants. We can handle everything except the main variant lazily. */
448 t = TYPE_MAIN_VARIANT (type);
449 if (type != t)
450 {
451 t = remap_type (t, id);
82d6e6fc
KG
452 TYPE_MAIN_VARIANT (new_tree) = t;
453 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
454 TYPE_NEXT_VARIANT (t) = new_tree;
3c2a7a6a
RH
455 }
456 else
457 {
82d6e6fc
KG
458 TYPE_MAIN_VARIANT (new_tree) = new_tree;
459 TYPE_NEXT_VARIANT (new_tree) = NULL;
3c2a7a6a
RH
460 }
461
596b98ce 462 if (TYPE_STUB_DECL (type))
82d6e6fc 463 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
596b98ce 464
3c2a7a6a 465 /* Lazily create pointer and reference types. */
82d6e6fc
KG
466 TYPE_POINTER_TO (new_tree) = NULL;
467 TYPE_REFERENCE_TO (new_tree) = NULL;
3c2a7a6a 468
8910466a
JH
469 /* Copy all types that may contain references to local variables; be sure to
470 preserve sharing in between type and its main variant when possible. */
82d6e6fc 471 switch (TREE_CODE (new_tree))
3c2a7a6a
RH
472 {
473 case INTEGER_TYPE:
474 case REAL_TYPE:
325217ed 475 case FIXED_POINT_TYPE:
3c2a7a6a
RH
476 case ENUMERAL_TYPE:
477 case BOOLEAN_TYPE:
8910466a
JH
478 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
479 {
480 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
481 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
1c9766da 482
8910466a
JH
483 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
484 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
485 }
486 else
487 {
488 t = TYPE_MIN_VALUE (new_tree);
489 if (t && TREE_CODE (t) != INTEGER_CST)
490 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
491
492 t = TYPE_MAX_VALUE (new_tree);
493 if (t && TREE_CODE (t) != INTEGER_CST)
494 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
495 }
82d6e6fc 496 return new_tree;
9f63daea 497
3c2a7a6a 498 case FUNCTION_TYPE:
8910466a
JH
499 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
500 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
501 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
502 else
503 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
504 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
505 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
506 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
507 else
508 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
82d6e6fc 509 return new_tree;
3c2a7a6a
RH
510
511 case ARRAY_TYPE:
8910466a
JH
512 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
513 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
514 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
daaf6209
JH
515 else
516 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
8910466a
JH
517
518 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
519 {
520 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
521 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
522 }
523 else
524 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
3c2a7a6a
RH
525 break;
526
527 case RECORD_TYPE:
528 case UNION_TYPE:
529 case QUAL_UNION_TYPE:
8910466a
JH
530 if (TYPE_MAIN_VARIANT (type) != type
531 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
532 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
533 else
534 {
535 tree f, nf = NULL;
536
537 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
538 {
539 t = remap_decl (f, id);
540 DECL_CONTEXT (t) = new_tree;
541 DECL_CHAIN (t) = nf;
542 nf = t;
543 }
544 TYPE_FIELDS (new_tree) = nreverse (nf);
545 }
3c2a7a6a
RH
546 break;
547
3c2a7a6a
RH
548 case OFFSET_TYPE:
549 default:
550 /* Shouldn't have been thought variable sized. */
1e128c5f 551 gcc_unreachable ();
3c2a7a6a
RH
552 }
553
8910466a
JH
554 /* All variants of type share the same size, so use the already remaped data. */
555 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
556 {
557 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
558 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
559
560 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
561 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
562 }
563 else
564 {
565 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
566 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
567 }
3c2a7a6a 568
82d6e6fc 569 return new_tree;
3c2a7a6a
RH
570}
571
1b369fae
RH
572tree
573remap_type (tree type, copy_body_data *id)
52dd234b 574{
6be42dd4 575 tree *node;
4f5c64b8 576 tree tmp;
52dd234b
RH
577
578 if (type == NULL)
579 return type;
580
581 /* See if we have remapped this type. */
b787e7a2 582 node = id->decl_map->get (type);
52dd234b 583 if (node)
6be42dd4 584 return *node;
52dd234b
RH
585
586 /* The type only needs remapping if it's variably modified. */
1b369fae 587 if (! variably_modified_type_p (type, id->src_fn))
52dd234b
RH
588 {
589 insert_decl_map (id, type, type);
590 return type;
591 }
592
4f5c64b8
RG
593 id->remapping_type_depth++;
594 tmp = remap_type_1 (type, id);
595 id->remapping_type_depth--;
596
597 return tmp;
52dd234b
RH
598}
599
526d73ab 600/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
b8698a0f 601
526d73ab
JH
602static bool
603can_be_nonlocal (tree decl, copy_body_data *id)
604{
605 /* We can not duplicate function decls. */
606 if (TREE_CODE (decl) == FUNCTION_DECL)
607 return true;
608
609 /* Local static vars must be non-local or we get multiple declaration
610 problems. */
611 if (TREE_CODE (decl) == VAR_DECL
612 && !auto_var_in_fn_p (decl, id->src_fn))
613 return true;
614
5f564b8f 615 return false;
526d73ab
JH
616}
617
6de9cd9a 618static tree
8318d4ce 619remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
9771b263 620 copy_body_data *id)
d4e4baa9 621{
6de9cd9a
DN
622 tree old_var;
623 tree new_decls = NULL_TREE;
d4e4baa9 624
6de9cd9a 625 /* Remap its variables. */
910ad8de 626 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
d4e4baa9 627 {
6de9cd9a
DN
628 tree new_var;
629
526d73ab 630 if (can_be_nonlocal (old_var, id))
30be951a 631 {
5f564b8f
MM
632 /* We need to add this variable to the local decls as otherwise
633 nothing else will do so. */
526d73ab 634 if (TREE_CODE (old_var) == VAR_DECL
5f564b8f 635 && ! DECL_EXTERNAL (old_var))
c021f10b 636 add_local_decl (cfun, old_var);
9e6aced0 637 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
638 && !DECL_IGNORED_P (old_var)
639 && nonlocalized_list)
8318d4ce 640 vec_safe_push (*nonlocalized_list, old_var);
30be951a
JH
641 continue;
642 }
643
6de9cd9a
DN
644 /* Remap the variable. */
645 new_var = remap_decl (old_var, id);
646
726a989a 647 /* If we didn't remap this variable, we can't mess with its
6de9cd9a
DN
648 TREE_CHAIN. If we remapped this variable to the return slot, it's
649 already declared somewhere else, so don't declare it here. */
b8698a0f 650
526d73ab 651 if (new_var == id->retvar)
6de9cd9a 652 ;
526d73ab
JH
653 else if (!new_var)
654 {
9e6aced0 655 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
656 && !DECL_IGNORED_P (old_var)
657 && nonlocalized_list)
8318d4ce 658 vec_safe_push (*nonlocalized_list, old_var);
526d73ab 659 }
d4e4baa9
AO
660 else
661 {
1e128c5f 662 gcc_assert (DECL_P (new_var));
910ad8de 663 DECL_CHAIN (new_var) = new_decls;
6de9cd9a 664 new_decls = new_var;
60a5d78a
JJ
665
666 /* Also copy value-expressions. */
667 if (TREE_CODE (new_var) == VAR_DECL
668 && DECL_HAS_VALUE_EXPR_P (new_var))
669 {
670 tree tem = DECL_VALUE_EXPR (new_var);
671 bool old_regimplify = id->regimplify;
672 id->remapping_type_depth++;
673 walk_tree (&tem, copy_tree_body_r, id, NULL);
674 id->remapping_type_depth--;
675 id->regimplify = old_regimplify;
676 SET_DECL_VALUE_EXPR (new_var, tem);
677 }
d4e4baa9 678 }
d4e4baa9 679 }
d4e4baa9 680
6de9cd9a
DN
681 return nreverse (new_decls);
682}
683
684/* Copy the BLOCK to contain remapped versions of the variables
685 therein. And hook the new block into the block-tree. */
686
687static void
1b369fae 688remap_block (tree *block, copy_body_data *id)
6de9cd9a 689{
d436bff8
AH
690 tree old_block;
691 tree new_block;
d436bff8
AH
692
693 /* Make the new block. */
694 old_block = *block;
695 new_block = make_node (BLOCK);
696 TREE_USED (new_block) = TREE_USED (old_block);
697 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
3e2844cb 698 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
526d73ab 699 BLOCK_NONLOCALIZED_VARS (new_block)
9771b263 700 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
d436bff8
AH
701 *block = new_block;
702
703 /* Remap its variables. */
526d73ab 704 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
8318d4ce 705 &BLOCK_NONLOCALIZED_VARS (new_block),
526d73ab 706 id);
d436bff8 707
1b369fae 708 if (id->transform_lang_insert_block)
9ff420f1 709 id->transform_lang_insert_block (new_block);
1b369fae 710
d436bff8 711 /* Remember the remapped block. */
6de9cd9a 712 insert_decl_map (id, old_block, new_block);
d4e4baa9
AO
713}
714
acb8f212
JH
715/* Copy the whole block tree and root it in id->block. */
716static tree
1b369fae 717remap_blocks (tree block, copy_body_data *id)
acb8f212
JH
718{
719 tree t;
82d6e6fc 720 tree new_tree = block;
acb8f212
JH
721
722 if (!block)
723 return NULL;
724
82d6e6fc
KG
725 remap_block (&new_tree, id);
726 gcc_assert (new_tree != block);
acb8f212 727 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
4a283090
JH
728 prepend_lexical_block (new_tree, remap_blocks (t, id));
729 /* Blocks are in arbitrary order, but make things slightly prettier and do
730 not swap order when producing a copy. */
731 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
82d6e6fc 732 return new_tree;
acb8f212
JH
733}
734
3e492e9c
RB
735/* Remap the block tree rooted at BLOCK to nothing. */
736static void
737remap_blocks_to_null (tree block, copy_body_data *id)
738{
739 tree t;
740 insert_decl_map (id, block, NULL_TREE);
741 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
742 remap_blocks_to_null (t, id);
743}
744
d4e4baa9 745static void
6de9cd9a 746copy_statement_list (tree *tp)
d4e4baa9 747{
6de9cd9a 748 tree_stmt_iterator oi, ni;
82d6e6fc 749 tree new_tree;
6de9cd9a 750
82d6e6fc
KG
751 new_tree = alloc_stmt_list ();
752 ni = tsi_start (new_tree);
6de9cd9a 753 oi = tsi_start (*tp);
b1d82db0 754 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
82d6e6fc 755 *tp = new_tree;
6de9cd9a
DN
756
757 for (; !tsi_end_p (oi); tsi_next (&oi))
a406865a
RG
758 {
759 tree stmt = tsi_stmt (oi);
62e36382
JM
760 if (TREE_CODE (stmt) == STATEMENT_LIST)
761 /* This copy is not redundant; tsi_link_after will smash this
762 STATEMENT_LIST into the end of the one we're building, and we
763 don't want to do that with the original. */
764 copy_statement_list (&stmt);
a406865a
RG
765 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
766 }
6de9cd9a 767}
d4e4baa9 768
6de9cd9a 769static void
1b369fae 770copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
6de9cd9a
DN
771{
772 tree block = BIND_EXPR_BLOCK (*tp);
d4e4baa9
AO
773 /* Copy (and replace) the statement. */
774 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
775 if (block)
776 {
777 remap_block (&block, id);
778 BIND_EXPR_BLOCK (*tp) = block;
779 }
d4e4baa9 780
6de9cd9a 781 if (BIND_EXPR_VARS (*tp))
60a5d78a
JJ
782 /* This will remap a lot of the same decls again, but this should be
783 harmless. */
784 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
d4e4baa9
AO
785}
786
726a989a
RB
787
788/* Create a new gimple_seq by remapping all the statements in BODY
789 using the inlining information in ID. */
790
b34fd25c 791static gimple_seq
726a989a
RB
792remap_gimple_seq (gimple_seq body, copy_body_data *id)
793{
794 gimple_stmt_iterator si;
795 gimple_seq new_body = NULL;
796
797 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
798 {
d5e254e1
IE
799 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
800 gimple_seq_add_seq (&new_body, new_stmts);
726a989a
RB
801 }
802
803 return new_body;
804}
805
806
807/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
808 block using the mapping information in ID. */
809
810static gimple
538dd0b7 811copy_gimple_bind (gbind *stmt, copy_body_data *id)
726a989a
RB
812{
813 gimple new_bind;
814 tree new_block, new_vars;
815 gimple_seq body, new_body;
816
817 /* Copy the statement. Note that we purposely don't use copy_stmt
818 here because we need to remap statements as we copy. */
819 body = gimple_bind_body (stmt);
820 new_body = remap_gimple_seq (body, id);
821
822 new_block = gimple_bind_block (stmt);
823 if (new_block)
824 remap_block (&new_block, id);
825
826 /* This will remap a lot of the same decls again, but this should be
827 harmless. */
828 new_vars = gimple_bind_vars (stmt);
829 if (new_vars)
526d73ab 830 new_vars = remap_decls (new_vars, NULL, id);
726a989a
RB
831
832 new_bind = gimple_build_bind (new_vars, new_body, new_block);
833
834 return new_bind;
835}
836
78bbd765
EB
837/* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
838
839static bool
840is_parm (tree decl)
841{
842 if (TREE_CODE (decl) == SSA_NAME)
843 {
844 decl = SSA_NAME_VAR (decl);
845 if (!decl)
846 return false;
847 }
848
849 return (TREE_CODE (decl) == PARM_DECL);
850}
726a989a 851
f3dccf50
RB
852/* Remap the dependence CLIQUE from the source to the destination function
853 as specified in ID. */
854
855static unsigned short
856remap_dependence_clique (copy_body_data *id, unsigned short clique)
857{
858 if (clique == 0)
859 return 0;
860 if (!id->dependence_map)
861 id->dependence_map
862 = new hash_map<unsigned short, unsigned short, dependence_hasher>;
863 bool existed;
864 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
865 if (!existed)
866 newc = ++cfun->last_clique;
867 return newc;
868}
869
726a989a
RB
870/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
871 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
872 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
873 recursing into the children nodes of *TP. */
874
875static tree
876remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
877{
878 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
879 copy_body_data *id = (copy_body_data *) wi_p->info;
880 tree fn = id->src_fn;
881
882 if (TREE_CODE (*tp) == SSA_NAME)
883 {
884 *tp = remap_ssa_name (*tp, id);
885 *walk_subtrees = 0;
886 return NULL;
887 }
888 else if (auto_var_in_fn_p (*tp, fn))
889 {
890 /* Local variables and labels need to be replaced by equivalent
891 variables. We don't want to copy static variables; there's
892 only one of those, no matter how many times we inline the
893 containing function. Similarly for globals from an outer
894 function. */
895 tree new_decl;
896
897 /* Remap the declaration. */
898 new_decl = remap_decl (*tp, id);
899 gcc_assert (new_decl);
900 /* Replace this variable with the copy. */
901 STRIP_TYPE_NOPS (new_decl);
211ca15c
RG
902 /* ??? The C++ frontend uses void * pointer zero to initialize
903 any other type. This confuses the middle-end type verification.
904 As cloned bodies do not go through gimplification again the fixup
905 there doesn't trigger. */
906 if (TREE_CODE (new_decl) == INTEGER_CST
907 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
908 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
726a989a
RB
909 *tp = new_decl;
910 *walk_subtrees = 0;
911 }
912 else if (TREE_CODE (*tp) == STATEMENT_LIST)
913 gcc_unreachable ();
914 else if (TREE_CODE (*tp) == SAVE_EXPR)
915 gcc_unreachable ();
916 else if (TREE_CODE (*tp) == LABEL_DECL
917 && (!DECL_CONTEXT (*tp)
918 || decl_function_context (*tp) == id->src_fn))
919 /* These may need to be remapped for EH handling. */
920 *tp = remap_decl (*tp, id);
37c59e69
EB
921 else if (TREE_CODE (*tp) == FIELD_DECL)
922 {
923 /* If the enclosing record type is variably_modified_type_p, the field
924 has already been remapped. Otherwise, it need not be. */
b787e7a2 925 tree *n = id->decl_map->get (*tp);
37c59e69
EB
926 if (n)
927 *tp = *n;
928 *walk_subtrees = 0;
929 }
726a989a
RB
930 else if (TYPE_P (*tp))
931 /* Types may need remapping as well. */
932 *tp = remap_type (*tp, id);
933 else if (CONSTANT_CLASS_P (*tp))
934 {
935 /* If this is a constant, we have to copy the node iff the type
936 will be remapped. copy_tree_r will not copy a constant. */
937 tree new_type = remap_type (TREE_TYPE (*tp), id);
938
939 if (new_type == TREE_TYPE (*tp))
940 *walk_subtrees = 0;
941
942 else if (TREE_CODE (*tp) == INTEGER_CST)
807e902e 943 *tp = wide_int_to_tree (new_type, *tp);
726a989a
RB
944 else
945 {
946 *tp = copy_node (*tp);
947 TREE_TYPE (*tp) = new_type;
948 }
949 }
950 else
951 {
952 /* Otherwise, just copy the node. Note that copy_tree_r already
953 knows not to copy VAR_DECLs, etc., so this is safe. */
41a58a92 954
70f34814 955 if (TREE_CODE (*tp) == MEM_REF)
726a989a 956 {
70f34814 957 /* We need to re-canonicalize MEM_REFs from inline substitutions
93e452ed
RG
958 that can happen when a pointer argument is an ADDR_EXPR.
959 Recurse here manually to allow that. */
78bbd765
EB
960 tree ptr = TREE_OPERAND (*tp, 0);
961 tree type = remap_type (TREE_TYPE (*tp), id);
962 tree old = *tp;
93e452ed 963 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
78bbd765 964 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
0de204de 965 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
31a47f1a 966 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
93e452ed 967 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
f3dccf50
RB
968 if (MR_DEPENDENCE_CLIQUE (old) != 0)
969 {
970 MR_DEPENDENCE_CLIQUE (*tp)
971 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
972 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
973 }
78bbd765
EB
974 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
975 remapped a parameter as the property might be valid only
976 for the parameter itself. */
977 if (TREE_THIS_NOTRAP (old)
978 && (!is_parm (TREE_OPERAND (old, 0))
979 || (!id->transform_parameter && is_parm (ptr))))
980 TREE_THIS_NOTRAP (*tp) = 1;
93e452ed
RG
981 *walk_subtrees = 0;
982 return NULL;
726a989a
RB
983 }
984
985 /* Here is the "usual case". Copy this tree node, and then
986 tweak some special cases. */
987 copy_tree_r (tp, walk_subtrees, NULL);
988
41a58a92
RG
989 if (TREE_CODE (*tp) != OMP_CLAUSE)
990 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
991
726a989a
RB
992 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
993 {
994 /* The copied TARGET_EXPR has never been expanded, even if the
995 original node was expanded already. */
996 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
997 TREE_OPERAND (*tp, 3) = NULL_TREE;
998 }
999 else if (TREE_CODE (*tp) == ADDR_EXPR)
1000 {
1001 /* Variable substitution need not be simple. In particular,
70f34814 1002 the MEM_REF substitution above. Make sure that
5368224f 1003 TREE_CONSTANT and friends are up-to-date. */
726a989a 1004 int invariant = is_gimple_min_invariant (*tp);
f1071b12 1005 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
70f34814 1006 recompute_tree_invariant_for_addr_expr (*tp);
726a989a
RB
1007
1008 /* If this used to be invariant, but is not any longer,
1009 then regimplification is probably needed. */
1010 if (invariant && !is_gimple_min_invariant (*tp))
1011 id->regimplify = true;
1012
1013 *walk_subtrees = 0;
1014 }
1015 }
1016
5368224f
DC
1017 /* Update the TREE_BLOCK for the cloned expr. */
1018 if (EXPR_P (*tp))
1019 {
1020 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1021 tree old_block = TREE_BLOCK (*tp);
1022 if (old_block)
1023 {
1024 tree *n;
b787e7a2 1025 n = id->decl_map->get (TREE_BLOCK (*tp));
5368224f
DC
1026 if (n)
1027 new_block = *n;
1028 }
1029 TREE_SET_BLOCK (*tp, new_block);
1030 }
1031
726a989a
RB
1032 /* Keep iterating. */
1033 return NULL_TREE;
1034}
1035
1036
1037/* Called from copy_body_id via walk_tree. DATA is really a
1b369fae 1038 `copy_body_data *'. */
aa4a53af 1039
1b369fae 1040tree
726a989a 1041copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
d4e4baa9 1042{
1b369fae
RH
1043 copy_body_data *id = (copy_body_data *) data;
1044 tree fn = id->src_fn;
acb8f212 1045 tree new_block;
d4e4baa9 1046
e21aff8a
SB
1047 /* Begin by recognizing trees that we'll completely rewrite for the
1048 inlining context. Our output for these trees is completely
1049 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1050 into an edge). Further down, we'll handle trees that get
1051 duplicated and/or tweaked. */
d4e4baa9 1052
1b369fae 1053 /* When requested, RETURN_EXPRs should be transformed to just the
726a989a 1054 contained MODIFY_EXPR. The branch semantics of the return will
1b369fae
RH
1055 be handled elsewhere by manipulating the CFG rather than a statement. */
1056 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
d4e4baa9 1057 {
e21aff8a 1058 tree assignment = TREE_OPERAND (*tp, 0);
d4e4baa9
AO
1059
1060 /* If we're returning something, just turn that into an
e21aff8a
SB
1061 assignment into the equivalent of the original RESULT_DECL.
1062 If the "assignment" is just the result decl, the result
1063 decl has already been set (e.g. a recent "foo (&result_decl,
1064 ...)"); just toss the entire RETURN_EXPR. */
726a989a 1065 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
e21aff8a
SB
1066 {
1067 /* Replace the RETURN_EXPR with (a copy of) the
726a989a 1068 MODIFY_EXPR hanging underneath. */
e21aff8a
SB
1069 *tp = copy_node (assignment);
1070 }
1071 else /* Else the RETURN_EXPR returns no value. */
1072 {
1073 *tp = NULL;
cceb1885 1074 return (tree) (void *)1;
e21aff8a 1075 }
d4e4baa9 1076 }
110cfe1c
JH
1077 else if (TREE_CODE (*tp) == SSA_NAME)
1078 {
1079 *tp = remap_ssa_name (*tp, id);
1080 *walk_subtrees = 0;
1081 return NULL;
1082 }
e21aff8a 1083
d4e4baa9
AO
1084 /* Local variables and labels need to be replaced by equivalent
1085 variables. We don't want to copy static variables; there's only
1086 one of those, no matter how many times we inline the containing
5377d5ba 1087 function. Similarly for globals from an outer function. */
50886bf1 1088 else if (auto_var_in_fn_p (*tp, fn))
d4e4baa9
AO
1089 {
1090 tree new_decl;
1091
1092 /* Remap the declaration. */
1093 new_decl = remap_decl (*tp, id);
1e128c5f 1094 gcc_assert (new_decl);
d4e4baa9
AO
1095 /* Replace this variable with the copy. */
1096 STRIP_TYPE_NOPS (new_decl);
1097 *tp = new_decl;
e4cf29ae 1098 *walk_subtrees = 0;
d4e4baa9 1099 }
6de9cd9a
DN
1100 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1101 copy_statement_list (tp);
a406865a
RG
1102 else if (TREE_CODE (*tp) == SAVE_EXPR
1103 || TREE_CODE (*tp) == TARGET_EXPR)
82c82743 1104 remap_save_expr (tp, id->decl_map, walk_subtrees);
17acc01a
JH
1105 else if (TREE_CODE (*tp) == LABEL_DECL
1106 && (! DECL_CONTEXT (*tp)
1b369fae 1107 || decl_function_context (*tp) == id->src_fn))
e21aff8a 1108 /* These may need to be remapped for EH handling. */
17acc01a 1109 *tp = remap_decl (*tp, id);
6de9cd9a
DN
1110 else if (TREE_CODE (*tp) == BIND_EXPR)
1111 copy_bind_expr (tp, walk_subtrees, id);
3c2a7a6a
RH
1112 /* Types may need remapping as well. */
1113 else if (TYPE_P (*tp))
1114 *tp = remap_type (*tp, id);
1115
bb04998a
RK
1116 /* If this is a constant, we have to copy the node iff the type will be
1117 remapped. copy_tree_r will not copy a constant. */
3cf11075 1118 else if (CONSTANT_CLASS_P (*tp))
bb04998a
RK
1119 {
1120 tree new_type = remap_type (TREE_TYPE (*tp), id);
1121
1122 if (new_type == TREE_TYPE (*tp))
1123 *walk_subtrees = 0;
1124
1125 else if (TREE_CODE (*tp) == INTEGER_CST)
807e902e 1126 *tp = wide_int_to_tree (new_type, *tp);
bb04998a
RK
1127 else
1128 {
1129 *tp = copy_node (*tp);
1130 TREE_TYPE (*tp) = new_type;
1131 }
1132 }
1133
d4e4baa9
AO
1134 /* Otherwise, just copy the node. Note that copy_tree_r already
1135 knows not to copy VAR_DECLs, etc., so this is safe. */
1136 else
1137 {
e21aff8a
SB
1138 /* Here we handle trees that are not completely rewritten.
1139 First we detect some inlining-induced bogosities for
1140 discarding. */
726a989a
RB
1141 if (TREE_CODE (*tp) == MODIFY_EXPR
1142 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1143 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
d4e4baa9
AO
1144 {
1145 /* Some assignments VAR = VAR; don't generate any rtl code
1146 and thus don't count as variable modification. Avoid
1147 keeping bogosities like 0 = 0. */
726a989a 1148 tree decl = TREE_OPERAND (*tp, 0), value;
6be42dd4 1149 tree *n;
d4e4baa9 1150
b787e7a2 1151 n = id->decl_map->get (decl);
d4e4baa9
AO
1152 if (n)
1153 {
6be42dd4 1154 value = *n;
d4e4baa9 1155 STRIP_TYPE_NOPS (value);
becfd6e5 1156 if (TREE_CONSTANT (value) || TREE_READONLY (value))
68594ce7 1157 {
c2255bc4 1158 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
726a989a 1159 return copy_tree_body_r (tp, walk_subtrees, data);
68594ce7 1160 }
d4e4baa9
AO
1161 }
1162 }
1b369fae 1163 else if (TREE_CODE (*tp) == INDIRECT_REF)
6de9cd9a
DN
1164 {
1165 /* Get rid of *& from inline substitutions that can happen when a
1166 pointer argument is an ADDR_EXPR. */
81cfbbc2 1167 tree decl = TREE_OPERAND (*tp, 0);
b787e7a2 1168 tree *n = id->decl_map->get (decl);
6de9cd9a
DN
1169 if (n)
1170 {
30d2e943
RG
1171 /* If we happen to get an ADDR_EXPR in n->value, strip
1172 it manually here as we'll eventually get ADDR_EXPRs
1173 which lie about their types pointed to. In this case
1174 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
095ecc24
RG
1175 but we absolutely rely on that. As fold_indirect_ref
1176 does other useful transformations, try that first, though. */
78bbd765
EB
1177 tree type = TREE_TYPE (*tp);
1178 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1179 tree old = *tp;
1180 *tp = gimple_fold_indirect_ref (ptr);
095ecc24
RG
1181 if (! *tp)
1182 {
78bbd765 1183 if (TREE_CODE (ptr) == ADDR_EXPR)
de4af523 1184 {
78bbd765
EB
1185 *tp
1186 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
de4af523
JJ
1187 /* ??? We should either assert here or build
1188 a VIEW_CONVERT_EXPR instead of blindly leaking
1189 incompatible types to our IL. */
1190 if (! *tp)
78bbd765 1191 *tp = TREE_OPERAND (ptr, 0);
de4af523 1192 }
095ecc24 1193 else
d84b37b0 1194 {
78bbd765 1195 *tp = build1 (INDIRECT_REF, type, ptr);
d84b37b0 1196 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
955f6531 1197 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
a61c3633 1198 TREE_READONLY (*tp) = TREE_READONLY (old);
78bbd765
EB
1199 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1200 have remapped a parameter as the property might be
1201 valid only for the parameter itself. */
1202 if (TREE_THIS_NOTRAP (old)
1203 && (!is_parm (TREE_OPERAND (old, 0))
1204 || (!id->transform_parameter && is_parm (ptr))))
1205 TREE_THIS_NOTRAP (*tp) = 1;
d84b37b0 1206 }
095ecc24 1207 }
81cfbbc2
JH
1208 *walk_subtrees = 0;
1209 return NULL;
68594ce7
JM
1210 }
1211 }
70f34814
RG
1212 else if (TREE_CODE (*tp) == MEM_REF)
1213 {
54714c68
RB
1214 /* We need to re-canonicalize MEM_REFs from inline substitutions
1215 that can happen when a pointer argument is an ADDR_EXPR.
1216 Recurse here manually to allow that. */
78bbd765
EB
1217 tree ptr = TREE_OPERAND (*tp, 0);
1218 tree type = remap_type (TREE_TYPE (*tp), id);
1219 tree old = *tp;
54714c68 1220 walk_tree (&ptr, copy_tree_body_r, data, NULL);
78bbd765 1221 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
54714c68
RB
1222 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1223 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1224 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
f3dccf50
RB
1225 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1226 {
1227 MR_DEPENDENCE_CLIQUE (*tp)
1228 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1229 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1230 }
78bbd765
EB
1231 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1232 remapped a parameter as the property might be valid only
1233 for the parameter itself. */
1234 if (TREE_THIS_NOTRAP (old)
1235 && (!is_parm (TREE_OPERAND (old, 0))
1236 || (!id->transform_parameter && is_parm (ptr))))
1237 TREE_THIS_NOTRAP (*tp) = 1;
54714c68
RB
1238 *walk_subtrees = 0;
1239 return NULL;
70f34814 1240 }
68594ce7 1241
e21aff8a
SB
1242 /* Here is the "usual case". Copy this tree node, and then
1243 tweak some special cases. */
1b369fae 1244 copy_tree_r (tp, walk_subtrees, NULL);
110cfe1c 1245
acb8f212
JH
1246 /* If EXPR has block defined, map it to newly constructed block.
1247 When inlining we want EXPRs without block appear in the block
ee0192a2 1248 of function call if we are not remapping a type. */
726a989a 1249 if (EXPR_P (*tp))
acb8f212 1250 {
ee0192a2 1251 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
acb8f212
JH
1252 if (TREE_BLOCK (*tp))
1253 {
6be42dd4 1254 tree *n;
b787e7a2 1255 n = id->decl_map->get (TREE_BLOCK (*tp));
60a5d78a
JJ
1256 if (n)
1257 new_block = *n;
acb8f212 1258 }
5368224f 1259 TREE_SET_BLOCK (*tp, new_block);
acb8f212 1260 }
68594ce7 1261
726a989a 1262 if (TREE_CODE (*tp) != OMP_CLAUSE)
07beea0d 1263 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
3c2a7a6a 1264
68594ce7
JM
1265 /* The copied TARGET_EXPR has never been expanded, even if the
1266 original node was expanded already. */
1267 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1268 {
1269 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1270 TREE_OPERAND (*tp, 3) = NULL_TREE;
1271 }
84cce55d
RH
1272
1273 /* Variable substitution need not be simple. In particular, the
1274 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1275 and friends are up-to-date. */
1276 else if (TREE_CODE (*tp) == ADDR_EXPR)
1277 {
ad6003f2 1278 int invariant = is_gimple_min_invariant (*tp);
726a989a
RB
1279 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1280
8e85fd14
RG
1281 /* Handle the case where we substituted an INDIRECT_REF
1282 into the operand of the ADDR_EXPR. */
1283 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1284 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1285 else
1286 recompute_tree_invariant_for_addr_expr (*tp);
726a989a 1287
416c991f
JJ
1288 /* If this used to be invariant, but is not any longer,
1289 then regimplification is probably needed. */
ad6003f2 1290 if (invariant && !is_gimple_min_invariant (*tp))
416c991f 1291 id->regimplify = true;
726a989a 1292
84cce55d
RH
1293 *walk_subtrees = 0;
1294 }
d4e4baa9
AO
1295 }
1296
1297 /* Keep iterating. */
1298 return NULL_TREE;
1299}
1300
1d65f45c
RH
1301/* Helper for remap_gimple_stmt. Given an EH region number for the
1302 source function, map that to the duplicate EH region number in
1303 the destination function. */
1304
1305static int
1306remap_eh_region_nr (int old_nr, copy_body_data *id)
1307{
1308 eh_region old_r, new_r;
1d65f45c
RH
1309
1310 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
b787e7a2 1311 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1d65f45c
RH
1312
1313 return new_r->index;
1314}
1315
1316/* Similar, but operate on INTEGER_CSTs. */
1317
1318static tree
1319remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1320{
1321 int old_nr, new_nr;
1322
9439e9a1 1323 old_nr = tree_to_shwi (old_t_nr);
1d65f45c
RH
1324 new_nr = remap_eh_region_nr (old_nr, id);
1325
9f616812 1326 return build_int_cst (integer_type_node, new_nr);
1d65f45c 1327}
726a989a
RB
1328
1329/* Helper for copy_bb. Remap statement STMT using the inlining
1330 information in ID. Return the new statement copy. */
1331
d5e254e1 1332static gimple_seq
726a989a
RB
1333remap_gimple_stmt (gimple stmt, copy_body_data *id)
1334{
1335 gimple copy = NULL;
1336 struct walk_stmt_info wi;
5a6e26b7 1337 bool skip_first = false;
d5e254e1 1338 gimple_seq stmts = NULL;
726a989a
RB
1339
1340 /* Begin by recognizing trees that we'll completely rewrite for the
1341 inlining context. Our output for these trees is completely
1342 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1343 into an edge). Further down, we'll handle trees that get
1344 duplicated and/or tweaked. */
1345
1346 /* When requested, GIMPLE_RETURNs should be transformed to just the
1347 contained GIMPLE_ASSIGN. The branch semantics of the return will
1348 be handled elsewhere by manipulating the CFG rather than the
1349 statement. */
1350 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1351 {
538dd0b7 1352 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
d5e254e1
IE
1353 tree retbnd = gimple_return_retbnd (stmt);
1354 tree bndslot = id->retbnd;
1355
1356 if (retbnd && bndslot)
1357 {
1358 gimple bndcopy = gimple_build_assign (bndslot, retbnd);
1359 memset (&wi, 0, sizeof (wi));
1360 wi.info = id;
1361 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1362 gimple_seq_add_stmt (&stmts, bndcopy);
1363 }
726a989a
RB
1364
1365 /* If we're returning something, just turn that into an
1366 assignment into the equivalent of the original RESULT_DECL.
1367 If RETVAL is just the result decl, the result decl has
1368 already been set (e.g. a recent "foo (&result_decl, ...)");
1369 just toss the entire GIMPLE_RETURN. */
6938f93f
JH
1370 if (retval
1371 && (TREE_CODE (retval) != RESULT_DECL
1372 && (TREE_CODE (retval) != SSA_NAME
70b5e7dc 1373 || ! SSA_NAME_VAR (retval)
6938f93f 1374 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
5a6e26b7 1375 {
f8cb36a9
JJ
1376 copy = gimple_build_assign (id->do_not_unshare
1377 ? id->retvar : unshare_expr (id->retvar),
1378 retval);
5a6e26b7
JH
1379 /* id->retvar is already substituted. Skip it on later remapping. */
1380 skip_first = true;
d5e254e1
IE
1381
1382 /* We need to copy bounds if return structure with pointers into
1383 instrumented function. */
1384 if (chkp_function_instrumented_p (id->dst_fn)
1385 && !bndslot
1386 && !BOUNDED_P (id->retvar)
1387 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1388 id->assign_stmts.safe_push (copy);
1389
5a6e26b7 1390 }
726a989a 1391 else
d5e254e1 1392 return stmts;
726a989a
RB
1393 }
1394 else if (gimple_has_substatements (stmt))
1395 {
1396 gimple_seq s1, s2;
1397
1398 /* When cloning bodies from the C++ front end, we will be handed bodies
1399 in High GIMPLE form. Handle here all the High GIMPLE statements that
1400 have embedded statements. */
1401 switch (gimple_code (stmt))
1402 {
1403 case GIMPLE_BIND:
538dd0b7 1404 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
726a989a
RB
1405 break;
1406
1407 case GIMPLE_CATCH:
538dd0b7
DM
1408 {
1409 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1410 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1411 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1412 }
726a989a
RB
1413 break;
1414
1415 case GIMPLE_EH_FILTER:
1416 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1417 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1418 break;
1419
1420 case GIMPLE_TRY:
1421 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1422 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
b8698a0f 1423 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
726a989a
RB
1424 break;
1425
1426 case GIMPLE_WITH_CLEANUP_EXPR:
1427 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1428 copy = gimple_build_wce (s1);
1429 break;
1430
1431 case GIMPLE_OMP_PARALLEL:
538dd0b7
DM
1432 {
1433 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1434 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1435 copy = gimple_build_omp_parallel
1436 (s1,
1437 gimple_omp_parallel_clauses (omp_par_stmt),
1438 gimple_omp_parallel_child_fn (omp_par_stmt),
1439 gimple_omp_parallel_data_arg (omp_par_stmt));
1440 }
726a989a
RB
1441 break;
1442
1443 case GIMPLE_OMP_TASK:
1444 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1445 copy = gimple_build_omp_task
1446 (s1,
1447 gimple_omp_task_clauses (stmt),
1448 gimple_omp_task_child_fn (stmt),
1449 gimple_omp_task_data_arg (stmt),
1450 gimple_omp_task_copy_fn (stmt),
1451 gimple_omp_task_arg_size (stmt),
1452 gimple_omp_task_arg_align (stmt));
1453 break;
1454
1455 case GIMPLE_OMP_FOR:
1456 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1457 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
74bf76ed
JJ
1458 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1459 gimple_omp_for_clauses (stmt),
726a989a
RB
1460 gimple_omp_for_collapse (stmt), s2);
1461 {
1462 size_t i;
1463 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1464 {
1465 gimple_omp_for_set_index (copy, i,
1466 gimple_omp_for_index (stmt, i));
1467 gimple_omp_for_set_initial (copy, i,
1468 gimple_omp_for_initial (stmt, i));
1469 gimple_omp_for_set_final (copy, i,
1470 gimple_omp_for_final (stmt, i));
1471 gimple_omp_for_set_incr (copy, i,
1472 gimple_omp_for_incr (stmt, i));
1473 gimple_omp_for_set_cond (copy, i,
1474 gimple_omp_for_cond (stmt, i));
1475 }
1476 }
1477 break;
1478
1479 case GIMPLE_OMP_MASTER:
1480 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1481 copy = gimple_build_omp_master (s1);
1482 break;
1483
acf0174b
JJ
1484 case GIMPLE_OMP_TASKGROUP:
1485 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1486 copy = gimple_build_omp_taskgroup (s1);
1487 break;
1488
726a989a
RB
1489 case GIMPLE_OMP_ORDERED:
1490 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1491 copy = gimple_build_omp_ordered (s1);
1492 break;
1493
1494 case GIMPLE_OMP_SECTION:
1495 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1496 copy = gimple_build_omp_section (s1);
1497 break;
1498
1499 case GIMPLE_OMP_SECTIONS:
1500 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1501 copy = gimple_build_omp_sections
1502 (s1, gimple_omp_sections_clauses (stmt));
1503 break;
1504
1505 case GIMPLE_OMP_SINGLE:
1506 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1507 copy = gimple_build_omp_single
1508 (s1, gimple_omp_single_clauses (stmt));
1509 break;
1510
acf0174b
JJ
1511 case GIMPLE_OMP_TARGET:
1512 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1513 copy = gimple_build_omp_target
1514 (s1, gimple_omp_target_kind (stmt),
1515 gimple_omp_target_clauses (stmt));
1516 break;
1517
1518 case GIMPLE_OMP_TEAMS:
1519 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1520 copy = gimple_build_omp_teams
1521 (s1, gimple_omp_teams_clauses (stmt));
1522 break;
1523
05a26161
JJ
1524 case GIMPLE_OMP_CRITICAL:
1525 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
538dd0b7
DM
1526 copy = gimple_build_omp_critical (s1,
1527 gimple_omp_critical_name (
1528 as_a <gomp_critical *> (stmt)));
05a26161
JJ
1529 break;
1530
0a35513e 1531 case GIMPLE_TRANSACTION:
538dd0b7
DM
1532 {
1533 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1534 gtransaction *new_trans_stmt;
1535 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1536 id);
1537 copy = new_trans_stmt
1538 = gimple_build_transaction (
1539 s1,
1540 gimple_transaction_label (old_trans_stmt));
1541 gimple_transaction_set_subcode (
1542 new_trans_stmt,
1543 gimple_transaction_subcode (old_trans_stmt));
1544 }
0a35513e
AH
1545 break;
1546
726a989a
RB
1547 default:
1548 gcc_unreachable ();
1549 }
1550 }
1551 else
1552 {
1553 if (gimple_assign_copy_p (stmt)
1554 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1555 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1556 {
1557 /* Here we handle statements that are not completely rewritten.
1558 First we detect some inlining-induced bogosities for
1559 discarding. */
1560
1561 /* Some assignments VAR = VAR; don't generate any rtl code
1562 and thus don't count as variable modification. Avoid
1563 keeping bogosities like 0 = 0. */
1564 tree decl = gimple_assign_lhs (stmt), value;
1565 tree *n;
1566
b787e7a2 1567 n = id->decl_map->get (decl);
726a989a
RB
1568 if (n)
1569 {
1570 value = *n;
1571 STRIP_TYPE_NOPS (value);
1572 if (TREE_CONSTANT (value) || TREE_READONLY (value))
d5e254e1 1573 return NULL;
726a989a
RB
1574 }
1575 }
1576
4029a5e0
JJ
1577 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1578 in a block that we aren't copying during tree_function_versioning,
1579 just drop the clobber stmt. */
1580 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1581 {
1582 tree lhs = gimple_assign_lhs (stmt);
1583 if (TREE_CODE (lhs) == MEM_REF
1584 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1585 {
1586 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1587 if (gimple_bb (def_stmt)
1588 && !bitmap_bit_p (id->blocks_to_copy,
1589 gimple_bb (def_stmt)->index))
d5e254e1 1590 return NULL;
4029a5e0
JJ
1591 }
1592 }
1593
b5b8b0ac
AO
1594 if (gimple_debug_bind_p (stmt))
1595 {
538dd0b7
DM
1596 gdebug *copy
1597 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1598 gimple_debug_bind_get_value (stmt),
1599 stmt);
9771b263 1600 id->debug_stmts.safe_push (copy);
d5e254e1
IE
1601 gimple_seq_add_stmt (&stmts, copy);
1602 return stmts;
b5b8b0ac 1603 }
ddb555ed
JJ
1604 if (gimple_debug_source_bind_p (stmt))
1605 {
538dd0b7
DM
1606 gdebug *copy = gimple_build_debug_source_bind
1607 (gimple_debug_source_bind_get_var (stmt),
1608 gimple_debug_source_bind_get_value (stmt),
1609 stmt);
9771b263 1610 id->debug_stmts.safe_push (copy);
d5e254e1
IE
1611 gimple_seq_add_stmt (&stmts, copy);
1612 return stmts;
ddb555ed 1613 }
1d65f45c
RH
1614
1615 /* Create a new deep copy of the statement. */
1616 copy = gimple_copy (stmt);
1617
6b77934e 1618 /* Clear flags that need revisiting. */
538dd0b7
DM
1619 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1620 if (gimple_call_tail_p (call_stmt))
1621 gimple_call_set_tail (call_stmt, false);
6b77934e 1622
1d65f45c
RH
1623 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1624 RESX and EH_DISPATCH. */
1625 if (id->eh_map)
1626 switch (gimple_code (copy))
1627 {
1628 case GIMPLE_CALL:
1629 {
1630 tree r, fndecl = gimple_call_fndecl (copy);
1631 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1632 switch (DECL_FUNCTION_CODE (fndecl))
1633 {
1634 case BUILT_IN_EH_COPY_VALUES:
1635 r = gimple_call_arg (copy, 1);
1636 r = remap_eh_region_tree_nr (r, id);
1637 gimple_call_set_arg (copy, 1, r);
1638 /* FALLTHRU */
1639
1640 case BUILT_IN_EH_POINTER:
1641 case BUILT_IN_EH_FILTER:
1642 r = gimple_call_arg (copy, 0);
1643 r = remap_eh_region_tree_nr (r, id);
1644 gimple_call_set_arg (copy, 0, r);
1645 break;
1646
1647 default:
1648 break;
1649 }
d086d311 1650
25a6a873
RG
1651 /* Reset alias info if we didn't apply measures to
1652 keep it valid over inlining by setting DECL_PT_UID. */
1653 if (!id->src_cfun->gimple_df
1654 || !id->src_cfun->gimple_df->ipa_pta)
538dd0b7 1655 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1d65f45c
RH
1656 }
1657 break;
1658
1659 case GIMPLE_RESX:
1660 {
538dd0b7
DM
1661 gresx *resx_stmt = as_a <gresx *> (copy);
1662 int r = gimple_resx_region (resx_stmt);
1d65f45c 1663 r = remap_eh_region_nr (r, id);
538dd0b7 1664 gimple_resx_set_region (resx_stmt, r);
1d65f45c
RH
1665 }
1666 break;
1667
1668 case GIMPLE_EH_DISPATCH:
1669 {
538dd0b7
DM
1670 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1671 int r = gimple_eh_dispatch_region (eh_dispatch);
1d65f45c 1672 r = remap_eh_region_nr (r, id);
538dd0b7 1673 gimple_eh_dispatch_set_region (eh_dispatch, r);
1d65f45c
RH
1674 }
1675 break;
1676
1677 default:
1678 break;
1679 }
726a989a
RB
1680 }
1681
1682 /* If STMT has a block defined, map it to the newly constructed
16917761 1683 block. */
726a989a
RB
1684 if (gimple_block (copy))
1685 {
1686 tree *n;
b787e7a2 1687 n = id->decl_map->get (gimple_block (copy));
726a989a 1688 gcc_assert (n);
16917761 1689 gimple_set_block (copy, *n);
726a989a
RB
1690 }
1691
ddb555ed 1692 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
d5e254e1
IE
1693 {
1694 gimple_seq_add_stmt (&stmts, copy);
1695 return stmts;
1696 }
b5b8b0ac 1697
726a989a
RB
1698 /* Remap all the operands in COPY. */
1699 memset (&wi, 0, sizeof (wi));
1700 wi.info = id;
5a6e26b7
JH
1701 if (skip_first)
1702 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1703 else
b8698a0f 1704 walk_gimple_op (copy, remap_gimple_op_r, &wi);
726a989a 1705
5006671f
RG
1706 /* Clear the copied virtual operands. We are not remapping them here
1707 but are going to recreate them from scratch. */
1708 if (gimple_has_mem_ops (copy))
1709 {
1710 gimple_set_vdef (copy, NULL_TREE);
1711 gimple_set_vuse (copy, NULL_TREE);
1712 }
1713
d5e254e1
IE
1714 gimple_seq_add_stmt (&stmts, copy);
1715 return stmts;
726a989a
RB
1716}
1717
1718
e21aff8a
SB
1719/* Copy basic block, scale profile accordingly. Edges will be taken care of
1720 later */
1721
1722static basic_block
0178d644
VR
1723copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1724 gcov_type count_scale)
e21aff8a 1725{
c2a4718a 1726 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
e21aff8a 1727 basic_block copy_basic_block;
726a989a 1728 tree decl;
0d63a740 1729 gcov_type freq;
91382288
JH
1730 basic_block prev;
1731
1732 /* Search for previous copied basic block. */
1733 prev = bb->prev_bb;
1734 while (!prev->aux)
1735 prev = prev->prev_bb;
e21aff8a
SB
1736
1737 /* create_basic_block() will append every new block to
1738 basic_block_info automatically. */
cceb1885 1739 copy_basic_block = create_basic_block (NULL, (void *) 0,
91382288 1740 (basic_block) prev->aux);
8b47039c 1741 copy_basic_block->count = apply_scale (bb->count, count_scale);
45a80bb9 1742
726a989a
RB
1743 /* We are going to rebuild frequencies from scratch. These values
1744 have just small importance to drive canonicalize_loop_headers. */
8b47039c 1745 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
726a989a 1746
0d63a740
JH
1747 /* We recompute frequencies after inlining, so this is quite safe. */
1748 if (freq > BB_FREQ_MAX)
1749 freq = BB_FREQ_MAX;
1750 copy_basic_block->frequency = freq;
e21aff8a 1751
726a989a
RB
1752 copy_gsi = gsi_start_bb (copy_basic_block);
1753
1754 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e21aff8a 1755 {
d5e254e1 1756 gimple_seq stmts;
726a989a
RB
1757 gimple stmt = gsi_stmt (gsi);
1758 gimple orig_stmt = stmt;
d5e254e1
IE
1759 gimple_stmt_iterator stmts_gsi;
1760 bool stmt_added = false;
e21aff8a 1761
416c991f 1762 id->regimplify = false;
d5e254e1
IE
1763 stmts = remap_gimple_stmt (stmt, id);
1764
1765 if (gimple_seq_empty_p (stmts))
726a989a
RB
1766 continue;
1767
c2a4718a 1768 seq_gsi = copy_gsi;
726a989a 1769
d5e254e1
IE
1770 for (stmts_gsi = gsi_start (stmts);
1771 !gsi_end_p (stmts_gsi); )
e21aff8a 1772 {
d5e254e1
IE
1773 stmt = gsi_stmt (stmts_gsi);
1774
1775 /* Advance iterator now before stmt is moved to seq_gsi. */
1776 gsi_next (&stmts_gsi);
2b65dae5 1777
d5e254e1
IE
1778 if (gimple_nop_p (stmt))
1779 continue;
1780
1781 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1782 orig_stmt);
1783
1784 /* With return slot optimization we can end up with
1785 non-gimple (foo *)&this->m, fix that here. */
1786 if (is_gimple_assign (stmt)
1787 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1788 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1789 {
1790 tree new_rhs;
1791 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1792 gimple_assign_rhs1 (stmt),
1793 true, NULL, false,
1794 GSI_CONTINUE_LINKING);
1795 gimple_assign_set_rhs1 (stmt, new_rhs);
1796 id->regimplify = false;
1797 }
1798
1799 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
c2a4718a 1800
d5e254e1
IE
1801 if (id->regimplify)
1802 gimple_regimplify_operands (stmt, &seq_gsi);
1803
1804 stmt_added = true;
1805 }
1806
1807 if (!stmt_added)
1808 continue;
c2a4718a
JJ
1809
1810 /* If copy_basic_block has been empty at the start of this iteration,
1811 call gsi_start_bb again to get at the newly added statements. */
1812 if (gsi_end_p (copy_gsi))
1813 copy_gsi = gsi_start_bb (copy_basic_block);
1814 else
1815 gsi_next (&copy_gsi);
110cfe1c 1816
726a989a
RB
1817 /* Process the new statement. The call to gimple_regimplify_operands
1818 possibly turned the statement into multiple statements, we
1819 need to process all of them. */
c2a4718a 1820 do
726a989a 1821 {
9187e02d 1822 tree fn;
538dd0b7 1823 gcall *call_stmt;
9187e02d 1824
c2a4718a 1825 stmt = gsi_stmt (copy_gsi);
538dd0b7
DM
1826 call_stmt = dyn_cast <gcall *> (stmt);
1827 if (call_stmt
1828 && gimple_call_va_arg_pack_p (call_stmt)
1829 && id->call_stmt)
726a989a
RB
1830 {
1831 /* __builtin_va_arg_pack () should be replaced by
1832 all arguments corresponding to ... in the caller. */
1833 tree p;
538dd0b7 1834 gcall *new_call;
9771b263 1835 vec<tree> argarray;
538dd0b7 1836 size_t nargs = gimple_call_num_args (id->call_stmt);
d5e254e1
IE
1837 size_t n, i, nargs_to_copy;
1838 bool remove_bounds = false;
726a989a 1839
910ad8de 1840 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
726a989a
RB
1841 nargs--;
1842
d5e254e1
IE
1843 /* Bounds should be removed from arg pack in case
1844 we handle not instrumented call in instrumented
1845 function. */
1846 nargs_to_copy = nargs;
538dd0b7 1847 if (gimple_call_with_bounds_p (id->call_stmt)
d5e254e1
IE
1848 && !gimple_call_with_bounds_p (stmt))
1849 {
538dd0b7
DM
1850 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1851 i < gimple_call_num_args (id->call_stmt);
d5e254e1 1852 i++)
538dd0b7 1853 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
d5e254e1
IE
1854 nargs_to_copy--;
1855 remove_bounds = true;
1856 }
1857
726a989a 1858 /* Create the new array of arguments. */
538dd0b7 1859 n = nargs_to_copy + gimple_call_num_args (call_stmt);
9771b263
DN
1860 argarray.create (n);
1861 argarray.safe_grow_cleared (n);
726a989a
RB
1862
1863 /* Copy all the arguments before '...' */
9771b263 1864 memcpy (argarray.address (),
538dd0b7
DM
1865 gimple_call_arg_ptr (call_stmt, 0),
1866 gimple_call_num_args (call_stmt) * sizeof (tree));
726a989a 1867
d5e254e1
IE
1868 if (remove_bounds)
1869 {
1870 /* Append the rest of arguments removing bounds. */
538dd0b7
DM
1871 unsigned cur = gimple_call_num_args (call_stmt);
1872 i = gimple_call_num_args (id->call_stmt) - nargs;
1873 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1874 i < gimple_call_num_args (id->call_stmt);
d5e254e1 1875 i++)
538dd0b7
DM
1876 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1877 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
d5e254e1
IE
1878 gcc_assert (cur == n);
1879 }
1880 else
1881 {
1882 /* Append the arguments passed in '...' */
538dd0b7
DM
1883 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1884 gimple_call_arg_ptr (id->call_stmt, 0)
1885 + (gimple_call_num_args (id->call_stmt) - nargs),
d5e254e1
IE
1886 nargs * sizeof (tree));
1887 }
726a989a 1888
538dd0b7 1889 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
726a989a
RB
1890 argarray);
1891
9771b263 1892 argarray.release ();
726a989a
RB
1893
1894 /* Copy all GIMPLE_CALL flags, location and block, except
1895 GF_CALL_VA_ARG_PACK. */
538dd0b7 1896 gimple_call_copy_flags (new_call, call_stmt);
726a989a
RB
1897 gimple_call_set_va_arg_pack (new_call, false);
1898 gimple_set_location (new_call, gimple_location (stmt));
1899 gimple_set_block (new_call, gimple_block (stmt));
538dd0b7 1900 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
726a989a
RB
1901
1902 gsi_replace (&copy_gsi, new_call, false);
1903 stmt = new_call;
1904 }
1905 else if (is_gimple_call (stmt)
538dd0b7 1906 && id->call_stmt
726a989a
RB
1907 && (decl = gimple_call_fndecl (stmt))
1908 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1909 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
e0704a46 1910 {
726a989a
RB
1911 /* __builtin_va_arg_pack_len () should be replaced by
1912 the number of anonymous arguments. */
538dd0b7 1913 size_t nargs = gimple_call_num_args (id->call_stmt), i;
726a989a
RB
1914 tree count, p;
1915 gimple new_stmt;
1916
910ad8de 1917 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
726a989a
RB
1918 nargs--;
1919
d5e254e1 1920 /* For instrumented calls we should ignore bounds. */
538dd0b7
DM
1921 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1922 i < gimple_call_num_args (id->call_stmt);
d5e254e1 1923 i++)
538dd0b7 1924 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
d5e254e1
IE
1925 nargs--;
1926
726a989a
RB
1927 count = build_int_cst (integer_type_node, nargs);
1928 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1929 gsi_replace (&copy_gsi, new_stmt, false);
1930 stmt = new_stmt;
1931 }
b8a00a4d 1932
726a989a
RB
1933 /* Statements produced by inlining can be unfolded, especially
1934 when we constant propagated some operands. We can't fold
1935 them right now for two reasons:
1936 1) folding require SSA_NAME_DEF_STMTs to be correct
1937 2) we can't change function calls to builtins.
1938 So we just mark statement for later folding. We mark
1939 all new statements, instead just statements that has changed
1940 by some nontrivial substitution so even statements made
1941 foldable indirectly are updated. If this turns out to be
1942 expensive, copy_body can be told to watch for nontrivial
1943 changes. */
1944 if (id->statements_to_fold)
6e2830c3 1945 id->statements_to_fold->add (stmt);
726a989a
RB
1946
1947 /* We're duplicating a CALL_EXPR. Find any corresponding
1948 callgraph edges and update or duplicate them. */
538dd0b7 1949 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
726a989a 1950 {
9b2a5ef7 1951 struct cgraph_edge *edge;
6ef5231b 1952
726a989a 1953 switch (id->transform_call_graph_edges)
e0704a46 1954 {
9b2a5ef7 1955 case CB_CGE_DUPLICATE:
d52f5295 1956 edge = id->src_node->get_edge (orig_stmt);
9b2a5ef7 1957 if (edge)
0d63a740
JH
1958 {
1959 int edge_freq = edge->frequency;
042ae7d2
JH
1960 int new_freq;
1961 struct cgraph_edge *old_edge = edge;
538dd0b7 1962 edge = edge->clone (id->dst_node, call_stmt,
3dafb85c
ML
1963 gimple_uid (stmt),
1964 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1965 true);
0d63a740
JH
1966 /* We could also just rescale the frequency, but
1967 doing so would introduce roundoff errors and make
1968 verifier unhappy. */
67348ccc 1969 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
042ae7d2
JH
1970 copy_basic_block);
1971
1972 /* Speculative calls consist of two edges - direct and indirect.
1973 Duplicate the whole thing and distribute frequencies accordingly. */
1974 if (edge->speculative)
0d63a740 1975 {
042ae7d2
JH
1976 struct cgraph_edge *direct, *indirect;
1977 struct ipa_ref *ref;
1978
1979 gcc_assert (!edge->indirect_unknown_callee);
3dafb85c 1980 old_edge->speculative_call_info (direct, indirect, ref);
538dd0b7 1981 indirect = indirect->clone (id->dst_node, call_stmt,
3dafb85c
ML
1982 gimple_uid (stmt),
1983 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1984 true);
042ae7d2
JH
1985 if (old_edge->frequency + indirect->frequency)
1986 {
1987 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1988 (old_edge->frequency + indirect->frequency)),
1989 CGRAPH_FREQ_MAX);
1990 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1991 (old_edge->frequency + indirect->frequency)),
1992 CGRAPH_FREQ_MAX);
1993 }
d122681a 1994 id->dst_node->clone_reference (ref, stmt);
042ae7d2
JH
1995 }
1996 else
1997 {
1998 edge->frequency = new_freq;
1999 if (dump_file
ea19eb9f 2000 && profile_status_for_fn (cfun) != PROFILE_ABSENT
042ae7d2
JH
2001 && (edge_freq > edge->frequency + 10
2002 || edge_freq < edge->frequency - 10))
2003 {
2004 fprintf (dump_file, "Edge frequency estimated by "
2005 "cgraph %i diverge from inliner's estimate %i\n",
2006 edge_freq,
2007 edge->frequency);
2008 fprintf (dump_file,
2009 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2010 bb->index,
2011 bb->frequency,
2012 copy_basic_block->frequency);
2013 }
0d63a740
JH
2014 }
2015 }
9b2a5ef7
RH
2016 break;
2017
2018 case CB_CGE_MOVE_CLONES:
d52f5295 2019 id->dst_node->set_call_stmt_including_clones (orig_stmt,
538dd0b7 2020 call_stmt);
d52f5295 2021 edge = id->dst_node->get_edge (stmt);
9b2a5ef7
RH
2022 break;
2023
2024 case CB_CGE_MOVE:
d52f5295 2025 edge = id->dst_node->get_edge (orig_stmt);
9b2a5ef7 2026 if (edge)
538dd0b7 2027 edge->set_call_stmt (call_stmt);
9b2a5ef7
RH
2028 break;
2029
2030 default:
2031 gcc_unreachable ();
110cfe1c 2032 }
f618d33e 2033
9b2a5ef7
RH
2034 /* Constant propagation on argument done during inlining
2035 may create new direct call. Produce an edge for it. */
b8698a0f 2036 if ((!edge
e33c6cd6 2037 || (edge->indirect_inlining_edge
9b2a5ef7 2038 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
67348ccc 2039 && id->dst_node->definition
9b2a5ef7
RH
2040 && (fn = gimple_call_fndecl (stmt)) != NULL)
2041 {
d52f5295 2042 struct cgraph_node *dest = cgraph_node::get (fn);
9b2a5ef7
RH
2043
2044 /* We have missing edge in the callgraph. This can happen
2045 when previous inlining turned an indirect call into a
0e3776db 2046 direct call by constant propagating arguments or we are
20a6bb58 2047 producing dead clone (for further cloning). In all
9b2a5ef7
RH
2048 other cases we hit a bug (incorrect node sharing is the
2049 most common reason for missing edges). */
67348ccc
DM
2050 gcc_assert (!dest->definition
2051 || dest->address_taken
2052 || !id->src_node->definition
2053 || !id->dst_node->definition);
9b2a5ef7 2054 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
d52f5295 2055 id->dst_node->create_edge_including_clones
538dd0b7 2056 (dest, orig_stmt, call_stmt, bb->count,
67348ccc 2057 compute_call_stmt_bb_frequency (id->dst_node->decl,
0d63a740 2058 copy_basic_block),
898b8927 2059 CIF_ORIGINALLY_INDIRECT_CALL);
9b2a5ef7 2060 else
538dd0b7 2061 id->dst_node->create_edge (dest, call_stmt,
47cb0d7d
JH
2062 bb->count,
2063 compute_call_stmt_bb_frequency
67348ccc 2064 (id->dst_node->decl,
960bfb69 2065 copy_basic_block))->inline_failed
9b2a5ef7
RH
2066 = CIF_ORIGINALLY_INDIRECT_CALL;
2067 if (dump_file)
2068 {
91382288 2069 fprintf (dump_file, "Created new direct edge to %s\n",
fec39fa6 2070 dest->name ());
9b2a5ef7
RH
2071 }
2072 }
9187e02d 2073
538dd0b7 2074 notice_special_calls (as_a <gcall *> (stmt));
726a989a 2075 }
e21aff8a 2076
1d65f45c
RH
2077 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2078 id->eh_map, id->eh_lp_nr);
726a989a 2079
b5b8b0ac 2080 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
726a989a
RB
2081 {
2082 ssa_op_iter i;
2083 tree def;
2084
726a989a
RB
2085 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2086 if (TREE_CODE (def) == SSA_NAME)
2087 SSA_NAME_DEF_STMT (def) = stmt;
2088 }
2089
2090 gsi_next (&copy_gsi);
e21aff8a 2091 }
c2a4718a 2092 while (!gsi_end_p (copy_gsi));
726a989a
RB
2093
2094 copy_gsi = gsi_last_bb (copy_basic_block);
e21aff8a 2095 }
726a989a 2096
e21aff8a
SB
2097 return copy_basic_block;
2098}
2099
110cfe1c
JH
2100/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2101 form is quite easy, since dominator relationship for old basic blocks does
2102 not change.
2103
2104 There is however exception where inlining might change dominator relation
2105 across EH edges from basic block within inlined functions destinating
5305a4cb 2106 to landing pads in function we inline into.
110cfe1c 2107
e9705dc5
AO
2108 The function fills in PHI_RESULTs of such PHI nodes if they refer
2109 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2110 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2111 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2112 set, and this means that there will be no overlapping live ranges
110cfe1c
JH
2113 for the underlying symbol.
2114
2115 This might change in future if we allow redirecting of EH edges and
2116 we might want to change way build CFG pre-inlining to include
2117 all the possible edges then. */
2118static void
e9705dc5
AO
2119update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2120 bool can_throw, bool nonlocal_goto)
110cfe1c
JH
2121{
2122 edge e;
2123 edge_iterator ei;
2124
2125 FOR_EACH_EDGE (e, ei, bb->succs)
2126 if (!e->dest->aux
2127 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2128 {
538dd0b7
DM
2129 gphi *phi;
2130 gphi_iterator si;
110cfe1c 2131
e9705dc5
AO
2132 if (!nonlocal_goto)
2133 gcc_assert (e->flags & EDGE_EH);
726a989a 2134
e9705dc5
AO
2135 if (!can_throw)
2136 gcc_assert (!(e->flags & EDGE_EH));
726a989a
RB
2137
2138 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
110cfe1c 2139 {
e9705dc5
AO
2140 edge re;
2141
538dd0b7 2142 phi = si.phi ();
726a989a 2143
3f8825c0
RB
2144 /* For abnormal goto/call edges the receiver can be the
2145 ENTRY_BLOCK. Do not assert this cannot happen. */
e9705dc5 2146
496a4ef5
JH
2147 gcc_assert ((e->flags & EDGE_EH)
2148 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
e9705dc5 2149
e9705dc5 2150 re = find_edge (ret_bb, e->dest);
0107dca2 2151 gcc_checking_assert (re);
e9705dc5
AO
2152 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2153 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2154
2155 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2156 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
110cfe1c
JH
2157 }
2158 }
2159}
2160
726a989a 2161
128a79fb
KH
2162/* Copy edges from BB into its copy constructed earlier, scale profile
2163 accordingly. Edges will be taken care of later. Assume aux
90a7788b
JJ
2164 pointers to point to the copies of each BB. Return true if any
2165 debug stmts are left after a statement that must end the basic block. */
726a989a 2166
90a7788b 2167static bool
92e776e9 2168copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
09b22f48 2169 basic_block abnormal_goto_dest)
e21aff8a 2170{
cceb1885 2171 basic_block new_bb = (basic_block) bb->aux;
e21aff8a
SB
2172 edge_iterator ei;
2173 edge old_edge;
726a989a 2174 gimple_stmt_iterator si;
e21aff8a 2175 int flags;
90a7788b 2176 bool need_debug_cleanup = false;
e21aff8a
SB
2177
2178 /* Use the indices from the original blocks to create edges for the
2179 new ones. */
2180 FOR_EACH_EDGE (old_edge, ei, bb->succs)
e0704a46
JH
2181 if (!(old_edge->flags & EDGE_EH))
2182 {
82d6e6fc 2183 edge new_edge;
e21aff8a 2184
e0704a46 2185 flags = old_edge->flags;
e21aff8a 2186
e0704a46 2187 /* Return edges do get a FALLTHRU flag when the get inlined. */
a764d660
RB
2188 if (old_edge->dest->index == EXIT_BLOCK
2189 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
fefa31b5 2190 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
e0704a46 2191 flags |= EDGE_FALLTHRU;
82d6e6fc 2192 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
8b47039c 2193 new_edge->count = apply_scale (old_edge->count, count_scale);
82d6e6fc 2194 new_edge->probability = old_edge->probability;
e0704a46 2195 }
e21aff8a
SB
2196
2197 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
90a7788b 2198 return false;
e21aff8a 2199
726a989a 2200 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
e21aff8a 2201 {
726a989a 2202 gimple copy_stmt;
e9705dc5 2203 bool can_throw, nonlocal_goto;
e21aff8a 2204
726a989a 2205 copy_stmt = gsi_stmt (si);
b5b8b0ac 2206 if (!is_gimple_debug (copy_stmt))
f9a21e13 2207 update_stmt (copy_stmt);
726a989a 2208
e21aff8a 2209 /* Do this before the possible split_block. */
726a989a 2210 gsi_next (&si);
e21aff8a
SB
2211
2212 /* If this tree could throw an exception, there are two
2213 cases where we need to add abnormal edge(s): the
2214 tree wasn't in a region and there is a "current
2215 region" in the caller; or the original tree had
2216 EH edges. In both cases split the block after the tree,
2217 and add abnormal edge(s) as needed; we need both
2218 those from the callee and the caller.
2219 We check whether the copy can throw, because the const
2220 propagation can change an INDIRECT_REF which throws
2221 into a COMPONENT_REF which doesn't. If the copy
2222 can throw, the original could also throw. */
726a989a 2223 can_throw = stmt_can_throw_internal (copy_stmt);
09b22f48
JJ
2224 nonlocal_goto
2225 = (stmt_can_make_abnormal_goto (copy_stmt)
2226 && !computed_goto_p (copy_stmt));
e9705dc5
AO
2227
2228 if (can_throw || nonlocal_goto)
e21aff8a 2229 {
90a7788b
JJ
2230 if (!gsi_end_p (si))
2231 {
2232 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2233 gsi_next (&si);
2234 if (gsi_end_p (si))
2235 need_debug_cleanup = true;
2236 }
726a989a 2237 if (!gsi_end_p (si))
e21aff8a
SB
2238 /* Note that bb's predecessor edges aren't necessarily
2239 right at this point; split_block doesn't care. */
2240 {
2241 edge e = split_block (new_bb, copy_stmt);
110cfe1c 2242
e21aff8a 2243 new_bb = e->dest;
110cfe1c 2244 new_bb->aux = e->src->aux;
726a989a 2245 si = gsi_start_bb (new_bb);
e21aff8a 2246 }
e9705dc5 2247 }
e21aff8a 2248
1d65f45c 2249 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
538dd0b7 2250 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
1d65f45c 2251 else if (can_throw)
e9705dc5 2252 make_eh_edges (copy_stmt);
110cfe1c 2253
a6f30e66
RB
2254 /* If the call we inline cannot make abnormal goto do not add
2255 additional abnormal edges but only retain those already present
2256 in the original function body. */
09b22f48
JJ
2257 if (abnormal_goto_dest == NULL)
2258 nonlocal_goto = false;
e9705dc5 2259 if (nonlocal_goto)
09b22f48
JJ
2260 {
2261 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2262
2263 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2264 nonlocal_goto = false;
2265 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2266 in OpenMP regions which aren't allowed to be left abnormally.
2267 So, no need to add abnormal edge in that case. */
2268 else if (is_gimple_call (copy_stmt)
2269 && gimple_call_internal_p (copy_stmt)
2270 && (gimple_call_internal_fn (copy_stmt)
2271 == IFN_ABNORMAL_DISPATCHER)
2272 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2273 nonlocal_goto = false;
2274 else
2275 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2276 }
e9705dc5
AO
2277
2278 if ((can_throw || nonlocal_goto)
2279 && gimple_in_ssa_p (cfun))
726a989a 2280 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
e9705dc5 2281 can_throw, nonlocal_goto);
110cfe1c 2282 }
90a7788b 2283 return need_debug_cleanup;
110cfe1c
JH
2284}
2285
2286/* Copy the PHIs. All blocks and edges are copied, some blocks
2287 was possibly split and new outgoing EH edges inserted.
2288 BB points to the block of original function and AUX pointers links
2289 the original and newly copied blocks. */
2290
2291static void
2292copy_phis_for_bb (basic_block bb, copy_body_data *id)
2293{
3d9a9f94 2294 basic_block const new_bb = (basic_block) bb->aux;
110cfe1c 2295 edge_iterator ei;
538dd0b7
DM
2296 gphi *phi;
2297 gphi_iterator si;
6a78fd06
RG
2298 edge new_edge;
2299 bool inserted = false;
110cfe1c 2300
355a7673 2301 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
110cfe1c 2302 {
726a989a 2303 tree res, new_res;
538dd0b7 2304 gphi *new_phi;
110cfe1c 2305
538dd0b7 2306 phi = si.phi ();
726a989a
RB
2307 res = PHI_RESULT (phi);
2308 new_res = res;
ea057359 2309 if (!virtual_operand_p (res))
110cfe1c 2310 {
726a989a 2311 walk_tree (&new_res, copy_tree_body_r, id, NULL);
dcc748dd 2312 new_phi = create_phi_node (new_res, new_bb);
110cfe1c
JH
2313 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2314 {
8b3057b3
JH
2315 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2316 tree arg;
2317 tree new_arg;
8b3057b3 2318 edge_iterator ei2;
5368224f 2319 location_t locus;
8b3057b3 2320
20a6bb58 2321 /* When doing partial cloning, we allow PHIs on the entry block
8b3057b3
JH
2322 as long as all the arguments are the same. Find any input
2323 edge to see argument to copy. */
2324 if (!old_edge)
2325 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2326 if (!old_edge->src->aux)
2327 break;
2328
2329 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2330 new_arg = arg;
726a989a 2331 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
110cfe1c 2332 gcc_assert (new_arg);
36b6e793
JJ
2333 /* With return slot optimization we can end up with
2334 non-gimple (foo *)&this->m, fix that here. */
2335 if (TREE_CODE (new_arg) != SSA_NAME
2336 && TREE_CODE (new_arg) != FUNCTION_DECL
2337 && !is_gimple_val (new_arg))
2338 {
726a989a
RB
2339 gimple_seq stmts = NULL;
2340 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
6a78fd06
RG
2341 gsi_insert_seq_on_edge (new_edge, stmts);
2342 inserted = true;
36b6e793 2343 }
5368224f 2344 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
5368224f
DC
2345 if (LOCATION_BLOCK (locus))
2346 {
2347 tree *n;
b787e7a2 2348 n = id->decl_map->get (LOCATION_BLOCK (locus));
5368224f 2349 gcc_assert (n);
ef6179d1
DC
2350 if (*n)
2351 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2352 else
2353 locus = LOCATION_LOCUS (locus);
5368224f 2354 }
16917761
RB
2355 else
2356 locus = LOCATION_LOCUS (locus);
5368224f 2357
16917761 2358 add_phi_arg (new_phi, new_arg, new_edge, locus);
110cfe1c 2359 }
e21aff8a
SB
2360 }
2361 }
6a78fd06
RG
2362
2363 /* Commit the delayed edge insertions. */
2364 if (inserted)
2365 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2366 gsi_commit_one_edge_insert (new_edge, NULL);
e21aff8a
SB
2367}
2368
726a989a 2369
e21aff8a 2370/* Wrapper for remap_decl so it can be used as a callback. */
726a989a 2371
e21aff8a
SB
2372static tree
2373remap_decl_1 (tree decl, void *data)
2374{
1b369fae 2375 return remap_decl (decl, (copy_body_data *) data);
e21aff8a
SB
2376}
2377
110cfe1c 2378/* Build struct function and associated datastructures for the new clone
af16bc76
MJ
2379 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2380 the cfun to the function of new_fndecl (and current_function_decl too). */
110cfe1c
JH
2381
2382static void
0d63a740 2383initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
110cfe1c 2384{
110cfe1c 2385 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
0d63a740 2386 gcov_type count_scale;
110cfe1c 2387
49bde175
JH
2388 if (!DECL_ARGUMENTS (new_fndecl))
2389 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2390 if (!DECL_RESULT (new_fndecl))
2391 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2392
fefa31b5 2393 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
8b47039c
TJ
2394 count_scale
2395 = GCOV_COMPUTE_SCALE (count,
fefa31b5 2396 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
110cfe1c 2397 else
0d63a740 2398 count_scale = REG_BR_PROB_BASE;
110cfe1c
JH
2399
2400 /* Register specific tree functions. */
726a989a 2401 gimple_register_cfg_hooks ();
39ecc018
JH
2402
2403 /* Get clean struct function. */
2404 push_struct_function (new_fndecl);
2405
2406 /* We will rebuild these, so just sanity check that they are empty. */
2407 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2408 gcc_assert (cfun->local_decls == NULL);
2409 gcc_assert (cfun->cfg == NULL);
2410 gcc_assert (cfun->decl == new_fndecl);
2411
20a6bb58 2412 /* Copy items we preserve during cloning. */
39ecc018
JH
2413 cfun->static_chain_decl = src_cfun->static_chain_decl;
2414 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2415 cfun->function_end_locus = src_cfun->function_end_locus;
a9e0d843 2416 cfun->curr_properties = src_cfun->curr_properties;
39ecc018 2417 cfun->last_verified = src_cfun->last_verified;
39ecc018
JH
2418 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2419 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
39ecc018
JH
2420 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2421 cfun->stdarg = src_cfun->stdarg;
39ecc018 2422 cfun->after_inlining = src_cfun->after_inlining;
8f4f502f
EB
2423 cfun->can_throw_non_call_exceptions
2424 = src_cfun->can_throw_non_call_exceptions;
9510c5af 2425 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
39ecc018
JH
2426 cfun->returns_struct = src_cfun->returns_struct;
2427 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
39ecc018 2428
110cfe1c
JH
2429 init_empty_tree_cfg ();
2430
ea19eb9f 2431 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
fefa31b5
DM
2432 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2433 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
110cfe1c 2434 REG_BR_PROB_BASE);
fefa31b5
DM
2435 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2436 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2437 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2438 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
110cfe1c 2439 REG_BR_PROB_BASE);
fefa31b5
DM
2440 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2441 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
110cfe1c
JH
2442 if (src_cfun->eh)
2443 init_eh_for_function ();
2444
2445 if (src_cfun->gimple_df)
2446 {
5db9ba0c 2447 init_tree_ssa (cfun);
110cfe1c 2448 cfun->gimple_df->in_ssa_p = true;
3828719a 2449 init_ssa_operands (cfun);
110cfe1c 2450 }
110cfe1c
JH
2451}
2452
90a7788b
JJ
2453/* Helper function for copy_cfg_body. Move debug stmts from the end
2454 of NEW_BB to the beginning of successor basic blocks when needed. If the
2455 successor has multiple predecessors, reset them, otherwise keep
2456 their value. */
2457
2458static void
2459maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2460{
2461 edge e;
2462 edge_iterator ei;
2463 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2464
2465 if (gsi_end_p (si)
2466 || gsi_one_before_end_p (si)
2467 || !(stmt_can_throw_internal (gsi_stmt (si))
2468 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2469 return;
2470
2471 FOR_EACH_EDGE (e, ei, new_bb->succs)
2472 {
2473 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2474 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2475 while (is_gimple_debug (gsi_stmt (ssi)))
2476 {
538dd0b7
DM
2477 gimple stmt = gsi_stmt (ssi);
2478 gdebug *new_stmt;
90a7788b
JJ
2479 tree var;
2480 tree value;
2481
2482 /* For the last edge move the debug stmts instead of copying
2483 them. */
2484 if (ei_one_before_end_p (ei))
2485 {
2486 si = ssi;
2487 gsi_prev (&ssi);
ddb555ed 2488 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
90a7788b
JJ
2489 gimple_debug_bind_reset_value (stmt);
2490 gsi_remove (&si, false);
2491 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2492 continue;
2493 }
2494
ddb555ed 2495 if (gimple_debug_bind_p (stmt))
90a7788b 2496 {
ddb555ed
JJ
2497 var = gimple_debug_bind_get_var (stmt);
2498 if (single_pred_p (e->dest))
2499 {
2500 value = gimple_debug_bind_get_value (stmt);
2501 value = unshare_expr (value);
2502 }
2503 else
2504 value = NULL_TREE;
2505 new_stmt = gimple_build_debug_bind (var, value, stmt);
2506 }
2507 else if (gimple_debug_source_bind_p (stmt))
2508 {
2509 var = gimple_debug_source_bind_get_var (stmt);
2510 value = gimple_debug_source_bind_get_value (stmt);
2511 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
90a7788b
JJ
2512 }
2513 else
ddb555ed 2514 gcc_unreachable ();
90a7788b 2515 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
9771b263 2516 id->debug_stmts.safe_push (new_stmt);
90a7788b
JJ
2517 gsi_prev (&ssi);
2518 }
2519 }
2520}
2521
a9e0d843
RB
2522/* Make a copy of the sub-loops of SRC_PARENT and place them
2523 as siblings of DEST_PARENT. */
2524
2525static void
f3b331d1 2526copy_loops (copy_body_data *id,
a9e0d843
RB
2527 struct loop *dest_parent, struct loop *src_parent)
2528{
2529 struct loop *src_loop = src_parent->inner;
2530 while (src_loop)
2531 {
f3b331d1
JJ
2532 if (!id->blocks_to_copy
2533 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
a9e0d843
RB
2534 {
2535 struct loop *dest_loop = alloc_loop ();
2536
2537 /* Assign the new loop its header and latch and associate
2538 those with the new loop. */
33d9078a
RB
2539 dest_loop->header = (basic_block)src_loop->header->aux;
2540 dest_loop->header->loop_father = dest_loop;
a9e0d843
RB
2541 if (src_loop->latch != NULL)
2542 {
2543 dest_loop->latch = (basic_block)src_loop->latch->aux;
2544 dest_loop->latch->loop_father = dest_loop;
2545 }
2546
2547 /* Copy loop meta-data. */
2548 copy_loop_info (src_loop, dest_loop);
2549
2550 /* Finally place it into the loop array and the loop tree. */
0fc822d0 2551 place_new_loop (cfun, dest_loop);
a9e0d843
RB
2552 flow_loop_tree_node_add (dest_parent, dest_loop);
2553
718c4601
EB
2554 dest_loop->safelen = src_loop->safelen;
2555 dest_loop->dont_vectorize = src_loop->dont_vectorize;
b15b5979 2556 if (src_loop->force_vectorize)
f3b331d1 2557 {
b15b5979
EB
2558 dest_loop->force_vectorize = true;
2559 cfun->has_force_vectorize_loops = true;
f3b331d1 2560 }
718c4601
EB
2561 if (src_loop->simduid)
2562 {
2563 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2564 cfun->has_simduid_loops = true;
2565 }
f3b331d1 2566
a9e0d843 2567 /* Recurse. */
f3b331d1 2568 copy_loops (id, dest_loop, src_loop);
a9e0d843
RB
2569 }
2570 src_loop = src_loop->next;
2571 }
2572}
2573
042ae7d2
JH
2574/* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2575
2576void
2577redirect_all_calls (copy_body_data * id, basic_block bb)
2578{
2579 gimple_stmt_iterator si;
2580 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2581 {
2582 if (is_gimple_call (gsi_stmt (si)))
2583 {
d52f5295 2584 struct cgraph_edge *edge = id->dst_node->get_edge (gsi_stmt (si));
042ae7d2 2585 if (edge)
3dafb85c 2586 edge->redirect_call_stmt_to_callee ();
042ae7d2
JH
2587 }
2588 }
2589}
2590
eb4b92c1
TJ
2591/* Convert estimated frequencies into counts for NODE, scaling COUNT
2592 with each bb's frequency. Used when NODE has a 0-weight entry
2593 but we are about to inline it into a non-zero count call bb.
2594 See the comments for handle_missing_profiles() in predict.c for
2595 when this can happen for COMDATs. */
2596
2597void
2598freqs_to_counts (struct cgraph_node *node, gcov_type count)
2599{
2600 basic_block bb;
2601 edge_iterator ei;
2602 edge e;
2603 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2604
2605 FOR_ALL_BB_FN(bb, fn)
2606 {
2607 bb->count = apply_scale (count,
2608 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2609 FOR_EACH_EDGE (e, ei, bb->succs)
2610 e->count = apply_probability (e->src->count, e->probability);
2611 }
2612}
2613
e21aff8a
SB
2614/* Make a copy of the body of FN so that it can be inserted inline in
2615 another function. Walks FN via CFG, returns new fndecl. */
2616
2617static tree
0d63a740 2618copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
91382288 2619 basic_block entry_block_map, basic_block exit_block_map,
f3b331d1 2620 basic_block new_entry)
e21aff8a 2621{
1b369fae 2622 tree callee_fndecl = id->src_fn;
e21aff8a 2623 /* Original cfun for the callee, doesn't change. */
1b369fae 2624 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
110cfe1c 2625 struct function *cfun_to_copy;
e21aff8a
SB
2626 basic_block bb;
2627 tree new_fndecl = NULL;
90a7788b 2628 bool need_debug_cleanup = false;
0d63a740 2629 gcov_type count_scale;
110cfe1c 2630 int last;
20a6bb58
JH
2631 int incoming_frequency = 0;
2632 gcov_type incoming_count = 0;
e21aff8a 2633
eb4b92c1
TJ
2634 /* This can happen for COMDAT routines that end up with 0 counts
2635 despite being called (see the comments for handle_missing_profiles()
2636 in predict.c as to why). Apply counts to the blocks in the callee
2637 before inlining, using the guessed edge frequencies, so that we don't
2638 end up with a 0-count inline body which can confuse downstream
2639 optimizations such as function splitting. */
fefa31b5 2640 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
eb4b92c1
TJ
2641 {
2642 /* Apply the larger of the call bb count and the total incoming
2643 call edge count to the callee. */
2644 gcov_type in_count = 0;
2645 struct cgraph_edge *in_edge;
2646 for (in_edge = id->src_node->callers; in_edge;
2647 in_edge = in_edge->next_caller)
2648 in_count += in_edge->count;
2649 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2650 }
2651
fefa31b5 2652 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
8b47039c
TJ
2653 count_scale
2654 = GCOV_COMPUTE_SCALE (count,
fefa31b5 2655 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
e21aff8a 2656 else
0d63a740 2657 count_scale = REG_BR_PROB_BASE;
e21aff8a
SB
2658
2659 /* Register specific tree functions. */
726a989a 2660 gimple_register_cfg_hooks ();
e21aff8a 2661
6626665f
DM
2662 /* If we are inlining just region of the function, make sure to connect
2663 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2664 part of loop, we must compute frequency and probability of
2665 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
20a6bb58 2666 probabilities of edges incoming from nonduplicated region. */
b35366ce
JH
2667 if (new_entry)
2668 {
2669 edge e;
2670 edge_iterator ei;
2671
2672 FOR_EACH_EDGE (e, ei, new_entry->preds)
2673 if (!e->src->aux)
2674 {
20a6bb58
JH
2675 incoming_frequency += EDGE_FREQUENCY (e);
2676 incoming_count += e->count;
b35366ce 2677 }
8b47039c 2678 incoming_count = apply_scale (incoming_count, count_scale);
20a6bb58 2679 incoming_frequency
8b47039c 2680 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
fefa31b5
DM
2681 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2682 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
b35366ce
JH
2683 }
2684
e21aff8a 2685 /* Must have a CFG here at this point. */
fefa31b5 2686 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
e21aff8a
SB
2687 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2688
110cfe1c
JH
2689 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2690
fefa31b5
DM
2691 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2692 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2693 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2694 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
e21aff8a 2695
e21aff8a
SB
2696 /* Duplicate any exception-handling regions. */
2697 if (cfun->eh)
1d65f45c
RH
2698 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2699 remap_decl_1, id);
726a989a 2700
e21aff8a
SB
2701 /* Use aux pointers to map the original blocks to copy. */
2702 FOR_EACH_BB_FN (bb, cfun_to_copy)
f3b331d1 2703 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
91382288
JH
2704 {
2705 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2706 bb->aux = new_bb;
2707 new_bb->aux = bb;
a9e0d843 2708 new_bb->loop_father = entry_block_map->loop_father;
91382288 2709 }
110cfe1c 2710
8b1c6fd7 2711 last = last_basic_block_for_fn (cfun);
726a989a 2712
e21aff8a 2713 /* Now that we've duplicated the blocks, duplicate their edges. */
09b22f48 2714 basic_block abnormal_goto_dest = NULL;
538dd0b7
DM
2715 if (id->call_stmt
2716 && stmt_can_make_abnormal_goto (id->call_stmt))
09b22f48 2717 {
538dd0b7 2718 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
09b22f48 2719
538dd0b7 2720 bb = gimple_bb (id->call_stmt);
09b22f48
JJ
2721 gsi_next (&gsi);
2722 if (gsi_end_p (gsi))
2723 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2724 }
e21aff8a 2725 FOR_ALL_BB_FN (bb, cfun_to_copy)
f3b331d1
JJ
2726 if (!id->blocks_to_copy
2727 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
92e776e9 2728 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
09b22f48 2729 abnormal_goto_dest);
726a989a 2730
91382288 2731 if (new_entry)
110cfe1c 2732 {
b35366ce 2733 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
91382288 2734 e->probability = REG_BR_PROB_BASE;
20a6bb58 2735 e->count = incoming_count;
110cfe1c 2736 }
726a989a 2737
a9e0d843 2738 /* Duplicate the loop tree, if available and wanted. */
0fc822d0 2739 if (loops_for_fn (src_cfun) != NULL
a9e0d843
RB
2740 && current_loops != NULL)
2741 {
f3b331d1 2742 copy_loops (id, entry_block_map->loop_father,
0fc822d0 2743 get_loop (src_cfun, 0));
a9e0d843
RB
2744 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2745 loops_state_set (LOOPS_NEED_FIXUP);
2746 }
2747
9f8e7a96
RB
2748 /* If the loop tree in the source function needed fixup, mark the
2749 destination loop tree for fixup, too. */
2750 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2751 loops_state_set (LOOPS_NEED_FIXUP);
2752
8b3057b3
JH
2753 if (gimple_in_ssa_p (cfun))
2754 FOR_ALL_BB_FN (bb, cfun_to_copy)
f3b331d1
JJ
2755 if (!id->blocks_to_copy
2756 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
8b3057b3
JH
2757 copy_phis_for_bb (bb, id);
2758
91382288
JH
2759 FOR_ALL_BB_FN (bb, cfun_to_copy)
2760 if (bb->aux)
2761 {
2762 if (need_debug_cleanup
2763 && bb->index != ENTRY_BLOCK
2764 && bb->index != EXIT_BLOCK)
2765 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
042ae7d2
JH
2766 /* Update call edge destinations. This can not be done before loop
2767 info is updated, because we may split basic blocks. */
2768 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2769 redirect_all_calls (id, (basic_block)bb->aux);
91382288
JH
2770 ((basic_block)bb->aux)->aux = NULL;
2771 bb->aux = NULL;
2772 }
2773
110cfe1c
JH
2774 /* Zero out AUX fields of newly created block during EH edge
2775 insertion. */
8b1c6fd7 2776 for (; last < last_basic_block_for_fn (cfun); last++)
90a7788b
JJ
2777 {
2778 if (need_debug_cleanup)
06e28de2
DM
2779 maybe_move_debug_stmts_to_successors (id,
2780 BASIC_BLOCK_FOR_FN (cfun, last));
2781 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
042ae7d2
JH
2782 /* Update call edge destinations. This can not be done before loop
2783 info is updated, because we may split basic blocks. */
2784 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
06e28de2 2785 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
90a7788b 2786 }
110cfe1c
JH
2787 entry_block_map->aux = NULL;
2788 exit_block_map->aux = NULL;
e21aff8a 2789
1d65f45c
RH
2790 if (id->eh_map)
2791 {
b787e7a2 2792 delete id->eh_map;
1d65f45c
RH
2793 id->eh_map = NULL;
2794 }
f3dccf50
RB
2795 if (id->dependence_map)
2796 {
2797 delete id->dependence_map;
2798 id->dependence_map = NULL;
2799 }
1d65f45c 2800
e21aff8a
SB
2801 return new_fndecl;
2802}
2803
b5b8b0ac
AO
2804/* Copy the debug STMT using ID. We deal with these statements in a
2805 special way: if any variable in their VALUE expression wasn't
2806 remapped yet, we won't remap it, because that would get decl uids
2807 out of sync, causing codegen differences between -g and -g0. If
2808 this arises, we drop the VALUE expression altogether. */
2809
2810static void
538dd0b7 2811copy_debug_stmt (gdebug *stmt, copy_body_data *id)
b5b8b0ac
AO
2812{
2813 tree t, *n;
2814 struct walk_stmt_info wi;
2815
b5b8b0ac
AO
2816 if (gimple_block (stmt))
2817 {
b787e7a2 2818 n = id->decl_map->get (gimple_block (stmt));
16917761 2819 gimple_set_block (stmt, n ? *n : id->block);
b5b8b0ac 2820 }
b5b8b0ac
AO
2821
2822 /* Remap all the operands in COPY. */
2823 memset (&wi, 0, sizeof (wi));
2824 wi.info = id;
2825
2826 processing_debug_stmt = 1;
2827
ddb555ed
JJ
2828 if (gimple_debug_source_bind_p (stmt))
2829 t = gimple_debug_source_bind_get_var (stmt);
2830 else
2831 t = gimple_debug_bind_get_var (stmt);
b5b8b0ac
AO
2832
2833 if (TREE_CODE (t) == PARM_DECL && id->debug_map
b787e7a2 2834 && (n = id->debug_map->get (t)))
b5b8b0ac
AO
2835 {
2836 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2837 t = *n;
2838 }
d17af147 2839 else if (TREE_CODE (t) == VAR_DECL
5f564b8f 2840 && !is_global_var (t)
b787e7a2 2841 && !id->decl_map->get (t))
d17af147 2842 /* T is a non-localized variable. */;
b5b8b0ac
AO
2843 else
2844 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2845
ddb555ed
JJ
2846 if (gimple_debug_bind_p (stmt))
2847 {
2848 gimple_debug_bind_set_var (stmt, t);
b5b8b0ac 2849
ddb555ed
JJ
2850 if (gimple_debug_bind_has_value_p (stmt))
2851 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2852 remap_gimple_op_r, &wi, NULL);
b5b8b0ac 2853
ddb555ed
JJ
2854 /* Punt if any decl couldn't be remapped. */
2855 if (processing_debug_stmt < 0)
2856 gimple_debug_bind_reset_value (stmt);
2857 }
2858 else if (gimple_debug_source_bind_p (stmt))
2859 {
2860 gimple_debug_source_bind_set_var (stmt, t);
2861 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2862 remap_gimple_op_r, &wi, NULL);
878eef4a
JJ
2863 /* When inlining and source bind refers to one of the optimized
2864 away parameters, change the source bind into normal debug bind
2865 referring to the corresponding DEBUG_EXPR_DECL that should have
2866 been bound before the call stmt. */
2867 t = gimple_debug_source_bind_get_value (stmt);
2868 if (t != NULL_TREE
2869 && TREE_CODE (t) == PARM_DECL
538dd0b7 2870 && id->call_stmt)
878eef4a 2871 {
9771b263 2872 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
878eef4a
JJ
2873 unsigned int i;
2874 if (debug_args != NULL)
2875 {
9771b263
DN
2876 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2877 if ((**debug_args)[i] == DECL_ORIGIN (t)
2878 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
878eef4a 2879 {
9771b263 2880 t = (**debug_args)[i + 1];
daa6e488 2881 stmt->subcode = GIMPLE_DEBUG_BIND;
878eef4a
JJ
2882 gimple_debug_bind_set_value (stmt, t);
2883 break;
2884 }
2885 }
2886 }
ddb555ed 2887 }
b5b8b0ac
AO
2888
2889 processing_debug_stmt = 0;
2890
2891 update_stmt (stmt);
b5b8b0ac
AO
2892}
2893
2894/* Process deferred debug stmts. In order to give values better odds
2895 of being successfully remapped, we delay the processing of debug
2896 stmts until all other stmts that might require remapping are
2897 processed. */
2898
2899static void
2900copy_debug_stmts (copy_body_data *id)
2901{
2902 size_t i;
538dd0b7 2903 gdebug *stmt;
b5b8b0ac 2904
9771b263 2905 if (!id->debug_stmts.exists ())
b5b8b0ac
AO
2906 return;
2907
9771b263 2908 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
b5b8b0ac
AO
2909 copy_debug_stmt (stmt, id);
2910
9771b263 2911 id->debug_stmts.release ();
b5b8b0ac
AO
2912}
2913
f82a627c
EB
2914/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2915 another function. */
2916
2917static tree
2918copy_tree_body (copy_body_data *id)
2919{
2920 tree fndecl = id->src_fn;
2921 tree body = DECL_SAVED_TREE (fndecl);
2922
2923 walk_tree (&body, copy_tree_body_r, id, NULL);
2924
2925 return body;
2926}
2927
b5b8b0ac
AO
2928/* Make a copy of the body of FN so that it can be inserted inline in
2929 another function. */
2930
e21aff8a 2931static tree
0d63a740 2932copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
91382288 2933 basic_block entry_block_map, basic_block exit_block_map,
f3b331d1 2934 basic_block new_entry)
e21aff8a 2935{
1b369fae 2936 tree fndecl = id->src_fn;
e21aff8a
SB
2937 tree body;
2938
2939 /* If this body has a CFG, walk CFG and copy. */
fefa31b5 2940 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
91382288 2941 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
f3b331d1 2942 new_entry);
b5b8b0ac 2943 copy_debug_stmts (id);
e21aff8a
SB
2944
2945 return body;
2946}
2947
04482133
AO
2948/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2949 defined in function FN, or of a data member thereof. */
2950
2951static bool
2952self_inlining_addr_expr (tree value, tree fn)
2953{
2954 tree var;
2955
2956 if (TREE_CODE (value) != ADDR_EXPR)
2957 return false;
2958
2959 var = get_base_address (TREE_OPERAND (value, 0));
e21aff8a 2960
50886bf1 2961 return var && auto_var_in_fn_p (var, fn);
04482133
AO
2962}
2963
b5b8b0ac
AO
2964/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2965 lexical block and line number information from base_stmt, if given,
2966 or from the last stmt of the block otherwise. */
2967
2968static gimple
2969insert_init_debug_bind (copy_body_data *id,
2970 basic_block bb, tree var, tree value,
2971 gimple base_stmt)
2972{
2973 gimple note;
2974 gimple_stmt_iterator gsi;
2975 tree tracked_var;
2976
2977 if (!gimple_in_ssa_p (id->src_cfun))
2978 return NULL;
2979
2980 if (!MAY_HAVE_DEBUG_STMTS)
2981 return NULL;
2982
2983 tracked_var = target_for_debug_bind (var);
2984 if (!tracked_var)
2985 return NULL;
2986
2987 if (bb)
2988 {
2989 gsi = gsi_last_bb (bb);
2990 if (!base_stmt && !gsi_end_p (gsi))
2991 base_stmt = gsi_stmt (gsi);
2992 }
2993
2994 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2995
2996 if (bb)
2997 {
2998 if (!gsi_end_p (gsi))
2999 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3000 else
3001 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3002 }
3003
3004 return note;
3005}
3006
6de9cd9a 3007static void
b5b8b0ac 3008insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
0f1961a2 3009{
0f1961a2
JH
3010 /* If VAR represents a zero-sized variable, it's possible that the
3011 assignment statement may result in no gimple statements. */
3012 if (init_stmt)
c2a4718a
JJ
3013 {
3014 gimple_stmt_iterator si = gsi_last_bb (bb);
0f1961a2 3015
bfb0b886
RG
3016 /* We can end up with init statements that store to a non-register
3017 from a rhs with a conversion. Handle that here by forcing the
3018 rhs into a temporary. gimple_regimplify_operands is not
3019 prepared to do this for us. */
b5b8b0ac
AO
3020 if (!is_gimple_debug (init_stmt)
3021 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
bfb0b886
RG
3022 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3023 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3024 {
3025 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3026 gimple_expr_type (init_stmt),
3027 gimple_assign_rhs1 (init_stmt));
3028 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3029 GSI_NEW_STMT);
3030 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3031 gimple_assign_set_rhs1 (init_stmt, rhs);
3032 }
c2a4718a
JJ
3033 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3034 gimple_regimplify_operands (init_stmt, &si);
b5b8b0ac
AO
3035
3036 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
3037 {
70b5e7dc
RG
3038 tree def = gimple_assign_lhs (init_stmt);
3039 insert_init_debug_bind (id, bb, def, def, init_stmt);
b5b8b0ac 3040 }
c2a4718a 3041 }
0f1961a2
JH
3042}
3043
3044/* Initialize parameter P with VALUE. If needed, produce init statement
3045 at the end of BB. When BB is NULL, we return init statement to be
3046 output later. */
3047static gimple
1b369fae 3048setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
e21aff8a 3049 basic_block bb, tree *vars)
6de9cd9a 3050{
0f1961a2 3051 gimple init_stmt = NULL;
6de9cd9a 3052 tree var;
f4088621 3053 tree rhs = value;
110cfe1c 3054 tree def = (gimple_in_ssa_p (cfun)
32244553 3055 ? ssa_default_def (id->src_cfun, p) : NULL);
6de9cd9a 3056
f4088621
RG
3057 if (value
3058 && value != error_mark_node
3059 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
c54e3854 3060 {
c4ac6e94 3061 /* If we can match up types by promotion/demotion do so. */
c54e3854 3062 if (fold_convertible_p (TREE_TYPE (p), value))
c4ac6e94 3063 rhs = fold_convert (TREE_TYPE (p), value);
c54e3854 3064 else
c4ac6e94
RG
3065 {
3066 /* ??? For valid programs we should not end up here.
3067 Still if we end up with truly mismatched types here, fall back
3068 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3069 GIMPLE to the following passes. */
3070 if (!is_gimple_reg_type (TREE_TYPE (value))
3071 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3072 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3073 else
3074 rhs = build_zero_cst (TREE_TYPE (p));
3075 }
c54e3854 3076 }
f4088621 3077
b5b8b0ac
AO
3078 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3079 here since the type of this decl must be visible to the calling
3080 function. */
3081 var = copy_decl_to_var (p, id);
3082
b5b8b0ac 3083 /* Declare this new variable. */
910ad8de 3084 DECL_CHAIN (var) = *vars;
b5b8b0ac
AO
3085 *vars = var;
3086
3087 /* Make gimplifier happy about this variable. */
3088 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3089
110cfe1c 3090 /* If the parameter is never assigned to, has no SSA_NAMEs created,
b5b8b0ac
AO
3091 we would not need to create a new variable here at all, if it
3092 weren't for debug info. Still, we can just use the argument
3093 value. */
6de9cd9a
DN
3094 if (TREE_READONLY (p)
3095 && !TREE_ADDRESSABLE (p)
110cfe1c
JH
3096 && value && !TREE_SIDE_EFFECTS (value)
3097 && !def)
6de9cd9a 3098 {
84936f6f
RH
3099 /* We may produce non-gimple trees by adding NOPs or introduce
3100 invalid sharing when operand is not really constant.
3101 It is not big deal to prohibit constant propagation here as
3102 we will constant propagate in DOM1 pass anyway. */
3103 if (is_gimple_min_invariant (value)
f4088621
RG
3104 && useless_type_conversion_p (TREE_TYPE (p),
3105 TREE_TYPE (value))
04482133
AO
3106 /* We have to be very careful about ADDR_EXPR. Make sure
3107 the base variable isn't a local variable of the inlined
3108 function, e.g., when doing recursive inlining, direct or
3109 mutually-recursive or whatever, which is why we don't
3110 just test whether fn == current_function_decl. */
3111 && ! self_inlining_addr_expr (value, fn))
6de9cd9a 3112 {
6de9cd9a 3113 insert_decl_map (id, p, value);
b5b8b0ac
AO
3114 insert_debug_decl_map (id, p, var);
3115 return insert_init_debug_bind (id, bb, var, value, NULL);
6de9cd9a
DN
3116 }
3117 }
3118
6de9cd9a
DN
3119 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3120 that way, when the PARM_DECL is encountered, it will be
3121 automatically replaced by the VAR_DECL. */
7c7d3047 3122 insert_decl_map (id, p, var);
6de9cd9a 3123
6de9cd9a
DN
3124 /* Even if P was TREE_READONLY, the new VAR should not be.
3125 In the original code, we would have constructed a
3126 temporary, and then the function body would have never
3127 changed the value of P. However, now, we will be
3128 constructing VAR directly. The constructor body may
3129 change its value multiple times as it is being
3130 constructed. Therefore, it must not be TREE_READONLY;
3131 the back-end assumes that TREE_READONLY variable is
3132 assigned to only once. */
3133 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3134 TREE_READONLY (var) = 0;
3135
110cfe1c
JH
3136 /* If there is no setup required and we are in SSA, take the easy route
3137 replacing all SSA names representing the function parameter by the
3138 SSA name passed to function.
3139
3140 We need to construct map for the variable anyway as it might be used
3141 in different SSA names when parameter is set in function.
3142
8454d27e
JH
3143 Do replacement at -O0 for const arguments replaced by constant.
3144 This is important for builtin_constant_p and other construct requiring
b5b8b0ac 3145 constant argument to be visible in inlined function body. */
110cfe1c 3146 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
8454d27e
JH
3147 && (optimize
3148 || (TREE_READONLY (p)
3149 && is_gimple_min_invariant (rhs)))
110cfe1c 3150 && (TREE_CODE (rhs) == SSA_NAME
9b718f81
JH
3151 || is_gimple_min_invariant (rhs))
3152 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
110cfe1c
JH
3153 {
3154 insert_decl_map (id, def, rhs);
b5b8b0ac 3155 return insert_init_debug_bind (id, bb, var, rhs, NULL);
110cfe1c
JH
3156 }
3157
f6f2da7d
JH
3158 /* If the value of argument is never used, don't care about initializing
3159 it. */
1cf5abb3 3160 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
f6f2da7d
JH
3161 {
3162 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
b5b8b0ac 3163 return insert_init_debug_bind (id, bb, var, rhs, NULL);
f6f2da7d
JH
3164 }
3165
6de9cd9a
DN
3166 /* Initialize this VAR_DECL from the equivalent argument. Convert
3167 the argument to the proper type in case it was promoted. */
3168 if (value)
3169 {
6de9cd9a 3170 if (rhs == error_mark_node)
110cfe1c 3171 {
7c7d3047 3172 insert_decl_map (id, p, var);
b5b8b0ac 3173 return insert_init_debug_bind (id, bb, var, rhs, NULL);
110cfe1c 3174 }
afe08db5 3175
73dab33b 3176 STRIP_USELESS_TYPE_CONVERSION (rhs);
6de9cd9a 3177
6b18b1a3 3178 /* If we are in SSA form properly remap the default definition
27eb31c9
RG
3179 or assign to a dummy SSA name if the parameter is unused and
3180 we are not optimizing. */
6b18b1a3 3181 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
110cfe1c 3182 {
6b18b1a3
RG
3183 if (def)
3184 {
3185 def = remap_ssa_name (def, id);
3186 init_stmt = gimple_build_assign (def, rhs);
3187 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
32244553 3188 set_ssa_default_def (cfun, var, NULL);
6b18b1a3 3189 }
27eb31c9
RG
3190 else if (!optimize)
3191 {
3192 def = make_ssa_name (var, NULL);
3193 init_stmt = gimple_build_assign (def, rhs);
3194 }
110cfe1c
JH
3195 }
3196 else
726a989a 3197 init_stmt = gimple_build_assign (var, rhs);
6de9cd9a 3198
0f1961a2 3199 if (bb && init_stmt)
b5b8b0ac 3200 insert_init_stmt (id, bb, init_stmt);
6de9cd9a 3201 }
0f1961a2 3202 return init_stmt;
6de9cd9a
DN
3203}
3204
d4e4baa9 3205/* Generate code to initialize the parameters of the function at the
726a989a 3206 top of the stack in ID from the GIMPLE_CALL STMT. */
d4e4baa9 3207
e21aff8a 3208static void
726a989a 3209initialize_inlined_parameters (copy_body_data *id, gimple stmt,
e21aff8a 3210 tree fn, basic_block bb)
d4e4baa9 3211{
d4e4baa9 3212 tree parms;
726a989a 3213 size_t i;
d4e4baa9 3214 tree p;
d436bff8 3215 tree vars = NULL_TREE;
726a989a 3216 tree static_chain = gimple_call_chain (stmt);
d4e4baa9
AO
3217
3218 /* Figure out what the parameters are. */
18c6ada9 3219 parms = DECL_ARGUMENTS (fn);
d4e4baa9 3220
d4e4baa9
AO
3221 /* Loop through the parameter declarations, replacing each with an
3222 equivalent VAR_DECL, appropriately initialized. */
910ad8de 3223 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
726a989a
RB
3224 {
3225 tree val;
3226 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3227 setup_one_parameter (id, p, val, fn, bb, &vars);
3228 }
ea184343
RG
3229 /* After remapping parameters remap their types. This has to be done
3230 in a second loop over all parameters to appropriately remap
3231 variable sized arrays when the size is specified in a
3232 parameter following the array. */
910ad8de 3233 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
ea184343 3234 {
b787e7a2 3235 tree *varp = id->decl_map->get (p);
ea184343
RG
3236 if (varp
3237 && TREE_CODE (*varp) == VAR_DECL)
3238 {
72aa3dca 3239 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
32244553 3240 ? ssa_default_def (id->src_cfun, p) : NULL);
72aa3dca
RG
3241 tree var = *varp;
3242 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
ea184343
RG
3243 /* Also remap the default definition if it was remapped
3244 to the default definition of the parameter replacement
3245 by the parameter setup. */
72aa3dca 3246 if (def)
ea184343 3247 {
b787e7a2 3248 tree *defp = id->decl_map->get (def);
ea184343
RG
3249 if (defp
3250 && TREE_CODE (*defp) == SSA_NAME
72aa3dca
RG
3251 && SSA_NAME_VAR (*defp) == var)
3252 TREE_TYPE (*defp) = TREE_TYPE (var);
ea184343
RG
3253 }
3254 }
3255 }
4838c5ee 3256
6de9cd9a
DN
3257 /* Initialize the static chain. */
3258 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
ea99e0be 3259 gcc_assert (fn != current_function_decl);
6de9cd9a
DN
3260 if (p)
3261 {
3262 /* No static chain? Seems like a bug in tree-nested.c. */
1e128c5f 3263 gcc_assert (static_chain);
4838c5ee 3264
e21aff8a 3265 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
4838c5ee
AO
3266 }
3267
e21aff8a 3268 declare_inline_vars (id->block, vars);
d4e4baa9
AO
3269}
3270
726a989a 3271
e21aff8a
SB
3272/* Declare a return variable to replace the RESULT_DECL for the
3273 function we are calling. An appropriate DECL_STMT is returned.
3274 The USE_STMT is filled to contain a use of the declaration to
3275 indicate the return value of the function.
3276
110cfe1c
JH
3277 RETURN_SLOT, if non-null is place where to store the result. It
3278 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
726a989a 3279 was the LHS of the MODIFY_EXPR to which this call is the RHS.
7740f00d 3280
d5e254e1
IE
3281 RETURN_BOUNDS holds a destination for returned bounds.
3282
0f900dfa
JJ
3283 The return value is a (possibly null) value that holds the result
3284 as seen by the caller. */
d4e4baa9 3285
d436bff8 3286static tree
6938f93f 3287declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
d5e254e1 3288 tree return_bounds, basic_block entry_bb)
d4e4baa9 3289{
1b369fae 3290 tree callee = id->src_fn;
7740f00d
RH
3291 tree result = DECL_RESULT (callee);
3292 tree callee_type = TREE_TYPE (result);
ea2edf88 3293 tree caller_type;
7740f00d 3294 tree var, use;
d4e4baa9 3295
ea2edf88
RG
3296 /* Handle type-mismatches in the function declaration return type
3297 vs. the call expression. */
3298 if (modify_dest)
3299 caller_type = TREE_TYPE (modify_dest);
3300 else
3301 caller_type = TREE_TYPE (TREE_TYPE (callee));
3302
1a2c27e9
EB
3303 /* We don't need to do anything for functions that don't return anything. */
3304 if (VOID_TYPE_P (callee_type))
0f900dfa 3305 return NULL_TREE;
d4e4baa9 3306
cc77ae10 3307 /* If there was a return slot, then the return value is the
7740f00d 3308 dereferenced address of that object. */
110cfe1c 3309 if (return_slot)
7740f00d 3310 {
110cfe1c 3311 /* The front end shouldn't have used both return_slot and
7740f00d 3312 a modify expression. */
1e128c5f 3313 gcc_assert (!modify_dest);
cc77ae10 3314 if (DECL_BY_REFERENCE (result))
110cfe1c
JH
3315 {
3316 tree return_slot_addr = build_fold_addr_expr (return_slot);
3317 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3318
3319 /* We are going to construct *&return_slot and we can't do that
b8698a0f 3320 for variables believed to be not addressable.
110cfe1c
JH
3321
3322 FIXME: This check possibly can match, because values returned
3323 via return slot optimization are not believed to have address
3324 taken by alias analysis. */
3325 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
110cfe1c
JH
3326 var = return_slot_addr;
3327 }
cc77ae10 3328 else
110cfe1c
JH
3329 {
3330 var = return_slot;
3331 gcc_assert (TREE_CODE (var) != SSA_NAME);
62ba699e
RB
3332 if (TREE_ADDRESSABLE (result))
3333 mark_addressable (var);
110cfe1c 3334 }
0890b981
AP
3335 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3336 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3337 && !DECL_GIMPLE_REG_P (result)
22918034 3338 && DECL_P (var))
0890b981 3339 DECL_GIMPLE_REG_P (var) = 0;
7740f00d
RH
3340 use = NULL;
3341 goto done;
3342 }
3343
3344 /* All types requiring non-trivial constructors should have been handled. */
1e128c5f 3345 gcc_assert (!TREE_ADDRESSABLE (callee_type));
7740f00d
RH
3346
3347 /* Attempt to avoid creating a new temporary variable. */
110cfe1c
JH
3348 if (modify_dest
3349 && TREE_CODE (modify_dest) != SSA_NAME)
7740f00d
RH
3350 {
3351 bool use_it = false;
3352
3353 /* We can't use MODIFY_DEST if there's type promotion involved. */
f4088621 3354 if (!useless_type_conversion_p (callee_type, caller_type))
7740f00d
RH
3355 use_it = false;
3356
3357 /* ??? If we're assigning to a variable sized type, then we must
3358 reuse the destination variable, because we've no good way to
3359 create variable sized temporaries at this point. */
3360 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3361 use_it = true;
3362
3363 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3364 reuse it as the result of the call directly. Don't do this if
3365 it would promote MODIFY_DEST to addressable. */
e2f9fe42
RH
3366 else if (TREE_ADDRESSABLE (result))
3367 use_it = false;
3368 else
3369 {
3370 tree base_m = get_base_address (modify_dest);
3371
3372 /* If the base isn't a decl, then it's a pointer, and we don't
3373 know where that's going to go. */
3374 if (!DECL_P (base_m))
3375 use_it = false;
3376 else if (is_global_var (base_m))
3377 use_it = false;
0890b981
AP
3378 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3379 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3380 && !DECL_GIMPLE_REG_P (result)
3381 && DECL_GIMPLE_REG_P (base_m))
1d327c16 3382 use_it = false;
e2f9fe42
RH
3383 else if (!TREE_ADDRESSABLE (base_m))
3384 use_it = true;
3385 }
7740f00d
RH
3386
3387 if (use_it)
3388 {
3389 var = modify_dest;
3390 use = NULL;
3391 goto done;
3392 }
3393 }
3394
1e128c5f 3395 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
7740f00d 3396
c08cd4c1 3397 var = copy_result_decl_to_var (result, id);
7740f00d 3398 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
7740f00d 3399
6de9cd9a 3400 /* Do not have the rest of GCC warn about this variable as it should
471854f8 3401 not be visible to the user. */
6de9cd9a 3402 TREE_NO_WARNING (var) = 1;
d4e4baa9 3403
c08cd4c1
JM
3404 declare_inline_vars (id->block, var);
3405
7740f00d
RH
3406 /* Build the use expr. If the return type of the function was
3407 promoted, convert it back to the expected type. */
3408 use = var;
f4088621 3409 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
c4ac6e94
RG
3410 {
3411 /* If we can match up types by promotion/demotion do so. */
3412 if (fold_convertible_p (caller_type, var))
3413 use = fold_convert (caller_type, var);
3414 else
3415 {
3416 /* ??? For valid programs we should not end up here.
3417 Still if we end up with truly mismatched types here, fall back
3418 to using a MEM_REF to not leak invalid GIMPLE to the following
3419 passes. */
3420 /* Prevent var from being written into SSA form. */
3421 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3422 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3423 DECL_GIMPLE_REG_P (var) = false;
3424 else if (is_gimple_reg_type (TREE_TYPE (var)))
3425 TREE_ADDRESSABLE (var) = true;
3426 use = fold_build2 (MEM_REF, caller_type,
3427 build_fold_addr_expr (var),
3428 build_int_cst (ptr_type_node, 0));
3429 }
3430 }
b8698a0f 3431
73dab33b 3432 STRIP_USELESS_TYPE_CONVERSION (use);
7740f00d 3433
c08cd4c1 3434 if (DECL_BY_REFERENCE (result))
32848948
RG
3435 {
3436 TREE_ADDRESSABLE (var) = 1;
3437 var = build_fold_addr_expr (var);
3438 }
c08cd4c1 3439
7740f00d 3440 done:
d4e4baa9
AO
3441 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3442 way, when the RESULT_DECL is encountered, it will be
6938f93f
JH
3443 automatically replaced by the VAR_DECL.
3444
3445 When returning by reference, ensure that RESULT_DECL remaps to
3446 gimple_val. */
3447 if (DECL_BY_REFERENCE (result)
3448 && !is_gimple_val (var))
3449 {
3450 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3451 insert_decl_map (id, result, temp);
6b18b1a3
RG
3452 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3453 it's default_def SSA_NAME. */
3454 if (gimple_in_ssa_p (id->src_cfun)
3455 && is_gimple_reg (result))
3456 {
3457 temp = make_ssa_name (temp, NULL);
32244553 3458 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
6b18b1a3 3459 }
6938f93f
JH
3460 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3461 }
3462 else
3463 insert_decl_map (id, result, var);
d4e4baa9 3464
6de9cd9a
DN
3465 /* Remember this so we can ignore it in remap_decls. */
3466 id->retvar = var;
3467
d5e254e1
IE
3468 /* If returned bounds are used, then make var for them. */
3469 if (return_bounds)
3470 {
3471 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3472 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3473 TREE_NO_WARNING (bndtemp) = 1;
3474 declare_inline_vars (id->block, bndtemp);
3475
3476 id->retbnd = bndtemp;
3477 insert_init_stmt (id, entry_bb,
3478 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3479 }
3480
0f900dfa 3481 return use;
d4e4baa9
AO
3482}
3483
27dbd3ac
RH
3484/* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3485 to a local label. */
4838c5ee 3486
27dbd3ac
RH
3487static tree
3488has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
4838c5ee 3489{
27dbd3ac
RH
3490 tree node = *nodep;
3491 tree fn = (tree) fnp;
726a989a 3492
27dbd3ac
RH
3493 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3494 return node;
3495
3496 if (TYPE_P (node))
3497 *walk_subtrees = 0;
3498
3499 return NULL_TREE;
3500}
726a989a 3501
27dbd3ac
RH
3502/* Determine if the function can be copied. If so return NULL. If
3503 not return a string describng the reason for failure. */
3504
3505static const char *
3506copy_forbidden (struct function *fun, tree fndecl)
3507{
3508 const char *reason = fun->cannot_be_copied_reason;
c021f10b
NF
3509 tree decl;
3510 unsigned ix;
27dbd3ac
RH
3511
3512 /* Only examine the function once. */
3513 if (fun->cannot_be_copied_set)
3514 return reason;
3515
3516 /* We cannot copy a function that receives a non-local goto
3517 because we cannot remap the destination label used in the
3518 function that is performing the non-local goto. */
3519 /* ??? Actually, this should be possible, if we work at it.
3520 No doubt there's just a handful of places that simply
3521 assume it doesn't happen and don't substitute properly. */
3522 if (fun->has_nonlocal_label)
3523 {
3524 reason = G_("function %q+F can never be copied "
3525 "because it receives a non-local goto");
3526 goto fail;
3527 }
3528
c021f10b
NF
3529 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3530 if (TREE_CODE (decl) == VAR_DECL
3531 && TREE_STATIC (decl)
3532 && !DECL_EXTERNAL (decl)
3533 && DECL_INITIAL (decl)
3534 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3535 has_label_address_in_static_1,
3536 fndecl))
3537 {
3538 reason = G_("function %q+F can never be copied because it saves "
3539 "address of local label in a static variable");
3540 goto fail;
3541 }
27dbd3ac
RH
3542
3543 fail:
3544 fun->cannot_be_copied_reason = reason;
3545 fun->cannot_be_copied_set = true;
3546 return reason;
3547}
3548
3549
3550static const char *inline_forbidden_reason;
3551
3552/* A callback for walk_gimple_seq to handle statements. Returns non-null
3553 iff a function can not be inlined. Also sets the reason why. */
c986baf6 3554
c986baf6 3555static tree
726a989a
RB
3556inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3557 struct walk_stmt_info *wip)
c986baf6 3558{
726a989a 3559 tree fn = (tree) wip->info;
f08545a8 3560 tree t;
726a989a 3561 gimple stmt = gsi_stmt (*gsi);
c986baf6 3562
726a989a 3563 switch (gimple_code (stmt))
f08545a8 3564 {
726a989a 3565 case GIMPLE_CALL:
3197c4fd
AS
3566 /* Refuse to inline alloca call unless user explicitly forced so as
3567 this may change program's memory overhead drastically when the
3568 function using alloca is called in loop. In GCC present in
3569 SPEC2000 inlining into schedule_block cause it to require 2GB of
63d2a353
MM
3570 RAM instead of 256MB. Don't do so for alloca calls emitted for
3571 VLA objects as those can't cause unbounded growth (they're always
3572 wrapped inside stack_save/stack_restore regions. */
726a989a 3573 if (gimple_alloca_call_p (stmt)
538dd0b7 3574 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
f08545a8
JH
3575 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3576 {
ddd2d57e 3577 inline_forbidden_reason
dee15844 3578 = G_("function %q+F can never be inlined because it uses "
ddd2d57e 3579 "alloca (override using the always_inline attribute)");
726a989a
RB
3580 *handled_ops_p = true;
3581 return fn;
f08545a8 3582 }
726a989a
RB
3583
3584 t = gimple_call_fndecl (stmt);
3585 if (t == NULL_TREE)
f08545a8 3586 break;
84f5e1b1 3587
f08545a8
JH
3588 /* We cannot inline functions that call setjmp. */
3589 if (setjmp_call_p (t))
3590 {
ddd2d57e 3591 inline_forbidden_reason
dee15844 3592 = G_("function %q+F can never be inlined because it uses setjmp");
726a989a
RB
3593 *handled_ops_p = true;
3594 return t;
f08545a8
JH
3595 }
3596
6de9cd9a 3597 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3197c4fd 3598 switch (DECL_FUNCTION_CODE (t))
f08545a8 3599 {
3197c4fd
AS
3600 /* We cannot inline functions that take a variable number of
3601 arguments. */
3602 case BUILT_IN_VA_START:
3197c4fd
AS
3603 case BUILT_IN_NEXT_ARG:
3604 case BUILT_IN_VA_END:
6de9cd9a 3605 inline_forbidden_reason
dee15844 3606 = G_("function %q+F can never be inlined because it "
6de9cd9a 3607 "uses variable argument lists");
726a989a
RB
3608 *handled_ops_p = true;
3609 return t;
6de9cd9a 3610
3197c4fd 3611 case BUILT_IN_LONGJMP:
6de9cd9a
DN
3612 /* We can't inline functions that call __builtin_longjmp at
3613 all. The non-local goto machinery really requires the
3614 destination be in a different function. If we allow the
3615 function calling __builtin_longjmp to be inlined into the
3616 function calling __builtin_setjmp, Things will Go Awry. */
3617 inline_forbidden_reason
dee15844 3618 = G_("function %q+F can never be inlined because "
6de9cd9a 3619 "it uses setjmp-longjmp exception handling");
726a989a
RB
3620 *handled_ops_p = true;
3621 return t;
6de9cd9a
DN
3622
3623 case BUILT_IN_NONLOCAL_GOTO:
3624 /* Similarly. */
3625 inline_forbidden_reason
dee15844 3626 = G_("function %q+F can never be inlined because "
6de9cd9a 3627 "it uses non-local goto");
726a989a
RB
3628 *handled_ops_p = true;
3629 return t;
f08545a8 3630
4b284111
JJ
3631 case BUILT_IN_RETURN:
3632 case BUILT_IN_APPLY_ARGS:
3633 /* If a __builtin_apply_args caller would be inlined,
3634 it would be saving arguments of the function it has
3635 been inlined into. Similarly __builtin_return would
3636 return from the function the inline has been inlined into. */
3637 inline_forbidden_reason
dee15844 3638 = G_("function %q+F can never be inlined because "
4b284111 3639 "it uses __builtin_return or __builtin_apply_args");
726a989a
RB
3640 *handled_ops_p = true;
3641 return t;
4b284111 3642
3197c4fd
AS
3643 default:
3644 break;
3645 }
f08545a8
JH
3646 break;
3647
726a989a
RB
3648 case GIMPLE_GOTO:
3649 t = gimple_goto_dest (stmt);
f08545a8
JH
3650
3651 /* We will not inline a function which uses computed goto. The
3652 addresses of its local labels, which may be tucked into
3653 global storage, are of course not constant across
3654 instantiations, which causes unexpected behavior. */
3655 if (TREE_CODE (t) != LABEL_DECL)
3656 {
ddd2d57e 3657 inline_forbidden_reason
dee15844 3658 = G_("function %q+F can never be inlined "
ddd2d57e 3659 "because it contains a computed goto");
726a989a
RB
3660 *handled_ops_p = true;
3661 return t;
f08545a8 3662 }
6de9cd9a 3663 break;
f08545a8 3664
f08545a8
JH
3665 default:
3666 break;
3667 }
3668
726a989a 3669 *handled_ops_p = false;
f08545a8 3670 return NULL_TREE;
84f5e1b1
RH
3671}
3672
726a989a
RB
3673/* Return true if FNDECL is a function that cannot be inlined into
3674 another one. */
3675
3676static bool
f08545a8 3677inline_forbidden_p (tree fndecl)
84f5e1b1 3678{
2092ee7d 3679 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
726a989a 3680 struct walk_stmt_info wi;
726a989a
RB
3681 basic_block bb;
3682 bool forbidden_p = false;
3683
27dbd3ac
RH
3684 /* First check for shared reasons not to copy the code. */
3685 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3686 if (inline_forbidden_reason != NULL)
3687 return true;
3688
3689 /* Next, walk the statements of the function looking for
3690 constraucts we can't handle, or are non-optimal for inlining. */
6e2830c3 3691 hash_set<tree> visited_nodes;
726a989a
RB
3692 memset (&wi, 0, sizeof (wi));
3693 wi.info = (void *) fndecl;
6e2830c3 3694 wi.pset = &visited_nodes;
e21aff8a 3695
2092ee7d 3696 FOR_EACH_BB_FN (bb, fun)
726a989a
RB
3697 {
3698 gimple ret;
3699 gimple_seq seq = bb_seq (bb);
27dbd3ac 3700 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
726a989a
RB
3701 forbidden_p = (ret != NULL);
3702 if (forbidden_p)
27dbd3ac 3703 break;
2092ee7d
JJ
3704 }
3705
726a989a 3706 return forbidden_p;
84f5e1b1 3707}
6399c0ab
SB
3708\f
3709/* Return false if the function FNDECL cannot be inlined on account of its
3710 attributes, true otherwise. */
3711static bool
3712function_attribute_inlinable_p (const_tree fndecl)
3713{
3714 if (targetm.attribute_table)
3715 {
3716 const_tree a;
3717
3718 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3719 {
3720 const_tree name = TREE_PURPOSE (a);
3721 int i;
3722
3723 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3724 if (is_attribute_p (targetm.attribute_table[i].name, name))
3725 return targetm.function_attribute_inlinable_p (fndecl);
3726 }
3727 }
3728
3729 return true;
3730}
84f5e1b1 3731
b3c3af2f
SB
3732/* Returns nonzero if FN is a function that does not have any
3733 fundamental inline blocking properties. */
d4e4baa9 3734
27dbd3ac
RH
3735bool
3736tree_inlinable_function_p (tree fn)
d4e4baa9 3737{
b3c3af2f 3738 bool inlinable = true;
18177c7e
RG
3739 bool do_warning;
3740 tree always_inline;
d4e4baa9
AO
3741
3742 /* If we've already decided this function shouldn't be inlined,
3743 there's no need to check again. */
3744 if (DECL_UNINLINABLE (fn))
b3c3af2f 3745 return false;
d4e4baa9 3746
18177c7e
RG
3747 /* We only warn for functions declared `inline' by the user. */
3748 do_warning = (warn_inline
18177c7e 3749 && DECL_DECLARED_INLINE_P (fn)
0494626a 3750 && !DECL_NO_INLINE_WARNING_P (fn)
18177c7e
RG
3751 && !DECL_IN_SYSTEM_HEADER (fn));
3752
3753 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3754
e90acd93 3755 if (flag_no_inline
18177c7e
RG
3756 && always_inline == NULL)
3757 {
3758 if (do_warning)
3759 warning (OPT_Winline, "function %q+F can never be inlined because it "
3760 "is suppressed using -fno-inline", fn);
3761 inlinable = false;
3762 }
3763
18177c7e
RG
3764 else if (!function_attribute_inlinable_p (fn))
3765 {
3766 if (do_warning)
3767 warning (OPT_Winline, "function %q+F can never be inlined because it "
3768 "uses attributes conflicting with inlining", fn);
3769 inlinable = false;
3770 }
46c5ad27 3771
f08545a8 3772 else if (inline_forbidden_p (fn))
b3c3af2f
SB
3773 {
3774 /* See if we should warn about uninlinable functions. Previously,
3775 some of these warnings would be issued while trying to expand
3776 the function inline, but that would cause multiple warnings
3777 about functions that would for example call alloca. But since
3778 this a property of the function, just one warning is enough.
3779 As a bonus we can now give more details about the reason why a
18177c7e
RG
3780 function is not inlinable. */
3781 if (always_inline)
c9fc06dc 3782 error (inline_forbidden_reason, fn);
2d327012 3783 else if (do_warning)
d2fcbf6f 3784 warning (OPT_Winline, inline_forbidden_reason, fn);
b3c3af2f
SB
3785
3786 inlinable = false;
3787 }
d4e4baa9
AO
3788
3789 /* Squirrel away the result so that we don't have to check again. */
b3c3af2f 3790 DECL_UNINLINABLE (fn) = !inlinable;
d4e4baa9 3791
b3c3af2f
SB
3792 return inlinable;
3793}
3794
b4c9af96
RB
3795/* Estimate the cost of a memory move of type TYPE. Use machine dependent
3796 word size and take possible memcpy call into account and return
3797 cost based on whether optimizing for size or speed according to SPEED_P. */
e5c4f28a
RG
3798
3799int
1ed85d52 3800estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
e5c4f28a
RG
3801{
3802 HOST_WIDE_INT size;
3803
078c3644
JH
3804 gcc_assert (!VOID_TYPE_P (type));
3805
c204d113
L
3806 if (TREE_CODE (type) == VECTOR_TYPE)
3807 {
ef4bddc2
RS
3808 machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3809 machine_mode simd
c204d113
L
3810 = targetm.vectorize.preferred_simd_mode (inner);
3811 int simd_mode_size = GET_MODE_SIZE (simd);
3812 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3813 / simd_mode_size);
3814 }
3815
e5c4f28a
RG
3816 size = int_size_in_bytes (type);
3817
b4c9af96 3818 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
e5c4f28a
RG
3819 /* Cost of a memcpy call, 3 arguments and the call. */
3820 return 4;
3821 else
3822 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3823}
3824
726a989a 3825/* Returns cost of operation CODE, according to WEIGHTS */
7f9bc51b 3826
726a989a 3827static int
02f0b13a
JH
3828estimate_operator_cost (enum tree_code code, eni_weights *weights,
3829 tree op1 ATTRIBUTE_UNUSED, tree op2)
6de9cd9a 3830{
726a989a 3831 switch (code)
6de9cd9a 3832 {
726a989a
RB
3833 /* These are "free" conversions, or their presumed cost
3834 is folded into other operations. */
61fcaeec 3835 case RANGE_EXPR:
1a87cf0c 3836 CASE_CONVERT:
726a989a
RB
3837 case COMPLEX_EXPR:
3838 case PAREN_EXPR:
d4d92cd3 3839 case VIEW_CONVERT_EXPR:
726a989a 3840 return 0;
6de9cd9a 3841
e5c4f28a
RG
3842 /* Assign cost of 1 to usual operations.
3843 ??? We may consider mapping RTL costs to this. */
6de9cd9a 3844 case COND_EXPR:
4151978d 3845 case VEC_COND_EXPR:
2205ed25 3846 case VEC_PERM_EXPR:
6de9cd9a
DN
3847
3848 case PLUS_EXPR:
5be014d5 3849 case POINTER_PLUS_EXPR:
6de9cd9a
DN
3850 case MINUS_EXPR:
3851 case MULT_EXPR:
98449720 3852 case MULT_HIGHPART_EXPR:
16949072 3853 case FMA_EXPR:
6de9cd9a 3854
09e881c9 3855 case ADDR_SPACE_CONVERT_EXPR:
325217ed 3856 case FIXED_CONVERT_EXPR:
6de9cd9a 3857 case FIX_TRUNC_EXPR:
6de9cd9a
DN
3858
3859 case NEGATE_EXPR:
3860 case FLOAT_EXPR:
3861 case MIN_EXPR:
3862 case MAX_EXPR:
3863 case ABS_EXPR:
3864
3865 case LSHIFT_EXPR:
3866 case RSHIFT_EXPR:
3867 case LROTATE_EXPR:
3868 case RROTATE_EXPR:
3869
3870 case BIT_IOR_EXPR:
3871 case BIT_XOR_EXPR:
3872 case BIT_AND_EXPR:
3873 case BIT_NOT_EXPR:
3874
3875 case TRUTH_ANDIF_EXPR:
3876 case TRUTH_ORIF_EXPR:
3877 case TRUTH_AND_EXPR:
3878 case TRUTH_OR_EXPR:
3879 case TRUTH_XOR_EXPR:
3880 case TRUTH_NOT_EXPR:
3881
3882 case LT_EXPR:
3883 case LE_EXPR:
3884 case GT_EXPR:
3885 case GE_EXPR:
3886 case EQ_EXPR:
3887 case NE_EXPR:
3888 case ORDERED_EXPR:
3889 case UNORDERED_EXPR:
3890
3891 case UNLT_EXPR:
3892 case UNLE_EXPR:
3893 case UNGT_EXPR:
3894 case UNGE_EXPR:
3895 case UNEQ_EXPR:
d1a7edaf 3896 case LTGT_EXPR:
6de9cd9a 3897
6de9cd9a
DN
3898 case CONJ_EXPR:
3899
3900 case PREDECREMENT_EXPR:
3901 case PREINCREMENT_EXPR:
3902 case POSTDECREMENT_EXPR:
3903 case POSTINCREMENT_EXPR:
3904
16630a2c
DN
3905 case REALIGN_LOAD_EXPR:
3906
61d3cdbb
DN
3907 case REDUC_MAX_EXPR:
3908 case REDUC_MIN_EXPR:
3909 case REDUC_PLUS_EXPR:
20f06221 3910 case WIDEN_SUM_EXPR:
726a989a
RB
3911 case WIDEN_MULT_EXPR:
3912 case DOT_PROD_EXPR:
79d652a5 3913 case SAD_EXPR:
0354c0c7
BS
3914 case WIDEN_MULT_PLUS_EXPR:
3915 case WIDEN_MULT_MINUS_EXPR:
36ba4aae 3916 case WIDEN_LSHIFT_EXPR:
726a989a 3917
89d67cca
DN
3918 case VEC_WIDEN_MULT_HI_EXPR:
3919 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
3920 case VEC_WIDEN_MULT_EVEN_EXPR:
3921 case VEC_WIDEN_MULT_ODD_EXPR:
89d67cca
DN
3922 case VEC_UNPACK_HI_EXPR:
3923 case VEC_UNPACK_LO_EXPR:
d9987fb4
UB
3924 case VEC_UNPACK_FLOAT_HI_EXPR:
3925 case VEC_UNPACK_FLOAT_LO_EXPR:
8115817b 3926 case VEC_PACK_TRUNC_EXPR:
89d67cca 3927 case VEC_PACK_SAT_EXPR:
d9987fb4 3928 case VEC_PACK_FIX_TRUNC_EXPR:
36ba4aae
IR
3929 case VEC_WIDEN_LSHIFT_HI_EXPR:
3930 case VEC_WIDEN_LSHIFT_LO_EXPR:
98b44b0e 3931
726a989a 3932 return 1;
6de9cd9a 3933
1ea7e6ad 3934 /* Few special cases of expensive operations. This is useful
6de9cd9a
DN
3935 to avoid inlining on functions having too many of these. */
3936 case TRUNC_DIV_EXPR:
3937 case CEIL_DIV_EXPR:
3938 case FLOOR_DIV_EXPR:
3939 case ROUND_DIV_EXPR:
3940 case EXACT_DIV_EXPR:
3941 case TRUNC_MOD_EXPR:
3942 case CEIL_MOD_EXPR:
3943 case FLOOR_MOD_EXPR:
3944 case ROUND_MOD_EXPR:
3945 case RDIV_EXPR:
02f0b13a
JH
3946 if (TREE_CODE (op2) != INTEGER_CST)
3947 return weights->div_mod_cost;
3948 return 1;
726a989a
RB
3949
3950 default:
3951 /* We expect a copy assignment with no operator. */
3952 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3953 return 0;
3954 }
3955}
3956
3957
3958/* Estimate number of instructions that will be created by expanding
3959 the statements in the statement sequence STMTS.
3960 WEIGHTS contains weights attributed to various constructs. */
3961
3962static
3963int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3964{
3965 int cost;
3966 gimple_stmt_iterator gsi;
3967
3968 cost = 0;
3969 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3970 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3971
3972 return cost;
3973}
3974
3975
3976/* Estimate number of instructions that will be created by expanding STMT.
3977 WEIGHTS contains weights attributed to various constructs. */
3978
3979int
3980estimate_num_insns (gimple stmt, eni_weights *weights)
3981{
3982 unsigned cost, i;
3983 enum gimple_code code = gimple_code (stmt);
3984 tree lhs;
02f0b13a 3985 tree rhs;
726a989a
RB
3986
3987 switch (code)
3988 {
3989 case GIMPLE_ASSIGN:
3990 /* Try to estimate the cost of assignments. We have three cases to
3991 deal with:
3992 1) Simple assignments to registers;
3993 2) Stores to things that must live in memory. This includes
3994 "normal" stores to scalars, but also assignments of large
3995 structures, or constructors of big arrays;
3996
3997 Let us look at the first two cases, assuming we have "a = b + C":
3998 <GIMPLE_ASSIGN <var_decl "a">
3999 <plus_expr <var_decl "b"> <constant C>>
4000 If "a" is a GIMPLE register, the assignment to it is free on almost
4001 any target, because "a" usually ends up in a real register. Hence
4002 the only cost of this expression comes from the PLUS_EXPR, and we
4003 can ignore the GIMPLE_ASSIGN.
4004 If "a" is not a GIMPLE register, the assignment to "a" will most
4005 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4006 of moving something into "a", which we compute using the function
4007 estimate_move_cost. */
bccc50d4
JJ
4008 if (gimple_clobber_p (stmt))
4009 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4010
726a989a 4011 lhs = gimple_assign_lhs (stmt);
02f0b13a
JH
4012 rhs = gimple_assign_rhs1 (stmt);
4013
c12d9242 4014 cost = 0;
726a989a 4015
c12d9242
RB
4016 /* Account for the cost of moving to / from memory. */
4017 if (gimple_store_p (stmt))
b4c9af96 4018 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
c12d9242 4019 if (gimple_assign_load_p (stmt))
b4c9af96 4020 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
02f0b13a
JH
4021
4022 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4023 gimple_assign_rhs1 (stmt),
4024 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4025 == GIMPLE_BINARY_RHS
4026 ? gimple_assign_rhs2 (stmt) : NULL);
726a989a
RB
4027 break;
4028
4029 case GIMPLE_COND:
02f0b13a
JH
4030 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4031 gimple_op (stmt, 0),
4032 gimple_op (stmt, 1));
726a989a
RB
4033 break;
4034
4035 case GIMPLE_SWITCH:
538dd0b7
DM
4036 {
4037 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4038 /* Take into account cost of the switch + guess 2 conditional jumps for
4039 each case label.
4040
4041 TODO: once the switch expansion logic is sufficiently separated, we can
4042 do better job on estimating cost of the switch. */
4043 if (weights->time_based)
4044 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4045 else
4046 cost = gimple_switch_num_labels (switch_stmt) * 2;
4047 }
6de9cd9a 4048 break;
726a989a
RB
4049
4050 case GIMPLE_CALL:
6de9cd9a 4051 {
e9287a41 4052 tree decl;
6de9cd9a 4053
e9287a41
RB
4054 if (gimple_call_internal_p (stmt))
4055 return 0;
fd9710dc
RB
4056 else if ((decl = gimple_call_fndecl (stmt))
4057 && DECL_BUILT_IN (decl))
e9f7ad79 4058 {
fd9710dc
RB
4059 /* Do not special case builtins where we see the body.
4060 This just confuse inliner. */
4061 struct cgraph_node *node;
d52f5295 4062 if (!(node = cgraph_node::get (decl))
fd9710dc
RB
4063 || node->definition)
4064 ;
4065 /* For buitins that are likely expanded to nothing or
4066 inlined do not account operand costs. */
4067 else if (is_simple_builtin (decl))
4068 return 0;
4069 else if (is_inexpensive_builtin (decl))
4070 return weights->target_builtin_call_cost;
4071 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
e9f7ad79 4072 {
fd9710dc
RB
4073 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4074 specialize the cheap expansion we do here.
4075 ??? This asks for a more general solution. */
4076 switch (DECL_FUNCTION_CODE (decl))
4077 {
4078 case BUILT_IN_POW:
4079 case BUILT_IN_POWF:
4080 case BUILT_IN_POWL:
4081 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4082 && REAL_VALUES_EQUAL
4083 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
4084 return estimate_operator_cost
4085 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4086 gimple_call_arg (stmt, 0));
4087 break;
4088
4089 default:
4090 break;
4091 }
e9f7ad79
RG
4092 }
4093 }
b8698a0f 4094
fd9710dc 4095 cost = decl ? weights->call_cost : weights->indirect_call_cost;
3c04921b 4096 if (gimple_call_lhs (stmt))
b4c9af96
RB
4097 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4098 weights->time_based);
3c04921b 4099 for (i = 0; i < gimple_call_num_args (stmt); i++)
c7f599d0 4100 {
3c04921b 4101 tree arg = gimple_call_arg (stmt, i);
b4c9af96
RB
4102 cost += estimate_move_cost (TREE_TYPE (arg),
4103 weights->time_based);
c7f599d0 4104 }
6de9cd9a
DN
4105 break;
4106 }
88f4034b 4107
9bb2f479
JH
4108 case GIMPLE_RETURN:
4109 return weights->return_cost;
4110
726a989a
RB
4111 case GIMPLE_GOTO:
4112 case GIMPLE_LABEL:
4113 case GIMPLE_NOP:
4114 case GIMPLE_PHI:
726a989a 4115 case GIMPLE_PREDICT:
b5b8b0ac 4116 case GIMPLE_DEBUG:
726a989a
RB
4117 return 0;
4118
4119 case GIMPLE_ASM:
cc4029ee 4120 {
538dd0b7 4121 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
cc4029ee
AK
4122 /* 1000 means infinity. This avoids overflows later
4123 with very long asm statements. */
4124 if (count > 1000)
4125 count = 1000;
4126 return count;
4127 }
726a989a 4128
1d65f45c
RH
4129 case GIMPLE_RESX:
4130 /* This is either going to be an external function call with one
4131 argument, or two register copy statements plus a goto. */
4132 return 2;
4133
4134 case GIMPLE_EH_DISPATCH:
4135 /* ??? This is going to turn into a switch statement. Ideally
4136 we'd have a look at the eh region and estimate the number of
4137 edges involved. */
4138 return 10;
4139
726a989a 4140 case GIMPLE_BIND:
538dd0b7
DM
4141 return estimate_num_insns_seq (
4142 gimple_bind_body (as_a <gbind *> (stmt)),
4143 weights);
726a989a
RB
4144
4145 case GIMPLE_EH_FILTER:
4146 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4147
4148 case GIMPLE_CATCH:
538dd0b7
DM
4149 return estimate_num_insns_seq (gimple_catch_handler (
4150 as_a <gcatch *> (stmt)),
4151 weights);
726a989a
RB
4152
4153 case GIMPLE_TRY:
4154 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4155 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4156
4157 /* OpenMP directives are generally very expensive. */
4158
4159 case GIMPLE_OMP_RETURN:
4160 case GIMPLE_OMP_SECTIONS_SWITCH:
4161 case GIMPLE_OMP_ATOMIC_STORE:
4162 case GIMPLE_OMP_CONTINUE:
4163 /* ...except these, which are cheap. */
4164 return 0;
4165
4166 case GIMPLE_OMP_ATOMIC_LOAD:
4167 return weights->omp_cost;
4168
4169 case GIMPLE_OMP_FOR:
4170 return (weights->omp_cost
4171 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4172 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4173
4174 case GIMPLE_OMP_PARALLEL:
4175 case GIMPLE_OMP_TASK:
4176 case GIMPLE_OMP_CRITICAL:
4177 case GIMPLE_OMP_MASTER:
acf0174b 4178 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
4179 case GIMPLE_OMP_ORDERED:
4180 case GIMPLE_OMP_SECTION:
4181 case GIMPLE_OMP_SECTIONS:
4182 case GIMPLE_OMP_SINGLE:
acf0174b
JJ
4183 case GIMPLE_OMP_TARGET:
4184 case GIMPLE_OMP_TEAMS:
726a989a
RB
4185 return (weights->omp_cost
4186 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
88f4034b 4187
0a35513e
AH
4188 case GIMPLE_TRANSACTION:
4189 return (weights->tm_cost
538dd0b7
DM
4190 + estimate_num_insns_seq (gimple_transaction_body (
4191 as_a <gtransaction *> (stmt)),
0a35513e
AH
4192 weights));
4193
6de9cd9a 4194 default:
1e128c5f 4195 gcc_unreachable ();
6de9cd9a 4196 }
726a989a
RB
4197
4198 return cost;
6de9cd9a
DN
4199}
4200
726a989a
RB
4201/* Estimate number of instructions that will be created by expanding
4202 function FNDECL. WEIGHTS contains weights attributed to various
4203 constructs. */
aa4a53af 4204
6de9cd9a 4205int
726a989a 4206estimate_num_insns_fn (tree fndecl, eni_weights *weights)
6de9cd9a 4207{
726a989a
RB
4208 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4209 gimple_stmt_iterator bsi;
e21aff8a 4210 basic_block bb;
726a989a 4211 int n = 0;
e21aff8a 4212
726a989a
RB
4213 gcc_assert (my_function && my_function->cfg);
4214 FOR_EACH_BB_FN (bb, my_function)
e21aff8a 4215 {
726a989a
RB
4216 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4217 n += estimate_num_insns (gsi_stmt (bsi), weights);
e21aff8a 4218 }
e21aff8a 4219
726a989a 4220 return n;
7f9bc51b
ZD
4221}
4222
726a989a 4223
7f9bc51b
ZD
4224/* Initializes weights used by estimate_num_insns. */
4225
4226void
4227init_inline_once (void)
4228{
7f9bc51b 4229 eni_size_weights.call_cost = 1;
d2d668fb 4230 eni_size_weights.indirect_call_cost = 3;
625a2efb 4231 eni_size_weights.target_builtin_call_cost = 1;
7f9bc51b 4232 eni_size_weights.div_mod_cost = 1;
7f9bc51b 4233 eni_size_weights.omp_cost = 40;
0a35513e 4234 eni_size_weights.tm_cost = 10;
02f0b13a 4235 eni_size_weights.time_based = false;
9bb2f479 4236 eni_size_weights.return_cost = 1;
7f9bc51b
ZD
4237
4238 /* Estimating time for call is difficult, since we have no idea what the
4239 called function does. In the current uses of eni_time_weights,
4240 underestimating the cost does less harm than overestimating it, so
ea2c620c 4241 we choose a rather small value here. */
7f9bc51b 4242 eni_time_weights.call_cost = 10;
d2d668fb 4243 eni_time_weights.indirect_call_cost = 15;
9bb2f479 4244 eni_time_weights.target_builtin_call_cost = 1;
7f9bc51b 4245 eni_time_weights.div_mod_cost = 10;
7f9bc51b 4246 eni_time_weights.omp_cost = 40;
0a35513e 4247 eni_time_weights.tm_cost = 40;
02f0b13a 4248 eni_time_weights.time_based = true;
9bb2f479 4249 eni_time_weights.return_cost = 2;
6de9cd9a
DN
4250}
4251
726a989a
RB
4252/* Estimate the number of instructions in a gimple_seq. */
4253
4254int
4255count_insns_seq (gimple_seq seq, eni_weights *weights)
4256{
4257 gimple_stmt_iterator gsi;
4258 int n = 0;
4259 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4260 n += estimate_num_insns (gsi_stmt (gsi), weights);
4261
4262 return n;
4263}
4264
4265
e21aff8a 4266/* Install new lexical TREE_BLOCK underneath 'current_block'. */
726a989a 4267
e21aff8a 4268static void
4a283090 4269prepend_lexical_block (tree current_block, tree new_block)
e21aff8a 4270{
4a283090
JH
4271 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4272 BLOCK_SUBBLOCKS (current_block) = new_block;
e21aff8a 4273 BLOCK_SUPERCONTEXT (new_block) = current_block;
e21aff8a
SB
4274}
4275
c021f10b
NF
4276/* Add local variables from CALLEE to CALLER. */
4277
4278static inline void
4279add_local_variables (struct function *callee, struct function *caller,
ae0379fc 4280 copy_body_data *id)
c021f10b
NF
4281{
4282 tree var;
4283 unsigned ix;
4284
4285 FOR_EACH_LOCAL_DECL (callee, ix, var)
ae0379fc 4286 if (!can_be_nonlocal (var, id))
42694189
JJ
4287 {
4288 tree new_var = remap_decl (var, id);
4289
4290 /* Remap debug-expressions. */
4291 if (TREE_CODE (new_var) == VAR_DECL
839b422f 4292 && DECL_HAS_DEBUG_EXPR_P (var)
42694189
JJ
4293 && new_var != var)
4294 {
4295 tree tem = DECL_DEBUG_EXPR (var);
4296 bool old_regimplify = id->regimplify;
4297 id->remapping_type_depth++;
4298 walk_tree (&tem, copy_tree_body_r, id, NULL);
4299 id->remapping_type_depth--;
4300 id->regimplify = old_regimplify;
4301 SET_DECL_DEBUG_EXPR (new_var, tem);
839b422f 4302 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
42694189
JJ
4303 }
4304 add_local_decl (caller, new_var);
4305 }
c021f10b
NF
4306}
4307
726a989a 4308/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
d4e4baa9 4309
e21aff8a 4310static bool
726a989a 4311expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
d4e4baa9 4312{
0f900dfa 4313 tree use_retvar;
d436bff8 4314 tree fn;
b787e7a2
TS
4315 hash_map<tree, tree> *dst;
4316 hash_map<tree, tree> *st = NULL;
110cfe1c 4317 tree return_slot;
7740f00d 4318 tree modify_dest;
d5e254e1 4319 tree return_bounds = NULL;
6de9cd9a 4320 location_t saved_location;
e21aff8a 4321 struct cgraph_edge *cg_edge;
61a05df1 4322 cgraph_inline_failed_t reason;
e21aff8a
SB
4323 basic_block return_block;
4324 edge e;
726a989a 4325 gimple_stmt_iterator gsi, stmt_gsi;
e21aff8a 4326 bool successfully_inlined = FALSE;
4f6c2131 4327 bool purge_dead_abnormal_edges;
538dd0b7 4328 gcall *call_stmt;
d5e254e1 4329 unsigned int i;
d4e4baa9 4330
6de9cd9a
DN
4331 /* Set input_location here so we get the right instantiation context
4332 if we call instantiate_decl from inlinable_function_p. */
532aafad 4333 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
6de9cd9a 4334 saved_location = input_location;
035775c8 4335 input_location = gimple_location (stmt);
6de9cd9a 4336
d4e4baa9 4337 /* From here on, we're only interested in CALL_EXPRs. */
538dd0b7
DM
4338 call_stmt = dyn_cast <gcall *> (stmt);
4339 if (!call_stmt)
6de9cd9a 4340 goto egress;
d4e4baa9 4341
d52f5295 4342 cg_edge = id->dst_node->get_edge (stmt);
db09f943 4343 gcc_checking_assert (cg_edge);
d4e4baa9
AO
4344 /* First, see if we can figure out what function is being called.
4345 If we cannot, then there is no hope of inlining the function. */
db09f943 4346 if (cg_edge->indirect_unknown_callee)
3949c4a7 4347 goto egress;
67348ccc 4348 fn = cg_edge->callee->decl;
db09f943 4349 gcc_checking_assert (fn);
b58b1157 4350
726a989a 4351 /* If FN is a declaration of a function in a nested scope that was
a1a0fd4e
AO
4352 globally declared inline, we don't set its DECL_INITIAL.
4353 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4354 C++ front-end uses it for cdtors to refer to their internal
4355 declarations, that are not real functions. Fortunately those
4356 don't have trees to be saved, so we can tell by checking their
726a989a
RB
4357 gimple_body. */
4358 if (!DECL_INITIAL (fn)
a1a0fd4e 4359 && DECL_ABSTRACT_ORIGIN (fn)
39ecc018 4360 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
a1a0fd4e
AO
4361 fn = DECL_ABSTRACT_ORIGIN (fn);
4362
8f4f502f 4363 /* Don't try to inline functions that are not well-suited to inlining. */
9c8305f8 4364 if (cg_edge->inline_failed)
a833faa5 4365 {
9c8305f8 4366 reason = cg_edge->inline_failed;
3e293154
MJ
4367 /* If this call was originally indirect, we do not want to emit any
4368 inlining related warnings or sorry messages because there are no
4369 guarantees regarding those. */
e33c6cd6 4370 if (cg_edge->indirect_inlining_edge)
3e293154
MJ
4371 goto egress;
4372
7fac66d4 4373 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
bfc61b40
JH
4374 /* For extern inline functions that get redefined we always
4375 silently ignored always_inline flag. Better behaviour would
4376 be to be able to keep both bodies and use extern inline body
4377 for inlining, but we can't do that because frontends overwrite
4378 the body. */
4379 && !cg_edge->callee->local.redefined_extern_inline
df9dda2d
ST
4380 /* During early inline pass, report only when optimization is
4381 not turned on. */
3dafb85c 4382 && (symtab->global_info_ready
1cf11770
L
4383 || !optimize
4384 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
c9fc06dc
CB
4385 /* PR 20090218-1_0.c. Body can be provided by another module. */
4386 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
2d327012 4387 {
c9fc06dc
CB
4388 error ("inlining failed in call to always_inline %q+F: %s", fn,
4389 cgraph_inline_failed_string (reason));
4390 error ("called from here");
2d327012 4391 }
ff7037dc
EB
4392 else if (warn_inline
4393 && DECL_DECLARED_INLINE_P (fn)
4394 && !DECL_NO_INLINE_WARNING_P (fn)
2d327012 4395 && !DECL_IN_SYSTEM_HEADER (fn)
61a05df1 4396 && reason != CIF_UNSPECIFIED
d63db217 4397 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
d7d1d041 4398 /* Do not warn about not inlined recursive calls. */
3dafb85c 4399 && !cg_edge->recursive_p ()
d63db217 4400 /* Avoid warnings during early inline pass. */
3dafb85c 4401 && symtab->global_info_ready)
a833faa5 4402 {
dee15844 4403 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
49c8958b 4404 fn, _(cgraph_inline_failed_string (reason)));
3176a0c2 4405 warning (OPT_Winline, "called from here");
a833faa5 4406 }
6de9cd9a 4407 goto egress;
a833faa5 4408 }
67348ccc 4409 fn = cg_edge->callee->decl;
70486010 4410 cg_edge->callee->get_untransformed_body ();
d4e4baa9 4411
18c6ada9 4412#ifdef ENABLE_CHECKING
67348ccc 4413 if (cg_edge->callee->decl != id->dst_node->decl)
d52f5295 4414 cg_edge->callee->verify ();
18c6ada9
JH
4415#endif
4416
e21aff8a 4417 /* We will be inlining this callee. */
1d65f45c 4418 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
d5e254e1 4419 id->assign_stmts.create (0);
e21aff8a 4420
f9417da1 4421 /* Update the callers EH personality. */
67348ccc
DM
4422 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4423 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4424 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
f9417da1 4425
726a989a 4426 /* Split the block holding the GIMPLE_CALL. */
e21aff8a
SB
4427 e = split_block (bb, stmt);
4428 bb = e->src;
4429 return_block = e->dest;
4430 remove_edge (e);
4431
4f6c2131
EB
4432 /* split_block splits after the statement; work around this by
4433 moving the call into the second block manually. Not pretty,
4434 but seems easier than doing the CFG manipulation by hand
726a989a
RB
4435 when the GIMPLE_CALL is in the last statement of BB. */
4436 stmt_gsi = gsi_last_bb (bb);
4437 gsi_remove (&stmt_gsi, false);
4f6c2131 4438
726a989a 4439 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4f6c2131
EB
4440 been the source of abnormal edges. In this case, schedule
4441 the removal of dead abnormal edges. */
726a989a
RB
4442 gsi = gsi_start_bb (return_block);
4443 if (gsi_end_p (gsi))
e21aff8a 4444 {
726a989a 4445 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4f6c2131 4446 purge_dead_abnormal_edges = true;
e21aff8a 4447 }
4f6c2131
EB
4448 else
4449 {
726a989a 4450 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4f6c2131
EB
4451 purge_dead_abnormal_edges = false;
4452 }
4453
726a989a 4454 stmt_gsi = gsi_start_bb (return_block);
742a37d5 4455
d436bff8
AH
4456 /* Build a block containing code to initialize the arguments, the
4457 actual inline expansion of the body, and a label for the return
4458 statements within the function to jump to. The type of the
3e492e9c
RB
4459 statement expression is the return type of the function call.
4460 ??? If the call does not have an associated block then we will
4461 remap all callee blocks to NULL, effectively dropping most of
4462 its debug information. This should only happen for calls to
4463 artificial decls inserted by the compiler itself. We need to
4464 either link the inlined blocks into the caller block tree or
4465 not refer to them in any way to not break GC for locations. */
5368224f 4466 if (gimple_block (stmt))
3e492e9c
RB
4467 {
4468 id->block = make_node (BLOCK);
4469 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
a9d5a059 4470 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
3e492e9c
RB
4471 prepend_lexical_block (gimple_block (stmt), id->block);
4472 }
e21aff8a 4473
d4e4baa9
AO
4474 /* Local declarations will be replaced by their equivalents in this
4475 map. */
4476 st = id->decl_map;
b787e7a2 4477 id->decl_map = new hash_map<tree, tree>;
b5b8b0ac
AO
4478 dst = id->debug_map;
4479 id->debug_map = NULL;
d4e4baa9 4480
e21aff8a 4481 /* Record the function we are about to inline. */
1b369fae
RH
4482 id->src_fn = fn;
4483 id->src_node = cg_edge->callee;
110cfe1c 4484 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
538dd0b7 4485 id->call_stmt = stmt;
1b369fae 4486
3c8da8a5
AO
4487 gcc_assert (!id->src_cfun->after_inlining);
4488
045685a9 4489 id->entry_bb = bb;
7299cb99
JH
4490 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4491 {
4492 gimple_stmt_iterator si = gsi_last_bb (bb);
4493 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4494 NOT_TAKEN),
4495 GSI_NEW_STMT);
4496 }
726a989a 4497 initialize_inlined_parameters (id, stmt, fn, bb);
d4e4baa9 4498
ea99e0be 4499 if (DECL_INITIAL (fn))
94645a02 4500 {
3e492e9c
RB
4501 if (gimple_block (stmt))
4502 {
4503 tree *var;
4504
4505 prepend_lexical_block (id->block,
4506 remap_blocks (DECL_INITIAL (fn), id));
4507 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4508 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4509 == NULL_TREE));
4510 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4511 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4512 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4513 under it. The parameters can be then evaluated in the debugger,
4514 but don't show in backtraces. */
4515 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4516 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4517 {
4518 tree v = *var;
4519 *var = TREE_CHAIN (v);
4520 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4521 BLOCK_VARS (id->block) = v;
4522 }
4523 else
4524 var = &TREE_CHAIN (*var);
4525 }
4526 else
4527 remap_blocks_to_null (DECL_INITIAL (fn), id);
94645a02 4528 }
acb8f212 4529
d4e4baa9
AO
4530 /* Return statements in the function body will be replaced by jumps
4531 to the RET_LABEL. */
1e128c5f
GB
4532 gcc_assert (DECL_INITIAL (fn));
4533 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
23700f65 4534
726a989a 4535 /* Find the LHS to which the result of this call is assigned. */
110cfe1c 4536 return_slot = NULL;
726a989a 4537 if (gimple_call_lhs (stmt))
81bafd36 4538 {
726a989a 4539 modify_dest = gimple_call_lhs (stmt);
81bafd36 4540
d5e254e1
IE
4541 /* Remember where to copy returned bounds. */
4542 if (gimple_call_with_bounds_p (stmt)
4543 && TREE_CODE (modify_dest) == SSA_NAME)
4544 {
538dd0b7 4545 gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
d5e254e1
IE
4546 if (retbnd)
4547 {
4548 return_bounds = gimple_call_lhs (retbnd);
4549 /* If returned bounds are not used then just
4550 remove unused call. */
4551 if (!return_bounds)
4552 {
4553 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4554 gsi_remove (&iter, true);
4555 }
4556 }
4557 }
4558
81bafd36
ILT
4559 /* The function which we are inlining might not return a value,
4560 in which case we should issue a warning that the function
4561 does not return a value. In that case the optimizers will
4562 see that the variable to which the value is assigned was not
4563 initialized. We do not want to issue a warning about that
4564 uninitialized variable. */
4565 if (DECL_P (modify_dest))
4566 TREE_NO_WARNING (modify_dest) = 1;
726a989a 4567
538dd0b7 4568 if (gimple_call_return_slot_opt_p (call_stmt))
fa47911c 4569 {
110cfe1c 4570 return_slot = modify_dest;
fa47911c
JM
4571 modify_dest = NULL;
4572 }
81bafd36 4573 }
7740f00d
RH
4574 else
4575 modify_dest = NULL;
4576
1ea193c2
ILT
4577 /* If we are inlining a call to the C++ operator new, we don't want
4578 to use type based alias analysis on the return value. Otherwise
4579 we may get confused if the compiler sees that the inlined new
4580 function returns a pointer which was just deleted. See bug
4581 33407. */
4582 if (DECL_IS_OPERATOR_NEW (fn))
4583 {
4584 return_slot = NULL;
4585 modify_dest = NULL;
4586 }
4587
d4e4baa9 4588 /* Declare the return variable for the function. */
d5e254e1
IE
4589 use_retvar = declare_return_variable (id, return_slot, modify_dest,
4590 return_bounds, bb);
1ea193c2 4591
acb8f212 4592 /* Add local vars in this inlined callee to caller. */
ae0379fc 4593 add_local_variables (id->src_cfun, cfun, id);
acb8f212 4594
0d63a740
JH
4595 if (dump_file && (dump_flags & TDF_DETAILS))
4596 {
4597 fprintf (dump_file, "Inlining ");
b8698a0f 4598 print_generic_expr (dump_file, id->src_fn, 0);
0d63a740 4599 fprintf (dump_file, " to ");
b8698a0f 4600 print_generic_expr (dump_file, id->dst_fn, 0);
0d63a740
JH
4601 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4602 }
4603
eb50f5f4
JH
4604 /* This is it. Duplicate the callee body. Assume callee is
4605 pre-gimplified. Note that we must not alter the caller
4606 function in any way before this point, as this CALL_EXPR may be
4607 a self-referential call; if we're calling ourselves, we need to
4608 duplicate our body before altering anything. */
98339851 4609 copy_body (id, cg_edge->callee->count,
8b47039c 4610 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
f3b331d1 4611 bb, return_block, NULL);
eb50f5f4 4612
d086d311 4613 /* Reset the escaped solution. */
6b8ed145 4614 if (cfun->gimple_df)
d086d311 4615 pt_solution_reset (&cfun->gimple_df->escaped);
6b8ed145 4616
d4e4baa9 4617 /* Clean up. */
b5b8b0ac
AO
4618 if (id->debug_map)
4619 {
b787e7a2 4620 delete id->debug_map;
b5b8b0ac
AO
4621 id->debug_map = dst;
4622 }
b787e7a2 4623 delete id->decl_map;
d4e4baa9
AO
4624 id->decl_map = st;
4625
5006671f
RG
4626 /* Unlink the calls virtual operands before replacing it. */
4627 unlink_stmt_vdef (stmt);
c742772c
RB
4628 if (gimple_vdef (stmt)
4629 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4630 release_ssa_name (gimple_vdef (stmt));
5006671f 4631
84936f6f 4632 /* If the inlined function returns a result that we care about,
726a989a
RB
4633 substitute the GIMPLE_CALL with an assignment of the return
4634 variable to the LHS of the call. That is, if STMT was
4635 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4636 if (use_retvar && gimple_call_lhs (stmt))
e21aff8a 4637 {
726a989a
RB
4638 gimple old_stmt = stmt;
4639 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4640 gsi_replace (&stmt_gsi, stmt, false);
726a989a 4641 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
d5e254e1
IE
4642
4643 /* Copy bounds if we copy structure with bounds. */
4644 if (chkp_function_instrumented_p (id->dst_fn)
4645 && !BOUNDED_P (use_retvar)
4646 && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4647 id->assign_stmts.safe_push (stmt);
e21aff8a 4648 }
6de9cd9a 4649 else
110cfe1c 4650 {
726a989a
RB
4651 /* Handle the case of inlining a function with no return
4652 statement, which causes the return value to become undefined. */
4653 if (gimple_call_lhs (stmt)
4654 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
110cfe1c 4655 {
726a989a
RB
4656 tree name = gimple_call_lhs (stmt);
4657 tree var = SSA_NAME_VAR (name);
32244553 4658 tree def = ssa_default_def (cfun, var);
110cfe1c 4659
110cfe1c
JH
4660 if (def)
4661 {
726a989a
RB
4662 /* If the variable is used undefined, make this name
4663 undefined via a move. */
4664 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4665 gsi_replace (&stmt_gsi, stmt, true);
110cfe1c 4666 }
110cfe1c
JH
4667 else
4668 {
726a989a
RB
4669 /* Otherwise make this variable undefined. */
4670 gsi_remove (&stmt_gsi, true);
32244553 4671 set_ssa_default_def (cfun, var, name);
726a989a 4672 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
110cfe1c
JH
4673 }
4674 }
4675 else
726a989a 4676 gsi_remove (&stmt_gsi, true);
110cfe1c 4677 }
d4e4baa9 4678
d5e254e1
IE
4679 /* Put returned bounds into the correct place if required. */
4680 if (return_bounds)
4681 {
4682 gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4683 gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4684 gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4685 unlink_stmt_vdef (old_stmt);
4686 gsi_replace (&bnd_gsi, new_stmt, false);
4687 maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4688 cgraph_update_edges_for_call_stmt (old_stmt,
4689 gimple_call_fndecl (old_stmt),
4690 new_stmt);
4691 }
4692
4f6c2131 4693 if (purge_dead_abnormal_edges)
30fd5881
EB
4694 {
4695 gimple_purge_dead_eh_edges (return_block);
4696 gimple_purge_dead_abnormal_call_edges (return_block);
4697 }
84936f6f 4698
e21aff8a
SB
4699 /* If the value of the new expression is ignored, that's OK. We
4700 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4701 the equivalent inlined version either. */
726a989a
RB
4702 if (is_gimple_assign (stmt))
4703 {
4704 gcc_assert (gimple_assign_single_p (stmt)
1a87cf0c 4705 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
726a989a
RB
4706 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4707 }
84936f6f 4708
d5e254e1
IE
4709 /* Copy bounds for all generated assigns that need it. */
4710 for (i = 0; i < id->assign_stmts.length (); i++)
4711 chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4712 id->assign_stmts.release ();
4713
1eb3331e
DB
4714 /* Output the inlining info for this abstract function, since it has been
4715 inlined. If we don't do this now, we can lose the information about the
4716 variables in the function when the blocks get blown away as soon as we
4717 remove the cgraph node. */
3e492e9c 4718 if (gimple_block (stmt))
67348ccc 4719 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
84936f6f 4720
e72fcfe8 4721 /* Update callgraph if needed. */
d52f5295 4722 cg_edge->callee->remove ();
e72fcfe8 4723
e21aff8a 4724 id->block = NULL_TREE;
e21aff8a 4725 successfully_inlined = TRUE;
742a37d5 4726
6de9cd9a
DN
4727 egress:
4728 input_location = saved_location;
e21aff8a 4729 return successfully_inlined;
d4e4baa9 4730}
6de9cd9a 4731
e21aff8a
SB
4732/* Expand call statements reachable from STMT_P.
4733 We can only have CALL_EXPRs as the "toplevel" tree code or nested
0a35513e 4734 in a MODIFY_EXPR. */
e21aff8a
SB
4735
4736static bool
1b369fae 4737gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
6de9cd9a 4738{
726a989a 4739 gimple_stmt_iterator gsi;
6de9cd9a 4740
726a989a 4741 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4742 {
726a989a 4743 gimple stmt = gsi_stmt (gsi);
e21aff8a 4744
726a989a 4745 if (is_gimple_call (stmt)
e9287a41 4746 && !gimple_call_internal_p (stmt)
726a989a
RB
4747 && expand_call_inline (bb, stmt, id))
4748 return true;
6de9cd9a 4749 }
726a989a 4750
e21aff8a 4751 return false;
6de9cd9a
DN
4752}
4753
726a989a 4754
b8a00a4d
JH
4755/* Walk all basic blocks created after FIRST and try to fold every statement
4756 in the STATEMENTS pointer set. */
726a989a 4757
b8a00a4d 4758static void
6e2830c3 4759fold_marked_statements (int first, hash_set<gimple> *statements)
b8a00a4d 4760{
0cae8d31 4761 for (; first < n_basic_blocks_for_fn (cfun); first++)
06e28de2 4762 if (BASIC_BLOCK_FOR_FN (cfun, first))
b8a00a4d 4763 {
726a989a
RB
4764 gimple_stmt_iterator gsi;
4765
06e28de2 4766 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
726a989a
RB
4767 !gsi_end_p (gsi);
4768 gsi_next (&gsi))
6e2830c3 4769 if (statements->contains (gsi_stmt (gsi)))
9477eb38 4770 {
726a989a 4771 gimple old_stmt = gsi_stmt (gsi);
4b685e14 4772 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
2bafad93 4773
44e10129
MM
4774 if (old_decl && DECL_BUILT_IN (old_decl))
4775 {
4776 /* Folding builtins can create multiple instructions,
4777 we need to look at all of them. */
4778 gimple_stmt_iterator i2 = gsi;
4779 gsi_prev (&i2);
4780 if (fold_stmt (&gsi))
4781 {
4782 gimple new_stmt;
a9d24544
JJ
4783 /* If a builtin at the end of a bb folded into nothing,
4784 the following loop won't work. */
4785 if (gsi_end_p (gsi))
4786 {
4787 cgraph_update_edges_for_call_stmt (old_stmt,
4788 old_decl, NULL);
4789 break;
4790 }
44e10129 4791 if (gsi_end_p (i2))
06e28de2 4792 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
44e10129
MM
4793 else
4794 gsi_next (&i2);
4795 while (1)
4796 {
4797 new_stmt = gsi_stmt (i2);
4798 update_stmt (new_stmt);
4799 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4800 new_stmt);
4801
4802 if (new_stmt == gsi_stmt (gsi))
4803 {
4804 /* It is okay to check only for the very last
4805 of these statements. If it is a throwing
4806 statement nothing will change. If it isn't
4807 this can remove EH edges. If that weren't
4808 correct then because some intermediate stmts
4809 throw, but not the last one. That would mean
4810 we'd have to split the block, which we can't
4811 here and we'd loose anyway. And as builtins
4812 probably never throw, this all
4813 is mood anyway. */
4814 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4815 new_stmt))
06e28de2
DM
4816 gimple_purge_dead_eh_edges (
4817 BASIC_BLOCK_FOR_FN (cfun, first));
44e10129
MM
4818 break;
4819 }
4820 gsi_next (&i2);
4821 }
4822 }
4823 }
4824 else if (fold_stmt (&gsi))
9477eb38 4825 {
726a989a
RB
4826 /* Re-read the statement from GSI as fold_stmt() may
4827 have changed it. */
4828 gimple new_stmt = gsi_stmt (gsi);
4829 update_stmt (new_stmt);
4830
4b685e14
JH
4831 if (is_gimple_call (old_stmt)
4832 || is_gimple_call (new_stmt))
44e10129
MM
4833 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4834 new_stmt);
726a989a
RB
4835
4836 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
06e28de2
DM
4837 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4838 first));
9477eb38
JH
4839 }
4840 }
b8a00a4d
JH
4841 }
4842}
4843
d4e4baa9
AO
4844/* Expand calls to inline functions in the body of FN. */
4845
873aa8f5 4846unsigned int
46c5ad27 4847optimize_inline_calls (tree fn)
d4e4baa9 4848{
1b369fae 4849 copy_body_data id;
e21aff8a 4850 basic_block bb;
0cae8d31 4851 int last = n_basic_blocks_for_fn (cfun);
5d7b099c 4852 bool inlined_p = false;
d406b663 4853
d4e4baa9
AO
4854 /* Clear out ID. */
4855 memset (&id, 0, sizeof (id));
4856
d52f5295 4857 id.src_node = id.dst_node = cgraph_node::get (fn);
67348ccc 4858 gcc_assert (id.dst_node->definition);
1b369fae 4859 id.dst_fn = fn;
d4e4baa9 4860 /* Or any functions that aren't finished yet. */
d4e4baa9 4861 if (current_function_decl)
0f900dfa 4862 id.dst_fn = current_function_decl;
1b369fae
RH
4863
4864 id.copy_decl = copy_decl_maybe_to_var;
4865 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4866 id.transform_new_cfg = false;
4867 id.transform_return_to_modify = true;
78bbd765 4868 id.transform_parameter = true;
9ff420f1 4869 id.transform_lang_insert_block = NULL;
6e2830c3 4870 id.statements_to_fold = new hash_set<gimple>;
1b369fae 4871
45852dcc 4872 push_gimplify_context ();
d4e4baa9 4873
672987e8
ZD
4874 /* We make no attempts to keep dominance info up-to-date. */
4875 free_dominance_info (CDI_DOMINATORS);
4876 free_dominance_info (CDI_POST_DOMINATORS);
4877
726a989a
RB
4878 /* Register specific gimple functions. */
4879 gimple_register_cfg_hooks ();
4880
e21aff8a
SB
4881 /* Reach the trees by walking over the CFG, and note the
4882 enclosing basic-blocks in the call edges. */
4883 /* We walk the blocks going forward, because inlined function bodies
4884 will split id->current_basic_block, and the new blocks will
4885 follow it; we'll trudge through them, processing their CALL_EXPRs
4886 along the way. */
11cd3bed 4887 FOR_EACH_BB_FN (bb, cfun)
5d7b099c 4888 inlined_p |= gimple_expand_calls_inline (bb, &id);
d4e4baa9 4889
e21aff8a 4890 pop_gimplify_context (NULL);
6de9cd9a 4891
18c6ada9
JH
4892#ifdef ENABLE_CHECKING
4893 {
4894 struct cgraph_edge *e;
4895
d52f5295 4896 id.dst_node->verify ();
18c6ada9
JH
4897
4898 /* Double check that we inlined everything we are supposed to inline. */
1b369fae 4899 for (e = id.dst_node->callees; e; e = e->next_callee)
1e128c5f 4900 gcc_assert (e->inline_failed);
18c6ada9
JH
4901 }
4902#endif
b8698a0f 4903
5d7b099c 4904 /* Fold queued statements. */
a9eafe81 4905 fold_marked_statements (last, id.statements_to_fold);
6e2830c3 4906 delete id.statements_to_fold;
b8698a0f 4907
9771b263 4908 gcc_assert (!id.debug_stmts.exists ());
b5b8b0ac 4909
5d7b099c
RG
4910 /* If we didn't inline into the function there is nothing to do. */
4911 if (!inlined_p)
4912 return 0;
4913
a9eafe81
AP
4914 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4915 number_blocks (fn);
b8a00a4d 4916
078c3644
JH
4917 delete_unreachable_blocks_update_callgraph (&id);
4918#ifdef ENABLE_CHECKING
d52f5295 4919 id.dst_node->verify ();
078c3644 4920#endif
726a989a 4921
110cfe1c
JH
4922 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4923 not possible yet - the IPA passes might make various functions to not
4924 throw and they don't care to proactively update local EH info. This is
4925 done later in fixup_cfg pass that also execute the verification. */
726a989a
RB
4926 return (TODO_update_ssa
4927 | TODO_cleanup_cfg
45a80bb9 4928 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5d7b099c 4929 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
0a6a6ac9
DM
4930 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4931 ? TODO_rebuild_frequencies : 0));
d4e4baa9
AO
4932}
4933
d4e4baa9
AO
4934/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4935
4936tree
46c5ad27 4937copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
d4e4baa9
AO
4938{
4939 enum tree_code code = TREE_CODE (*tp);
07beea0d 4940 enum tree_code_class cl = TREE_CODE_CLASS (code);
d4e4baa9
AO
4941
4942 /* We make copies of most nodes. */
07beea0d 4943 if (IS_EXPR_CODE_CLASS (cl)
d4e4baa9
AO
4944 || code == TREE_LIST
4945 || code == TREE_VEC
8843c120
DN
4946 || code == TYPE_DECL
4947 || code == OMP_CLAUSE)
d4e4baa9
AO
4948 {
4949 /* Because the chain gets clobbered when we make a copy, we save it
4950 here. */
82d6e6fc 4951 tree chain = NULL_TREE, new_tree;
07beea0d 4952
81f653d6
NF
4953 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4954 chain = TREE_CHAIN (*tp);
d4e4baa9
AO
4955
4956 /* Copy the node. */
82d6e6fc 4957 new_tree = copy_node (*tp);
6de9cd9a 4958
82d6e6fc 4959 *tp = new_tree;
d4e4baa9
AO
4960
4961 /* Now, restore the chain, if appropriate. That will cause
4962 walk_tree to walk into the chain as well. */
50674e96
DN
4963 if (code == PARM_DECL
4964 || code == TREE_LIST
aaf46ef9 4965 || code == OMP_CLAUSE)
d4e4baa9
AO
4966 TREE_CHAIN (*tp) = chain;
4967
4968 /* For now, we don't update BLOCKs when we make copies. So, we
6de9cd9a
DN
4969 have to nullify all BIND_EXPRs. */
4970 if (TREE_CODE (*tp) == BIND_EXPR)
4971 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
d4e4baa9 4972 }
4038c495
GB
4973 else if (code == CONSTRUCTOR)
4974 {
4975 /* CONSTRUCTOR nodes need special handling because
4976 we need to duplicate the vector of elements. */
82d6e6fc 4977 tree new_tree;
4038c495 4978
82d6e6fc 4979 new_tree = copy_node (*tp);
9771b263 4980 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
82d6e6fc 4981 *tp = new_tree;
4038c495 4982 }
3533b943 4983 else if (code == STATEMENT_LIST)
deb5046b
JM
4984 /* We used to just abort on STATEMENT_LIST, but we can run into them
4985 with statement-expressions (c++/40975). */
4986 copy_statement_list (tp);
6615c446 4987 else if (TREE_CODE_CLASS (code) == tcc_type)
d4e4baa9 4988 *walk_subtrees = 0;
6615c446 4989 else if (TREE_CODE_CLASS (code) == tcc_declaration)
6de9cd9a 4990 *walk_subtrees = 0;
a396f8ae
GK
4991 else if (TREE_CODE_CLASS (code) == tcc_constant)
4992 *walk_subtrees = 0;
d4e4baa9
AO
4993 return NULL_TREE;
4994}
4995
4996/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
aa4a53af 4997 information indicating to what new SAVE_EXPR this one should be mapped,
e21aff8a
SB
4998 use that one. Otherwise, create a new node and enter it in ST. FN is
4999 the function into which the copy will be placed. */
d4e4baa9 5000
892c7e1e 5001static void
b787e7a2 5002remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
d4e4baa9 5003{
6be42dd4 5004 tree *n;
5e20bdd7 5005 tree t;
d4e4baa9
AO
5006
5007 /* See if we already encountered this SAVE_EXPR. */
b787e7a2 5008 n = st->get (*tp);
d92b4486 5009
d4e4baa9
AO
5010 /* If we didn't already remap this SAVE_EXPR, do so now. */
5011 if (!n)
5012 {
5e20bdd7 5013 t = copy_node (*tp);
d4e4baa9 5014
d4e4baa9 5015 /* Remember this SAVE_EXPR. */
b787e7a2 5016 st->put (*tp, t);
350ebd54 5017 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
b787e7a2 5018 st->put (t, t);
d4e4baa9
AO
5019 }
5020 else
5e20bdd7
JZ
5021 {
5022 /* We've already walked into this SAVE_EXPR; don't do it again. */
5023 *walk_subtrees = 0;
6be42dd4 5024 t = *n;
5e20bdd7 5025 }
d4e4baa9
AO
5026
5027 /* Replace this SAVE_EXPR with the copy. */
5e20bdd7 5028 *tp = t;
d4e4baa9 5029}
d436bff8 5030
726a989a
RB
5031/* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5032 label, copies the declaration and enters it in the splay_tree in DATA (which
5033 is really a 'copy_body_data *'. */
5034
5035static tree
5036mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5037 bool *handled_ops_p ATTRIBUTE_UNUSED,
5038 struct walk_stmt_info *wi)
5039{
5040 copy_body_data *id = (copy_body_data *) wi->info;
538dd0b7 5041 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
726a989a 5042
538dd0b7 5043 if (stmt)
726a989a
RB
5044 {
5045 tree decl = gimple_label_label (stmt);
5046
5047 /* Copy the decl and remember the copy. */
5048 insert_decl_map (id, decl, id->copy_decl (decl, id));
5049 }
5050
5051 return NULL_TREE;
5052}
5053
5054
5055/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5056 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5057 remaps all local declarations to appropriate replacements in gimple
5058 operands. */
5059
5060static tree
5061replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5062{
5063 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5064 copy_body_data *id = (copy_body_data *) wi->info;
b787e7a2 5065 hash_map<tree, tree> *st = id->decl_map;
726a989a
RB
5066 tree *n;
5067 tree expr = *tp;
5068
5069 /* Only a local declaration (variable or label). */
5070 if ((TREE_CODE (expr) == VAR_DECL
5071 && !TREE_STATIC (expr))
5072 || TREE_CODE (expr) == LABEL_DECL)
5073 {
5074 /* Lookup the declaration. */
b787e7a2 5075 n = st->get (expr);
726a989a
RB
5076
5077 /* If it's there, remap it. */
5078 if (n)
5079 *tp = *n;
5080 *walk_subtrees = 0;
5081 }
5082 else if (TREE_CODE (expr) == STATEMENT_LIST
5083 || TREE_CODE (expr) == BIND_EXPR
5084 || TREE_CODE (expr) == SAVE_EXPR)
5085 gcc_unreachable ();
5086 else if (TREE_CODE (expr) == TARGET_EXPR)
5087 {
5088 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5089 It's OK for this to happen if it was part of a subtree that
5090 isn't immediately expanded, such as operand 2 of another
5091 TARGET_EXPR. */
5092 if (!TREE_OPERAND (expr, 1))
5093 {
5094 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5095 TREE_OPERAND (expr, 3) = NULL_TREE;
5096 }
5097 }
5098
5099 /* Keep iterating. */
5100 return NULL_TREE;
5101}
5102
5103
5104/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5105 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5106 remaps all local declarations to appropriate replacements in gimple
5107 statements. */
5108
5109static tree
5110replace_locals_stmt (gimple_stmt_iterator *gsip,
5111 bool *handled_ops_p ATTRIBUTE_UNUSED,
5112 struct walk_stmt_info *wi)
5113{
5114 copy_body_data *id = (copy_body_data *) wi->info;
538dd0b7 5115 gimple gs = gsi_stmt (*gsip);
726a989a 5116
538dd0b7 5117 if (gbind *stmt = dyn_cast <gbind *> (gs))
726a989a
RB
5118 {
5119 tree block = gimple_bind_block (stmt);
5120
5121 if (block)
5122 {
5123 remap_block (&block, id);
5124 gimple_bind_set_block (stmt, block);
5125 }
5126
5127 /* This will remap a lot of the same decls again, but this should be
5128 harmless. */
5129 if (gimple_bind_vars (stmt))
9771b263
DN
5130 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
5131 NULL, id));
726a989a
RB
5132 }
5133
5134 /* Keep iterating. */
5135 return NULL_TREE;
5136}
5137
5138
5139/* Copies everything in SEQ and replaces variables and labels local to
5140 current_function_decl. */
5141
5142gimple_seq
5143copy_gimple_seq_and_replace_locals (gimple_seq seq)
5144{
5145 copy_body_data id;
5146 struct walk_stmt_info wi;
726a989a
RB
5147 gimple_seq copy;
5148
5149 /* There's nothing to do for NULL_TREE. */
5150 if (seq == NULL)
5151 return seq;
5152
5153 /* Set up ID. */
5154 memset (&id, 0, sizeof (id));
5155 id.src_fn = current_function_decl;
5156 id.dst_fn = current_function_decl;
b787e7a2 5157 id.decl_map = new hash_map<tree, tree>;
b5b8b0ac 5158 id.debug_map = NULL;
726a989a
RB
5159
5160 id.copy_decl = copy_decl_no_change;
5161 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5162 id.transform_new_cfg = false;
5163 id.transform_return_to_modify = false;
78bbd765 5164 id.transform_parameter = false;
726a989a
RB
5165 id.transform_lang_insert_block = NULL;
5166
5167 /* Walk the tree once to find local labels. */
5168 memset (&wi, 0, sizeof (wi));
6e2830c3 5169 hash_set<tree> visited;
726a989a 5170 wi.info = &id;
6e2830c3 5171 wi.pset = &visited;
726a989a 5172 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
726a989a
RB
5173
5174 copy = gimple_seq_copy (seq);
5175
5176 /* Walk the copy, remapping decls. */
5177 memset (&wi, 0, sizeof (wi));
5178 wi.info = &id;
5179 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5180
5181 /* Clean up. */
b787e7a2 5182 delete id.decl_map;
b5b8b0ac 5183 if (id.debug_map)
b787e7a2 5184 delete id.debug_map;
f3dccf50
RB
5185 if (id.dependence_map)
5186 {
5187 delete id.dependence_map;
5188 id.dependence_map = NULL;
5189 }
726a989a
RB
5190
5191 return copy;
5192}
5193
5194
6de9cd9a 5195/* Allow someone to determine if SEARCH is a child of TOP from gdb. */
aa4a53af 5196
6de9cd9a
DN
5197static tree
5198debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5199{
5200 if (*tp == data)
5201 return (tree) data;
5202 else
5203 return NULL;
5204}
5205
24e47c76 5206DEBUG_FUNCTION bool
6de9cd9a
DN
5207debug_find_tree (tree top, tree search)
5208{
5209 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5210}
5211
e21aff8a 5212
6de9cd9a
DN
5213/* Declare the variables created by the inliner. Add all the variables in
5214 VARS to BIND_EXPR. */
5215
5216static void
e21aff8a 5217declare_inline_vars (tree block, tree vars)
6de9cd9a 5218{
84936f6f 5219 tree t;
910ad8de 5220 for (t = vars; t; t = DECL_CHAIN (t))
9659ce8b
JH
5221 {
5222 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5223 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
c021f10b 5224 add_local_decl (cfun, t);
9659ce8b 5225 }
6de9cd9a 5226
e21aff8a
SB
5227 if (block)
5228 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5229}
5230
19734dd8 5231/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
1b369fae
RH
5232 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5233 VAR_DECL translation. */
19734dd8 5234
1b369fae
RH
5235static tree
5236copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
19734dd8 5237{
19734dd8
RL
5238 /* Don't generate debug information for the copy if we wouldn't have
5239 generated it for the copy either. */
5240 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5241 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5242
5243 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
b8698a0f 5244 declaration inspired this copy. */
19734dd8
RL
5245 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5246
5247 /* The new variable/label has no RTL, yet. */
68a976f2
RL
5248 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5249 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
2eb79bbb 5250 SET_DECL_RTL (copy, 0);
b8698a0f 5251
19734dd8
RL
5252 /* These args would always appear unused, if not for this. */
5253 TREE_USED (copy) = 1;
5254
5255 /* Set the context for the new declaration. */
5256 if (!DECL_CONTEXT (decl))
5257 /* Globals stay global. */
5258 ;
1b369fae 5259 else if (DECL_CONTEXT (decl) != id->src_fn)
19734dd8
RL
5260 /* Things that weren't in the scope of the function we're inlining
5261 from aren't in the scope we're inlining to, either. */
5262 ;
5263 else if (TREE_STATIC (decl))
5264 /* Function-scoped static variables should stay in the original
5265 function. */
5266 ;
5267 else
5268 /* Ordinary automatic local variables are now in the scope of the
5269 new function. */
1b369fae 5270 DECL_CONTEXT (copy) = id->dst_fn;
19734dd8
RL
5271
5272 return copy;
5273}
5274
1b369fae
RH
5275static tree
5276copy_decl_to_var (tree decl, copy_body_data *id)
5277{
5278 tree copy, type;
5279
5280 gcc_assert (TREE_CODE (decl) == PARM_DECL
5281 || TREE_CODE (decl) == RESULT_DECL);
5282
5283 type = TREE_TYPE (decl);
5284
c2255bc4
AH
5285 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5286 VAR_DECL, DECL_NAME (decl), type);
25a6a873
RG
5287 if (DECL_PT_UID_SET_P (decl))
5288 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
1b369fae
RH
5289 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5290 TREE_READONLY (copy) = TREE_READONLY (decl);
5291 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
0890b981 5292 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
1b369fae
RH
5293
5294 return copy_decl_for_dup_finish (id, decl, copy);
5295}
5296
c08cd4c1
JM
5297/* Like copy_decl_to_var, but create a return slot object instead of a
5298 pointer variable for return by invisible reference. */
5299
5300static tree
5301copy_result_decl_to_var (tree decl, copy_body_data *id)
5302{
5303 tree copy, type;
5304
5305 gcc_assert (TREE_CODE (decl) == PARM_DECL
5306 || TREE_CODE (decl) == RESULT_DECL);
5307
5308 type = TREE_TYPE (decl);
5309 if (DECL_BY_REFERENCE (decl))
5310 type = TREE_TYPE (type);
5311
c2255bc4
AH
5312 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5313 VAR_DECL, DECL_NAME (decl), type);
25a6a873
RG
5314 if (DECL_PT_UID_SET_P (decl))
5315 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
c08cd4c1
JM
5316 TREE_READONLY (copy) = TREE_READONLY (decl);
5317 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5318 if (!DECL_BY_REFERENCE (decl))
5319 {
5320 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
0890b981 5321 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
c08cd4c1
JM
5322 }
5323
5324 return copy_decl_for_dup_finish (id, decl, copy);
5325}
5326
9ff420f1 5327tree
1b369fae
RH
5328copy_decl_no_change (tree decl, copy_body_data *id)
5329{
5330 tree copy;
5331
5332 copy = copy_node (decl);
5333
5334 /* The COPY is not abstract; it will be generated in DST_FN. */
00de328a 5335 DECL_ABSTRACT_P (copy) = false;
1b369fae
RH
5336 lang_hooks.dup_lang_specific_decl (copy);
5337
5338 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5339 been taken; it's for internal bookkeeping in expand_goto_internal. */
5340 if (TREE_CODE (copy) == LABEL_DECL)
5341 {
5342 TREE_ADDRESSABLE (copy) = 0;
5343 LABEL_DECL_UID (copy) = -1;
5344 }
5345
5346 return copy_decl_for_dup_finish (id, decl, copy);
5347}
5348
5349static tree
5350copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5351{
5352 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5353 return copy_decl_to_var (decl, id);
5354 else
5355 return copy_decl_no_change (decl, id);
5356}
5357
19734dd8
RL
5358/* Return a copy of the function's argument tree. */
5359static tree
c6f7cfc1
JH
5360copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5361 bitmap args_to_skip, tree *vars)
19734dd8 5362{
c6f7cfc1
JH
5363 tree arg, *parg;
5364 tree new_parm = NULL;
5365 int i = 0;
19734dd8 5366
c6f7cfc1
JH
5367 parg = &new_parm;
5368
910ad8de 5369 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
c6f7cfc1
JH
5370 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5371 {
5372 tree new_tree = remap_decl (arg, id);
d7da5cc8
MJ
5373 if (TREE_CODE (new_tree) != PARM_DECL)
5374 new_tree = id->copy_decl (arg, id);
c6f7cfc1
JH
5375 lang_hooks.dup_lang_specific_decl (new_tree);
5376 *parg = new_tree;
910ad8de 5377 parg = &DECL_CHAIN (new_tree);
c6f7cfc1 5378 }
b787e7a2 5379 else if (!id->decl_map->get (arg))
c6f7cfc1
JH
5380 {
5381 /* Make an equivalent VAR_DECL. If the argument was used
5382 as temporary variable later in function, the uses will be
5383 replaced by local variable. */
5384 tree var = copy_decl_to_var (arg, id);
c6f7cfc1
JH
5385 insert_decl_map (id, arg, var);
5386 /* Declare this new variable. */
910ad8de 5387 DECL_CHAIN (var) = *vars;
c6f7cfc1
JH
5388 *vars = var;
5389 }
5390 return new_parm;
19734dd8
RL
5391}
5392
5393/* Return a copy of the function's static chain. */
5394static tree
1b369fae 5395copy_static_chain (tree static_chain, copy_body_data * id)
19734dd8
RL
5396{
5397 tree *chain_copy, *pvar;
5398
5399 chain_copy = &static_chain;
910ad8de 5400 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
19734dd8 5401 {
82d6e6fc
KG
5402 tree new_tree = remap_decl (*pvar, id);
5403 lang_hooks.dup_lang_specific_decl (new_tree);
910ad8de 5404 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
82d6e6fc 5405 *pvar = new_tree;
19734dd8
RL
5406 }
5407 return static_chain;
5408}
5409
5410/* Return true if the function is allowed to be versioned.
5411 This is a guard for the versioning functionality. */
27dbd3ac 5412
19734dd8
RL
5413bool
5414tree_versionable_function_p (tree fndecl)
5415{
86631ea3
MJ
5416 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5417 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
19734dd8
RL
5418}
5419
9187e02d
JH
5420/* Delete all unreachable basic blocks and update callgraph.
5421 Doing so is somewhat nontrivial because we need to update all clones and
5422 remove inline function that become unreachable. */
9f5e9983 5423
9187e02d
JH
5424static bool
5425delete_unreachable_blocks_update_callgraph (copy_body_data *id)
9f5e9983 5426{
9187e02d
JH
5427 bool changed = false;
5428 basic_block b, next_bb;
5429
5430 find_unreachable_blocks ();
5431
5432 /* Delete all unreachable basic blocks. */
5433
fefa31b5
DM
5434 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5435 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
9187e02d
JH
5436 {
5437 next_bb = b->next_bb;
5438
5439 if (!(b->flags & BB_REACHABLE))
5440 {
5441 gimple_stmt_iterator bsi;
5442
5443 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
042ae7d2
JH
5444 {
5445 struct cgraph_edge *e;
5446 struct cgraph_node *node;
9187e02d 5447
d122681a 5448 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
042ae7d2
JH
5449
5450 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
d52f5295 5451 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
042ae7d2
JH
5452 {
5453 if (!e->inline_failed)
d52f5295 5454 e->callee->remove_symbol_and_inline_clones (id->dst_node);
042ae7d2 5455 else
3dafb85c 5456 e->remove ();
042ae7d2
JH
5457 }
5458 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5459 && id->dst_node->clones)
5460 for (node = id->dst_node->clones; node != id->dst_node;)
9187e02d 5461 {
d122681a 5462 node->remove_stmt_references (gsi_stmt (bsi));
042ae7d2 5463 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
d52f5295 5464 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
042ae7d2
JH
5465 {
5466 if (!e->inline_failed)
d52f5295 5467 e->callee->remove_symbol_and_inline_clones (id->dst_node);
042ae7d2 5468 else
3dafb85c 5469 e->remove ();
042ae7d2
JH
5470 }
5471
5472 if (node->clones)
5473 node = node->clones;
5474 else if (node->next_sibling_clone)
5475 node = node->next_sibling_clone;
9187e02d 5476 else
042ae7d2
JH
5477 {
5478 while (node != id->dst_node && !node->next_sibling_clone)
5479 node = node->clone_of;
5480 if (node != id->dst_node)
5481 node = node->next_sibling_clone;
5482 }
9187e02d 5483 }
042ae7d2 5484 }
9187e02d
JH
5485 delete_basic_block (b);
5486 changed = true;
5487 }
5488 }
5489
9187e02d 5490 return changed;
9f5e9983
JJ
5491}
5492
08ad1d6d
JH
5493/* Update clone info after duplication. */
5494
5495static void
5496update_clone_info (copy_body_data * id)
5497{
5498 struct cgraph_node *node;
5499 if (!id->dst_node->clones)
5500 return;
5501 for (node = id->dst_node->clones; node != id->dst_node;)
5502 {
5503 /* First update replace maps to match the new body. */
5504 if (node->clone.tree_map)
5505 {
5506 unsigned int i;
9771b263 5507 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
08ad1d6d
JH
5508 {
5509 struct ipa_replace_map *replace_info;
9771b263 5510 replace_info = (*node->clone.tree_map)[i];
08ad1d6d
JH
5511 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5512 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5513 }
5514 }
5515 if (node->clones)
5516 node = node->clones;
5517 else if (node->next_sibling_clone)
5518 node = node->next_sibling_clone;
5519 else
5520 {
5521 while (node != id->dst_node && !node->next_sibling_clone)
5522 node = node->clone_of;
5523 if (node != id->dst_node)
5524 node = node->next_sibling_clone;
5525 }
5526 }
5527}
5528
19734dd8
RL
5529/* Create a copy of a function's tree.
5530 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5531 of the original function and the new copied function
b8698a0f
L
5532 respectively. In case we want to replace a DECL
5533 tree with another tree while duplicating the function's
5534 body, TREE_MAP represents the mapping between these
ea99e0be 5535 trees. If UPDATE_CLONES is set, the call_stmt fields
91382288
JH
5536 of edges of clones of the function will be updated.
5537
5538 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5539 from new version.
1a2c27e9 5540 If SKIP_RETURN is true, the new version will return void.
91382288
JH
5541 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5542 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5543*/
19734dd8 5544void
27dbd3ac 5545tree_function_versioning (tree old_decl, tree new_decl,
d52f5295 5546 vec<ipa_replace_map *, va_gc> *tree_map,
91382288 5547 bool update_clones, bitmap args_to_skip,
1a2c27e9
EB
5548 bool skip_return, bitmap blocks_to_copy,
5549 basic_block new_entry)
19734dd8
RL
5550{
5551 struct cgraph_node *old_version_node;
5552 struct cgraph_node *new_version_node;
1b369fae 5553 copy_body_data id;
110cfe1c 5554 tree p;
19734dd8
RL
5555 unsigned i;
5556 struct ipa_replace_map *replace_info;
b5b8b0ac 5557 basic_block old_entry_block, bb;
00f96dc9 5558 auto_vec<gimple, 10> init_stmts;
0f1961a2 5559 tree vars = NULL_TREE;
19734dd8
RL
5560
5561 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5562 && TREE_CODE (new_decl) == FUNCTION_DECL);
5563 DECL_POSSIBLY_INLINED (old_decl) = 1;
5564
d52f5295 5565 old_version_node = cgraph_node::get (old_decl);
fe660d7b 5566 gcc_checking_assert (old_version_node);
d52f5295 5567 new_version_node = cgraph_node::get (new_decl);
fe660d7b 5568 gcc_checking_assert (new_version_node);
19734dd8 5569
ddb555ed
JJ
5570 /* Copy over debug args. */
5571 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5572 {
9771b263 5573 vec<tree, va_gc> **new_debug_args, **old_debug_args;
ddb555ed
JJ
5574 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5575 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5576 old_debug_args = decl_debug_args_lookup (old_decl);
5577 if (old_debug_args)
5578 {
5579 new_debug_args = decl_debug_args_insert (new_decl);
9771b263 5580 *new_debug_args = vec_safe_copy (*old_debug_args);
ddb555ed
JJ
5581 }
5582 }
5583
a3aadcc5
JH
5584 /* Output the inlining info for this abstract function, since it has been
5585 inlined. If we don't do this now, we can lose the information about the
5586 variables in the function when the blocks get blown away as soon as we
5587 remove the cgraph node. */
5588 (*debug_hooks->outlining_inline_function) (old_decl);
5589
19734dd8
RL
5590 DECL_ARTIFICIAL (new_decl) = 1;
5591 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
c0c123ef
JH
5592 if (DECL_ORIGIN (old_decl) == old_decl)
5593 old_version_node->used_as_abstract_origin = true;
f9417da1 5594 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
19734dd8 5595
3d283195
JH
5596 /* Prepare the data structures for the tree copy. */
5597 memset (&id, 0, sizeof (id));
5598
19734dd8 5599 /* Generate a new name for the new version. */
6e2830c3 5600 id.statements_to_fold = new hash_set<gimple>;
b5b8b0ac 5601
b787e7a2 5602 id.decl_map = new hash_map<tree, tree>;
b5b8b0ac 5603 id.debug_map = NULL;
1b369fae
RH
5604 id.src_fn = old_decl;
5605 id.dst_fn = new_decl;
5606 id.src_node = old_version_node;
5607 id.dst_node = new_version_node;
5608 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4029a5e0 5609 id.blocks_to_copy = blocks_to_copy;
b8698a0f 5610
1b369fae
RH
5611 id.copy_decl = copy_decl_no_change;
5612 id.transform_call_graph_edges
5613 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5614 id.transform_new_cfg = true;
5615 id.transform_return_to_modify = false;
78bbd765 5616 id.transform_parameter = false;
9ff420f1 5617 id.transform_lang_insert_block = NULL;
1b369fae 5618
fefa31b5 5619 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
110cfe1c 5620 (DECL_STRUCT_FUNCTION (old_decl));
c0c123ef
JH
5621 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5622 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
110cfe1c 5623 initialize_cfun (new_decl, old_decl,
0d63a740 5624 old_entry_block->count);
95cc0a1a
IE
5625 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5626 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5627 = id.src_cfun->gimple_df->ipa_pta;
b8698a0f 5628
19734dd8
RL
5629 /* Copy the function's static chain. */
5630 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5631 if (p)
5632 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5633 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5634 &id);
b8698a0f 5635
19734dd8
RL
5636 /* If there's a tree_map, prepare for substitution. */
5637 if (tree_map)
9771b263 5638 for (i = 0; i < tree_map->length (); i++)
19734dd8 5639 {
0f1961a2 5640 gimple init;
9771b263 5641 replace_info = (*tree_map)[i];
1b369fae 5642 if (replace_info->replace_p)
00fc2333 5643 {
922f15c2
JH
5644 if (!replace_info->old_tree)
5645 {
5646 int i = replace_info->parm_num;
5647 tree parm;
0e8853ee
JH
5648 tree req_type;
5649
910ad8de 5650 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
922f15c2
JH
5651 i --;
5652 replace_info->old_tree = parm;
0e8853ee
JH
5653 req_type = TREE_TYPE (parm);
5654 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5655 {
5656 if (fold_convertible_p (req_type, replace_info->new_tree))
5657 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5658 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5659 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5660 else
5661 {
5662 if (dump_file)
5663 {
5664 fprintf (dump_file, " const ");
5665 print_generic_expr (dump_file, replace_info->new_tree, 0);
5666 fprintf (dump_file, " can't be converted to param ");
5667 print_generic_expr (dump_file, parm, 0);
5668 fprintf (dump_file, "\n");
5669 }
5670 replace_info->old_tree = NULL;
5671 }
5672 }
5673 }
5674 else
5675 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5676 if (replace_info->old_tree)
5677 {
5678 init = setup_one_parameter (&id, replace_info->old_tree,
5679 replace_info->new_tree, id.src_fn,
5680 NULL,
5681 &vars);
5682 if (init)
5683 init_stmts.safe_push (init);
922f15c2 5684 }
00fc2333 5685 }
19734dd8 5686 }
eb50f5f4
JH
5687 /* Copy the function's arguments. */
5688 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5689 DECL_ARGUMENTS (new_decl) =
5690 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5691 args_to_skip, &vars);
b8698a0f 5692
eb50f5f4 5693 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
474086eb 5694 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
b8698a0f 5695
0f1961a2 5696 declare_inline_vars (DECL_INITIAL (new_decl), vars);
9187e02d 5697
9771b263 5698 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
19734dd8 5699 /* Add local vars. */
ae0379fc 5700 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
b8698a0f 5701
90dda0e9 5702 if (DECL_RESULT (old_decl) == NULL_TREE)
1a2c27e9 5703 ;
90dda0e9 5704 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
1a2c27e9
EB
5705 {
5706 DECL_RESULT (new_decl)
5707 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5708 RESULT_DECL, NULL_TREE, void_type_node);
5709 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5710 cfun->returns_struct = 0;
5711 cfun->returns_pcc_struct = 0;
5712 }
5713 else
19734dd8 5714 {
6ff38230
RG
5715 tree old_name;
5716 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
19734dd8 5717 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6ff38230
RG
5718 if (gimple_in_ssa_p (id.src_cfun)
5719 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
32244553 5720 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6ff38230
RG
5721 {
5722 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5723 insert_decl_map (&id, old_name, new_name);
5724 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
32244553 5725 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6ff38230 5726 }
19734dd8 5727 }
b8698a0f 5728
a9e0d843 5729 /* Set up the destination functions loop tree. */
0fc822d0 5730 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
a9e0d843
RB
5731 {
5732 cfun->curr_properties &= ~PROP_loops;
5733 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5734 cfun->curr_properties |= PROP_loops;
5735 }
5736
6ff38230
RG
5737 /* Copy the Function's body. */
5738 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
fefa31b5
DM
5739 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5740 new_entry);
6ff38230 5741
19734dd8
RL
5742 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5743 number_blocks (new_decl);
5744
b5b8b0ac
AO
5745 /* We want to create the BB unconditionally, so that the addition of
5746 debug stmts doesn't affect BB count, which may in the end cause
5747 codegen differences. */
fefa31b5 5748 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
9771b263
DN
5749 while (init_stmts.length ())
5750 insert_init_stmt (&id, bb, init_stmts.pop ());
08ad1d6d 5751 update_clone_info (&id);
0f1961a2 5752
27dbd3ac
RH
5753 /* Remap the nonlocal_goto_save_area, if any. */
5754 if (cfun->nonlocal_goto_save_area)
5755 {
5756 struct walk_stmt_info wi;
5757
5758 memset (&wi, 0, sizeof (wi));
5759 wi.info = &id;
5760 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5761 }
5762
19734dd8 5763 /* Clean up. */
b787e7a2 5764 delete id.decl_map;
b5b8b0ac 5765 if (id.debug_map)
b787e7a2 5766 delete id.debug_map;
5006671f
RG
5767 free_dominance_info (CDI_DOMINATORS);
5768 free_dominance_info (CDI_POST_DOMINATORS);
9187e02d
JH
5769
5770 fold_marked_statements (0, id.statements_to_fold);
6e2830c3 5771 delete id.statements_to_fold;
9187e02d
JH
5772 fold_cond_expr_cond ();
5773 delete_unreachable_blocks_update_callgraph (&id);
67348ccc 5774 if (id.dst_node->definition)
3dafb85c 5775 cgraph_edge::rebuild_references ();
33d9078a
RB
5776 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5777 {
5778 calculate_dominance_info (CDI_DOMINATORS);
5779 fix_loop_structure (NULL);
5780 }
9187e02d 5781 update_ssa (TODO_update_ssa);
b35366ce
JH
5782
5783 /* After partial cloning we need to rescale frequencies, so they are
5784 within proper range in the cloned function. */
5785 if (new_entry)
5786 {
5787 struct cgraph_edge *e;
5788 rebuild_frequencies ();
5789
fefa31b5 5790 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
b35366ce
JH
5791 for (e = new_version_node->callees; e; e = e->next_callee)
5792 {
5793 basic_block bb = gimple_bb (e->call_stmt);
02ec6988
MJ
5794 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5795 bb);
5796 e->count = bb->count;
5797 }
5798 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5799 {
5800 basic_block bb = gimple_bb (e->call_stmt);
5801 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5802 bb);
b35366ce
JH
5803 e->count = bb->count;
5804 }
5805 }
5806
9187e02d
JH
5807 free_dominance_info (CDI_DOMINATORS);
5808 free_dominance_info (CDI_POST_DOMINATORS);
5809
9771b263 5810 gcc_assert (!id.debug_stmts.exists ());
110cfe1c 5811 pop_cfun ();
19734dd8
RL
5812 return;
5813}
5814
f82a627c
EB
5815/* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5816 the callee and return the inlined body on success. */
5817
5818tree
5819maybe_inline_call_in_expr (tree exp)
5820{
5821 tree fn = get_callee_fndecl (exp);
5822
5823 /* We can only try to inline "const" functions. */
5824 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5825 {
f82a627c
EB
5826 call_expr_arg_iterator iter;
5827 copy_body_data id;
5828 tree param, arg, t;
b787e7a2 5829 hash_map<tree, tree> decl_map;
f82a627c
EB
5830
5831 /* Remap the parameters. */
5832 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5833 param;
910ad8de 5834 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
b787e7a2 5835 decl_map.put (param, arg);
f82a627c
EB
5836
5837 memset (&id, 0, sizeof (id));
5838 id.src_fn = fn;
5839 id.dst_fn = current_function_decl;
5840 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
b787e7a2 5841 id.decl_map = &decl_map;
f82a627c
EB
5842
5843 id.copy_decl = copy_decl_no_change;
5844 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5845 id.transform_new_cfg = false;
5846 id.transform_return_to_modify = true;
78bbd765 5847 id.transform_parameter = true;
267ffce3 5848 id.transform_lang_insert_block = NULL;
f82a627c
EB
5849
5850 /* Make sure not to unshare trees behind the front-end's back
5851 since front-end specific mechanisms may rely on sharing. */
5852 id.regimplify = false;
5853 id.do_not_unshare = true;
5854
5855 /* We're not inside any EH region. */
1d65f45c 5856 id.eh_lp_nr = 0;
f82a627c
EB
5857
5858 t = copy_tree_body (&id);
f82a627c
EB
5859
5860 /* We can only return something suitable for use in a GENERIC
5861 expression tree. */
5862 if (TREE_CODE (t) == MODIFY_EXPR)
5863 return TREE_OPERAND (t, 1);
5864 }
5865
5866 return NULL_TREE;
5867}
5868
52dd234b
RH
5869/* Duplicate a type, fields and all. */
5870
5871tree
5872build_duplicate_type (tree type)
5873{
1b369fae 5874 struct copy_body_data id;
52dd234b
RH
5875
5876 memset (&id, 0, sizeof (id));
1b369fae
RH
5877 id.src_fn = current_function_decl;
5878 id.dst_fn = current_function_decl;
5879 id.src_cfun = cfun;
b787e7a2 5880 id.decl_map = new hash_map<tree, tree>;
b5b8b0ac 5881 id.debug_map = NULL;
4009f2e7 5882 id.copy_decl = copy_decl_no_change;
52dd234b
RH
5883
5884 type = remap_type_1 (type, &id);
5885
b787e7a2 5886 delete id.decl_map;
b5b8b0ac 5887 if (id.debug_map)
b787e7a2 5888 delete id.debug_map;
52dd234b 5889
f31c9f09
DG
5890 TYPE_CANONICAL (type) = type;
5891
52dd234b
RH
5892 return type;
5893}
60813a46
JM
5894
5895/* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
5896 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
5897 evaluation. */
5898
5899tree
5900copy_fn (tree fn, tree& parms, tree& result)
5901{
5902 copy_body_data id;
5903 tree param;
5904 hash_map<tree, tree> decl_map;
5905
5906 tree *p = &parms;
5907 *p = NULL_TREE;
5908
5909 memset (&id, 0, sizeof (id));
5910 id.src_fn = fn;
5911 id.dst_fn = current_function_decl;
5912 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5913 id.decl_map = &decl_map;
5914
5915 id.copy_decl = copy_decl_no_change;
5916 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5917 id.transform_new_cfg = false;
5918 id.transform_return_to_modify = false;
5919 id.transform_parameter = true;
5920 id.transform_lang_insert_block = NULL;
5921
5922 /* Make sure not to unshare trees behind the front-end's back
5923 since front-end specific mechanisms may rely on sharing. */
5924 id.regimplify = false;
5925 id.do_not_unshare = true;
5926
5927 /* We're not inside any EH region. */
5928 id.eh_lp_nr = 0;
5929
5930 /* Remap the parameters and result and return them to the caller. */
5931 for (param = DECL_ARGUMENTS (fn);
5932 param;
5933 param = DECL_CHAIN (param))
5934 {
5935 *p = remap_decl (param, &id);
5936 p = &DECL_CHAIN (*p);
5937 }
5938
5939 if (DECL_RESULT (fn))
5940 result = remap_decl (DECL_RESULT (fn), &id);
5941 else
5942 result = NULL_TREE;
5943
5944 return copy_tree_body (&id);
5945}