]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-inline.c
gimple.h: Remove all includes.
[thirdparty/gcc.git] / gcc / tree-inline.c
CommitLineData
ac534736 1/* Tree inlining.
d1e082c2 2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
588d3ade
AO
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
54a7b573 5This file is part of GCC.
588d3ade 6
54a7b573 7GCC is free software; you can redistribute it and/or modify
588d3ade 8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
588d3ade
AO
10any later version.
11
54a7b573 12GCC is distributed in the hope that it will be useful,
588d3ade
AO
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
588d3ade
AO
20
21#include "config.h"
22#include "system.h"
4977bab6
ZW
23#include "coretypes.h"
24#include "tm.h"
718f9c0f 25#include "diagnostic-core.h"
588d3ade 26#include "tree.h"
d8a2d370
DN
27#include "stor-layout.h"
28#include "calls.h"
588d3ade 29#include "tree-inline.h"
d4e4baa9
AO
30#include "flags.h"
31#include "params.h"
32#include "input.h"
33#include "insn-config.h"
d4e4baa9 34#include "hashtab.h"
d23c55c2 35#include "langhooks.h"
e21aff8a
SB
36#include "basic-block.h"
37#include "tree-iterator.h"
ddd2d57e 38#include "intl.h"
2fb9a547
AM
39#include "pointer-set.h"
40#include "tree-ssa-alias.h"
41#include "internal-fn.h"
42#include "gimple-fold.h"
43#include "tree-eh.h"
44#include "gimple-expr.h"
45#include "is-a.h"
18f429e2 46#include "gimple.h"
45b0be94 47#include "gimplify.h"
5be5c238 48#include "gimple-iterator.h"
18f429e2 49#include "gimplify-me.h"
5be5c238 50#include "gimple-walk.h"
442b4905
AM
51#include "gimple-ssa.h"
52#include "tree-cfg.h"
53#include "tree-phinodes.h"
54#include "ssa-iterators.h"
d8a2d370 55#include "stringpool.h"
442b4905
AM
56#include "tree-ssanames.h"
57#include "tree-into-ssa.h"
d8a2d370 58#include "expr.h"
442b4905 59#include "tree-dfa.h"
7a300452 60#include "tree-ssa.h"
18c6ada9 61#include "function.h"
cf835838 62#include "tree-pretty-print.h"
e21aff8a 63#include "except.h"
1eb3331e 64#include "debug.h"
19734dd8 65#include "ipa-prop.h"
6946b3f7 66#include "value-prof.h"
110cfe1c 67#include "tree-pass.h"
18177c7e 68#include "target.h"
a9e0d843 69#include "cfgloop.h"
d4e4baa9 70
2eb79bbb
SB
71#include "rtl.h" /* FIXME: For asm_str_count. */
72
6de9cd9a
DN
73/* I'm not real happy about this, but we need to handle gimple and
74 non-gimple trees. */
588d3ade 75
1b369fae 76/* Inlining, Cloning, Versioning, Parallelization
e21aff8a
SB
77
78 Inlining: a function body is duplicated, but the PARM_DECLs are
79 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
726a989a 80 MODIFY_EXPRs that store to a dedicated returned-value variable.
e21aff8a
SB
81 The duplicated eh_region info of the copy will later be appended
82 to the info for the caller; the eh_region info in copied throwing
1d65f45c 83 statements and RESX statements are adjusted accordingly.
e21aff8a 84
e21aff8a
SB
85 Cloning: (only in C++) We have one body for a con/de/structor, and
86 multiple function decls, each with a unique parameter list.
87 Duplicate the body, using the given splay tree; some parameters
88 will become constants (like 0 or 1).
89
1b369fae
RH
90 Versioning: a function body is duplicated and the result is a new
91 function rather than into blocks of an existing function as with
92 inlining. Some parameters will become constants.
93
94 Parallelization: a region of a function is duplicated resulting in
95 a new function. Variables may be replaced with complex expressions
96 to enable shared variable semantics.
97
e21aff8a
SB
98 All of these will simultaneously lookup any callgraph edges. If
99 we're going to inline the duplicated function body, and the given
100 function has some cloned callgraph nodes (one for each place this
101 function will be inlined) those callgraph edges will be duplicated.
1b369fae 102 If we're cloning the body, those callgraph edges will be
e21aff8a
SB
103 updated to point into the new body. (Note that the original
104 callgraph node and edge list will not be altered.)
105
726a989a 106 See the CALL_EXPR handling case in copy_tree_body_r (). */
e21aff8a 107
d4e4baa9
AO
108/* To Do:
109
110 o In order to make inlining-on-trees work, we pessimized
111 function-local static constants. In particular, they are now
112 always output, even when not addressed. Fix this by treating
113 function-local static constants just like global static
114 constants; the back-end already knows not to output them if they
115 are not needed.
116
117 o Provide heuristics to clamp inlining of recursive template
118 calls? */
119
7f9bc51b 120
7f9bc51b
ZD
121/* Weights that estimate_num_insns uses to estimate the size of the
122 produced code. */
123
124eni_weights eni_size_weights;
125
126/* Weights that estimate_num_insns uses to estimate the time necessary
127 to execute the produced code. */
128
129eni_weights eni_time_weights;
130
d4e4baa9
AO
131/* Prototypes. */
132
6938f93f 133static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
1b369fae 134static void remap_block (tree *, copy_body_data *);
1b369fae 135static void copy_bind_expr (tree *, int *, copy_body_data *);
e21aff8a 136static void declare_inline_vars (tree, tree);
892c7e1e 137static void remap_save_expr (tree *, void *, int *);
4a283090 138static void prepend_lexical_block (tree current_block, tree new_block);
1b369fae 139static tree copy_decl_to_var (tree, copy_body_data *);
c08cd4c1 140static tree copy_result_decl_to_var (tree, copy_body_data *);
1b369fae 141static tree copy_decl_maybe_to_var (tree, copy_body_data *);
726a989a 142static gimple remap_gimple_stmt (gimple, copy_body_data *);
078c3644 143static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
e21aff8a 144
5e20bdd7
JZ
145/* Insert a tree->tree mapping for ID. Despite the name suggests
146 that the trees should be variables, it is used for more than that. */
147
1b369fae
RH
148void
149insert_decl_map (copy_body_data *id, tree key, tree value)
5e20bdd7 150{
6be42dd4 151 *pointer_map_insert (id->decl_map, key) = value;
5e20bdd7
JZ
152
153 /* Always insert an identity map as well. If we see this same new
154 node again, we won't want to duplicate it a second time. */
155 if (key != value)
6be42dd4 156 *pointer_map_insert (id->decl_map, value) = value;
5e20bdd7
JZ
157}
158
b5b8b0ac
AO
159/* Insert a tree->tree mapping for ID. This is only used for
160 variables. */
161
162static void
163insert_debug_decl_map (copy_body_data *id, tree key, tree value)
164{
165 if (!gimple_in_ssa_p (id->src_cfun))
166 return;
167
168 if (!MAY_HAVE_DEBUG_STMTS)
169 return;
170
171 if (!target_for_debug_bind (key))
172 return;
173
174 gcc_assert (TREE_CODE (key) == PARM_DECL);
175 gcc_assert (TREE_CODE (value) == VAR_DECL);
176
177 if (!id->debug_map)
178 id->debug_map = pointer_map_create ();
179
180 *pointer_map_insert (id->debug_map, key) = value;
181}
182
082ab5ff
JJ
183/* If nonzero, we're remapping the contents of inlined debug
184 statements. If negative, an error has occurred, such as a
185 reference to a variable that isn't available in the inlined
186 context. */
187static int processing_debug_stmt = 0;
188
110cfe1c
JH
189/* Construct new SSA name for old NAME. ID is the inline context. */
190
191static tree
192remap_ssa_name (tree name, copy_body_data *id)
193{
70b5e7dc 194 tree new_tree, var;
6be42dd4 195 tree *n;
110cfe1c
JH
196
197 gcc_assert (TREE_CODE (name) == SSA_NAME);
198
6be42dd4 199 n = (tree *) pointer_map_contains (id->decl_map, name);
110cfe1c 200 if (n)
129a37fc 201 return unshare_expr (*n);
110cfe1c 202
082ab5ff
JJ
203 if (processing_debug_stmt)
204 {
67386041
RG
205 if (SSA_NAME_IS_DEFAULT_DEF (name)
206 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
ddb555ed 207 && id->entry_bb == NULL
fefa31b5 208 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
ddb555ed
JJ
209 {
210 tree vexpr = make_node (DEBUG_EXPR_DECL);
211 gimple def_temp;
212 gimple_stmt_iterator gsi;
213 tree val = SSA_NAME_VAR (name);
214
215 n = (tree *) pointer_map_contains (id->decl_map, val);
216 if (n != NULL)
217 val = *n;
218 if (TREE_CODE (val) != PARM_DECL)
219 {
220 processing_debug_stmt = -1;
221 return name;
222 }
223 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
224 DECL_ARTIFICIAL (vexpr) = 1;
225 TREE_TYPE (vexpr) = TREE_TYPE (name);
226 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
fefa31b5 227 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
ddb555ed
JJ
228 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
229 return vexpr;
230 }
231
082ab5ff
JJ
232 processing_debug_stmt = -1;
233 return name;
234 }
235
70b5e7dc
RG
236 /* Remap anonymous SSA names or SSA names of anonymous decls. */
237 var = SSA_NAME_VAR (name);
238 if (!var
239 || (!SSA_NAME_IS_DEFAULT_DEF (name)
240 && TREE_CODE (var) == VAR_DECL
241 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
242 && DECL_ARTIFICIAL (var)
243 && DECL_IGNORED_P (var)
244 && !DECL_NAME (var)))
245 {
246 struct ptr_info_def *pi;
247 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
248 if (!var && SSA_NAME_IDENTIFIER (name))
249 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
250 insert_decl_map (id, name, new_tree);
251 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
252 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
253 /* At least IPA points-to info can be directly transferred. */
254 if (id->src_cfun->gimple_df
255 && id->src_cfun->gimple_df->ipa_pta
256 && (pi = SSA_NAME_PTR_INFO (name))
257 && !pi->pt.anything)
258 {
259 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
260 new_pi->pt = pi->pt;
261 }
262 return new_tree;
263 }
264
110cfe1c
JH
265 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
266 in copy_bb. */
70b5e7dc 267 new_tree = remap_decl (var, id);
726a989a 268
110cfe1c 269 /* We might've substituted constant or another SSA_NAME for
b8698a0f 270 the variable.
110cfe1c
JH
271
272 Replace the SSA name representing RESULT_DECL by variable during
273 inlining: this saves us from need to introduce PHI node in a case
274 return value is just partly initialized. */
82d6e6fc 275 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
70b5e7dc
RG
276 && (!SSA_NAME_VAR (name)
277 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
110cfe1c
JH
278 || !id->transform_return_to_modify))
279 {
25a6a873 280 struct ptr_info_def *pi;
82d6e6fc
KG
281 new_tree = make_ssa_name (new_tree, NULL);
282 insert_decl_map (id, name, new_tree);
283 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
110cfe1c 284 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
25a6a873
RG
285 /* At least IPA points-to info can be directly transferred. */
286 if (id->src_cfun->gimple_df
287 && id->src_cfun->gimple_df->ipa_pta
288 && (pi = SSA_NAME_PTR_INFO (name))
289 && !pi->pt.anything)
290 {
291 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
292 new_pi->pt = pi->pt;
293 }
32244553 294 if (SSA_NAME_IS_DEFAULT_DEF (name))
045685a9
JH
295 {
296 /* By inlining function having uninitialized variable, we might
297 extend the lifetime (variable might get reused). This cause
298 ICE in the case we end up extending lifetime of SSA name across
fa10beec 299 abnormal edge, but also increase register pressure.
045685a9 300
726a989a
RB
301 We simply initialize all uninitialized vars by 0 except
302 for case we are inlining to very first BB. We can avoid
303 this for all BBs that are not inside strongly connected
304 regions of the CFG, but this is expensive to test. */
305 if (id->entry_bb
dcad005d 306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
70b5e7dc
RG
307 && (!SSA_NAME_VAR (name)
308 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
fefa31b5
DM
309 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
310 0)->dest
045685a9
JH
311 || EDGE_COUNT (id->entry_bb->preds) != 1))
312 {
726a989a
RB
313 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
314 gimple init_stmt;
e8160c9a 315 tree zero = build_zero_cst (TREE_TYPE (new_tree));
b8698a0f 316
e8160c9a 317 init_stmt = gimple_build_assign (new_tree, zero);
726a989a 318 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
82d6e6fc 319 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
045685a9
JH
320 }
321 else
322 {
82d6e6fc 323 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
32244553 324 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
045685a9
JH
325 }
326 }
110cfe1c
JH
327 }
328 else
82d6e6fc
KG
329 insert_decl_map (id, name, new_tree);
330 return new_tree;
110cfe1c
JH
331}
332
e21aff8a 333/* Remap DECL during the copying of the BLOCK tree for the function. */
d4e4baa9 334
1b369fae
RH
335tree
336remap_decl (tree decl, copy_body_data *id)
d4e4baa9 337{
6be42dd4 338 tree *n;
e21aff8a
SB
339
340 /* We only remap local variables in the current function. */
3c2a7a6a 341
e21aff8a
SB
342 /* See if we have remapped this declaration. */
343
6be42dd4 344 n = (tree *) pointer_map_contains (id->decl_map, decl);
e21aff8a 345
b5b8b0ac
AO
346 if (!n && processing_debug_stmt)
347 {
348 processing_debug_stmt = -1;
349 return decl;
350 }
351
e21aff8a
SB
352 /* If we didn't already have an equivalent for this declaration,
353 create one now. */
d4e4baa9
AO
354 if (!n)
355 {
d4e4baa9 356 /* Make a copy of the variable or label. */
1b369fae 357 tree t = id->copy_decl (decl, id);
b8698a0f 358
596b98ce
AO
359 /* Remember it, so that if we encounter this local entity again
360 we can reuse this copy. Do this early because remap_type may
361 need this decl for TYPE_STUB_DECL. */
362 insert_decl_map (id, decl, t);
363
1b369fae
RH
364 if (!DECL_P (t))
365 return t;
366
3c2a7a6a
RH
367 /* Remap types, if necessary. */
368 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
369 if (TREE_CODE (t) == TYPE_DECL)
370 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
3c2a7a6a
RH
371
372 /* Remap sizes as necessary. */
726a989a
RB
373 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
374 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
d4e4baa9 375
8c27b7d4 376 /* If fields, do likewise for offset and qualifier. */
5377d5ba
RK
377 if (TREE_CODE (t) == FIELD_DECL)
378 {
726a989a 379 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
5377d5ba 380 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
726a989a 381 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
5377d5ba
RK
382 }
383
5e20bdd7 384 return t;
d4e4baa9
AO
385 }
386
f82a627c
EB
387 if (id->do_not_unshare)
388 return *n;
389 else
390 return unshare_expr (*n);
d4e4baa9
AO
391}
392
3c2a7a6a 393static tree
1b369fae 394remap_type_1 (tree type, copy_body_data *id)
3c2a7a6a 395{
82d6e6fc 396 tree new_tree, t;
3c2a7a6a 397
ed397c43
RK
398 /* We do need a copy. build and register it now. If this is a pointer or
399 reference type, remap the designated type and make a new pointer or
400 reference type. */
401 if (TREE_CODE (type) == POINTER_TYPE)
402 {
82d6e6fc 403 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
404 TYPE_MODE (type),
405 TYPE_REF_CAN_ALIAS_ALL (type));
040c6d51
MM
406 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
407 new_tree = build_type_attribute_qual_variant (new_tree,
408 TYPE_ATTRIBUTES (type),
409 TYPE_QUALS (type));
82d6e6fc
KG
410 insert_decl_map (id, type, new_tree);
411 return new_tree;
ed397c43
RK
412 }
413 else if (TREE_CODE (type) == REFERENCE_TYPE)
414 {
82d6e6fc 415 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
416 TYPE_MODE (type),
417 TYPE_REF_CAN_ALIAS_ALL (type));
040c6d51
MM
418 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
419 new_tree = build_type_attribute_qual_variant (new_tree,
420 TYPE_ATTRIBUTES (type),
421 TYPE_QUALS (type));
82d6e6fc
KG
422 insert_decl_map (id, type, new_tree);
423 return new_tree;
ed397c43
RK
424 }
425 else
82d6e6fc 426 new_tree = copy_node (type);
ed397c43 427
82d6e6fc 428 insert_decl_map (id, type, new_tree);
3c2a7a6a
RH
429
430 /* This is a new type, not a copy of an old type. Need to reassociate
431 variants. We can handle everything except the main variant lazily. */
432 t = TYPE_MAIN_VARIANT (type);
433 if (type != t)
434 {
435 t = remap_type (t, id);
82d6e6fc
KG
436 TYPE_MAIN_VARIANT (new_tree) = t;
437 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
438 TYPE_NEXT_VARIANT (t) = new_tree;
3c2a7a6a
RH
439 }
440 else
441 {
82d6e6fc
KG
442 TYPE_MAIN_VARIANT (new_tree) = new_tree;
443 TYPE_NEXT_VARIANT (new_tree) = NULL;
3c2a7a6a
RH
444 }
445
596b98ce 446 if (TYPE_STUB_DECL (type))
82d6e6fc 447 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
596b98ce 448
3c2a7a6a 449 /* Lazily create pointer and reference types. */
82d6e6fc
KG
450 TYPE_POINTER_TO (new_tree) = NULL;
451 TYPE_REFERENCE_TO (new_tree) = NULL;
3c2a7a6a 452
82d6e6fc 453 switch (TREE_CODE (new_tree))
3c2a7a6a
RH
454 {
455 case INTEGER_TYPE:
456 case REAL_TYPE:
325217ed 457 case FIXED_POINT_TYPE:
3c2a7a6a
RH
458 case ENUMERAL_TYPE:
459 case BOOLEAN_TYPE:
82d6e6fc 460 t = TYPE_MIN_VALUE (new_tree);
3c2a7a6a 461 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc 462 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
1c9766da 463
82d6e6fc 464 t = TYPE_MAX_VALUE (new_tree);
3c2a7a6a 465 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc
KG
466 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
467 return new_tree;
9f63daea 468
3c2a7a6a 469 case FUNCTION_TYPE:
82d6e6fc
KG
470 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
471 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
472 return new_tree;
3c2a7a6a
RH
473
474 case ARRAY_TYPE:
82d6e6fc
KG
475 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
476 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
3c2a7a6a
RH
477 break;
478
479 case RECORD_TYPE:
480 case UNION_TYPE:
481 case QUAL_UNION_TYPE:
52dd234b
RH
482 {
483 tree f, nf = NULL;
484
910ad8de 485 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
52dd234b
RH
486 {
487 t = remap_decl (f, id);
82d6e6fc 488 DECL_CONTEXT (t) = new_tree;
910ad8de 489 DECL_CHAIN (t) = nf;
52dd234b
RH
490 nf = t;
491 }
82d6e6fc 492 TYPE_FIELDS (new_tree) = nreverse (nf);
52dd234b 493 }
3c2a7a6a
RH
494 break;
495
3c2a7a6a
RH
496 case OFFSET_TYPE:
497 default:
498 /* Shouldn't have been thought variable sized. */
1e128c5f 499 gcc_unreachable ();
3c2a7a6a
RH
500 }
501
82d6e6fc
KG
502 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
503 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
3c2a7a6a 504
82d6e6fc 505 return new_tree;
3c2a7a6a
RH
506}
507
1b369fae
RH
508tree
509remap_type (tree type, copy_body_data *id)
52dd234b 510{
6be42dd4 511 tree *node;
4f5c64b8 512 tree tmp;
52dd234b
RH
513
514 if (type == NULL)
515 return type;
516
517 /* See if we have remapped this type. */
6be42dd4 518 node = (tree *) pointer_map_contains (id->decl_map, type);
52dd234b 519 if (node)
6be42dd4 520 return *node;
52dd234b
RH
521
522 /* The type only needs remapping if it's variably modified. */
1b369fae 523 if (! variably_modified_type_p (type, id->src_fn))
52dd234b
RH
524 {
525 insert_decl_map (id, type, type);
526 return type;
527 }
528
4f5c64b8
RG
529 id->remapping_type_depth++;
530 tmp = remap_type_1 (type, id);
531 id->remapping_type_depth--;
532
533 return tmp;
52dd234b
RH
534}
535
526d73ab 536/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
b8698a0f 537
526d73ab
JH
538static bool
539can_be_nonlocal (tree decl, copy_body_data *id)
540{
541 /* We can not duplicate function decls. */
542 if (TREE_CODE (decl) == FUNCTION_DECL)
543 return true;
544
545 /* Local static vars must be non-local or we get multiple declaration
546 problems. */
547 if (TREE_CODE (decl) == VAR_DECL
548 && !auto_var_in_fn_p (decl, id->src_fn))
549 return true;
550
5f564b8f 551 return false;
526d73ab
JH
552}
553
6de9cd9a 554static tree
8318d4ce 555remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
9771b263 556 copy_body_data *id)
d4e4baa9 557{
6de9cd9a
DN
558 tree old_var;
559 tree new_decls = NULL_TREE;
d4e4baa9 560
6de9cd9a 561 /* Remap its variables. */
910ad8de 562 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
d4e4baa9 563 {
6de9cd9a
DN
564 tree new_var;
565
526d73ab 566 if (can_be_nonlocal (old_var, id))
30be951a 567 {
5f564b8f
MM
568 /* We need to add this variable to the local decls as otherwise
569 nothing else will do so. */
526d73ab 570 if (TREE_CODE (old_var) == VAR_DECL
5f564b8f 571 && ! DECL_EXTERNAL (old_var))
c021f10b 572 add_local_decl (cfun, old_var);
9e6aced0 573 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
574 && !DECL_IGNORED_P (old_var)
575 && nonlocalized_list)
8318d4ce 576 vec_safe_push (*nonlocalized_list, old_var);
30be951a
JH
577 continue;
578 }
579
6de9cd9a
DN
580 /* Remap the variable. */
581 new_var = remap_decl (old_var, id);
582
726a989a 583 /* If we didn't remap this variable, we can't mess with its
6de9cd9a
DN
584 TREE_CHAIN. If we remapped this variable to the return slot, it's
585 already declared somewhere else, so don't declare it here. */
b8698a0f 586
526d73ab 587 if (new_var == id->retvar)
6de9cd9a 588 ;
526d73ab
JH
589 else if (!new_var)
590 {
9e6aced0 591 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
592 && !DECL_IGNORED_P (old_var)
593 && nonlocalized_list)
8318d4ce 594 vec_safe_push (*nonlocalized_list, old_var);
526d73ab 595 }
d4e4baa9
AO
596 else
597 {
1e128c5f 598 gcc_assert (DECL_P (new_var));
910ad8de 599 DECL_CHAIN (new_var) = new_decls;
6de9cd9a 600 new_decls = new_var;
60a5d78a
JJ
601
602 /* Also copy value-expressions. */
603 if (TREE_CODE (new_var) == VAR_DECL
604 && DECL_HAS_VALUE_EXPR_P (new_var))
605 {
606 tree tem = DECL_VALUE_EXPR (new_var);
607 bool old_regimplify = id->regimplify;
608 id->remapping_type_depth++;
609 walk_tree (&tem, copy_tree_body_r, id, NULL);
610 id->remapping_type_depth--;
611 id->regimplify = old_regimplify;
612 SET_DECL_VALUE_EXPR (new_var, tem);
613 }
d4e4baa9 614 }
d4e4baa9 615 }
d4e4baa9 616
6de9cd9a
DN
617 return nreverse (new_decls);
618}
619
620/* Copy the BLOCK to contain remapped versions of the variables
621 therein. And hook the new block into the block-tree. */
622
623static void
1b369fae 624remap_block (tree *block, copy_body_data *id)
6de9cd9a 625{
d436bff8
AH
626 tree old_block;
627 tree new_block;
d436bff8
AH
628
629 /* Make the new block. */
630 old_block = *block;
631 new_block = make_node (BLOCK);
632 TREE_USED (new_block) = TREE_USED (old_block);
633 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
3e2844cb 634 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
526d73ab 635 BLOCK_NONLOCALIZED_VARS (new_block)
9771b263 636 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
d436bff8
AH
637 *block = new_block;
638
639 /* Remap its variables. */
526d73ab 640 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
8318d4ce 641 &BLOCK_NONLOCALIZED_VARS (new_block),
526d73ab 642 id);
d436bff8 643
1b369fae 644 if (id->transform_lang_insert_block)
9ff420f1 645 id->transform_lang_insert_block (new_block);
1b369fae 646
d436bff8 647 /* Remember the remapped block. */
6de9cd9a 648 insert_decl_map (id, old_block, new_block);
d4e4baa9
AO
649}
650
acb8f212
JH
651/* Copy the whole block tree and root it in id->block. */
652static tree
1b369fae 653remap_blocks (tree block, copy_body_data *id)
acb8f212
JH
654{
655 tree t;
82d6e6fc 656 tree new_tree = block;
acb8f212
JH
657
658 if (!block)
659 return NULL;
660
82d6e6fc
KG
661 remap_block (&new_tree, id);
662 gcc_assert (new_tree != block);
acb8f212 663 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
4a283090
JH
664 prepend_lexical_block (new_tree, remap_blocks (t, id));
665 /* Blocks are in arbitrary order, but make things slightly prettier and do
666 not swap order when producing a copy. */
667 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
82d6e6fc 668 return new_tree;
acb8f212
JH
669}
670
3e492e9c
RB
671/* Remap the block tree rooted at BLOCK to nothing. */
672static void
673remap_blocks_to_null (tree block, copy_body_data *id)
674{
675 tree t;
676 insert_decl_map (id, block, NULL_TREE);
677 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
678 remap_blocks_to_null (t, id);
679}
680
d4e4baa9 681static void
6de9cd9a 682copy_statement_list (tree *tp)
d4e4baa9 683{
6de9cd9a 684 tree_stmt_iterator oi, ni;
82d6e6fc 685 tree new_tree;
6de9cd9a 686
82d6e6fc
KG
687 new_tree = alloc_stmt_list ();
688 ni = tsi_start (new_tree);
6de9cd9a 689 oi = tsi_start (*tp);
b1d82db0 690 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
82d6e6fc 691 *tp = new_tree;
6de9cd9a
DN
692
693 for (; !tsi_end_p (oi); tsi_next (&oi))
a406865a
RG
694 {
695 tree stmt = tsi_stmt (oi);
62e36382
JM
696 if (TREE_CODE (stmt) == STATEMENT_LIST)
697 /* This copy is not redundant; tsi_link_after will smash this
698 STATEMENT_LIST into the end of the one we're building, and we
699 don't want to do that with the original. */
700 copy_statement_list (&stmt);
a406865a
RG
701 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
702 }
6de9cd9a 703}
d4e4baa9 704
6de9cd9a 705static void
1b369fae 706copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
6de9cd9a
DN
707{
708 tree block = BIND_EXPR_BLOCK (*tp);
d4e4baa9
AO
709 /* Copy (and replace) the statement. */
710 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
711 if (block)
712 {
713 remap_block (&block, id);
714 BIND_EXPR_BLOCK (*tp) = block;
715 }
d4e4baa9 716
6de9cd9a 717 if (BIND_EXPR_VARS (*tp))
60a5d78a
JJ
718 /* This will remap a lot of the same decls again, but this should be
719 harmless. */
720 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
d4e4baa9
AO
721}
722
726a989a
RB
723
724/* Create a new gimple_seq by remapping all the statements in BODY
725 using the inlining information in ID. */
726
b34fd25c 727static gimple_seq
726a989a
RB
728remap_gimple_seq (gimple_seq body, copy_body_data *id)
729{
730 gimple_stmt_iterator si;
731 gimple_seq new_body = NULL;
732
733 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
734 {
735 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
736 gimple_seq_add_stmt (&new_body, new_stmt);
737 }
738
739 return new_body;
740}
741
742
743/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
744 block using the mapping information in ID. */
745
746static gimple
747copy_gimple_bind (gimple stmt, copy_body_data *id)
748{
749 gimple new_bind;
750 tree new_block, new_vars;
751 gimple_seq body, new_body;
752
753 /* Copy the statement. Note that we purposely don't use copy_stmt
754 here because we need to remap statements as we copy. */
755 body = gimple_bind_body (stmt);
756 new_body = remap_gimple_seq (body, id);
757
758 new_block = gimple_bind_block (stmt);
759 if (new_block)
760 remap_block (&new_block, id);
761
762 /* This will remap a lot of the same decls again, but this should be
763 harmless. */
764 new_vars = gimple_bind_vars (stmt);
765 if (new_vars)
526d73ab 766 new_vars = remap_decls (new_vars, NULL, id);
726a989a
RB
767
768 new_bind = gimple_build_bind (new_vars, new_body, new_block);
769
770 return new_bind;
771}
772
78bbd765
EB
773/* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
774
775static bool
776is_parm (tree decl)
777{
778 if (TREE_CODE (decl) == SSA_NAME)
779 {
780 decl = SSA_NAME_VAR (decl);
781 if (!decl)
782 return false;
783 }
784
785 return (TREE_CODE (decl) == PARM_DECL);
786}
726a989a
RB
787
788/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
789 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
790 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
791 recursing into the children nodes of *TP. */
792
793static tree
794remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
795{
796 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
797 copy_body_data *id = (copy_body_data *) wi_p->info;
798 tree fn = id->src_fn;
799
800 if (TREE_CODE (*tp) == SSA_NAME)
801 {
802 *tp = remap_ssa_name (*tp, id);
803 *walk_subtrees = 0;
804 return NULL;
805 }
806 else if (auto_var_in_fn_p (*tp, fn))
807 {
808 /* Local variables and labels need to be replaced by equivalent
809 variables. We don't want to copy static variables; there's
810 only one of those, no matter how many times we inline the
811 containing function. Similarly for globals from an outer
812 function. */
813 tree new_decl;
814
815 /* Remap the declaration. */
816 new_decl = remap_decl (*tp, id);
817 gcc_assert (new_decl);
818 /* Replace this variable with the copy. */
819 STRIP_TYPE_NOPS (new_decl);
211ca15c
RG
820 /* ??? The C++ frontend uses void * pointer zero to initialize
821 any other type. This confuses the middle-end type verification.
822 As cloned bodies do not go through gimplification again the fixup
823 there doesn't trigger. */
824 if (TREE_CODE (new_decl) == INTEGER_CST
825 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
826 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
726a989a
RB
827 *tp = new_decl;
828 *walk_subtrees = 0;
829 }
830 else if (TREE_CODE (*tp) == STATEMENT_LIST)
831 gcc_unreachable ();
832 else if (TREE_CODE (*tp) == SAVE_EXPR)
833 gcc_unreachable ();
834 else if (TREE_CODE (*tp) == LABEL_DECL
835 && (!DECL_CONTEXT (*tp)
836 || decl_function_context (*tp) == id->src_fn))
837 /* These may need to be remapped for EH handling. */
838 *tp = remap_decl (*tp, id);
37c59e69
EB
839 else if (TREE_CODE (*tp) == FIELD_DECL)
840 {
841 /* If the enclosing record type is variably_modified_type_p, the field
842 has already been remapped. Otherwise, it need not be. */
843 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
844 if (n)
845 *tp = *n;
846 *walk_subtrees = 0;
847 }
726a989a
RB
848 else if (TYPE_P (*tp))
849 /* Types may need remapping as well. */
850 *tp = remap_type (*tp, id);
851 else if (CONSTANT_CLASS_P (*tp))
852 {
853 /* If this is a constant, we have to copy the node iff the type
854 will be remapped. copy_tree_r will not copy a constant. */
855 tree new_type = remap_type (TREE_TYPE (*tp), id);
856
857 if (new_type == TREE_TYPE (*tp))
858 *walk_subtrees = 0;
859
860 else if (TREE_CODE (*tp) == INTEGER_CST)
861 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
862 TREE_INT_CST_HIGH (*tp));
863 else
864 {
865 *tp = copy_node (*tp);
866 TREE_TYPE (*tp) = new_type;
867 }
868 }
869 else
870 {
871 /* Otherwise, just copy the node. Note that copy_tree_r already
872 knows not to copy VAR_DECLs, etc., so this is safe. */
41a58a92 873
70f34814 874 if (TREE_CODE (*tp) == MEM_REF)
726a989a 875 {
70f34814 876 /* We need to re-canonicalize MEM_REFs from inline substitutions
93e452ed
RG
877 that can happen when a pointer argument is an ADDR_EXPR.
878 Recurse here manually to allow that. */
78bbd765
EB
879 tree ptr = TREE_OPERAND (*tp, 0);
880 tree type = remap_type (TREE_TYPE (*tp), id);
881 tree old = *tp;
93e452ed 882 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
78bbd765 883 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
0de204de 884 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
31a47f1a 885 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
93e452ed 886 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
78bbd765
EB
887 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
888 remapped a parameter as the property might be valid only
889 for the parameter itself. */
890 if (TREE_THIS_NOTRAP (old)
891 && (!is_parm (TREE_OPERAND (old, 0))
892 || (!id->transform_parameter && is_parm (ptr))))
893 TREE_THIS_NOTRAP (*tp) = 1;
93e452ed
RG
894 *walk_subtrees = 0;
895 return NULL;
726a989a
RB
896 }
897
898 /* Here is the "usual case". Copy this tree node, and then
899 tweak some special cases. */
900 copy_tree_r (tp, walk_subtrees, NULL);
901
41a58a92
RG
902 if (TREE_CODE (*tp) != OMP_CLAUSE)
903 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
904
726a989a
RB
905 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
906 {
907 /* The copied TARGET_EXPR has never been expanded, even if the
908 original node was expanded already. */
909 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
910 TREE_OPERAND (*tp, 3) = NULL_TREE;
911 }
912 else if (TREE_CODE (*tp) == ADDR_EXPR)
913 {
914 /* Variable substitution need not be simple. In particular,
70f34814 915 the MEM_REF substitution above. Make sure that
5368224f 916 TREE_CONSTANT and friends are up-to-date. */
726a989a 917 int invariant = is_gimple_min_invariant (*tp);
f1071b12 918 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
70f34814 919 recompute_tree_invariant_for_addr_expr (*tp);
726a989a
RB
920
921 /* If this used to be invariant, but is not any longer,
922 then regimplification is probably needed. */
923 if (invariant && !is_gimple_min_invariant (*tp))
924 id->regimplify = true;
925
926 *walk_subtrees = 0;
927 }
928 }
929
5368224f
DC
930 /* Update the TREE_BLOCK for the cloned expr. */
931 if (EXPR_P (*tp))
932 {
933 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
934 tree old_block = TREE_BLOCK (*tp);
935 if (old_block)
936 {
937 tree *n;
938 n = (tree *) pointer_map_contains (id->decl_map,
939 TREE_BLOCK (*tp));
940 if (n)
941 new_block = *n;
942 }
943 TREE_SET_BLOCK (*tp, new_block);
944 }
945
726a989a
RB
946 /* Keep iterating. */
947 return NULL_TREE;
948}
949
950
951/* Called from copy_body_id via walk_tree. DATA is really a
1b369fae 952 `copy_body_data *'. */
aa4a53af 953
1b369fae 954tree
726a989a 955copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
d4e4baa9 956{
1b369fae
RH
957 copy_body_data *id = (copy_body_data *) data;
958 tree fn = id->src_fn;
acb8f212 959 tree new_block;
d4e4baa9 960
e21aff8a
SB
961 /* Begin by recognizing trees that we'll completely rewrite for the
962 inlining context. Our output for these trees is completely
963 different from out input (e.g. RETURN_EXPR is deleted, and morphs
964 into an edge). Further down, we'll handle trees that get
965 duplicated and/or tweaked. */
d4e4baa9 966
1b369fae 967 /* When requested, RETURN_EXPRs should be transformed to just the
726a989a 968 contained MODIFY_EXPR. The branch semantics of the return will
1b369fae
RH
969 be handled elsewhere by manipulating the CFG rather than a statement. */
970 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
d4e4baa9 971 {
e21aff8a 972 tree assignment = TREE_OPERAND (*tp, 0);
d4e4baa9
AO
973
974 /* If we're returning something, just turn that into an
e21aff8a
SB
975 assignment into the equivalent of the original RESULT_DECL.
976 If the "assignment" is just the result decl, the result
977 decl has already been set (e.g. a recent "foo (&result_decl,
978 ...)"); just toss the entire RETURN_EXPR. */
726a989a 979 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
e21aff8a
SB
980 {
981 /* Replace the RETURN_EXPR with (a copy of) the
726a989a 982 MODIFY_EXPR hanging underneath. */
e21aff8a
SB
983 *tp = copy_node (assignment);
984 }
985 else /* Else the RETURN_EXPR returns no value. */
986 {
987 *tp = NULL;
cceb1885 988 return (tree) (void *)1;
e21aff8a 989 }
d4e4baa9 990 }
110cfe1c
JH
991 else if (TREE_CODE (*tp) == SSA_NAME)
992 {
993 *tp = remap_ssa_name (*tp, id);
994 *walk_subtrees = 0;
995 return NULL;
996 }
e21aff8a 997
d4e4baa9
AO
998 /* Local variables and labels need to be replaced by equivalent
999 variables. We don't want to copy static variables; there's only
1000 one of those, no matter how many times we inline the containing
5377d5ba 1001 function. Similarly for globals from an outer function. */
50886bf1 1002 else if (auto_var_in_fn_p (*tp, fn))
d4e4baa9
AO
1003 {
1004 tree new_decl;
1005
1006 /* Remap the declaration. */
1007 new_decl = remap_decl (*tp, id);
1e128c5f 1008 gcc_assert (new_decl);
d4e4baa9
AO
1009 /* Replace this variable with the copy. */
1010 STRIP_TYPE_NOPS (new_decl);
1011 *tp = new_decl;
e4cf29ae 1012 *walk_subtrees = 0;
d4e4baa9 1013 }
6de9cd9a
DN
1014 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1015 copy_statement_list (tp);
a406865a
RG
1016 else if (TREE_CODE (*tp) == SAVE_EXPR
1017 || TREE_CODE (*tp) == TARGET_EXPR)
82c82743 1018 remap_save_expr (tp, id->decl_map, walk_subtrees);
17acc01a
JH
1019 else if (TREE_CODE (*tp) == LABEL_DECL
1020 && (! DECL_CONTEXT (*tp)
1b369fae 1021 || decl_function_context (*tp) == id->src_fn))
e21aff8a 1022 /* These may need to be remapped for EH handling. */
17acc01a 1023 *tp = remap_decl (*tp, id);
6de9cd9a
DN
1024 else if (TREE_CODE (*tp) == BIND_EXPR)
1025 copy_bind_expr (tp, walk_subtrees, id);
3c2a7a6a
RH
1026 /* Types may need remapping as well. */
1027 else if (TYPE_P (*tp))
1028 *tp = remap_type (*tp, id);
1029
bb04998a
RK
1030 /* If this is a constant, we have to copy the node iff the type will be
1031 remapped. copy_tree_r will not copy a constant. */
3cf11075 1032 else if (CONSTANT_CLASS_P (*tp))
bb04998a
RK
1033 {
1034 tree new_type = remap_type (TREE_TYPE (*tp), id);
1035
1036 if (new_type == TREE_TYPE (*tp))
1037 *walk_subtrees = 0;
1038
1039 else if (TREE_CODE (*tp) == INTEGER_CST)
1040 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1041 TREE_INT_CST_HIGH (*tp));
1042 else
1043 {
1044 *tp = copy_node (*tp);
1045 TREE_TYPE (*tp) = new_type;
1046 }
1047 }
1048
d4e4baa9
AO
1049 /* Otherwise, just copy the node. Note that copy_tree_r already
1050 knows not to copy VAR_DECLs, etc., so this is safe. */
1051 else
1052 {
e21aff8a
SB
1053 /* Here we handle trees that are not completely rewritten.
1054 First we detect some inlining-induced bogosities for
1055 discarding. */
726a989a
RB
1056 if (TREE_CODE (*tp) == MODIFY_EXPR
1057 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1058 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
d4e4baa9
AO
1059 {
1060 /* Some assignments VAR = VAR; don't generate any rtl code
1061 and thus don't count as variable modification. Avoid
1062 keeping bogosities like 0 = 0. */
726a989a 1063 tree decl = TREE_OPERAND (*tp, 0), value;
6be42dd4 1064 tree *n;
d4e4baa9 1065
6be42dd4 1066 n = (tree *) pointer_map_contains (id->decl_map, decl);
d4e4baa9
AO
1067 if (n)
1068 {
6be42dd4 1069 value = *n;
d4e4baa9 1070 STRIP_TYPE_NOPS (value);
becfd6e5 1071 if (TREE_CONSTANT (value) || TREE_READONLY (value))
68594ce7 1072 {
c2255bc4 1073 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
726a989a 1074 return copy_tree_body_r (tp, walk_subtrees, data);
68594ce7 1075 }
d4e4baa9
AO
1076 }
1077 }
1b369fae 1078 else if (TREE_CODE (*tp) == INDIRECT_REF)
6de9cd9a
DN
1079 {
1080 /* Get rid of *& from inline substitutions that can happen when a
1081 pointer argument is an ADDR_EXPR. */
81cfbbc2 1082 tree decl = TREE_OPERAND (*tp, 0);
78bbd765 1083 tree *n = (tree *) pointer_map_contains (id->decl_map, decl);
6de9cd9a
DN
1084 if (n)
1085 {
30d2e943
RG
1086 /* If we happen to get an ADDR_EXPR in n->value, strip
1087 it manually here as we'll eventually get ADDR_EXPRs
1088 which lie about their types pointed to. In this case
1089 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
095ecc24
RG
1090 but we absolutely rely on that. As fold_indirect_ref
1091 does other useful transformations, try that first, though. */
78bbd765
EB
1092 tree type = TREE_TYPE (*tp);
1093 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1094 tree old = *tp;
1095 *tp = gimple_fold_indirect_ref (ptr);
095ecc24
RG
1096 if (! *tp)
1097 {
78bbd765 1098 if (TREE_CODE (ptr) == ADDR_EXPR)
de4af523 1099 {
78bbd765
EB
1100 *tp
1101 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
de4af523
JJ
1102 /* ??? We should either assert here or build
1103 a VIEW_CONVERT_EXPR instead of blindly leaking
1104 incompatible types to our IL. */
1105 if (! *tp)
78bbd765 1106 *tp = TREE_OPERAND (ptr, 0);
de4af523 1107 }
095ecc24 1108 else
d84b37b0 1109 {
78bbd765 1110 *tp = build1 (INDIRECT_REF, type, ptr);
d84b37b0 1111 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
955f6531 1112 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
a61c3633 1113 TREE_READONLY (*tp) = TREE_READONLY (old);
78bbd765
EB
1114 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1115 have remapped a parameter as the property might be
1116 valid only for the parameter itself. */
1117 if (TREE_THIS_NOTRAP (old)
1118 && (!is_parm (TREE_OPERAND (old, 0))
1119 || (!id->transform_parameter && is_parm (ptr))))
1120 TREE_THIS_NOTRAP (*tp) = 1;
d84b37b0 1121 }
095ecc24 1122 }
81cfbbc2
JH
1123 *walk_subtrees = 0;
1124 return NULL;
68594ce7
JM
1125 }
1126 }
70f34814
RG
1127 else if (TREE_CODE (*tp) == MEM_REF)
1128 {
54714c68
RB
1129 /* We need to re-canonicalize MEM_REFs from inline substitutions
1130 that can happen when a pointer argument is an ADDR_EXPR.
1131 Recurse here manually to allow that. */
78bbd765
EB
1132 tree ptr = TREE_OPERAND (*tp, 0);
1133 tree type = remap_type (TREE_TYPE (*tp), id);
1134 tree old = *tp;
54714c68 1135 walk_tree (&ptr, copy_tree_body_r, data, NULL);
78bbd765 1136 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
54714c68
RB
1137 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1138 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1139 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
78bbd765
EB
1140 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1141 remapped a parameter as the property might be valid only
1142 for the parameter itself. */
1143 if (TREE_THIS_NOTRAP (old)
1144 && (!is_parm (TREE_OPERAND (old, 0))
1145 || (!id->transform_parameter && is_parm (ptr))))
1146 TREE_THIS_NOTRAP (*tp) = 1;
54714c68
RB
1147 *walk_subtrees = 0;
1148 return NULL;
70f34814 1149 }
68594ce7 1150
e21aff8a
SB
1151 /* Here is the "usual case". Copy this tree node, and then
1152 tweak some special cases. */
1b369fae 1153 copy_tree_r (tp, walk_subtrees, NULL);
110cfe1c 1154
acb8f212
JH
1155 /* If EXPR has block defined, map it to newly constructed block.
1156 When inlining we want EXPRs without block appear in the block
ee0192a2 1157 of function call if we are not remapping a type. */
726a989a 1158 if (EXPR_P (*tp))
acb8f212 1159 {
ee0192a2 1160 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
acb8f212
JH
1161 if (TREE_BLOCK (*tp))
1162 {
6be42dd4
RG
1163 tree *n;
1164 n = (tree *) pointer_map_contains (id->decl_map,
1165 TREE_BLOCK (*tp));
60a5d78a
JJ
1166 if (n)
1167 new_block = *n;
acb8f212 1168 }
5368224f 1169 TREE_SET_BLOCK (*tp, new_block);
acb8f212 1170 }
68594ce7 1171
726a989a 1172 if (TREE_CODE (*tp) != OMP_CLAUSE)
07beea0d 1173 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
3c2a7a6a 1174
68594ce7
JM
1175 /* The copied TARGET_EXPR has never been expanded, even if the
1176 original node was expanded already. */
1177 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1178 {
1179 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1180 TREE_OPERAND (*tp, 3) = NULL_TREE;
1181 }
84cce55d
RH
1182
1183 /* Variable substitution need not be simple. In particular, the
1184 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1185 and friends are up-to-date. */
1186 else if (TREE_CODE (*tp) == ADDR_EXPR)
1187 {
ad6003f2 1188 int invariant = is_gimple_min_invariant (*tp);
726a989a
RB
1189 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1190
8e85fd14
RG
1191 /* Handle the case where we substituted an INDIRECT_REF
1192 into the operand of the ADDR_EXPR. */
1193 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1194 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1195 else
1196 recompute_tree_invariant_for_addr_expr (*tp);
726a989a 1197
416c991f
JJ
1198 /* If this used to be invariant, but is not any longer,
1199 then regimplification is probably needed. */
ad6003f2 1200 if (invariant && !is_gimple_min_invariant (*tp))
416c991f 1201 id->regimplify = true;
726a989a 1202
84cce55d
RH
1203 *walk_subtrees = 0;
1204 }
d4e4baa9
AO
1205 }
1206
1207 /* Keep iterating. */
1208 return NULL_TREE;
1209}
1210
1d65f45c
RH
1211/* Helper for remap_gimple_stmt. Given an EH region number for the
1212 source function, map that to the duplicate EH region number in
1213 the destination function. */
1214
1215static int
1216remap_eh_region_nr (int old_nr, copy_body_data *id)
1217{
1218 eh_region old_r, new_r;
1219 void **slot;
1220
1221 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1222 slot = pointer_map_contains (id->eh_map, old_r);
1223 new_r = (eh_region) *slot;
1224
1225 return new_r->index;
1226}
1227
1228/* Similar, but operate on INTEGER_CSTs. */
1229
1230static tree
1231remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1232{
1233 int old_nr, new_nr;
1234
9439e9a1 1235 old_nr = tree_to_shwi (old_t_nr);
1d65f45c
RH
1236 new_nr = remap_eh_region_nr (old_nr, id);
1237
9f616812 1238 return build_int_cst (integer_type_node, new_nr);
1d65f45c 1239}
726a989a
RB
1240
1241/* Helper for copy_bb. Remap statement STMT using the inlining
1242 information in ID. Return the new statement copy. */
1243
1244static gimple
1245remap_gimple_stmt (gimple stmt, copy_body_data *id)
1246{
1247 gimple copy = NULL;
1248 struct walk_stmt_info wi;
5a6e26b7 1249 bool skip_first = false;
726a989a
RB
1250
1251 /* Begin by recognizing trees that we'll completely rewrite for the
1252 inlining context. Our output for these trees is completely
1253 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1254 into an edge). Further down, we'll handle trees that get
1255 duplicated and/or tweaked. */
1256
1257 /* When requested, GIMPLE_RETURNs should be transformed to just the
1258 contained GIMPLE_ASSIGN. The branch semantics of the return will
1259 be handled elsewhere by manipulating the CFG rather than the
1260 statement. */
1261 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1262 {
1263 tree retval = gimple_return_retval (stmt);
1264
1265 /* If we're returning something, just turn that into an
1266 assignment into the equivalent of the original RESULT_DECL.
1267 If RETVAL is just the result decl, the result decl has
1268 already been set (e.g. a recent "foo (&result_decl, ...)");
1269 just toss the entire GIMPLE_RETURN. */
6938f93f
JH
1270 if (retval
1271 && (TREE_CODE (retval) != RESULT_DECL
1272 && (TREE_CODE (retval) != SSA_NAME
70b5e7dc 1273 || ! SSA_NAME_VAR (retval)
6938f93f 1274 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
5a6e26b7
JH
1275 {
1276 copy = gimple_build_assign (id->retvar, retval);
1277 /* id->retvar is already substituted. Skip it on later remapping. */
1278 skip_first = true;
1279 }
726a989a
RB
1280 else
1281 return gimple_build_nop ();
1282 }
1283 else if (gimple_has_substatements (stmt))
1284 {
1285 gimple_seq s1, s2;
1286
1287 /* When cloning bodies from the C++ front end, we will be handed bodies
1288 in High GIMPLE form. Handle here all the High GIMPLE statements that
1289 have embedded statements. */
1290 switch (gimple_code (stmt))
1291 {
1292 case GIMPLE_BIND:
1293 copy = copy_gimple_bind (stmt, id);
1294 break;
1295
1296 case GIMPLE_CATCH:
1297 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1298 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1299 break;
1300
1301 case GIMPLE_EH_FILTER:
1302 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1303 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1304 break;
1305
1306 case GIMPLE_TRY:
1307 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1308 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
b8698a0f 1309 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
726a989a
RB
1310 break;
1311
1312 case GIMPLE_WITH_CLEANUP_EXPR:
1313 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1314 copy = gimple_build_wce (s1);
1315 break;
1316
1317 case GIMPLE_OMP_PARALLEL:
1318 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1319 copy = gimple_build_omp_parallel
1320 (s1,
1321 gimple_omp_parallel_clauses (stmt),
1322 gimple_omp_parallel_child_fn (stmt),
1323 gimple_omp_parallel_data_arg (stmt));
1324 break;
1325
1326 case GIMPLE_OMP_TASK:
1327 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1328 copy = gimple_build_omp_task
1329 (s1,
1330 gimple_omp_task_clauses (stmt),
1331 gimple_omp_task_child_fn (stmt),
1332 gimple_omp_task_data_arg (stmt),
1333 gimple_omp_task_copy_fn (stmt),
1334 gimple_omp_task_arg_size (stmt),
1335 gimple_omp_task_arg_align (stmt));
1336 break;
1337
1338 case GIMPLE_OMP_FOR:
1339 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1340 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
74bf76ed
JJ
1341 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1342 gimple_omp_for_clauses (stmt),
726a989a
RB
1343 gimple_omp_for_collapse (stmt), s2);
1344 {
1345 size_t i;
1346 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1347 {
1348 gimple_omp_for_set_index (copy, i,
1349 gimple_omp_for_index (stmt, i));
1350 gimple_omp_for_set_initial (copy, i,
1351 gimple_omp_for_initial (stmt, i));
1352 gimple_omp_for_set_final (copy, i,
1353 gimple_omp_for_final (stmt, i));
1354 gimple_omp_for_set_incr (copy, i,
1355 gimple_omp_for_incr (stmt, i));
1356 gimple_omp_for_set_cond (copy, i,
1357 gimple_omp_for_cond (stmt, i));
1358 }
1359 }
1360 break;
1361
1362 case GIMPLE_OMP_MASTER:
1363 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1364 copy = gimple_build_omp_master (s1);
1365 break;
1366
acf0174b
JJ
1367 case GIMPLE_OMP_TASKGROUP:
1368 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1369 copy = gimple_build_omp_taskgroup (s1);
1370 break;
1371
726a989a
RB
1372 case GIMPLE_OMP_ORDERED:
1373 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1374 copy = gimple_build_omp_ordered (s1);
1375 break;
1376
1377 case GIMPLE_OMP_SECTION:
1378 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1379 copy = gimple_build_omp_section (s1);
1380 break;
1381
1382 case GIMPLE_OMP_SECTIONS:
1383 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1384 copy = gimple_build_omp_sections
1385 (s1, gimple_omp_sections_clauses (stmt));
1386 break;
1387
1388 case GIMPLE_OMP_SINGLE:
1389 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1390 copy = gimple_build_omp_single
1391 (s1, gimple_omp_single_clauses (stmt));
1392 break;
1393
acf0174b
JJ
1394 case GIMPLE_OMP_TARGET:
1395 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1396 copy = gimple_build_omp_target
1397 (s1, gimple_omp_target_kind (stmt),
1398 gimple_omp_target_clauses (stmt));
1399 break;
1400
1401 case GIMPLE_OMP_TEAMS:
1402 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1403 copy = gimple_build_omp_teams
1404 (s1, gimple_omp_teams_clauses (stmt));
1405 break;
1406
05a26161
JJ
1407 case GIMPLE_OMP_CRITICAL:
1408 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1409 copy
1410 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1411 break;
1412
0a35513e
AH
1413 case GIMPLE_TRANSACTION:
1414 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1415 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1416 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1417 break;
1418
726a989a
RB
1419 default:
1420 gcc_unreachable ();
1421 }
1422 }
1423 else
1424 {
1425 if (gimple_assign_copy_p (stmt)
1426 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1427 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1428 {
1429 /* Here we handle statements that are not completely rewritten.
1430 First we detect some inlining-induced bogosities for
1431 discarding. */
1432
1433 /* Some assignments VAR = VAR; don't generate any rtl code
1434 and thus don't count as variable modification. Avoid
1435 keeping bogosities like 0 = 0. */
1436 tree decl = gimple_assign_lhs (stmt), value;
1437 tree *n;
1438
1439 n = (tree *) pointer_map_contains (id->decl_map, decl);
1440 if (n)
1441 {
1442 value = *n;
1443 STRIP_TYPE_NOPS (value);
1444 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1445 return gimple_build_nop ();
1446 }
1447 }
1448
4029a5e0
JJ
1449 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1450 in a block that we aren't copying during tree_function_versioning,
1451 just drop the clobber stmt. */
1452 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1453 {
1454 tree lhs = gimple_assign_lhs (stmt);
1455 if (TREE_CODE (lhs) == MEM_REF
1456 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1457 {
1458 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1459 if (gimple_bb (def_stmt)
1460 && !bitmap_bit_p (id->blocks_to_copy,
1461 gimple_bb (def_stmt)->index))
1462 return gimple_build_nop ();
1463 }
1464 }
1465
b5b8b0ac
AO
1466 if (gimple_debug_bind_p (stmt))
1467 {
1468 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1469 gimple_debug_bind_get_value (stmt),
1470 stmt);
9771b263 1471 id->debug_stmts.safe_push (copy);
b5b8b0ac
AO
1472 return copy;
1473 }
ddb555ed
JJ
1474 if (gimple_debug_source_bind_p (stmt))
1475 {
1476 copy = gimple_build_debug_source_bind
1477 (gimple_debug_source_bind_get_var (stmt),
1478 gimple_debug_source_bind_get_value (stmt), stmt);
9771b263 1479 id->debug_stmts.safe_push (copy);
ddb555ed
JJ
1480 return copy;
1481 }
1d65f45c
RH
1482
1483 /* Create a new deep copy of the statement. */
1484 copy = gimple_copy (stmt);
1485
1486 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1487 RESX and EH_DISPATCH. */
1488 if (id->eh_map)
1489 switch (gimple_code (copy))
1490 {
1491 case GIMPLE_CALL:
1492 {
1493 tree r, fndecl = gimple_call_fndecl (copy);
1494 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1495 switch (DECL_FUNCTION_CODE (fndecl))
1496 {
1497 case BUILT_IN_EH_COPY_VALUES:
1498 r = gimple_call_arg (copy, 1);
1499 r = remap_eh_region_tree_nr (r, id);
1500 gimple_call_set_arg (copy, 1, r);
1501 /* FALLTHRU */
1502
1503 case BUILT_IN_EH_POINTER:
1504 case BUILT_IN_EH_FILTER:
1505 r = gimple_call_arg (copy, 0);
1506 r = remap_eh_region_tree_nr (r, id);
1507 gimple_call_set_arg (copy, 0, r);
1508 break;
1509
1510 default:
1511 break;
1512 }
d086d311 1513
25a6a873
RG
1514 /* Reset alias info if we didn't apply measures to
1515 keep it valid over inlining by setting DECL_PT_UID. */
1516 if (!id->src_cfun->gimple_df
1517 || !id->src_cfun->gimple_df->ipa_pta)
1518 gimple_call_reset_alias_info (copy);
1d65f45c
RH
1519 }
1520 break;
1521
1522 case GIMPLE_RESX:
1523 {
1524 int r = gimple_resx_region (copy);
1525 r = remap_eh_region_nr (r, id);
1526 gimple_resx_set_region (copy, r);
1527 }
1528 break;
1529
1530 case GIMPLE_EH_DISPATCH:
1531 {
1532 int r = gimple_eh_dispatch_region (copy);
1533 r = remap_eh_region_nr (r, id);
1534 gimple_eh_dispatch_set_region (copy, r);
1535 }
1536 break;
1537
1538 default:
1539 break;
1540 }
726a989a
RB
1541 }
1542
1543 /* If STMT has a block defined, map it to the newly constructed
16917761 1544 block. */
726a989a
RB
1545 if (gimple_block (copy))
1546 {
1547 tree *n;
1548 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1549 gcc_assert (n);
16917761 1550 gimple_set_block (copy, *n);
726a989a
RB
1551 }
1552
ddb555ed 1553 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
b5b8b0ac
AO
1554 return copy;
1555
726a989a
RB
1556 /* Remap all the operands in COPY. */
1557 memset (&wi, 0, sizeof (wi));
1558 wi.info = id;
5a6e26b7
JH
1559 if (skip_first)
1560 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1561 else
b8698a0f 1562 walk_gimple_op (copy, remap_gimple_op_r, &wi);
726a989a 1563
5006671f
RG
1564 /* Clear the copied virtual operands. We are not remapping them here
1565 but are going to recreate them from scratch. */
1566 if (gimple_has_mem_ops (copy))
1567 {
1568 gimple_set_vdef (copy, NULL_TREE);
1569 gimple_set_vuse (copy, NULL_TREE);
1570 }
1571
726a989a
RB
1572 return copy;
1573}
1574
1575
e21aff8a
SB
1576/* Copy basic block, scale profile accordingly. Edges will be taken care of
1577 later */
1578
1579static basic_block
0178d644
VR
1580copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1581 gcov_type count_scale)
e21aff8a 1582{
c2a4718a 1583 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
e21aff8a 1584 basic_block copy_basic_block;
726a989a 1585 tree decl;
0d63a740 1586 gcov_type freq;
91382288
JH
1587 basic_block prev;
1588
1589 /* Search for previous copied basic block. */
1590 prev = bb->prev_bb;
1591 while (!prev->aux)
1592 prev = prev->prev_bb;
e21aff8a
SB
1593
1594 /* create_basic_block() will append every new block to
1595 basic_block_info automatically. */
cceb1885 1596 copy_basic_block = create_basic_block (NULL, (void *) 0,
91382288 1597 (basic_block) prev->aux);
8b47039c 1598 copy_basic_block->count = apply_scale (bb->count, count_scale);
45a80bb9 1599
726a989a
RB
1600 /* We are going to rebuild frequencies from scratch. These values
1601 have just small importance to drive canonicalize_loop_headers. */
8b47039c 1602 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
726a989a 1603
0d63a740
JH
1604 /* We recompute frequencies after inlining, so this is quite safe. */
1605 if (freq > BB_FREQ_MAX)
1606 freq = BB_FREQ_MAX;
1607 copy_basic_block->frequency = freq;
e21aff8a 1608
726a989a
RB
1609 copy_gsi = gsi_start_bb (copy_basic_block);
1610
1611 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e21aff8a 1612 {
726a989a
RB
1613 gimple stmt = gsi_stmt (gsi);
1614 gimple orig_stmt = stmt;
e21aff8a 1615
416c991f 1616 id->regimplify = false;
726a989a
RB
1617 stmt = remap_gimple_stmt (stmt, id);
1618 if (gimple_nop_p (stmt))
1619 continue;
1620
1621 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
c2a4718a 1622 seq_gsi = copy_gsi;
726a989a
RB
1623
1624 /* With return slot optimization we can end up with
1625 non-gimple (foo *)&this->m, fix that here. */
4c29307d
JJ
1626 if (is_gimple_assign (stmt)
1627 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1628 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
e21aff8a 1629 {
726a989a 1630 tree new_rhs;
c2a4718a 1631 new_rhs = force_gimple_operand_gsi (&seq_gsi,
4a2b7f24 1632 gimple_assign_rhs1 (stmt),
cf1bcf06
EB
1633 true, NULL, false,
1634 GSI_CONTINUE_LINKING);
726a989a 1635 gimple_assign_set_rhs1 (stmt, new_rhs);
c2a4718a 1636 id->regimplify = false;
726a989a 1637 }
2b65dae5 1638
c2a4718a
JJ
1639 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1640
1641 if (id->regimplify)
1642 gimple_regimplify_operands (stmt, &seq_gsi);
1643
1644 /* If copy_basic_block has been empty at the start of this iteration,
1645 call gsi_start_bb again to get at the newly added statements. */
1646 if (gsi_end_p (copy_gsi))
1647 copy_gsi = gsi_start_bb (copy_basic_block);
1648 else
1649 gsi_next (&copy_gsi);
110cfe1c 1650
726a989a
RB
1651 /* Process the new statement. The call to gimple_regimplify_operands
1652 possibly turned the statement into multiple statements, we
1653 need to process all of them. */
c2a4718a 1654 do
726a989a 1655 {
9187e02d
JH
1656 tree fn;
1657
c2a4718a 1658 stmt = gsi_stmt (copy_gsi);
726a989a
RB
1659 if (is_gimple_call (stmt)
1660 && gimple_call_va_arg_pack_p (stmt)
1661 && id->gimple_call)
1662 {
1663 /* __builtin_va_arg_pack () should be replaced by
1664 all arguments corresponding to ... in the caller. */
1665 tree p;
1666 gimple new_call;
9771b263 1667 vec<tree> argarray;
726a989a
RB
1668 size_t nargs = gimple_call_num_args (id->gimple_call);
1669 size_t n;
1670
910ad8de 1671 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
726a989a
RB
1672 nargs--;
1673
1674 /* Create the new array of arguments. */
1675 n = nargs + gimple_call_num_args (stmt);
9771b263
DN
1676 argarray.create (n);
1677 argarray.safe_grow_cleared (n);
726a989a
RB
1678
1679 /* Copy all the arguments before '...' */
9771b263 1680 memcpy (argarray.address (),
726a989a
RB
1681 gimple_call_arg_ptr (stmt, 0),
1682 gimple_call_num_args (stmt) * sizeof (tree));
1683
1684 /* Append the arguments passed in '...' */
9771b263 1685 memcpy (argarray.address () + gimple_call_num_args (stmt),
726a989a
RB
1686 gimple_call_arg_ptr (id->gimple_call, 0)
1687 + (gimple_call_num_args (id->gimple_call) - nargs),
1688 nargs * sizeof (tree));
1689
1690 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1691 argarray);
1692
9771b263 1693 argarray.release ();
726a989a
RB
1694
1695 /* Copy all GIMPLE_CALL flags, location and block, except
1696 GF_CALL_VA_ARG_PACK. */
1697 gimple_call_copy_flags (new_call, stmt);
1698 gimple_call_set_va_arg_pack (new_call, false);
1699 gimple_set_location (new_call, gimple_location (stmt));
1700 gimple_set_block (new_call, gimple_block (stmt));
1701 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1702
1703 gsi_replace (&copy_gsi, new_call, false);
1704 stmt = new_call;
1705 }
1706 else if (is_gimple_call (stmt)
1707 && id->gimple_call
1708 && (decl = gimple_call_fndecl (stmt))
1709 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1710 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
e0704a46 1711 {
726a989a
RB
1712 /* __builtin_va_arg_pack_len () should be replaced by
1713 the number of anonymous arguments. */
1714 size_t nargs = gimple_call_num_args (id->gimple_call);
1715 tree count, p;
1716 gimple new_stmt;
1717
910ad8de 1718 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
726a989a
RB
1719 nargs--;
1720
1721 count = build_int_cst (integer_type_node, nargs);
1722 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1723 gsi_replace (&copy_gsi, new_stmt, false);
1724 stmt = new_stmt;
1725 }
b8a00a4d 1726
726a989a
RB
1727 /* Statements produced by inlining can be unfolded, especially
1728 when we constant propagated some operands. We can't fold
1729 them right now for two reasons:
1730 1) folding require SSA_NAME_DEF_STMTs to be correct
1731 2) we can't change function calls to builtins.
1732 So we just mark statement for later folding. We mark
1733 all new statements, instead just statements that has changed
1734 by some nontrivial substitution so even statements made
1735 foldable indirectly are updated. If this turns out to be
1736 expensive, copy_body can be told to watch for nontrivial
1737 changes. */
1738 if (id->statements_to_fold)
1739 pointer_set_insert (id->statements_to_fold, stmt);
1740
1741 /* We're duplicating a CALL_EXPR. Find any corresponding
1742 callgraph edges and update or duplicate them. */
1743 if (is_gimple_call (stmt))
1744 {
9b2a5ef7 1745 struct cgraph_edge *edge;
f618d33e 1746 int flags;
6ef5231b 1747
726a989a 1748 switch (id->transform_call_graph_edges)
e0704a46 1749 {
9b2a5ef7
RH
1750 case CB_CGE_DUPLICATE:
1751 edge = cgraph_edge (id->src_node, orig_stmt);
1752 if (edge)
0d63a740
JH
1753 {
1754 int edge_freq = edge->frequency;
042ae7d2
JH
1755 int new_freq;
1756 struct cgraph_edge *old_edge = edge;
0d63a740
JH
1757 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1758 gimple_uid (stmt),
1759 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
898b8927 1760 true);
0d63a740
JH
1761 /* We could also just rescale the frequency, but
1762 doing so would introduce roundoff errors and make
1763 verifier unhappy. */
67348ccc 1764 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
042ae7d2
JH
1765 copy_basic_block);
1766
1767 /* Speculative calls consist of two edges - direct and indirect.
1768 Duplicate the whole thing and distribute frequencies accordingly. */
1769 if (edge->speculative)
0d63a740 1770 {
042ae7d2
JH
1771 struct cgraph_edge *direct, *indirect;
1772 struct ipa_ref *ref;
1773
1774 gcc_assert (!edge->indirect_unknown_callee);
1775 cgraph_speculative_call_info (old_edge, direct, indirect, ref);
1776 indirect = cgraph_clone_edge (indirect, id->dst_node, stmt,
1777 gimple_uid (stmt),
1778 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1779 true);
1780 if (old_edge->frequency + indirect->frequency)
1781 {
1782 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1783 (old_edge->frequency + indirect->frequency)),
1784 CGRAPH_FREQ_MAX);
1785 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1786 (old_edge->frequency + indirect->frequency)),
1787 CGRAPH_FREQ_MAX);
1788 }
67348ccc 1789 ipa_clone_ref (ref, id->dst_node, stmt);
042ae7d2
JH
1790 }
1791 else
1792 {
1793 edge->frequency = new_freq;
1794 if (dump_file
1795 && profile_status_for_function (cfun) != PROFILE_ABSENT
1796 && (edge_freq > edge->frequency + 10
1797 || edge_freq < edge->frequency - 10))
1798 {
1799 fprintf (dump_file, "Edge frequency estimated by "
1800 "cgraph %i diverge from inliner's estimate %i\n",
1801 edge_freq,
1802 edge->frequency);
1803 fprintf (dump_file,
1804 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1805 bb->index,
1806 bb->frequency,
1807 copy_basic_block->frequency);
1808 }
0d63a740
JH
1809 }
1810 }
9b2a5ef7
RH
1811 break;
1812
1813 case CB_CGE_MOVE_CLONES:
1814 cgraph_set_call_stmt_including_clones (id->dst_node,
1815 orig_stmt, stmt);
1816 edge = cgraph_edge (id->dst_node, stmt);
1817 break;
1818
1819 case CB_CGE_MOVE:
1820 edge = cgraph_edge (id->dst_node, orig_stmt);
1821 if (edge)
1822 cgraph_set_call_stmt (edge, stmt);
1823 break;
1824
1825 default:
1826 gcc_unreachable ();
110cfe1c 1827 }
f618d33e 1828
9b2a5ef7
RH
1829 /* Constant propagation on argument done during inlining
1830 may create new direct call. Produce an edge for it. */
b8698a0f 1831 if ((!edge
e33c6cd6 1832 || (edge->indirect_inlining_edge
9b2a5ef7 1833 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
67348ccc 1834 && id->dst_node->definition
9b2a5ef7
RH
1835 && (fn = gimple_call_fndecl (stmt)) != NULL)
1836 {
581985d7 1837 struct cgraph_node *dest = cgraph_get_node (fn);
9b2a5ef7
RH
1838
1839 /* We have missing edge in the callgraph. This can happen
1840 when previous inlining turned an indirect call into a
0e3776db 1841 direct call by constant propagating arguments or we are
20a6bb58 1842 producing dead clone (for further cloning). In all
9b2a5ef7
RH
1843 other cases we hit a bug (incorrect node sharing is the
1844 most common reason for missing edges). */
67348ccc
DM
1845 gcc_assert (!dest->definition
1846 || dest->address_taken
1847 || !id->src_node->definition
1848 || !id->dst_node->definition);
9b2a5ef7
RH
1849 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1850 cgraph_create_edge_including_clones
47cb0d7d 1851 (id->dst_node, dest, orig_stmt, stmt, bb->count,
67348ccc 1852 compute_call_stmt_bb_frequency (id->dst_node->decl,
0d63a740 1853 copy_basic_block),
898b8927 1854 CIF_ORIGINALLY_INDIRECT_CALL);
9b2a5ef7
RH
1855 else
1856 cgraph_create_edge (id->dst_node, dest, stmt,
47cb0d7d
JH
1857 bb->count,
1858 compute_call_stmt_bb_frequency
67348ccc 1859 (id->dst_node->decl,
960bfb69 1860 copy_basic_block))->inline_failed
9b2a5ef7
RH
1861 = CIF_ORIGINALLY_INDIRECT_CALL;
1862 if (dump_file)
1863 {
91382288 1864 fprintf (dump_file, "Created new direct edge to %s\n",
fec39fa6 1865 dest->name ());
9b2a5ef7
RH
1866 }
1867 }
9187e02d 1868
f618d33e 1869 flags = gimple_call_flags (stmt);
f618d33e
MJ
1870 if (flags & ECF_MAY_BE_ALLOCA)
1871 cfun->calls_alloca = true;
1872 if (flags & ECF_RETURNS_TWICE)
1873 cfun->calls_setjmp = true;
726a989a 1874 }
e21aff8a 1875
1d65f45c
RH
1876 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1877 id->eh_map, id->eh_lp_nr);
726a989a 1878
b5b8b0ac 1879 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
726a989a
RB
1880 {
1881 ssa_op_iter i;
1882 tree def;
1883
726a989a
RB
1884 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1885 if (TREE_CODE (def) == SSA_NAME)
1886 SSA_NAME_DEF_STMT (def) = stmt;
1887 }
1888
1889 gsi_next (&copy_gsi);
e21aff8a 1890 }
c2a4718a 1891 while (!gsi_end_p (copy_gsi));
726a989a
RB
1892
1893 copy_gsi = gsi_last_bb (copy_basic_block);
e21aff8a 1894 }
726a989a 1895
e21aff8a
SB
1896 return copy_basic_block;
1897}
1898
110cfe1c
JH
1899/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1900 form is quite easy, since dominator relationship for old basic blocks does
1901 not change.
1902
1903 There is however exception where inlining might change dominator relation
1904 across EH edges from basic block within inlined functions destinating
5305a4cb 1905 to landing pads in function we inline into.
110cfe1c 1906
e9705dc5
AO
1907 The function fills in PHI_RESULTs of such PHI nodes if they refer
1908 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1909 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1910 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1911 set, and this means that there will be no overlapping live ranges
110cfe1c
JH
1912 for the underlying symbol.
1913
1914 This might change in future if we allow redirecting of EH edges and
1915 we might want to change way build CFG pre-inlining to include
1916 all the possible edges then. */
1917static void
e9705dc5
AO
1918update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1919 bool can_throw, bool nonlocal_goto)
110cfe1c
JH
1920{
1921 edge e;
1922 edge_iterator ei;
1923
1924 FOR_EACH_EDGE (e, ei, bb->succs)
1925 if (!e->dest->aux
1926 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1927 {
726a989a
RB
1928 gimple phi;
1929 gimple_stmt_iterator si;
110cfe1c 1930
e9705dc5
AO
1931 if (!nonlocal_goto)
1932 gcc_assert (e->flags & EDGE_EH);
726a989a 1933
e9705dc5
AO
1934 if (!can_throw)
1935 gcc_assert (!(e->flags & EDGE_EH));
726a989a
RB
1936
1937 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1938 {
e9705dc5
AO
1939 edge re;
1940
726a989a
RB
1941 phi = gsi_stmt (si);
1942
3f8825c0
RB
1943 /* For abnormal goto/call edges the receiver can be the
1944 ENTRY_BLOCK. Do not assert this cannot happen. */
e9705dc5 1945
496a4ef5
JH
1946 gcc_assert ((e->flags & EDGE_EH)
1947 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
e9705dc5 1948
e9705dc5 1949 re = find_edge (ret_bb, e->dest);
0107dca2 1950 gcc_checking_assert (re);
e9705dc5
AO
1951 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1952 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1953
1954 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1955 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
110cfe1c
JH
1956 }
1957 }
1958}
1959
726a989a 1960
128a79fb
KH
1961/* Copy edges from BB into its copy constructed earlier, scale profile
1962 accordingly. Edges will be taken care of later. Assume aux
90a7788b
JJ
1963 pointers to point to the copies of each BB. Return true if any
1964 debug stmts are left after a statement that must end the basic block. */
726a989a 1965
90a7788b 1966static bool
92e776e9
RB
1967copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
1968 bool can_make_abnormal_goto)
e21aff8a 1969{
cceb1885 1970 basic_block new_bb = (basic_block) bb->aux;
e21aff8a
SB
1971 edge_iterator ei;
1972 edge old_edge;
726a989a 1973 gimple_stmt_iterator si;
e21aff8a 1974 int flags;
90a7788b 1975 bool need_debug_cleanup = false;
e21aff8a
SB
1976
1977 /* Use the indices from the original blocks to create edges for the
1978 new ones. */
1979 FOR_EACH_EDGE (old_edge, ei, bb->succs)
e0704a46
JH
1980 if (!(old_edge->flags & EDGE_EH))
1981 {
82d6e6fc 1982 edge new_edge;
e21aff8a 1983
e0704a46 1984 flags = old_edge->flags;
e21aff8a 1985
e0704a46
JH
1986 /* Return edges do get a FALLTHRU flag when the get inlined. */
1987 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
fefa31b5 1988 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
e0704a46 1989 flags |= EDGE_FALLTHRU;
82d6e6fc 1990 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
8b47039c 1991 new_edge->count = apply_scale (old_edge->count, count_scale);
82d6e6fc 1992 new_edge->probability = old_edge->probability;
e0704a46 1993 }
e21aff8a
SB
1994
1995 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
90a7788b 1996 return false;
e21aff8a 1997
726a989a 1998 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
e21aff8a 1999 {
726a989a 2000 gimple copy_stmt;
e9705dc5 2001 bool can_throw, nonlocal_goto;
e21aff8a 2002
726a989a 2003 copy_stmt = gsi_stmt (si);
b5b8b0ac 2004 if (!is_gimple_debug (copy_stmt))
f9a21e13 2005 update_stmt (copy_stmt);
726a989a 2006
e21aff8a 2007 /* Do this before the possible split_block. */
726a989a 2008 gsi_next (&si);
e21aff8a
SB
2009
2010 /* If this tree could throw an exception, there are two
2011 cases where we need to add abnormal edge(s): the
2012 tree wasn't in a region and there is a "current
2013 region" in the caller; or the original tree had
2014 EH edges. In both cases split the block after the tree,
2015 and add abnormal edge(s) as needed; we need both
2016 those from the callee and the caller.
2017 We check whether the copy can throw, because the const
2018 propagation can change an INDIRECT_REF which throws
2019 into a COMPONENT_REF which doesn't. If the copy
2020 can throw, the original could also throw. */
726a989a 2021 can_throw = stmt_can_throw_internal (copy_stmt);
a6f30e66 2022 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
e9705dc5
AO
2023
2024 if (can_throw || nonlocal_goto)
e21aff8a 2025 {
90a7788b
JJ
2026 if (!gsi_end_p (si))
2027 {
2028 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2029 gsi_next (&si);
2030 if (gsi_end_p (si))
2031 need_debug_cleanup = true;
2032 }
726a989a 2033 if (!gsi_end_p (si))
e21aff8a
SB
2034 /* Note that bb's predecessor edges aren't necessarily
2035 right at this point; split_block doesn't care. */
2036 {
2037 edge e = split_block (new_bb, copy_stmt);
110cfe1c 2038
e21aff8a 2039 new_bb = e->dest;
110cfe1c 2040 new_bb->aux = e->src->aux;
726a989a 2041 si = gsi_start_bb (new_bb);
e21aff8a 2042 }
e9705dc5 2043 }
e21aff8a 2044
1d65f45c
RH
2045 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2046 make_eh_dispatch_edges (copy_stmt);
2047 else if (can_throw)
e9705dc5 2048 make_eh_edges (copy_stmt);
110cfe1c 2049
a6f30e66
RB
2050 /* If the call we inline cannot make abnormal goto do not add
2051 additional abnormal edges but only retain those already present
2052 in the original function body. */
2053 nonlocal_goto &= can_make_abnormal_goto;
e9705dc5 2054 if (nonlocal_goto)
726a989a 2055 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
e9705dc5
AO
2056
2057 if ((can_throw || nonlocal_goto)
2058 && gimple_in_ssa_p (cfun))
726a989a 2059 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
e9705dc5 2060 can_throw, nonlocal_goto);
110cfe1c 2061 }
90a7788b 2062 return need_debug_cleanup;
110cfe1c
JH
2063}
2064
2065/* Copy the PHIs. All blocks and edges are copied, some blocks
2066 was possibly split and new outgoing EH edges inserted.
2067 BB points to the block of original function and AUX pointers links
2068 the original and newly copied blocks. */
2069
2070static void
2071copy_phis_for_bb (basic_block bb, copy_body_data *id)
2072{
3d9a9f94 2073 basic_block const new_bb = (basic_block) bb->aux;
110cfe1c 2074 edge_iterator ei;
726a989a
RB
2075 gimple phi;
2076 gimple_stmt_iterator si;
6a78fd06
RG
2077 edge new_edge;
2078 bool inserted = false;
110cfe1c 2079
355a7673 2080 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
110cfe1c 2081 {
726a989a
RB
2082 tree res, new_res;
2083 gimple new_phi;
110cfe1c 2084
726a989a
RB
2085 phi = gsi_stmt (si);
2086 res = PHI_RESULT (phi);
2087 new_res = res;
ea057359 2088 if (!virtual_operand_p (res))
110cfe1c 2089 {
726a989a 2090 walk_tree (&new_res, copy_tree_body_r, id, NULL);
dcc748dd 2091 new_phi = create_phi_node (new_res, new_bb);
110cfe1c
JH
2092 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2093 {
8b3057b3
JH
2094 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2095 tree arg;
2096 tree new_arg;
8b3057b3 2097 edge_iterator ei2;
5368224f 2098 location_t locus;
8b3057b3 2099
20a6bb58 2100 /* When doing partial cloning, we allow PHIs on the entry block
8b3057b3
JH
2101 as long as all the arguments are the same. Find any input
2102 edge to see argument to copy. */
2103 if (!old_edge)
2104 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2105 if (!old_edge->src->aux)
2106 break;
2107
2108 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2109 new_arg = arg;
726a989a 2110 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
110cfe1c 2111 gcc_assert (new_arg);
36b6e793
JJ
2112 /* With return slot optimization we can end up with
2113 non-gimple (foo *)&this->m, fix that here. */
2114 if (TREE_CODE (new_arg) != SSA_NAME
2115 && TREE_CODE (new_arg) != FUNCTION_DECL
2116 && !is_gimple_val (new_arg))
2117 {
726a989a
RB
2118 gimple_seq stmts = NULL;
2119 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
6a78fd06
RG
2120 gsi_insert_seq_on_edge (new_edge, stmts);
2121 inserted = true;
36b6e793 2122 }
5368224f 2123 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
5368224f
DC
2124 if (LOCATION_BLOCK (locus))
2125 {
2126 tree *n;
2127 n = (tree *) pointer_map_contains (id->decl_map,
2128 LOCATION_BLOCK (locus));
2129 gcc_assert (n);
ef6179d1
DC
2130 if (*n)
2131 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2132 else
2133 locus = LOCATION_LOCUS (locus);
5368224f 2134 }
16917761
RB
2135 else
2136 locus = LOCATION_LOCUS (locus);
5368224f 2137
16917761 2138 add_phi_arg (new_phi, new_arg, new_edge, locus);
110cfe1c 2139 }
e21aff8a
SB
2140 }
2141 }
6a78fd06
RG
2142
2143 /* Commit the delayed edge insertions. */
2144 if (inserted)
2145 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2146 gsi_commit_one_edge_insert (new_edge, NULL);
e21aff8a
SB
2147}
2148
726a989a 2149
e21aff8a 2150/* Wrapper for remap_decl so it can be used as a callback. */
726a989a 2151
e21aff8a
SB
2152static tree
2153remap_decl_1 (tree decl, void *data)
2154{
1b369fae 2155 return remap_decl (decl, (copy_body_data *) data);
e21aff8a
SB
2156}
2157
110cfe1c 2158/* Build struct function and associated datastructures for the new clone
af16bc76
MJ
2159 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2160 the cfun to the function of new_fndecl (and current_function_decl too). */
110cfe1c
JH
2161
2162static void
0d63a740 2163initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
110cfe1c 2164{
110cfe1c 2165 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
0d63a740 2166 gcov_type count_scale;
110cfe1c 2167
49bde175
JH
2168 if (!DECL_ARGUMENTS (new_fndecl))
2169 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2170 if (!DECL_RESULT (new_fndecl))
2171 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2172
fefa31b5 2173 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
8b47039c
TJ
2174 count_scale
2175 = GCOV_COMPUTE_SCALE (count,
fefa31b5 2176 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
110cfe1c 2177 else
0d63a740 2178 count_scale = REG_BR_PROB_BASE;
110cfe1c
JH
2179
2180 /* Register specific tree functions. */
726a989a 2181 gimple_register_cfg_hooks ();
39ecc018
JH
2182
2183 /* Get clean struct function. */
2184 push_struct_function (new_fndecl);
2185
2186 /* We will rebuild these, so just sanity check that they are empty. */
2187 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2188 gcc_assert (cfun->local_decls == NULL);
2189 gcc_assert (cfun->cfg == NULL);
2190 gcc_assert (cfun->decl == new_fndecl);
2191
20a6bb58 2192 /* Copy items we preserve during cloning. */
39ecc018
JH
2193 cfun->static_chain_decl = src_cfun->static_chain_decl;
2194 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2195 cfun->function_end_locus = src_cfun->function_end_locus;
a9e0d843 2196 cfun->curr_properties = src_cfun->curr_properties;
39ecc018 2197 cfun->last_verified = src_cfun->last_verified;
39ecc018
JH
2198 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2199 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
39ecc018
JH
2200 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2201 cfun->stdarg = src_cfun->stdarg;
39ecc018 2202 cfun->after_inlining = src_cfun->after_inlining;
8f4f502f
EB
2203 cfun->can_throw_non_call_exceptions
2204 = src_cfun->can_throw_non_call_exceptions;
9510c5af 2205 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
39ecc018
JH
2206 cfun->returns_struct = src_cfun->returns_struct;
2207 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
39ecc018 2208
110cfe1c
JH
2209 init_empty_tree_cfg ();
2210
0d63a740 2211 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
fefa31b5
DM
2212 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2213 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
110cfe1c 2214 REG_BR_PROB_BASE);
fefa31b5
DM
2215 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2216 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2217 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2218 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
110cfe1c 2219 REG_BR_PROB_BASE);
fefa31b5
DM
2220 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2221 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
110cfe1c
JH
2222 if (src_cfun->eh)
2223 init_eh_for_function ();
2224
2225 if (src_cfun->gimple_df)
2226 {
5db9ba0c 2227 init_tree_ssa (cfun);
110cfe1c 2228 cfun->gimple_df->in_ssa_p = true;
3828719a 2229 init_ssa_operands (cfun);
110cfe1c 2230 }
110cfe1c
JH
2231}
2232
90a7788b
JJ
2233/* Helper function for copy_cfg_body. Move debug stmts from the end
2234 of NEW_BB to the beginning of successor basic blocks when needed. If the
2235 successor has multiple predecessors, reset them, otherwise keep
2236 their value. */
2237
2238static void
2239maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2240{
2241 edge e;
2242 edge_iterator ei;
2243 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2244
2245 if (gsi_end_p (si)
2246 || gsi_one_before_end_p (si)
2247 || !(stmt_can_throw_internal (gsi_stmt (si))
2248 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2249 return;
2250
2251 FOR_EACH_EDGE (e, ei, new_bb->succs)
2252 {
2253 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2254 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2255 while (is_gimple_debug (gsi_stmt (ssi)))
2256 {
2257 gimple stmt = gsi_stmt (ssi), new_stmt;
2258 tree var;
2259 tree value;
2260
2261 /* For the last edge move the debug stmts instead of copying
2262 them. */
2263 if (ei_one_before_end_p (ei))
2264 {
2265 si = ssi;
2266 gsi_prev (&ssi);
ddb555ed 2267 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
90a7788b
JJ
2268 gimple_debug_bind_reset_value (stmt);
2269 gsi_remove (&si, false);
2270 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2271 continue;
2272 }
2273
ddb555ed 2274 if (gimple_debug_bind_p (stmt))
90a7788b 2275 {
ddb555ed
JJ
2276 var = gimple_debug_bind_get_var (stmt);
2277 if (single_pred_p (e->dest))
2278 {
2279 value = gimple_debug_bind_get_value (stmt);
2280 value = unshare_expr (value);
2281 }
2282 else
2283 value = NULL_TREE;
2284 new_stmt = gimple_build_debug_bind (var, value, stmt);
2285 }
2286 else if (gimple_debug_source_bind_p (stmt))
2287 {
2288 var = gimple_debug_source_bind_get_var (stmt);
2289 value = gimple_debug_source_bind_get_value (stmt);
2290 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
90a7788b
JJ
2291 }
2292 else
ddb555ed 2293 gcc_unreachable ();
90a7788b 2294 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
9771b263 2295 id->debug_stmts.safe_push (new_stmt);
90a7788b
JJ
2296 gsi_prev (&ssi);
2297 }
2298 }
2299}
2300
a9e0d843
RB
2301/* Make a copy of the sub-loops of SRC_PARENT and place them
2302 as siblings of DEST_PARENT. */
2303
2304static void
f3b331d1 2305copy_loops (copy_body_data *id,
a9e0d843
RB
2306 struct loop *dest_parent, struct loop *src_parent)
2307{
2308 struct loop *src_loop = src_parent->inner;
2309 while (src_loop)
2310 {
f3b331d1
JJ
2311 if (!id->blocks_to_copy
2312 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
a9e0d843
RB
2313 {
2314 struct loop *dest_loop = alloc_loop ();
2315
2316 /* Assign the new loop its header and latch and associate
2317 those with the new loop. */
9f8e7a96
RB
2318 if (src_loop->header != NULL)
2319 {
2320 dest_loop->header = (basic_block)src_loop->header->aux;
2321 dest_loop->header->loop_father = dest_loop;
2322 }
a9e0d843
RB
2323 if (src_loop->latch != NULL)
2324 {
2325 dest_loop->latch = (basic_block)src_loop->latch->aux;
2326 dest_loop->latch->loop_father = dest_loop;
2327 }
2328
2329 /* Copy loop meta-data. */
2330 copy_loop_info (src_loop, dest_loop);
2331
2332 /* Finally place it into the loop array and the loop tree. */
0fc822d0 2333 place_new_loop (cfun, dest_loop);
a9e0d843
RB
2334 flow_loop_tree_node_add (dest_parent, dest_loop);
2335
f3b331d1
JJ
2336 if (src_loop->simduid)
2337 {
2338 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2339 cfun->has_simduid_loops = true;
2340 }
2341 if (src_loop->force_vect)
2342 {
2343 dest_loop->force_vect = true;
2344 cfun->has_force_vect_loops = true;
2345 }
2346
a9e0d843 2347 /* Recurse. */
f3b331d1 2348 copy_loops (id, dest_loop, src_loop);
a9e0d843
RB
2349 }
2350 src_loop = src_loop->next;
2351 }
2352}
2353
042ae7d2
JH
2354/* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2355
2356void
2357redirect_all_calls (copy_body_data * id, basic_block bb)
2358{
2359 gimple_stmt_iterator si;
2360 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2361 {
2362 if (is_gimple_call (gsi_stmt (si)))
2363 {
2364 struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
2365 if (edge)
2366 cgraph_redirect_edge_call_stmt_to_callee (edge);
2367 }
2368 }
2369}
2370
eb4b92c1
TJ
2371/* Convert estimated frequencies into counts for NODE, scaling COUNT
2372 with each bb's frequency. Used when NODE has a 0-weight entry
2373 but we are about to inline it into a non-zero count call bb.
2374 See the comments for handle_missing_profiles() in predict.c for
2375 when this can happen for COMDATs. */
2376
2377void
2378freqs_to_counts (struct cgraph_node *node, gcov_type count)
2379{
2380 basic_block bb;
2381 edge_iterator ei;
2382 edge e;
2383 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2384
2385 FOR_ALL_BB_FN(bb, fn)
2386 {
2387 bb->count = apply_scale (count,
2388 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2389 FOR_EACH_EDGE (e, ei, bb->succs)
2390 e->count = apply_probability (e->src->count, e->probability);
2391 }
2392}
2393
e21aff8a
SB
2394/* Make a copy of the body of FN so that it can be inserted inline in
2395 another function. Walks FN via CFG, returns new fndecl. */
2396
2397static tree
0d63a740 2398copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
91382288 2399 basic_block entry_block_map, basic_block exit_block_map,
f3b331d1 2400 basic_block new_entry)
e21aff8a 2401{
1b369fae 2402 tree callee_fndecl = id->src_fn;
e21aff8a 2403 /* Original cfun for the callee, doesn't change. */
1b369fae 2404 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
110cfe1c 2405 struct function *cfun_to_copy;
e21aff8a
SB
2406 basic_block bb;
2407 tree new_fndecl = NULL;
90a7788b 2408 bool need_debug_cleanup = false;
0d63a740 2409 gcov_type count_scale;
110cfe1c 2410 int last;
20a6bb58
JH
2411 int incoming_frequency = 0;
2412 gcov_type incoming_count = 0;
e21aff8a 2413
eb4b92c1
TJ
2414 /* This can happen for COMDAT routines that end up with 0 counts
2415 despite being called (see the comments for handle_missing_profiles()
2416 in predict.c as to why). Apply counts to the blocks in the callee
2417 before inlining, using the guessed edge frequencies, so that we don't
2418 end up with a 0-count inline body which can confuse downstream
2419 optimizations such as function splitting. */
fefa31b5 2420 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
eb4b92c1
TJ
2421 {
2422 /* Apply the larger of the call bb count and the total incoming
2423 call edge count to the callee. */
2424 gcov_type in_count = 0;
2425 struct cgraph_edge *in_edge;
2426 for (in_edge = id->src_node->callers; in_edge;
2427 in_edge = in_edge->next_caller)
2428 in_count += in_edge->count;
2429 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2430 }
2431
fefa31b5 2432 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
8b47039c
TJ
2433 count_scale
2434 = GCOV_COMPUTE_SCALE (count,
fefa31b5 2435 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
e21aff8a 2436 else
0d63a740 2437 count_scale = REG_BR_PROB_BASE;
e21aff8a
SB
2438
2439 /* Register specific tree functions. */
726a989a 2440 gimple_register_cfg_hooks ();
e21aff8a 2441
6626665f
DM
2442 /* If we are inlining just region of the function, make sure to connect
2443 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2444 part of loop, we must compute frequency and probability of
2445 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
20a6bb58 2446 probabilities of edges incoming from nonduplicated region. */
b35366ce
JH
2447 if (new_entry)
2448 {
2449 edge e;
2450 edge_iterator ei;
2451
2452 FOR_EACH_EDGE (e, ei, new_entry->preds)
2453 if (!e->src->aux)
2454 {
20a6bb58
JH
2455 incoming_frequency += EDGE_FREQUENCY (e);
2456 incoming_count += e->count;
b35366ce 2457 }
8b47039c 2458 incoming_count = apply_scale (incoming_count, count_scale);
20a6bb58 2459 incoming_frequency
8b47039c 2460 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
fefa31b5
DM
2461 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2462 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
b35366ce
JH
2463 }
2464
e21aff8a 2465 /* Must have a CFG here at this point. */
fefa31b5 2466 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
e21aff8a
SB
2467 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2468
110cfe1c
JH
2469 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2470
fefa31b5
DM
2471 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2472 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2473 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2474 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
e21aff8a 2475
e21aff8a
SB
2476 /* Duplicate any exception-handling regions. */
2477 if (cfun->eh)
1d65f45c
RH
2478 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2479 remap_decl_1, id);
726a989a 2480
e21aff8a
SB
2481 /* Use aux pointers to map the original blocks to copy. */
2482 FOR_EACH_BB_FN (bb, cfun_to_copy)
f3b331d1 2483 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
91382288
JH
2484 {
2485 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2486 bb->aux = new_bb;
2487 new_bb->aux = bb;
a9e0d843 2488 new_bb->loop_father = entry_block_map->loop_father;
91382288 2489 }
110cfe1c 2490
7c57be85 2491 last = last_basic_block;
726a989a 2492
e21aff8a 2493 /* Now that we've duplicated the blocks, duplicate their edges. */
92e776e9
RB
2494 bool can_make_abormal_goto
2495 = id->gimple_call && stmt_can_make_abnormal_goto (id->gimple_call);
e21aff8a 2496 FOR_ALL_BB_FN (bb, cfun_to_copy)
f3b331d1
JJ
2497 if (!id->blocks_to_copy
2498 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
92e776e9
RB
2499 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2500 can_make_abormal_goto);
726a989a 2501
91382288 2502 if (new_entry)
110cfe1c 2503 {
b35366ce 2504 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
91382288 2505 e->probability = REG_BR_PROB_BASE;
20a6bb58 2506 e->count = incoming_count;
110cfe1c 2507 }
726a989a 2508
a9e0d843 2509 /* Duplicate the loop tree, if available and wanted. */
0fc822d0 2510 if (loops_for_fn (src_cfun) != NULL
a9e0d843
RB
2511 && current_loops != NULL)
2512 {
f3b331d1 2513 copy_loops (id, entry_block_map->loop_father,
0fc822d0 2514 get_loop (src_cfun, 0));
a9e0d843
RB
2515 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2516 loops_state_set (LOOPS_NEED_FIXUP);
2517 }
2518
9f8e7a96
RB
2519 /* If the loop tree in the source function needed fixup, mark the
2520 destination loop tree for fixup, too. */
2521 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2522 loops_state_set (LOOPS_NEED_FIXUP);
2523
8b3057b3
JH
2524 if (gimple_in_ssa_p (cfun))
2525 FOR_ALL_BB_FN (bb, cfun_to_copy)
f3b331d1
JJ
2526 if (!id->blocks_to_copy
2527 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
8b3057b3
JH
2528 copy_phis_for_bb (bb, id);
2529
91382288
JH
2530 FOR_ALL_BB_FN (bb, cfun_to_copy)
2531 if (bb->aux)
2532 {
2533 if (need_debug_cleanup
2534 && bb->index != ENTRY_BLOCK
2535 && bb->index != EXIT_BLOCK)
2536 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
042ae7d2
JH
2537 /* Update call edge destinations. This can not be done before loop
2538 info is updated, because we may split basic blocks. */
2539 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2540 redirect_all_calls (id, (basic_block)bb->aux);
91382288
JH
2541 ((basic_block)bb->aux)->aux = NULL;
2542 bb->aux = NULL;
2543 }
2544
110cfe1c
JH
2545 /* Zero out AUX fields of newly created block during EH edge
2546 insertion. */
7c57be85 2547 for (; last < last_basic_block; last++)
90a7788b
JJ
2548 {
2549 if (need_debug_cleanup)
2550 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2551 BASIC_BLOCK (last)->aux = NULL;
042ae7d2
JH
2552 /* Update call edge destinations. This can not be done before loop
2553 info is updated, because we may split basic blocks. */
2554 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2555 redirect_all_calls (id, BASIC_BLOCK (last));
90a7788b 2556 }
110cfe1c
JH
2557 entry_block_map->aux = NULL;
2558 exit_block_map->aux = NULL;
e21aff8a 2559
1d65f45c
RH
2560 if (id->eh_map)
2561 {
2562 pointer_map_destroy (id->eh_map);
2563 id->eh_map = NULL;
2564 }
2565
e21aff8a
SB
2566 return new_fndecl;
2567}
2568
b5b8b0ac
AO
2569/* Copy the debug STMT using ID. We deal with these statements in a
2570 special way: if any variable in their VALUE expression wasn't
2571 remapped yet, we won't remap it, because that would get decl uids
2572 out of sync, causing codegen differences between -g and -g0. If
2573 this arises, we drop the VALUE expression altogether. */
2574
2575static void
2576copy_debug_stmt (gimple stmt, copy_body_data *id)
2577{
2578 tree t, *n;
2579 struct walk_stmt_info wi;
2580
b5b8b0ac
AO
2581 if (gimple_block (stmt))
2582 {
b5b8b0ac 2583 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
16917761 2584 gimple_set_block (stmt, n ? *n : id->block);
b5b8b0ac 2585 }
b5b8b0ac
AO
2586
2587 /* Remap all the operands in COPY. */
2588 memset (&wi, 0, sizeof (wi));
2589 wi.info = id;
2590
2591 processing_debug_stmt = 1;
2592
ddb555ed
JJ
2593 if (gimple_debug_source_bind_p (stmt))
2594 t = gimple_debug_source_bind_get_var (stmt);
2595 else
2596 t = gimple_debug_bind_get_var (stmt);
b5b8b0ac
AO
2597
2598 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2599 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2600 {
2601 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2602 t = *n;
2603 }
d17af147 2604 else if (TREE_CODE (t) == VAR_DECL
5f564b8f
MM
2605 && !is_global_var (t)
2606 && !pointer_map_contains (id->decl_map, t))
d17af147 2607 /* T is a non-localized variable. */;
b5b8b0ac
AO
2608 else
2609 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2610
ddb555ed
JJ
2611 if (gimple_debug_bind_p (stmt))
2612 {
2613 gimple_debug_bind_set_var (stmt, t);
b5b8b0ac 2614
ddb555ed
JJ
2615 if (gimple_debug_bind_has_value_p (stmt))
2616 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2617 remap_gimple_op_r, &wi, NULL);
b5b8b0ac 2618
ddb555ed
JJ
2619 /* Punt if any decl couldn't be remapped. */
2620 if (processing_debug_stmt < 0)
2621 gimple_debug_bind_reset_value (stmt);
2622 }
2623 else if (gimple_debug_source_bind_p (stmt))
2624 {
2625 gimple_debug_source_bind_set_var (stmt, t);
2626 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2627 remap_gimple_op_r, &wi, NULL);
878eef4a
JJ
2628 /* When inlining and source bind refers to one of the optimized
2629 away parameters, change the source bind into normal debug bind
2630 referring to the corresponding DEBUG_EXPR_DECL that should have
2631 been bound before the call stmt. */
2632 t = gimple_debug_source_bind_get_value (stmt);
2633 if (t != NULL_TREE
2634 && TREE_CODE (t) == PARM_DECL
2635 && id->gimple_call)
2636 {
9771b263 2637 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
878eef4a
JJ
2638 unsigned int i;
2639 if (debug_args != NULL)
2640 {
9771b263
DN
2641 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2642 if ((**debug_args)[i] == DECL_ORIGIN (t)
2643 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
878eef4a 2644 {
9771b263 2645 t = (**debug_args)[i + 1];
daa6e488 2646 stmt->subcode = GIMPLE_DEBUG_BIND;
878eef4a
JJ
2647 gimple_debug_bind_set_value (stmt, t);
2648 break;
2649 }
2650 }
2651 }
ddb555ed 2652 }
b5b8b0ac
AO
2653
2654 processing_debug_stmt = 0;
2655
2656 update_stmt (stmt);
b5b8b0ac
AO
2657}
2658
2659/* Process deferred debug stmts. In order to give values better odds
2660 of being successfully remapped, we delay the processing of debug
2661 stmts until all other stmts that might require remapping are
2662 processed. */
2663
2664static void
2665copy_debug_stmts (copy_body_data *id)
2666{
2667 size_t i;
2668 gimple stmt;
2669
9771b263 2670 if (!id->debug_stmts.exists ())
b5b8b0ac
AO
2671 return;
2672
9771b263 2673 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
b5b8b0ac
AO
2674 copy_debug_stmt (stmt, id);
2675
9771b263 2676 id->debug_stmts.release ();
b5b8b0ac
AO
2677}
2678
f82a627c
EB
2679/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2680 another function. */
2681
2682static tree
2683copy_tree_body (copy_body_data *id)
2684{
2685 tree fndecl = id->src_fn;
2686 tree body = DECL_SAVED_TREE (fndecl);
2687
2688 walk_tree (&body, copy_tree_body_r, id, NULL);
2689
2690 return body;
2691}
2692
b5b8b0ac
AO
2693/* Make a copy of the body of FN so that it can be inserted inline in
2694 another function. */
2695
e21aff8a 2696static tree
0d63a740 2697copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
91382288 2698 basic_block entry_block_map, basic_block exit_block_map,
f3b331d1 2699 basic_block new_entry)
e21aff8a 2700{
1b369fae 2701 tree fndecl = id->src_fn;
e21aff8a
SB
2702 tree body;
2703
2704 /* If this body has a CFG, walk CFG and copy. */
fefa31b5 2705 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
91382288 2706 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
f3b331d1 2707 new_entry);
b5b8b0ac 2708 copy_debug_stmts (id);
e21aff8a
SB
2709
2710 return body;
2711}
2712
04482133
AO
2713/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2714 defined in function FN, or of a data member thereof. */
2715
2716static bool
2717self_inlining_addr_expr (tree value, tree fn)
2718{
2719 tree var;
2720
2721 if (TREE_CODE (value) != ADDR_EXPR)
2722 return false;
2723
2724 var = get_base_address (TREE_OPERAND (value, 0));
e21aff8a 2725
50886bf1 2726 return var && auto_var_in_fn_p (var, fn);
04482133
AO
2727}
2728
b5b8b0ac
AO
2729/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2730 lexical block and line number information from base_stmt, if given,
2731 or from the last stmt of the block otherwise. */
2732
2733static gimple
2734insert_init_debug_bind (copy_body_data *id,
2735 basic_block bb, tree var, tree value,
2736 gimple base_stmt)
2737{
2738 gimple note;
2739 gimple_stmt_iterator gsi;
2740 tree tracked_var;
2741
2742 if (!gimple_in_ssa_p (id->src_cfun))
2743 return NULL;
2744
2745 if (!MAY_HAVE_DEBUG_STMTS)
2746 return NULL;
2747
2748 tracked_var = target_for_debug_bind (var);
2749 if (!tracked_var)
2750 return NULL;
2751
2752 if (bb)
2753 {
2754 gsi = gsi_last_bb (bb);
2755 if (!base_stmt && !gsi_end_p (gsi))
2756 base_stmt = gsi_stmt (gsi);
2757 }
2758
2759 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2760
2761 if (bb)
2762 {
2763 if (!gsi_end_p (gsi))
2764 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2765 else
2766 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2767 }
2768
2769 return note;
2770}
2771
6de9cd9a 2772static void
b5b8b0ac 2773insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
0f1961a2 2774{
0f1961a2
JH
2775 /* If VAR represents a zero-sized variable, it's possible that the
2776 assignment statement may result in no gimple statements. */
2777 if (init_stmt)
c2a4718a
JJ
2778 {
2779 gimple_stmt_iterator si = gsi_last_bb (bb);
0f1961a2 2780
bfb0b886
RG
2781 /* We can end up with init statements that store to a non-register
2782 from a rhs with a conversion. Handle that here by forcing the
2783 rhs into a temporary. gimple_regimplify_operands is not
2784 prepared to do this for us. */
b5b8b0ac
AO
2785 if (!is_gimple_debug (init_stmt)
2786 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
bfb0b886
RG
2787 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2788 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2789 {
2790 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2791 gimple_expr_type (init_stmt),
2792 gimple_assign_rhs1 (init_stmt));
2793 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2794 GSI_NEW_STMT);
2795 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2796 gimple_assign_set_rhs1 (init_stmt, rhs);
2797 }
c2a4718a
JJ
2798 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2799 gimple_regimplify_operands (init_stmt, &si);
b5b8b0ac
AO
2800
2801 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2802 {
70b5e7dc
RG
2803 tree def = gimple_assign_lhs (init_stmt);
2804 insert_init_debug_bind (id, bb, def, def, init_stmt);
b5b8b0ac 2805 }
c2a4718a 2806 }
0f1961a2
JH
2807}
2808
2809/* Initialize parameter P with VALUE. If needed, produce init statement
2810 at the end of BB. When BB is NULL, we return init statement to be
2811 output later. */
2812static gimple
1b369fae 2813setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
e21aff8a 2814 basic_block bb, tree *vars)
6de9cd9a 2815{
0f1961a2 2816 gimple init_stmt = NULL;
6de9cd9a 2817 tree var;
f4088621 2818 tree rhs = value;
110cfe1c 2819 tree def = (gimple_in_ssa_p (cfun)
32244553 2820 ? ssa_default_def (id->src_cfun, p) : NULL);
6de9cd9a 2821
f4088621
RG
2822 if (value
2823 && value != error_mark_node
2824 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
c54e3854 2825 {
c4ac6e94 2826 /* If we can match up types by promotion/demotion do so. */
c54e3854 2827 if (fold_convertible_p (TREE_TYPE (p), value))
c4ac6e94 2828 rhs = fold_convert (TREE_TYPE (p), value);
c54e3854 2829 else
c4ac6e94
RG
2830 {
2831 /* ??? For valid programs we should not end up here.
2832 Still if we end up with truly mismatched types here, fall back
2833 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2834 GIMPLE to the following passes. */
2835 if (!is_gimple_reg_type (TREE_TYPE (value))
2836 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2837 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2838 else
2839 rhs = build_zero_cst (TREE_TYPE (p));
2840 }
c54e3854 2841 }
f4088621 2842
b5b8b0ac
AO
2843 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2844 here since the type of this decl must be visible to the calling
2845 function. */
2846 var = copy_decl_to_var (p, id);
2847
b5b8b0ac 2848 /* Declare this new variable. */
910ad8de 2849 DECL_CHAIN (var) = *vars;
b5b8b0ac
AO
2850 *vars = var;
2851
2852 /* Make gimplifier happy about this variable. */
2853 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2854
110cfe1c 2855 /* If the parameter is never assigned to, has no SSA_NAMEs created,
b5b8b0ac
AO
2856 we would not need to create a new variable here at all, if it
2857 weren't for debug info. Still, we can just use the argument
2858 value. */
6de9cd9a
DN
2859 if (TREE_READONLY (p)
2860 && !TREE_ADDRESSABLE (p)
110cfe1c
JH
2861 && value && !TREE_SIDE_EFFECTS (value)
2862 && !def)
6de9cd9a 2863 {
84936f6f
RH
2864 /* We may produce non-gimple trees by adding NOPs or introduce
2865 invalid sharing when operand is not really constant.
2866 It is not big deal to prohibit constant propagation here as
2867 we will constant propagate in DOM1 pass anyway. */
2868 if (is_gimple_min_invariant (value)
f4088621
RG
2869 && useless_type_conversion_p (TREE_TYPE (p),
2870 TREE_TYPE (value))
04482133
AO
2871 /* We have to be very careful about ADDR_EXPR. Make sure
2872 the base variable isn't a local variable of the inlined
2873 function, e.g., when doing recursive inlining, direct or
2874 mutually-recursive or whatever, which is why we don't
2875 just test whether fn == current_function_decl. */
2876 && ! self_inlining_addr_expr (value, fn))
6de9cd9a 2877 {
6de9cd9a 2878 insert_decl_map (id, p, value);
b5b8b0ac
AO
2879 insert_debug_decl_map (id, p, var);
2880 return insert_init_debug_bind (id, bb, var, value, NULL);
6de9cd9a
DN
2881 }
2882 }
2883
6de9cd9a
DN
2884 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2885 that way, when the PARM_DECL is encountered, it will be
2886 automatically replaced by the VAR_DECL. */
7c7d3047 2887 insert_decl_map (id, p, var);
6de9cd9a 2888
6de9cd9a
DN
2889 /* Even if P was TREE_READONLY, the new VAR should not be.
2890 In the original code, we would have constructed a
2891 temporary, and then the function body would have never
2892 changed the value of P. However, now, we will be
2893 constructing VAR directly. The constructor body may
2894 change its value multiple times as it is being
2895 constructed. Therefore, it must not be TREE_READONLY;
2896 the back-end assumes that TREE_READONLY variable is
2897 assigned to only once. */
2898 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2899 TREE_READONLY (var) = 0;
2900
110cfe1c
JH
2901 /* If there is no setup required and we are in SSA, take the easy route
2902 replacing all SSA names representing the function parameter by the
2903 SSA name passed to function.
2904
2905 We need to construct map for the variable anyway as it might be used
2906 in different SSA names when parameter is set in function.
2907
8454d27e
JH
2908 Do replacement at -O0 for const arguments replaced by constant.
2909 This is important for builtin_constant_p and other construct requiring
b5b8b0ac 2910 constant argument to be visible in inlined function body. */
110cfe1c 2911 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
8454d27e
JH
2912 && (optimize
2913 || (TREE_READONLY (p)
2914 && is_gimple_min_invariant (rhs)))
110cfe1c 2915 && (TREE_CODE (rhs) == SSA_NAME
9b718f81
JH
2916 || is_gimple_min_invariant (rhs))
2917 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
110cfe1c
JH
2918 {
2919 insert_decl_map (id, def, rhs);
b5b8b0ac 2920 return insert_init_debug_bind (id, bb, var, rhs, NULL);
110cfe1c
JH
2921 }
2922
f6f2da7d
JH
2923 /* If the value of argument is never used, don't care about initializing
2924 it. */
1cf5abb3 2925 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
f6f2da7d
JH
2926 {
2927 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
b5b8b0ac 2928 return insert_init_debug_bind (id, bb, var, rhs, NULL);
f6f2da7d
JH
2929 }
2930
6de9cd9a
DN
2931 /* Initialize this VAR_DECL from the equivalent argument. Convert
2932 the argument to the proper type in case it was promoted. */
2933 if (value)
2934 {
6de9cd9a 2935 if (rhs == error_mark_node)
110cfe1c 2936 {
7c7d3047 2937 insert_decl_map (id, p, var);
b5b8b0ac 2938 return insert_init_debug_bind (id, bb, var, rhs, NULL);
110cfe1c 2939 }
afe08db5 2940
73dab33b 2941 STRIP_USELESS_TYPE_CONVERSION (rhs);
6de9cd9a 2942
6b18b1a3 2943 /* If we are in SSA form properly remap the default definition
27eb31c9
RG
2944 or assign to a dummy SSA name if the parameter is unused and
2945 we are not optimizing. */
6b18b1a3 2946 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
110cfe1c 2947 {
6b18b1a3
RG
2948 if (def)
2949 {
2950 def = remap_ssa_name (def, id);
2951 init_stmt = gimple_build_assign (def, rhs);
2952 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
32244553 2953 set_ssa_default_def (cfun, var, NULL);
6b18b1a3 2954 }
27eb31c9
RG
2955 else if (!optimize)
2956 {
2957 def = make_ssa_name (var, NULL);
2958 init_stmt = gimple_build_assign (def, rhs);
2959 }
110cfe1c
JH
2960 }
2961 else
726a989a 2962 init_stmt = gimple_build_assign (var, rhs);
6de9cd9a 2963
0f1961a2 2964 if (bb && init_stmt)
b5b8b0ac 2965 insert_init_stmt (id, bb, init_stmt);
6de9cd9a 2966 }
0f1961a2 2967 return init_stmt;
6de9cd9a
DN
2968}
2969
d4e4baa9 2970/* Generate code to initialize the parameters of the function at the
726a989a 2971 top of the stack in ID from the GIMPLE_CALL STMT. */
d4e4baa9 2972
e21aff8a 2973static void
726a989a 2974initialize_inlined_parameters (copy_body_data *id, gimple stmt,
e21aff8a 2975 tree fn, basic_block bb)
d4e4baa9 2976{
d4e4baa9 2977 tree parms;
726a989a 2978 size_t i;
d4e4baa9 2979 tree p;
d436bff8 2980 tree vars = NULL_TREE;
726a989a 2981 tree static_chain = gimple_call_chain (stmt);
d4e4baa9
AO
2982
2983 /* Figure out what the parameters are. */
18c6ada9 2984 parms = DECL_ARGUMENTS (fn);
d4e4baa9 2985
d4e4baa9
AO
2986 /* Loop through the parameter declarations, replacing each with an
2987 equivalent VAR_DECL, appropriately initialized. */
910ad8de 2988 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
726a989a
RB
2989 {
2990 tree val;
2991 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2992 setup_one_parameter (id, p, val, fn, bb, &vars);
2993 }
ea184343
RG
2994 /* After remapping parameters remap their types. This has to be done
2995 in a second loop over all parameters to appropriately remap
2996 variable sized arrays when the size is specified in a
2997 parameter following the array. */
910ad8de 2998 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
ea184343
RG
2999 {
3000 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
3001 if (varp
3002 && TREE_CODE (*varp) == VAR_DECL)
3003 {
72aa3dca 3004 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
32244553 3005 ? ssa_default_def (id->src_cfun, p) : NULL);
72aa3dca
RG
3006 tree var = *varp;
3007 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
ea184343
RG
3008 /* Also remap the default definition if it was remapped
3009 to the default definition of the parameter replacement
3010 by the parameter setup. */
72aa3dca 3011 if (def)
ea184343
RG
3012 {
3013 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
3014 if (defp
3015 && TREE_CODE (*defp) == SSA_NAME
72aa3dca
RG
3016 && SSA_NAME_VAR (*defp) == var)
3017 TREE_TYPE (*defp) = TREE_TYPE (var);
ea184343
RG
3018 }
3019 }
3020 }
4838c5ee 3021
6de9cd9a
DN
3022 /* Initialize the static chain. */
3023 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
ea99e0be 3024 gcc_assert (fn != current_function_decl);
6de9cd9a
DN
3025 if (p)
3026 {
3027 /* No static chain? Seems like a bug in tree-nested.c. */
1e128c5f 3028 gcc_assert (static_chain);
4838c5ee 3029
e21aff8a 3030 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
4838c5ee
AO
3031 }
3032
e21aff8a 3033 declare_inline_vars (id->block, vars);
d4e4baa9
AO
3034}
3035
726a989a 3036
e21aff8a
SB
3037/* Declare a return variable to replace the RESULT_DECL for the
3038 function we are calling. An appropriate DECL_STMT is returned.
3039 The USE_STMT is filled to contain a use of the declaration to
3040 indicate the return value of the function.
3041
110cfe1c
JH
3042 RETURN_SLOT, if non-null is place where to store the result. It
3043 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
726a989a 3044 was the LHS of the MODIFY_EXPR to which this call is the RHS.
7740f00d 3045
0f900dfa
JJ
3046 The return value is a (possibly null) value that holds the result
3047 as seen by the caller. */
d4e4baa9 3048
d436bff8 3049static tree
6938f93f
JH
3050declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3051 basic_block entry_bb)
d4e4baa9 3052{
1b369fae 3053 tree callee = id->src_fn;
7740f00d
RH
3054 tree result = DECL_RESULT (callee);
3055 tree callee_type = TREE_TYPE (result);
ea2edf88 3056 tree caller_type;
7740f00d 3057 tree var, use;
d4e4baa9 3058
ea2edf88
RG
3059 /* Handle type-mismatches in the function declaration return type
3060 vs. the call expression. */
3061 if (modify_dest)
3062 caller_type = TREE_TYPE (modify_dest);
3063 else
3064 caller_type = TREE_TYPE (TREE_TYPE (callee));
3065
1a2c27e9
EB
3066 /* We don't need to do anything for functions that don't return anything. */
3067 if (VOID_TYPE_P (callee_type))
0f900dfa 3068 return NULL_TREE;
d4e4baa9 3069
cc77ae10 3070 /* If there was a return slot, then the return value is the
7740f00d 3071 dereferenced address of that object. */
110cfe1c 3072 if (return_slot)
7740f00d 3073 {
110cfe1c 3074 /* The front end shouldn't have used both return_slot and
7740f00d 3075 a modify expression. */
1e128c5f 3076 gcc_assert (!modify_dest);
cc77ae10 3077 if (DECL_BY_REFERENCE (result))
110cfe1c
JH
3078 {
3079 tree return_slot_addr = build_fold_addr_expr (return_slot);
3080 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3081
3082 /* We are going to construct *&return_slot and we can't do that
b8698a0f 3083 for variables believed to be not addressable.
110cfe1c
JH
3084
3085 FIXME: This check possibly can match, because values returned
3086 via return slot optimization are not believed to have address
3087 taken by alias analysis. */
3088 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
110cfe1c
JH
3089 var = return_slot_addr;
3090 }
cc77ae10 3091 else
110cfe1c
JH
3092 {
3093 var = return_slot;
3094 gcc_assert (TREE_CODE (var) != SSA_NAME);
b5ca517c 3095 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
110cfe1c 3096 }
0890b981
AP
3097 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3098 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3099 && !DECL_GIMPLE_REG_P (result)
22918034 3100 && DECL_P (var))
0890b981 3101 DECL_GIMPLE_REG_P (var) = 0;
7740f00d
RH
3102 use = NULL;
3103 goto done;
3104 }
3105
3106 /* All types requiring non-trivial constructors should have been handled. */
1e128c5f 3107 gcc_assert (!TREE_ADDRESSABLE (callee_type));
7740f00d
RH
3108
3109 /* Attempt to avoid creating a new temporary variable. */
110cfe1c
JH
3110 if (modify_dest
3111 && TREE_CODE (modify_dest) != SSA_NAME)
7740f00d
RH
3112 {
3113 bool use_it = false;
3114
3115 /* We can't use MODIFY_DEST if there's type promotion involved. */
f4088621 3116 if (!useless_type_conversion_p (callee_type, caller_type))
7740f00d
RH
3117 use_it = false;
3118
3119 /* ??? If we're assigning to a variable sized type, then we must
3120 reuse the destination variable, because we've no good way to
3121 create variable sized temporaries at this point. */
3122 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3123 use_it = true;
3124
3125 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3126 reuse it as the result of the call directly. Don't do this if
3127 it would promote MODIFY_DEST to addressable. */
e2f9fe42
RH
3128 else if (TREE_ADDRESSABLE (result))
3129 use_it = false;
3130 else
3131 {
3132 tree base_m = get_base_address (modify_dest);
3133
3134 /* If the base isn't a decl, then it's a pointer, and we don't
3135 know where that's going to go. */
3136 if (!DECL_P (base_m))
3137 use_it = false;
3138 else if (is_global_var (base_m))
3139 use_it = false;
0890b981
AP
3140 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3141 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3142 && !DECL_GIMPLE_REG_P (result)
3143 && DECL_GIMPLE_REG_P (base_m))
1d327c16 3144 use_it = false;
e2f9fe42
RH
3145 else if (!TREE_ADDRESSABLE (base_m))
3146 use_it = true;
3147 }
7740f00d
RH
3148
3149 if (use_it)
3150 {
3151 var = modify_dest;
3152 use = NULL;
3153 goto done;
3154 }
3155 }
3156
1e128c5f 3157 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
7740f00d 3158
c08cd4c1 3159 var = copy_result_decl_to_var (result, id);
7740f00d 3160 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
7740f00d 3161
6de9cd9a 3162 /* Do not have the rest of GCC warn about this variable as it should
471854f8 3163 not be visible to the user. */
6de9cd9a 3164 TREE_NO_WARNING (var) = 1;
d4e4baa9 3165
c08cd4c1
JM
3166 declare_inline_vars (id->block, var);
3167
7740f00d
RH
3168 /* Build the use expr. If the return type of the function was
3169 promoted, convert it back to the expected type. */
3170 use = var;
f4088621 3171 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
c4ac6e94
RG
3172 {
3173 /* If we can match up types by promotion/demotion do so. */
3174 if (fold_convertible_p (caller_type, var))
3175 use = fold_convert (caller_type, var);
3176 else
3177 {
3178 /* ??? For valid programs we should not end up here.
3179 Still if we end up with truly mismatched types here, fall back
3180 to using a MEM_REF to not leak invalid GIMPLE to the following
3181 passes. */
3182 /* Prevent var from being written into SSA form. */
3183 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3184 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3185 DECL_GIMPLE_REG_P (var) = false;
3186 else if (is_gimple_reg_type (TREE_TYPE (var)))
3187 TREE_ADDRESSABLE (var) = true;
3188 use = fold_build2 (MEM_REF, caller_type,
3189 build_fold_addr_expr (var),
3190 build_int_cst (ptr_type_node, 0));
3191 }
3192 }
b8698a0f 3193
73dab33b 3194 STRIP_USELESS_TYPE_CONVERSION (use);
7740f00d 3195
c08cd4c1 3196 if (DECL_BY_REFERENCE (result))
32848948
RG
3197 {
3198 TREE_ADDRESSABLE (var) = 1;
3199 var = build_fold_addr_expr (var);
3200 }
c08cd4c1 3201
7740f00d 3202 done:
d4e4baa9
AO
3203 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3204 way, when the RESULT_DECL is encountered, it will be
6938f93f
JH
3205 automatically replaced by the VAR_DECL.
3206
3207 When returning by reference, ensure that RESULT_DECL remaps to
3208 gimple_val. */
3209 if (DECL_BY_REFERENCE (result)
3210 && !is_gimple_val (var))
3211 {
3212 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3213 insert_decl_map (id, result, temp);
6b18b1a3
RG
3214 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3215 it's default_def SSA_NAME. */
3216 if (gimple_in_ssa_p (id->src_cfun)
3217 && is_gimple_reg (result))
3218 {
3219 temp = make_ssa_name (temp, NULL);
32244553 3220 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
6b18b1a3 3221 }
6938f93f
JH
3222 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3223 }
3224 else
3225 insert_decl_map (id, result, var);
d4e4baa9 3226
6de9cd9a
DN
3227 /* Remember this so we can ignore it in remap_decls. */
3228 id->retvar = var;
3229
0f900dfa 3230 return use;
d4e4baa9
AO
3231}
3232
27dbd3ac
RH
3233/* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3234 to a local label. */
4838c5ee 3235
27dbd3ac
RH
3236static tree
3237has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
4838c5ee 3238{
27dbd3ac
RH
3239 tree node = *nodep;
3240 tree fn = (tree) fnp;
726a989a 3241
27dbd3ac
RH
3242 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3243 return node;
3244
3245 if (TYPE_P (node))
3246 *walk_subtrees = 0;
3247
3248 return NULL_TREE;
3249}
726a989a 3250
27dbd3ac
RH
3251/* Determine if the function can be copied. If so return NULL. If
3252 not return a string describng the reason for failure. */
3253
3254static const char *
3255copy_forbidden (struct function *fun, tree fndecl)
3256{
3257 const char *reason = fun->cannot_be_copied_reason;
c021f10b
NF
3258 tree decl;
3259 unsigned ix;
27dbd3ac
RH
3260
3261 /* Only examine the function once. */
3262 if (fun->cannot_be_copied_set)
3263 return reason;
3264
3265 /* We cannot copy a function that receives a non-local goto
3266 because we cannot remap the destination label used in the
3267 function that is performing the non-local goto. */
3268 /* ??? Actually, this should be possible, if we work at it.
3269 No doubt there's just a handful of places that simply
3270 assume it doesn't happen and don't substitute properly. */
3271 if (fun->has_nonlocal_label)
3272 {
3273 reason = G_("function %q+F can never be copied "
3274 "because it receives a non-local goto");
3275 goto fail;
3276 }
3277
c021f10b
NF
3278 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3279 if (TREE_CODE (decl) == VAR_DECL
3280 && TREE_STATIC (decl)
3281 && !DECL_EXTERNAL (decl)
3282 && DECL_INITIAL (decl)
3283 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3284 has_label_address_in_static_1,
3285 fndecl))
3286 {
3287 reason = G_("function %q+F can never be copied because it saves "
3288 "address of local label in a static variable");
3289 goto fail;
3290 }
27dbd3ac
RH
3291
3292 fail:
3293 fun->cannot_be_copied_reason = reason;
3294 fun->cannot_be_copied_set = true;
3295 return reason;
3296}
3297
3298
3299static const char *inline_forbidden_reason;
3300
3301/* A callback for walk_gimple_seq to handle statements. Returns non-null
3302 iff a function can not be inlined. Also sets the reason why. */
c986baf6 3303
c986baf6 3304static tree
726a989a
RB
3305inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3306 struct walk_stmt_info *wip)
c986baf6 3307{
726a989a 3308 tree fn = (tree) wip->info;
f08545a8 3309 tree t;
726a989a 3310 gimple stmt = gsi_stmt (*gsi);
c986baf6 3311
726a989a 3312 switch (gimple_code (stmt))
f08545a8 3313 {
726a989a 3314 case GIMPLE_CALL:
3197c4fd
AS
3315 /* Refuse to inline alloca call unless user explicitly forced so as
3316 this may change program's memory overhead drastically when the
3317 function using alloca is called in loop. In GCC present in
3318 SPEC2000 inlining into schedule_block cause it to require 2GB of
63d2a353
MM
3319 RAM instead of 256MB. Don't do so for alloca calls emitted for
3320 VLA objects as those can't cause unbounded growth (they're always
3321 wrapped inside stack_save/stack_restore regions. */
726a989a 3322 if (gimple_alloca_call_p (stmt)
63d2a353 3323 && !gimple_call_alloca_for_var_p (stmt)
f08545a8
JH
3324 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3325 {
ddd2d57e 3326 inline_forbidden_reason
dee15844 3327 = G_("function %q+F can never be inlined because it uses "
ddd2d57e 3328 "alloca (override using the always_inline attribute)");
726a989a
RB
3329 *handled_ops_p = true;
3330 return fn;
f08545a8 3331 }
726a989a
RB
3332
3333 t = gimple_call_fndecl (stmt);
3334 if (t == NULL_TREE)
f08545a8 3335 break;
84f5e1b1 3336
f08545a8
JH
3337 /* We cannot inline functions that call setjmp. */
3338 if (setjmp_call_p (t))
3339 {
ddd2d57e 3340 inline_forbidden_reason
dee15844 3341 = G_("function %q+F can never be inlined because it uses setjmp");
726a989a
RB
3342 *handled_ops_p = true;
3343 return t;
f08545a8
JH
3344 }
3345
6de9cd9a 3346 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3197c4fd 3347 switch (DECL_FUNCTION_CODE (t))
f08545a8 3348 {
3197c4fd
AS
3349 /* We cannot inline functions that take a variable number of
3350 arguments. */
3351 case BUILT_IN_VA_START:
3197c4fd
AS
3352 case BUILT_IN_NEXT_ARG:
3353 case BUILT_IN_VA_END:
6de9cd9a 3354 inline_forbidden_reason
dee15844 3355 = G_("function %q+F can never be inlined because it "
6de9cd9a 3356 "uses variable argument lists");
726a989a
RB
3357 *handled_ops_p = true;
3358 return t;
6de9cd9a 3359
3197c4fd 3360 case BUILT_IN_LONGJMP:
6de9cd9a
DN
3361 /* We can't inline functions that call __builtin_longjmp at
3362 all. The non-local goto machinery really requires the
3363 destination be in a different function. If we allow the
3364 function calling __builtin_longjmp to be inlined into the
3365 function calling __builtin_setjmp, Things will Go Awry. */
3366 inline_forbidden_reason
dee15844 3367 = G_("function %q+F can never be inlined because "
6de9cd9a 3368 "it uses setjmp-longjmp exception handling");
726a989a
RB
3369 *handled_ops_p = true;
3370 return t;
6de9cd9a
DN
3371
3372 case BUILT_IN_NONLOCAL_GOTO:
3373 /* Similarly. */
3374 inline_forbidden_reason
dee15844 3375 = G_("function %q+F can never be inlined because "
6de9cd9a 3376 "it uses non-local goto");
726a989a
RB
3377 *handled_ops_p = true;
3378 return t;
f08545a8 3379
4b284111
JJ
3380 case BUILT_IN_RETURN:
3381 case BUILT_IN_APPLY_ARGS:
3382 /* If a __builtin_apply_args caller would be inlined,
3383 it would be saving arguments of the function it has
3384 been inlined into. Similarly __builtin_return would
3385 return from the function the inline has been inlined into. */
3386 inline_forbidden_reason
dee15844 3387 = G_("function %q+F can never be inlined because "
4b284111 3388 "it uses __builtin_return or __builtin_apply_args");
726a989a
RB
3389 *handled_ops_p = true;
3390 return t;
4b284111 3391
3197c4fd
AS
3392 default:
3393 break;
3394 }
f08545a8
JH
3395 break;
3396
726a989a
RB
3397 case GIMPLE_GOTO:
3398 t = gimple_goto_dest (stmt);
f08545a8
JH
3399
3400 /* We will not inline a function which uses computed goto. The
3401 addresses of its local labels, which may be tucked into
3402 global storage, are of course not constant across
3403 instantiations, which causes unexpected behavior. */
3404 if (TREE_CODE (t) != LABEL_DECL)
3405 {
ddd2d57e 3406 inline_forbidden_reason
dee15844 3407 = G_("function %q+F can never be inlined "
ddd2d57e 3408 "because it contains a computed goto");
726a989a
RB
3409 *handled_ops_p = true;
3410 return t;
f08545a8 3411 }
6de9cd9a 3412 break;
f08545a8 3413
f08545a8
JH
3414 default:
3415 break;
3416 }
3417
726a989a 3418 *handled_ops_p = false;
f08545a8 3419 return NULL_TREE;
84f5e1b1
RH
3420}
3421
726a989a
RB
3422/* Return true if FNDECL is a function that cannot be inlined into
3423 another one. */
3424
3425static bool
f08545a8 3426inline_forbidden_p (tree fndecl)
84f5e1b1 3427{
2092ee7d 3428 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
726a989a
RB
3429 struct walk_stmt_info wi;
3430 struct pointer_set_t *visited_nodes;
3431 basic_block bb;
3432 bool forbidden_p = false;
3433
27dbd3ac
RH
3434 /* First check for shared reasons not to copy the code. */
3435 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3436 if (inline_forbidden_reason != NULL)
3437 return true;
3438
3439 /* Next, walk the statements of the function looking for
3440 constraucts we can't handle, or are non-optimal for inlining. */
726a989a
RB
3441 visited_nodes = pointer_set_create ();
3442 memset (&wi, 0, sizeof (wi));
3443 wi.info = (void *) fndecl;
3444 wi.pset = visited_nodes;
e21aff8a 3445
2092ee7d 3446 FOR_EACH_BB_FN (bb, fun)
726a989a
RB
3447 {
3448 gimple ret;
3449 gimple_seq seq = bb_seq (bb);
27dbd3ac 3450 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
726a989a
RB
3451 forbidden_p = (ret != NULL);
3452 if (forbidden_p)
27dbd3ac 3453 break;
2092ee7d
JJ
3454 }
3455
726a989a 3456 pointer_set_destroy (visited_nodes);
726a989a 3457 return forbidden_p;
84f5e1b1 3458}
6399c0ab
SB
3459\f
3460/* Return false if the function FNDECL cannot be inlined on account of its
3461 attributes, true otherwise. */
3462static bool
3463function_attribute_inlinable_p (const_tree fndecl)
3464{
3465 if (targetm.attribute_table)
3466 {
3467 const_tree a;
3468
3469 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3470 {
3471 const_tree name = TREE_PURPOSE (a);
3472 int i;
3473
3474 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3475 if (is_attribute_p (targetm.attribute_table[i].name, name))
3476 return targetm.function_attribute_inlinable_p (fndecl);
3477 }
3478 }
3479
3480 return true;
3481}
84f5e1b1 3482
b3c3af2f
SB
3483/* Returns nonzero if FN is a function that does not have any
3484 fundamental inline blocking properties. */
d4e4baa9 3485
27dbd3ac
RH
3486bool
3487tree_inlinable_function_p (tree fn)
d4e4baa9 3488{
b3c3af2f 3489 bool inlinable = true;
18177c7e
RG
3490 bool do_warning;
3491 tree always_inline;
d4e4baa9
AO
3492
3493 /* If we've already decided this function shouldn't be inlined,
3494 there's no need to check again. */
3495 if (DECL_UNINLINABLE (fn))
b3c3af2f 3496 return false;
d4e4baa9 3497
18177c7e
RG
3498 /* We only warn for functions declared `inline' by the user. */
3499 do_warning = (warn_inline
18177c7e 3500 && DECL_DECLARED_INLINE_P (fn)
0494626a 3501 && !DECL_NO_INLINE_WARNING_P (fn)
18177c7e
RG
3502 && !DECL_IN_SYSTEM_HEADER (fn));
3503
3504 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3505
e90acd93 3506 if (flag_no_inline
18177c7e
RG
3507 && always_inline == NULL)
3508 {
3509 if (do_warning)
3510 warning (OPT_Winline, "function %q+F can never be inlined because it "
3511 "is suppressed using -fno-inline", fn);
3512 inlinable = false;
3513 }
3514
18177c7e
RG
3515 else if (!function_attribute_inlinable_p (fn))
3516 {
3517 if (do_warning)
3518 warning (OPT_Winline, "function %q+F can never be inlined because it "
3519 "uses attributes conflicting with inlining", fn);
3520 inlinable = false;
3521 }
46c5ad27 3522
f08545a8 3523 else if (inline_forbidden_p (fn))
b3c3af2f
SB
3524 {
3525 /* See if we should warn about uninlinable functions. Previously,
3526 some of these warnings would be issued while trying to expand
3527 the function inline, but that would cause multiple warnings
3528 about functions that would for example call alloca. But since
3529 this a property of the function, just one warning is enough.
3530 As a bonus we can now give more details about the reason why a
18177c7e
RG
3531 function is not inlinable. */
3532 if (always_inline)
c9fc06dc 3533 error (inline_forbidden_reason, fn);
2d327012 3534 else if (do_warning)
d2fcbf6f 3535 warning (OPT_Winline, inline_forbidden_reason, fn);
b3c3af2f
SB
3536
3537 inlinable = false;
3538 }
d4e4baa9
AO
3539
3540 /* Squirrel away the result so that we don't have to check again. */
b3c3af2f 3541 DECL_UNINLINABLE (fn) = !inlinable;
d4e4baa9 3542
b3c3af2f
SB
3543 return inlinable;
3544}
3545
e5c4f28a
RG
3546/* Estimate the cost of a memory move. Use machine dependent
3547 word size and take possible memcpy call into account. */
3548
3549int
3550estimate_move_cost (tree type)
3551{
3552 HOST_WIDE_INT size;
3553
078c3644
JH
3554 gcc_assert (!VOID_TYPE_P (type));
3555
c204d113
L
3556 if (TREE_CODE (type) == VECTOR_TYPE)
3557 {
3558 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3559 enum machine_mode simd
3560 = targetm.vectorize.preferred_simd_mode (inner);
3561 int simd_mode_size = GET_MODE_SIZE (simd);
3562 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3563 / simd_mode_size);
3564 }
3565
e5c4f28a
RG
3566 size = int_size_in_bytes (type);
3567
e04ad03d 3568 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
e5c4f28a
RG
3569 /* Cost of a memcpy call, 3 arguments and the call. */
3570 return 4;
3571 else
3572 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3573}
3574
726a989a 3575/* Returns cost of operation CODE, according to WEIGHTS */
7f9bc51b 3576
726a989a 3577static int
02f0b13a
JH
3578estimate_operator_cost (enum tree_code code, eni_weights *weights,
3579 tree op1 ATTRIBUTE_UNUSED, tree op2)
6de9cd9a 3580{
726a989a 3581 switch (code)
6de9cd9a 3582 {
726a989a
RB
3583 /* These are "free" conversions, or their presumed cost
3584 is folded into other operations. */
61fcaeec 3585 case RANGE_EXPR:
1a87cf0c 3586 CASE_CONVERT:
726a989a
RB
3587 case COMPLEX_EXPR:
3588 case PAREN_EXPR:
d4d92cd3 3589 case VIEW_CONVERT_EXPR:
726a989a 3590 return 0;
6de9cd9a 3591
e5c4f28a
RG
3592 /* Assign cost of 1 to usual operations.
3593 ??? We may consider mapping RTL costs to this. */
6de9cd9a 3594 case COND_EXPR:
4151978d 3595 case VEC_COND_EXPR:
2205ed25 3596 case VEC_PERM_EXPR:
6de9cd9a
DN
3597
3598 case PLUS_EXPR:
5be014d5 3599 case POINTER_PLUS_EXPR:
6de9cd9a
DN
3600 case MINUS_EXPR:
3601 case MULT_EXPR:
98449720 3602 case MULT_HIGHPART_EXPR:
16949072 3603 case FMA_EXPR:
6de9cd9a 3604
09e881c9 3605 case ADDR_SPACE_CONVERT_EXPR:
325217ed 3606 case FIXED_CONVERT_EXPR:
6de9cd9a 3607 case FIX_TRUNC_EXPR:
6de9cd9a
DN
3608
3609 case NEGATE_EXPR:
3610 case FLOAT_EXPR:
3611 case MIN_EXPR:
3612 case MAX_EXPR:
3613 case ABS_EXPR:
3614
3615 case LSHIFT_EXPR:
3616 case RSHIFT_EXPR:
3617 case LROTATE_EXPR:
3618 case RROTATE_EXPR:
a6b46ba2
DN
3619 case VEC_LSHIFT_EXPR:
3620 case VEC_RSHIFT_EXPR:
6de9cd9a
DN
3621
3622 case BIT_IOR_EXPR:
3623 case BIT_XOR_EXPR:
3624 case BIT_AND_EXPR:
3625 case BIT_NOT_EXPR:
3626
3627 case TRUTH_ANDIF_EXPR:
3628 case TRUTH_ORIF_EXPR:
3629 case TRUTH_AND_EXPR:
3630 case TRUTH_OR_EXPR:
3631 case TRUTH_XOR_EXPR:
3632 case TRUTH_NOT_EXPR:
3633
3634 case LT_EXPR:
3635 case LE_EXPR:
3636 case GT_EXPR:
3637 case GE_EXPR:
3638 case EQ_EXPR:
3639 case NE_EXPR:
3640 case ORDERED_EXPR:
3641 case UNORDERED_EXPR:
3642
3643 case UNLT_EXPR:
3644 case UNLE_EXPR:
3645 case UNGT_EXPR:
3646 case UNGE_EXPR:
3647 case UNEQ_EXPR:
d1a7edaf 3648 case LTGT_EXPR:
6de9cd9a 3649
6de9cd9a
DN
3650 case CONJ_EXPR:
3651
3652 case PREDECREMENT_EXPR:
3653 case PREINCREMENT_EXPR:
3654 case POSTDECREMENT_EXPR:
3655 case POSTINCREMENT_EXPR:
3656
16630a2c
DN
3657 case REALIGN_LOAD_EXPR:
3658
61d3cdbb
DN
3659 case REDUC_MAX_EXPR:
3660 case REDUC_MIN_EXPR:
3661 case REDUC_PLUS_EXPR:
20f06221 3662 case WIDEN_SUM_EXPR:
726a989a
RB
3663 case WIDEN_MULT_EXPR:
3664 case DOT_PROD_EXPR:
0354c0c7
BS
3665 case WIDEN_MULT_PLUS_EXPR:
3666 case WIDEN_MULT_MINUS_EXPR:
36ba4aae 3667 case WIDEN_LSHIFT_EXPR:
726a989a 3668
89d67cca
DN
3669 case VEC_WIDEN_MULT_HI_EXPR:
3670 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
3671 case VEC_WIDEN_MULT_EVEN_EXPR:
3672 case VEC_WIDEN_MULT_ODD_EXPR:
89d67cca
DN
3673 case VEC_UNPACK_HI_EXPR:
3674 case VEC_UNPACK_LO_EXPR:
d9987fb4
UB
3675 case VEC_UNPACK_FLOAT_HI_EXPR:
3676 case VEC_UNPACK_FLOAT_LO_EXPR:
8115817b 3677 case VEC_PACK_TRUNC_EXPR:
89d67cca 3678 case VEC_PACK_SAT_EXPR:
d9987fb4 3679 case VEC_PACK_FIX_TRUNC_EXPR:
36ba4aae
IR
3680 case VEC_WIDEN_LSHIFT_HI_EXPR:
3681 case VEC_WIDEN_LSHIFT_LO_EXPR:
98b44b0e 3682
726a989a 3683 return 1;
6de9cd9a 3684
1ea7e6ad 3685 /* Few special cases of expensive operations. This is useful
6de9cd9a
DN
3686 to avoid inlining on functions having too many of these. */
3687 case TRUNC_DIV_EXPR:
3688 case CEIL_DIV_EXPR:
3689 case FLOOR_DIV_EXPR:
3690 case ROUND_DIV_EXPR:
3691 case EXACT_DIV_EXPR:
3692 case TRUNC_MOD_EXPR:
3693 case CEIL_MOD_EXPR:
3694 case FLOOR_MOD_EXPR:
3695 case ROUND_MOD_EXPR:
3696 case RDIV_EXPR:
02f0b13a
JH
3697 if (TREE_CODE (op2) != INTEGER_CST)
3698 return weights->div_mod_cost;
3699 return 1;
726a989a
RB
3700
3701 default:
3702 /* We expect a copy assignment with no operator. */
3703 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3704 return 0;
3705 }
3706}
3707
3708
3709/* Estimate number of instructions that will be created by expanding
3710 the statements in the statement sequence STMTS.
3711 WEIGHTS contains weights attributed to various constructs. */
3712
3713static
3714int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3715{
3716 int cost;
3717 gimple_stmt_iterator gsi;
3718
3719 cost = 0;
3720 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3721 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3722
3723 return cost;
3724}
3725
3726
3727/* Estimate number of instructions that will be created by expanding STMT.
3728 WEIGHTS contains weights attributed to various constructs. */
3729
3730int
3731estimate_num_insns (gimple stmt, eni_weights *weights)
3732{
3733 unsigned cost, i;
3734 enum gimple_code code = gimple_code (stmt);
3735 tree lhs;
02f0b13a 3736 tree rhs;
726a989a
RB
3737
3738 switch (code)
3739 {
3740 case GIMPLE_ASSIGN:
3741 /* Try to estimate the cost of assignments. We have three cases to
3742 deal with:
3743 1) Simple assignments to registers;
3744 2) Stores to things that must live in memory. This includes
3745 "normal" stores to scalars, but also assignments of large
3746 structures, or constructors of big arrays;
3747
3748 Let us look at the first two cases, assuming we have "a = b + C":
3749 <GIMPLE_ASSIGN <var_decl "a">
3750 <plus_expr <var_decl "b"> <constant C>>
3751 If "a" is a GIMPLE register, the assignment to it is free on almost
3752 any target, because "a" usually ends up in a real register. Hence
3753 the only cost of this expression comes from the PLUS_EXPR, and we
3754 can ignore the GIMPLE_ASSIGN.
3755 If "a" is not a GIMPLE register, the assignment to "a" will most
3756 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3757 of moving something into "a", which we compute using the function
3758 estimate_move_cost. */
bccc50d4
JJ
3759 if (gimple_clobber_p (stmt))
3760 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3761
726a989a 3762 lhs = gimple_assign_lhs (stmt);
02f0b13a
JH
3763 rhs = gimple_assign_rhs1 (stmt);
3764
c12d9242 3765 cost = 0;
726a989a 3766
c12d9242
RB
3767 /* Account for the cost of moving to / from memory. */
3768 if (gimple_store_p (stmt))
3769 cost += estimate_move_cost (TREE_TYPE (lhs));
3770 if (gimple_assign_load_p (stmt))
02f0b13a
JH
3771 cost += estimate_move_cost (TREE_TYPE (rhs));
3772
3773 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3774 gimple_assign_rhs1 (stmt),
3775 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3776 == GIMPLE_BINARY_RHS
3777 ? gimple_assign_rhs2 (stmt) : NULL);
726a989a
RB
3778 break;
3779
3780 case GIMPLE_COND:
02f0b13a
JH
3781 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3782 gimple_op (stmt, 0),
3783 gimple_op (stmt, 1));
726a989a
RB
3784 break;
3785
3786 case GIMPLE_SWITCH:
3787 /* Take into account cost of the switch + guess 2 conditional jumps for
b8698a0f 3788 each case label.
726a989a
RB
3789
3790 TODO: once the switch expansion logic is sufficiently separated, we can
3791 do better job on estimating cost of the switch. */
02f0b13a
JH
3792 if (weights->time_based)
3793 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3794 else
3795 cost = gimple_switch_num_labels (stmt) * 2;
6de9cd9a 3796 break;
726a989a
RB
3797
3798 case GIMPLE_CALL:
6de9cd9a 3799 {
726a989a 3800 tree decl = gimple_call_fndecl (stmt);
d2d668fb 3801 struct cgraph_node *node = NULL;
6de9cd9a 3802
9bb2f479
JH
3803 /* Do not special case builtins where we see the body.
3804 This just confuse inliner. */
67348ccc 3805 if (!decl || !(node = cgraph_get_node (decl)) || node->definition)
e9f7ad79 3806 ;
9bb2f479
JH
3807 /* For buitins that are likely expanded to nothing or
3808 inlined do not account operand costs. */
3809 else if (is_simple_builtin (decl))
bec922f0
SL
3810 return 0;
3811 else if (is_inexpensive_builtin (decl))
9bb2f479 3812 return weights->target_builtin_call_cost;
e9f7ad79
RG
3813 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3814 {
3815 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3816 specialize the cheap expansion we do here.
3817 ??? This asks for a more general solution. */
3818 switch (DECL_FUNCTION_CODE (decl))
3819 {
3820 case BUILT_IN_POW:
3821 case BUILT_IN_POWF:
3822 case BUILT_IN_POWL:
3823 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3824 && REAL_VALUES_EQUAL
3825 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3826 return estimate_operator_cost (MULT_EXPR, weights,
3827 gimple_call_arg (stmt, 0),
3828 gimple_call_arg (stmt, 0));
3829 break;
3830
3831 default:
3832 break;
3833 }
3834 }
b8698a0f 3835
d2d668fb 3836 cost = node ? weights->call_cost : weights->indirect_call_cost;
3c04921b
RG
3837 if (gimple_call_lhs (stmt))
3838 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3839 for (i = 0; i < gimple_call_num_args (stmt); i++)
c7f599d0 3840 {
3c04921b
RG
3841 tree arg = gimple_call_arg (stmt, i);
3842 cost += estimate_move_cost (TREE_TYPE (arg));
c7f599d0 3843 }
6de9cd9a
DN
3844 break;
3845 }
88f4034b 3846
9bb2f479
JH
3847 case GIMPLE_RETURN:
3848 return weights->return_cost;
3849
726a989a
RB
3850 case GIMPLE_GOTO:
3851 case GIMPLE_LABEL:
3852 case GIMPLE_NOP:
3853 case GIMPLE_PHI:
726a989a 3854 case GIMPLE_PREDICT:
b5b8b0ac 3855 case GIMPLE_DEBUG:
726a989a
RB
3856 return 0;
3857
3858 case GIMPLE_ASM:
cc4029ee
AK
3859 {
3860 int count = asm_str_count (gimple_asm_string (stmt));
3861 /* 1000 means infinity. This avoids overflows later
3862 with very long asm statements. */
3863 if (count > 1000)
3864 count = 1000;
3865 return count;
3866 }
726a989a 3867
1d65f45c
RH
3868 case GIMPLE_RESX:
3869 /* This is either going to be an external function call with one
3870 argument, or two register copy statements plus a goto. */
3871 return 2;
3872
3873 case GIMPLE_EH_DISPATCH:
3874 /* ??? This is going to turn into a switch statement. Ideally
3875 we'd have a look at the eh region and estimate the number of
3876 edges involved. */
3877 return 10;
3878
726a989a
RB
3879 case GIMPLE_BIND:
3880 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3881
3882 case GIMPLE_EH_FILTER:
3883 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3884
3885 case GIMPLE_CATCH:
3886 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3887
3888 case GIMPLE_TRY:
3889 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3890 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3891
3892 /* OpenMP directives are generally very expensive. */
3893
3894 case GIMPLE_OMP_RETURN:
3895 case GIMPLE_OMP_SECTIONS_SWITCH:
3896 case GIMPLE_OMP_ATOMIC_STORE:
3897 case GIMPLE_OMP_CONTINUE:
3898 /* ...except these, which are cheap. */
3899 return 0;
3900
3901 case GIMPLE_OMP_ATOMIC_LOAD:
3902 return weights->omp_cost;
3903
3904 case GIMPLE_OMP_FOR:
3905 return (weights->omp_cost
3906 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3907 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3908
3909 case GIMPLE_OMP_PARALLEL:
3910 case GIMPLE_OMP_TASK:
3911 case GIMPLE_OMP_CRITICAL:
3912 case GIMPLE_OMP_MASTER:
acf0174b 3913 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
3914 case GIMPLE_OMP_ORDERED:
3915 case GIMPLE_OMP_SECTION:
3916 case GIMPLE_OMP_SECTIONS:
3917 case GIMPLE_OMP_SINGLE:
acf0174b
JJ
3918 case GIMPLE_OMP_TARGET:
3919 case GIMPLE_OMP_TEAMS:
726a989a
RB
3920 return (weights->omp_cost
3921 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
88f4034b 3922
0a35513e
AH
3923 case GIMPLE_TRANSACTION:
3924 return (weights->tm_cost
3925 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3926 weights));
3927
6de9cd9a 3928 default:
1e128c5f 3929 gcc_unreachable ();
6de9cd9a 3930 }
726a989a
RB
3931
3932 return cost;
6de9cd9a
DN
3933}
3934
726a989a
RB
3935/* Estimate number of instructions that will be created by expanding
3936 function FNDECL. WEIGHTS contains weights attributed to various
3937 constructs. */
aa4a53af 3938
6de9cd9a 3939int
726a989a 3940estimate_num_insns_fn (tree fndecl, eni_weights *weights)
6de9cd9a 3941{
726a989a
RB
3942 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3943 gimple_stmt_iterator bsi;
e21aff8a 3944 basic_block bb;
726a989a 3945 int n = 0;
e21aff8a 3946
726a989a
RB
3947 gcc_assert (my_function && my_function->cfg);
3948 FOR_EACH_BB_FN (bb, my_function)
e21aff8a 3949 {
726a989a
RB
3950 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3951 n += estimate_num_insns (gsi_stmt (bsi), weights);
e21aff8a 3952 }
e21aff8a 3953
726a989a 3954 return n;
7f9bc51b
ZD
3955}
3956
726a989a 3957
7f9bc51b
ZD
3958/* Initializes weights used by estimate_num_insns. */
3959
3960void
3961init_inline_once (void)
3962{
7f9bc51b 3963 eni_size_weights.call_cost = 1;
d2d668fb 3964 eni_size_weights.indirect_call_cost = 3;
625a2efb 3965 eni_size_weights.target_builtin_call_cost = 1;
7f9bc51b 3966 eni_size_weights.div_mod_cost = 1;
7f9bc51b 3967 eni_size_weights.omp_cost = 40;
0a35513e 3968 eni_size_weights.tm_cost = 10;
02f0b13a 3969 eni_size_weights.time_based = false;
9bb2f479 3970 eni_size_weights.return_cost = 1;
7f9bc51b
ZD
3971
3972 /* Estimating time for call is difficult, since we have no idea what the
3973 called function does. In the current uses of eni_time_weights,
3974 underestimating the cost does less harm than overestimating it, so
ea2c620c 3975 we choose a rather small value here. */
7f9bc51b 3976 eni_time_weights.call_cost = 10;
d2d668fb 3977 eni_time_weights.indirect_call_cost = 15;
9bb2f479 3978 eni_time_weights.target_builtin_call_cost = 1;
7f9bc51b 3979 eni_time_weights.div_mod_cost = 10;
7f9bc51b 3980 eni_time_weights.omp_cost = 40;
0a35513e 3981 eni_time_weights.tm_cost = 40;
02f0b13a 3982 eni_time_weights.time_based = true;
9bb2f479 3983 eni_time_weights.return_cost = 2;
6de9cd9a
DN
3984}
3985
726a989a
RB
3986/* Estimate the number of instructions in a gimple_seq. */
3987
3988int
3989count_insns_seq (gimple_seq seq, eni_weights *weights)
3990{
3991 gimple_stmt_iterator gsi;
3992 int n = 0;
3993 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3994 n += estimate_num_insns (gsi_stmt (gsi), weights);
3995
3996 return n;
3997}
3998
3999
e21aff8a 4000/* Install new lexical TREE_BLOCK underneath 'current_block'. */
726a989a 4001
e21aff8a 4002static void
4a283090 4003prepend_lexical_block (tree current_block, tree new_block)
e21aff8a 4004{
4a283090
JH
4005 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4006 BLOCK_SUBBLOCKS (current_block) = new_block;
e21aff8a 4007 BLOCK_SUPERCONTEXT (new_block) = current_block;
e21aff8a
SB
4008}
4009
c021f10b
NF
4010/* Add local variables from CALLEE to CALLER. */
4011
4012static inline void
4013add_local_variables (struct function *callee, struct function *caller,
ae0379fc 4014 copy_body_data *id)
c021f10b
NF
4015{
4016 tree var;
4017 unsigned ix;
4018
4019 FOR_EACH_LOCAL_DECL (callee, ix, var)
ae0379fc 4020 if (!can_be_nonlocal (var, id))
42694189
JJ
4021 {
4022 tree new_var = remap_decl (var, id);
4023
4024 /* Remap debug-expressions. */
4025 if (TREE_CODE (new_var) == VAR_DECL
839b422f 4026 && DECL_HAS_DEBUG_EXPR_P (var)
42694189
JJ
4027 && new_var != var)
4028 {
4029 tree tem = DECL_DEBUG_EXPR (var);
4030 bool old_regimplify = id->regimplify;
4031 id->remapping_type_depth++;
4032 walk_tree (&tem, copy_tree_body_r, id, NULL);
4033 id->remapping_type_depth--;
4034 id->regimplify = old_regimplify;
4035 SET_DECL_DEBUG_EXPR (new_var, tem);
839b422f 4036 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
42694189
JJ
4037 }
4038 add_local_decl (caller, new_var);
4039 }
c021f10b
NF
4040}
4041
726a989a 4042/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
d4e4baa9 4043
e21aff8a 4044static bool
726a989a 4045expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
d4e4baa9 4046{
0f900dfa 4047 tree use_retvar;
d436bff8 4048 tree fn;
b5b8b0ac 4049 struct pointer_map_t *st, *dst;
110cfe1c 4050 tree return_slot;
7740f00d 4051 tree modify_dest;
6de9cd9a 4052 location_t saved_location;
e21aff8a 4053 struct cgraph_edge *cg_edge;
61a05df1 4054 cgraph_inline_failed_t reason;
e21aff8a
SB
4055 basic_block return_block;
4056 edge e;
726a989a 4057 gimple_stmt_iterator gsi, stmt_gsi;
e21aff8a 4058 bool successfully_inlined = FALSE;
4f6c2131 4059 bool purge_dead_abnormal_edges;
d4e4baa9 4060
6de9cd9a
DN
4061 /* Set input_location here so we get the right instantiation context
4062 if we call instantiate_decl from inlinable_function_p. */
532aafad 4063 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
6de9cd9a 4064 saved_location = input_location;
035775c8 4065 input_location = gimple_location (stmt);
6de9cd9a 4066
d4e4baa9 4067 /* From here on, we're only interested in CALL_EXPRs. */
726a989a 4068 if (gimple_code (stmt) != GIMPLE_CALL)
6de9cd9a 4069 goto egress;
d4e4baa9 4070
db09f943
MJ
4071 cg_edge = cgraph_edge (id->dst_node, stmt);
4072 gcc_checking_assert (cg_edge);
d4e4baa9
AO
4073 /* First, see if we can figure out what function is being called.
4074 If we cannot, then there is no hope of inlining the function. */
db09f943 4075 if (cg_edge->indirect_unknown_callee)
3949c4a7 4076 goto egress;
67348ccc 4077 fn = cg_edge->callee->decl;
db09f943 4078 gcc_checking_assert (fn);
b58b1157 4079
726a989a 4080 /* If FN is a declaration of a function in a nested scope that was
a1a0fd4e
AO
4081 globally declared inline, we don't set its DECL_INITIAL.
4082 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4083 C++ front-end uses it for cdtors to refer to their internal
4084 declarations, that are not real functions. Fortunately those
4085 don't have trees to be saved, so we can tell by checking their
726a989a
RB
4086 gimple_body. */
4087 if (!DECL_INITIAL (fn)
a1a0fd4e 4088 && DECL_ABSTRACT_ORIGIN (fn)
39ecc018 4089 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
a1a0fd4e
AO
4090 fn = DECL_ABSTRACT_ORIGIN (fn);
4091
8f4f502f 4092 /* Don't try to inline functions that are not well-suited to inlining. */
9c8305f8 4093 if (cg_edge->inline_failed)
a833faa5 4094 {
9c8305f8 4095 reason = cg_edge->inline_failed;
3e293154
MJ
4096 /* If this call was originally indirect, we do not want to emit any
4097 inlining related warnings or sorry messages because there are no
4098 guarantees regarding those. */
e33c6cd6 4099 if (cg_edge->indirect_inlining_edge)
3e293154
MJ
4100 goto egress;
4101
7fac66d4 4102 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
bfc61b40
JH
4103 /* For extern inline functions that get redefined we always
4104 silently ignored always_inline flag. Better behaviour would
4105 be to be able to keep both bodies and use extern inline body
4106 for inlining, but we can't do that because frontends overwrite
4107 the body. */
4108 && !cg_edge->callee->local.redefined_extern_inline
df9dda2d
ST
4109 /* During early inline pass, report only when optimization is
4110 not turned on. */
4111 && (cgraph_global_info_ready
4112 || !optimize)
c9fc06dc
CB
4113 /* PR 20090218-1_0.c. Body can be provided by another module. */
4114 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
2d327012 4115 {
c9fc06dc
CB
4116 error ("inlining failed in call to always_inline %q+F: %s", fn,
4117 cgraph_inline_failed_string (reason));
4118 error ("called from here");
2d327012 4119 }
ff7037dc
EB
4120 else if (warn_inline
4121 && DECL_DECLARED_INLINE_P (fn)
4122 && !DECL_NO_INLINE_WARNING_P (fn)
2d327012 4123 && !DECL_IN_SYSTEM_HEADER (fn)
61a05df1 4124 && reason != CIF_UNSPECIFIED
d63db217 4125 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
d7d1d041
RG
4126 /* Do not warn about not inlined recursive calls. */
4127 && !cgraph_edge_recursive_p (cg_edge)
d63db217 4128 /* Avoid warnings during early inline pass. */
7e8b322a 4129 && cgraph_global_info_ready)
a833faa5 4130 {
dee15844 4131 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
49c8958b 4132 fn, _(cgraph_inline_failed_string (reason)));
3176a0c2 4133 warning (OPT_Winline, "called from here");
a833faa5 4134 }
6de9cd9a 4135 goto egress;
a833faa5 4136 }
67348ccc 4137 fn = cg_edge->callee->decl;
a2e2a668 4138 cgraph_get_body (cg_edge->callee);
d4e4baa9 4139
18c6ada9 4140#ifdef ENABLE_CHECKING
67348ccc 4141 if (cg_edge->callee->decl != id->dst_node->decl)
e21aff8a 4142 verify_cgraph_node (cg_edge->callee);
18c6ada9
JH
4143#endif
4144
e21aff8a 4145 /* We will be inlining this callee. */
1d65f45c 4146 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
e21aff8a 4147
f9417da1 4148 /* Update the callers EH personality. */
67348ccc
DM
4149 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4150 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4151 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
f9417da1 4152
726a989a 4153 /* Split the block holding the GIMPLE_CALL. */
e21aff8a
SB
4154 e = split_block (bb, stmt);
4155 bb = e->src;
4156 return_block = e->dest;
4157 remove_edge (e);
4158
4f6c2131
EB
4159 /* split_block splits after the statement; work around this by
4160 moving the call into the second block manually. Not pretty,
4161 but seems easier than doing the CFG manipulation by hand
726a989a
RB
4162 when the GIMPLE_CALL is in the last statement of BB. */
4163 stmt_gsi = gsi_last_bb (bb);
4164 gsi_remove (&stmt_gsi, false);
4f6c2131 4165
726a989a 4166 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4f6c2131
EB
4167 been the source of abnormal edges. In this case, schedule
4168 the removal of dead abnormal edges. */
726a989a
RB
4169 gsi = gsi_start_bb (return_block);
4170 if (gsi_end_p (gsi))
e21aff8a 4171 {
726a989a 4172 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4f6c2131 4173 purge_dead_abnormal_edges = true;
e21aff8a 4174 }
4f6c2131
EB
4175 else
4176 {
726a989a 4177 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4f6c2131
EB
4178 purge_dead_abnormal_edges = false;
4179 }
4180
726a989a 4181 stmt_gsi = gsi_start_bb (return_block);
742a37d5 4182
d436bff8
AH
4183 /* Build a block containing code to initialize the arguments, the
4184 actual inline expansion of the body, and a label for the return
4185 statements within the function to jump to. The type of the
3e492e9c
RB
4186 statement expression is the return type of the function call.
4187 ??? If the call does not have an associated block then we will
4188 remap all callee blocks to NULL, effectively dropping most of
4189 its debug information. This should only happen for calls to
4190 artificial decls inserted by the compiler itself. We need to
4191 either link the inlined blocks into the caller block tree or
4192 not refer to them in any way to not break GC for locations. */
5368224f 4193 if (gimple_block (stmt))
3e492e9c
RB
4194 {
4195 id->block = make_node (BLOCK);
4196 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
a9d5a059 4197 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
3e492e9c
RB
4198 prepend_lexical_block (gimple_block (stmt), id->block);
4199 }
e21aff8a 4200
d4e4baa9
AO
4201 /* Local declarations will be replaced by their equivalents in this
4202 map. */
4203 st = id->decl_map;
6be42dd4 4204 id->decl_map = pointer_map_create ();
b5b8b0ac
AO
4205 dst = id->debug_map;
4206 id->debug_map = NULL;
d4e4baa9 4207
e21aff8a 4208 /* Record the function we are about to inline. */
1b369fae
RH
4209 id->src_fn = fn;
4210 id->src_node = cg_edge->callee;
110cfe1c 4211 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
726a989a 4212 id->gimple_call = stmt;
1b369fae 4213
3c8da8a5
AO
4214 gcc_assert (!id->src_cfun->after_inlining);
4215
045685a9 4216 id->entry_bb = bb;
7299cb99
JH
4217 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4218 {
4219 gimple_stmt_iterator si = gsi_last_bb (bb);
4220 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4221 NOT_TAKEN),
4222 GSI_NEW_STMT);
4223 }
726a989a 4224 initialize_inlined_parameters (id, stmt, fn, bb);
d4e4baa9 4225
ea99e0be 4226 if (DECL_INITIAL (fn))
94645a02 4227 {
3e492e9c
RB
4228 if (gimple_block (stmt))
4229 {
4230 tree *var;
4231
4232 prepend_lexical_block (id->block,
4233 remap_blocks (DECL_INITIAL (fn), id));
4234 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4235 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4236 == NULL_TREE));
4237 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4238 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4239 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4240 under it. The parameters can be then evaluated in the debugger,
4241 but don't show in backtraces. */
4242 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4243 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4244 {
4245 tree v = *var;
4246 *var = TREE_CHAIN (v);
4247 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4248 BLOCK_VARS (id->block) = v;
4249 }
4250 else
4251 var = &TREE_CHAIN (*var);
4252 }
4253 else
4254 remap_blocks_to_null (DECL_INITIAL (fn), id);
94645a02 4255 }
acb8f212 4256
d4e4baa9
AO
4257 /* Return statements in the function body will be replaced by jumps
4258 to the RET_LABEL. */
1e128c5f
GB
4259 gcc_assert (DECL_INITIAL (fn));
4260 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
23700f65 4261
726a989a 4262 /* Find the LHS to which the result of this call is assigned. */
110cfe1c 4263 return_slot = NULL;
726a989a 4264 if (gimple_call_lhs (stmt))
81bafd36 4265 {
726a989a 4266 modify_dest = gimple_call_lhs (stmt);
81bafd36
ILT
4267
4268 /* The function which we are inlining might not return a value,
4269 in which case we should issue a warning that the function
4270 does not return a value. In that case the optimizers will
4271 see that the variable to which the value is assigned was not
4272 initialized. We do not want to issue a warning about that
4273 uninitialized variable. */
4274 if (DECL_P (modify_dest))
4275 TREE_NO_WARNING (modify_dest) = 1;
726a989a
RB
4276
4277 if (gimple_call_return_slot_opt_p (stmt))
fa47911c 4278 {
110cfe1c 4279 return_slot = modify_dest;
fa47911c
JM
4280 modify_dest = NULL;
4281 }
81bafd36 4282 }
7740f00d
RH
4283 else
4284 modify_dest = NULL;
4285
1ea193c2
ILT
4286 /* If we are inlining a call to the C++ operator new, we don't want
4287 to use type based alias analysis on the return value. Otherwise
4288 we may get confused if the compiler sees that the inlined new
4289 function returns a pointer which was just deleted. See bug
4290 33407. */
4291 if (DECL_IS_OPERATOR_NEW (fn))
4292 {
4293 return_slot = NULL;
4294 modify_dest = NULL;
4295 }
4296
d4e4baa9 4297 /* Declare the return variable for the function. */
6938f93f 4298 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
1ea193c2 4299
acb8f212 4300 /* Add local vars in this inlined callee to caller. */
ae0379fc 4301 add_local_variables (id->src_cfun, cfun, id);
acb8f212 4302
0d63a740
JH
4303 if (dump_file && (dump_flags & TDF_DETAILS))
4304 {
4305 fprintf (dump_file, "Inlining ");
b8698a0f 4306 print_generic_expr (dump_file, id->src_fn, 0);
0d63a740 4307 fprintf (dump_file, " to ");
b8698a0f 4308 print_generic_expr (dump_file, id->dst_fn, 0);
0d63a740
JH
4309 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4310 }
4311
eb50f5f4
JH
4312 /* This is it. Duplicate the callee body. Assume callee is
4313 pre-gimplified. Note that we must not alter the caller
4314 function in any way before this point, as this CALL_EXPR may be
4315 a self-referential call; if we're calling ourselves, we need to
4316 duplicate our body before altering anything. */
0d63a740 4317 copy_body (id, bb->count,
8b47039c 4318 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
f3b331d1 4319 bb, return_block, NULL);
eb50f5f4 4320
d086d311 4321 /* Reset the escaped solution. */
6b8ed145 4322 if (cfun->gimple_df)
d086d311 4323 pt_solution_reset (&cfun->gimple_df->escaped);
6b8ed145 4324
d4e4baa9 4325 /* Clean up. */
b5b8b0ac
AO
4326 if (id->debug_map)
4327 {
4328 pointer_map_destroy (id->debug_map);
4329 id->debug_map = dst;
4330 }
6be42dd4 4331 pointer_map_destroy (id->decl_map);
d4e4baa9
AO
4332 id->decl_map = st;
4333
5006671f
RG
4334 /* Unlink the calls virtual operands before replacing it. */
4335 unlink_stmt_vdef (stmt);
4336
84936f6f 4337 /* If the inlined function returns a result that we care about,
726a989a
RB
4338 substitute the GIMPLE_CALL with an assignment of the return
4339 variable to the LHS of the call. That is, if STMT was
4340 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4341 if (use_retvar && gimple_call_lhs (stmt))
e21aff8a 4342 {
726a989a
RB
4343 gimple old_stmt = stmt;
4344 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4345 gsi_replace (&stmt_gsi, stmt, false);
726a989a 4346 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
e21aff8a 4347 }
6de9cd9a 4348 else
110cfe1c 4349 {
726a989a
RB
4350 /* Handle the case of inlining a function with no return
4351 statement, which causes the return value to become undefined. */
4352 if (gimple_call_lhs (stmt)
4353 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
110cfe1c 4354 {
726a989a
RB
4355 tree name = gimple_call_lhs (stmt);
4356 tree var = SSA_NAME_VAR (name);
32244553 4357 tree def = ssa_default_def (cfun, var);
110cfe1c 4358
110cfe1c
JH
4359 if (def)
4360 {
726a989a
RB
4361 /* If the variable is used undefined, make this name
4362 undefined via a move. */
4363 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4364 gsi_replace (&stmt_gsi, stmt, true);
110cfe1c 4365 }
110cfe1c
JH
4366 else
4367 {
726a989a
RB
4368 /* Otherwise make this variable undefined. */
4369 gsi_remove (&stmt_gsi, true);
32244553 4370 set_ssa_default_def (cfun, var, name);
726a989a 4371 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
110cfe1c
JH
4372 }
4373 }
4374 else
726a989a 4375 gsi_remove (&stmt_gsi, true);
110cfe1c 4376 }
d4e4baa9 4377
4f6c2131 4378 if (purge_dead_abnormal_edges)
30fd5881
EB
4379 {
4380 gimple_purge_dead_eh_edges (return_block);
4381 gimple_purge_dead_abnormal_call_edges (return_block);
4382 }
84936f6f 4383
e21aff8a
SB
4384 /* If the value of the new expression is ignored, that's OK. We
4385 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4386 the equivalent inlined version either. */
726a989a
RB
4387 if (is_gimple_assign (stmt))
4388 {
4389 gcc_assert (gimple_assign_single_p (stmt)
1a87cf0c 4390 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
726a989a
RB
4391 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4392 }
84936f6f 4393
1eb3331e
DB
4394 /* Output the inlining info for this abstract function, since it has been
4395 inlined. If we don't do this now, we can lose the information about the
4396 variables in the function when the blocks get blown away as soon as we
4397 remove the cgraph node. */
3e492e9c 4398 if (gimple_block (stmt))
67348ccc 4399 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
84936f6f 4400
e72fcfe8 4401 /* Update callgraph if needed. */
e21aff8a 4402 cgraph_remove_node (cg_edge->callee);
e72fcfe8 4403
e21aff8a 4404 id->block = NULL_TREE;
e21aff8a 4405 successfully_inlined = TRUE;
742a37d5 4406
6de9cd9a
DN
4407 egress:
4408 input_location = saved_location;
e21aff8a 4409 return successfully_inlined;
d4e4baa9 4410}
6de9cd9a 4411
e21aff8a
SB
4412/* Expand call statements reachable from STMT_P.
4413 We can only have CALL_EXPRs as the "toplevel" tree code or nested
0a35513e 4414 in a MODIFY_EXPR. */
e21aff8a
SB
4415
4416static bool
1b369fae 4417gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
6de9cd9a 4418{
726a989a 4419 gimple_stmt_iterator gsi;
6de9cd9a 4420
726a989a 4421 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4422 {
726a989a 4423 gimple stmt = gsi_stmt (gsi);
e21aff8a 4424
726a989a
RB
4425 if (is_gimple_call (stmt)
4426 && expand_call_inline (bb, stmt, id))
4427 return true;
6de9cd9a 4428 }
726a989a 4429
e21aff8a 4430 return false;
6de9cd9a
DN
4431}
4432
726a989a 4433
b8a00a4d
JH
4434/* Walk all basic blocks created after FIRST and try to fold every statement
4435 in the STATEMENTS pointer set. */
726a989a 4436
b8a00a4d
JH
4437static void
4438fold_marked_statements (int first, struct pointer_set_t *statements)
4439{
0cae8d31 4440 for (; first < n_basic_blocks_for_fn (cfun); first++)
b8a00a4d
JH
4441 if (BASIC_BLOCK (first))
4442 {
726a989a
RB
4443 gimple_stmt_iterator gsi;
4444
4445 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4446 !gsi_end_p (gsi);
4447 gsi_next (&gsi))
4448 if (pointer_set_contains (statements, gsi_stmt (gsi)))
9477eb38 4449 {
726a989a 4450 gimple old_stmt = gsi_stmt (gsi);
4b685e14 4451 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
2bafad93 4452
44e10129
MM
4453 if (old_decl && DECL_BUILT_IN (old_decl))
4454 {
4455 /* Folding builtins can create multiple instructions,
4456 we need to look at all of them. */
4457 gimple_stmt_iterator i2 = gsi;
4458 gsi_prev (&i2);
4459 if (fold_stmt (&gsi))
4460 {
4461 gimple new_stmt;
a9d24544
JJ
4462 /* If a builtin at the end of a bb folded into nothing,
4463 the following loop won't work. */
4464 if (gsi_end_p (gsi))
4465 {
4466 cgraph_update_edges_for_call_stmt (old_stmt,
4467 old_decl, NULL);
4468 break;
4469 }
44e10129
MM
4470 if (gsi_end_p (i2))
4471 i2 = gsi_start_bb (BASIC_BLOCK (first));
4472 else
4473 gsi_next (&i2);
4474 while (1)
4475 {
4476 new_stmt = gsi_stmt (i2);
4477 update_stmt (new_stmt);
4478 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4479 new_stmt);
4480
4481 if (new_stmt == gsi_stmt (gsi))
4482 {
4483 /* It is okay to check only for the very last
4484 of these statements. If it is a throwing
4485 statement nothing will change. If it isn't
4486 this can remove EH edges. If that weren't
4487 correct then because some intermediate stmts
4488 throw, but not the last one. That would mean
4489 we'd have to split the block, which we can't
4490 here and we'd loose anyway. And as builtins
4491 probably never throw, this all
4492 is mood anyway. */
4493 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4494 new_stmt))
4495 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4496 break;
4497 }
4498 gsi_next (&i2);
4499 }
4500 }
4501 }
4502 else if (fold_stmt (&gsi))
9477eb38 4503 {
726a989a
RB
4504 /* Re-read the statement from GSI as fold_stmt() may
4505 have changed it. */
4506 gimple new_stmt = gsi_stmt (gsi);
4507 update_stmt (new_stmt);
4508
4b685e14
JH
4509 if (is_gimple_call (old_stmt)
4510 || is_gimple_call (new_stmt))
44e10129
MM
4511 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4512 new_stmt);
726a989a
RB
4513
4514 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4515 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
9477eb38
JH
4516 }
4517 }
b8a00a4d
JH
4518 }
4519}
4520
d4e4baa9
AO
4521/* Expand calls to inline functions in the body of FN. */
4522
873aa8f5 4523unsigned int
46c5ad27 4524optimize_inline_calls (tree fn)
d4e4baa9 4525{
1b369fae 4526 copy_body_data id;
e21aff8a 4527 basic_block bb;
0cae8d31 4528 int last = n_basic_blocks_for_fn (cfun);
5d7b099c 4529 bool inlined_p = false;
d406b663 4530
d4e4baa9
AO
4531 /* Clear out ID. */
4532 memset (&id, 0, sizeof (id));
4533
581985d7 4534 id.src_node = id.dst_node = cgraph_get_node (fn);
67348ccc 4535 gcc_assert (id.dst_node->definition);
1b369fae 4536 id.dst_fn = fn;
d4e4baa9 4537 /* Or any functions that aren't finished yet. */
d4e4baa9 4538 if (current_function_decl)
0f900dfa 4539 id.dst_fn = current_function_decl;
1b369fae
RH
4540
4541 id.copy_decl = copy_decl_maybe_to_var;
4542 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4543 id.transform_new_cfg = false;
4544 id.transform_return_to_modify = true;
78bbd765 4545 id.transform_parameter = true;
9ff420f1 4546 id.transform_lang_insert_block = NULL;
b8a00a4d 4547 id.statements_to_fold = pointer_set_create ();
1b369fae 4548
45852dcc 4549 push_gimplify_context ();
d4e4baa9 4550
672987e8
ZD
4551 /* We make no attempts to keep dominance info up-to-date. */
4552 free_dominance_info (CDI_DOMINATORS);
4553 free_dominance_info (CDI_POST_DOMINATORS);
4554
726a989a
RB
4555 /* Register specific gimple functions. */
4556 gimple_register_cfg_hooks ();
4557
e21aff8a
SB
4558 /* Reach the trees by walking over the CFG, and note the
4559 enclosing basic-blocks in the call edges. */
4560 /* We walk the blocks going forward, because inlined function bodies
4561 will split id->current_basic_block, and the new blocks will
4562 follow it; we'll trudge through them, processing their CALL_EXPRs
4563 along the way. */
4564 FOR_EACH_BB (bb)
5d7b099c 4565 inlined_p |= gimple_expand_calls_inline (bb, &id);
d4e4baa9 4566
e21aff8a 4567 pop_gimplify_context (NULL);
6de9cd9a 4568
18c6ada9
JH
4569#ifdef ENABLE_CHECKING
4570 {
4571 struct cgraph_edge *e;
4572
1b369fae 4573 verify_cgraph_node (id.dst_node);
18c6ada9
JH
4574
4575 /* Double check that we inlined everything we are supposed to inline. */
1b369fae 4576 for (e = id.dst_node->callees; e; e = e->next_callee)
1e128c5f 4577 gcc_assert (e->inline_failed);
18c6ada9
JH
4578 }
4579#endif
b8698a0f 4580
5d7b099c 4581 /* Fold queued statements. */
a9eafe81
AP
4582 fold_marked_statements (last, id.statements_to_fold);
4583 pointer_set_destroy (id.statements_to_fold);
b8698a0f 4584
9771b263 4585 gcc_assert (!id.debug_stmts.exists ());
b5b8b0ac 4586
5d7b099c
RG
4587 /* If we didn't inline into the function there is nothing to do. */
4588 if (!inlined_p)
4589 return 0;
4590
a9eafe81
AP
4591 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4592 number_blocks (fn);
b8a00a4d 4593
078c3644
JH
4594 delete_unreachable_blocks_update_callgraph (&id);
4595#ifdef ENABLE_CHECKING
4596 verify_cgraph_node (id.dst_node);
4597#endif
726a989a 4598
110cfe1c
JH
4599 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4600 not possible yet - the IPA passes might make various functions to not
4601 throw and they don't care to proactively update local EH info. This is
4602 done later in fixup_cfg pass that also execute the verification. */
726a989a
RB
4603 return (TODO_update_ssa
4604 | TODO_cleanup_cfg
45a80bb9 4605 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5d7b099c 4606 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
45a80bb9 4607 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
d4e4baa9
AO
4608}
4609
d4e4baa9
AO
4610/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4611
4612tree
46c5ad27 4613copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
d4e4baa9
AO
4614{
4615 enum tree_code code = TREE_CODE (*tp);
07beea0d 4616 enum tree_code_class cl = TREE_CODE_CLASS (code);
d4e4baa9
AO
4617
4618 /* We make copies of most nodes. */
07beea0d 4619 if (IS_EXPR_CODE_CLASS (cl)
d4e4baa9
AO
4620 || code == TREE_LIST
4621 || code == TREE_VEC
8843c120
DN
4622 || code == TYPE_DECL
4623 || code == OMP_CLAUSE)
d4e4baa9
AO
4624 {
4625 /* Because the chain gets clobbered when we make a copy, we save it
4626 here. */
82d6e6fc 4627 tree chain = NULL_TREE, new_tree;
07beea0d 4628
81f653d6
NF
4629 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4630 chain = TREE_CHAIN (*tp);
d4e4baa9
AO
4631
4632 /* Copy the node. */
82d6e6fc 4633 new_tree = copy_node (*tp);
6de9cd9a 4634
82d6e6fc 4635 *tp = new_tree;
d4e4baa9
AO
4636
4637 /* Now, restore the chain, if appropriate. That will cause
4638 walk_tree to walk into the chain as well. */
50674e96
DN
4639 if (code == PARM_DECL
4640 || code == TREE_LIST
aaf46ef9 4641 || code == OMP_CLAUSE)
d4e4baa9
AO
4642 TREE_CHAIN (*tp) = chain;
4643
4644 /* For now, we don't update BLOCKs when we make copies. So, we
6de9cd9a
DN
4645 have to nullify all BIND_EXPRs. */
4646 if (TREE_CODE (*tp) == BIND_EXPR)
4647 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
d4e4baa9 4648 }
4038c495
GB
4649 else if (code == CONSTRUCTOR)
4650 {
4651 /* CONSTRUCTOR nodes need special handling because
4652 we need to duplicate the vector of elements. */
82d6e6fc 4653 tree new_tree;
4038c495 4654
82d6e6fc 4655 new_tree = copy_node (*tp);
9771b263 4656 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
82d6e6fc 4657 *tp = new_tree;
4038c495 4658 }
3533b943 4659 else if (code == STATEMENT_LIST)
deb5046b
JM
4660 /* We used to just abort on STATEMENT_LIST, but we can run into them
4661 with statement-expressions (c++/40975). */
4662 copy_statement_list (tp);
6615c446 4663 else if (TREE_CODE_CLASS (code) == tcc_type)
d4e4baa9 4664 *walk_subtrees = 0;
6615c446 4665 else if (TREE_CODE_CLASS (code) == tcc_declaration)
6de9cd9a 4666 *walk_subtrees = 0;
a396f8ae
GK
4667 else if (TREE_CODE_CLASS (code) == tcc_constant)
4668 *walk_subtrees = 0;
d4e4baa9
AO
4669 return NULL_TREE;
4670}
4671
4672/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
aa4a53af 4673 information indicating to what new SAVE_EXPR this one should be mapped,
e21aff8a
SB
4674 use that one. Otherwise, create a new node and enter it in ST. FN is
4675 the function into which the copy will be placed. */
d4e4baa9 4676
892c7e1e 4677static void
82c82743 4678remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
d4e4baa9 4679{
6be42dd4
RG
4680 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4681 tree *n;
5e20bdd7 4682 tree t;
d4e4baa9
AO
4683
4684 /* See if we already encountered this SAVE_EXPR. */
6be42dd4 4685 n = (tree *) pointer_map_contains (st, *tp);
d92b4486 4686
d4e4baa9
AO
4687 /* If we didn't already remap this SAVE_EXPR, do so now. */
4688 if (!n)
4689 {
5e20bdd7 4690 t = copy_node (*tp);
d4e4baa9 4691
d4e4baa9 4692 /* Remember this SAVE_EXPR. */
6be42dd4 4693 *pointer_map_insert (st, *tp) = t;
350ebd54 4694 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
6be42dd4 4695 *pointer_map_insert (st, t) = t;
d4e4baa9
AO
4696 }
4697 else
5e20bdd7
JZ
4698 {
4699 /* We've already walked into this SAVE_EXPR; don't do it again. */
4700 *walk_subtrees = 0;
6be42dd4 4701 t = *n;
5e20bdd7 4702 }
d4e4baa9
AO
4703
4704 /* Replace this SAVE_EXPR with the copy. */
5e20bdd7 4705 *tp = t;
d4e4baa9 4706}
d436bff8 4707
726a989a
RB
4708/* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4709 label, copies the declaration and enters it in the splay_tree in DATA (which
4710 is really a 'copy_body_data *'. */
4711
4712static tree
4713mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4714 bool *handled_ops_p ATTRIBUTE_UNUSED,
4715 struct walk_stmt_info *wi)
4716{
4717 copy_body_data *id = (copy_body_data *) wi->info;
4718 gimple stmt = gsi_stmt (*gsip);
4719
4720 if (gimple_code (stmt) == GIMPLE_LABEL)
4721 {
4722 tree decl = gimple_label_label (stmt);
4723
4724 /* Copy the decl and remember the copy. */
4725 insert_decl_map (id, decl, id->copy_decl (decl, id));
4726 }
4727
4728 return NULL_TREE;
4729}
4730
4731
4732/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4733 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4734 remaps all local declarations to appropriate replacements in gimple
4735 operands. */
4736
4737static tree
4738replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4739{
4740 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4741 copy_body_data *id = (copy_body_data *) wi->info;
4742 struct pointer_map_t *st = id->decl_map;
4743 tree *n;
4744 tree expr = *tp;
4745
4746 /* Only a local declaration (variable or label). */
4747 if ((TREE_CODE (expr) == VAR_DECL
4748 && !TREE_STATIC (expr))
4749 || TREE_CODE (expr) == LABEL_DECL)
4750 {
4751 /* Lookup the declaration. */
4752 n = (tree *) pointer_map_contains (st, expr);
4753
4754 /* If it's there, remap it. */
4755 if (n)
4756 *tp = *n;
4757 *walk_subtrees = 0;
4758 }
4759 else if (TREE_CODE (expr) == STATEMENT_LIST
4760 || TREE_CODE (expr) == BIND_EXPR
4761 || TREE_CODE (expr) == SAVE_EXPR)
4762 gcc_unreachable ();
4763 else if (TREE_CODE (expr) == TARGET_EXPR)
4764 {
4765 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4766 It's OK for this to happen if it was part of a subtree that
4767 isn't immediately expanded, such as operand 2 of another
4768 TARGET_EXPR. */
4769 if (!TREE_OPERAND (expr, 1))
4770 {
4771 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4772 TREE_OPERAND (expr, 3) = NULL_TREE;
4773 }
4774 }
4775
4776 /* Keep iterating. */
4777 return NULL_TREE;
4778}
4779
4780
4781/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4782 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4783 remaps all local declarations to appropriate replacements in gimple
4784 statements. */
4785
4786static tree
4787replace_locals_stmt (gimple_stmt_iterator *gsip,
4788 bool *handled_ops_p ATTRIBUTE_UNUSED,
4789 struct walk_stmt_info *wi)
4790{
4791 copy_body_data *id = (copy_body_data *) wi->info;
4792 gimple stmt = gsi_stmt (*gsip);
4793
4794 if (gimple_code (stmt) == GIMPLE_BIND)
4795 {
4796 tree block = gimple_bind_block (stmt);
4797
4798 if (block)
4799 {
4800 remap_block (&block, id);
4801 gimple_bind_set_block (stmt, block);
4802 }
4803
4804 /* This will remap a lot of the same decls again, but this should be
4805 harmless. */
4806 if (gimple_bind_vars (stmt))
9771b263
DN
4807 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4808 NULL, id));
726a989a
RB
4809 }
4810
4811 /* Keep iterating. */
4812 return NULL_TREE;
4813}
4814
4815
4816/* Copies everything in SEQ and replaces variables and labels local to
4817 current_function_decl. */
4818
4819gimple_seq
4820copy_gimple_seq_and_replace_locals (gimple_seq seq)
4821{
4822 copy_body_data id;
4823 struct walk_stmt_info wi;
4824 struct pointer_set_t *visited;
4825 gimple_seq copy;
4826
4827 /* There's nothing to do for NULL_TREE. */
4828 if (seq == NULL)
4829 return seq;
4830
4831 /* Set up ID. */
4832 memset (&id, 0, sizeof (id));
4833 id.src_fn = current_function_decl;
4834 id.dst_fn = current_function_decl;
4835 id.decl_map = pointer_map_create ();
b5b8b0ac 4836 id.debug_map = NULL;
726a989a
RB
4837
4838 id.copy_decl = copy_decl_no_change;
4839 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4840 id.transform_new_cfg = false;
4841 id.transform_return_to_modify = false;
78bbd765 4842 id.transform_parameter = false;
726a989a
RB
4843 id.transform_lang_insert_block = NULL;
4844
4845 /* Walk the tree once to find local labels. */
4846 memset (&wi, 0, sizeof (wi));
4847 visited = pointer_set_create ();
4848 wi.info = &id;
4849 wi.pset = visited;
4850 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4851 pointer_set_destroy (visited);
4852
4853 copy = gimple_seq_copy (seq);
4854
4855 /* Walk the copy, remapping decls. */
4856 memset (&wi, 0, sizeof (wi));
4857 wi.info = &id;
4858 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4859
4860 /* Clean up. */
4861 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
4862 if (id.debug_map)
4863 pointer_map_destroy (id.debug_map);
726a989a
RB
4864
4865 return copy;
4866}
4867
4868
6de9cd9a 4869/* Allow someone to determine if SEARCH is a child of TOP from gdb. */
aa4a53af 4870
6de9cd9a
DN
4871static tree
4872debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4873{
4874 if (*tp == data)
4875 return (tree) data;
4876 else
4877 return NULL;
4878}
4879
24e47c76 4880DEBUG_FUNCTION bool
6de9cd9a
DN
4881debug_find_tree (tree top, tree search)
4882{
4883 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4884}
4885
e21aff8a 4886
6de9cd9a
DN
4887/* Declare the variables created by the inliner. Add all the variables in
4888 VARS to BIND_EXPR. */
4889
4890static void
e21aff8a 4891declare_inline_vars (tree block, tree vars)
6de9cd9a 4892{
84936f6f 4893 tree t;
910ad8de 4894 for (t = vars; t; t = DECL_CHAIN (t))
9659ce8b
JH
4895 {
4896 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4897 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
c021f10b 4898 add_local_decl (cfun, t);
9659ce8b 4899 }
6de9cd9a 4900
e21aff8a
SB
4901 if (block)
4902 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4903}
4904
19734dd8 4905/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
1b369fae
RH
4906 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4907 VAR_DECL translation. */
19734dd8 4908
1b369fae
RH
4909static tree
4910copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
19734dd8 4911{
19734dd8
RL
4912 /* Don't generate debug information for the copy if we wouldn't have
4913 generated it for the copy either. */
4914 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4915 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4916
4917 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
b8698a0f 4918 declaration inspired this copy. */
19734dd8
RL
4919 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4920
4921 /* The new variable/label has no RTL, yet. */
68a976f2
RL
4922 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4923 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
2eb79bbb 4924 SET_DECL_RTL (copy, 0);
b8698a0f 4925
19734dd8
RL
4926 /* These args would always appear unused, if not for this. */
4927 TREE_USED (copy) = 1;
4928
4929 /* Set the context for the new declaration. */
4930 if (!DECL_CONTEXT (decl))
4931 /* Globals stay global. */
4932 ;
1b369fae 4933 else if (DECL_CONTEXT (decl) != id->src_fn)
19734dd8
RL
4934 /* Things that weren't in the scope of the function we're inlining
4935 from aren't in the scope we're inlining to, either. */
4936 ;
4937 else if (TREE_STATIC (decl))
4938 /* Function-scoped static variables should stay in the original
4939 function. */
4940 ;
4941 else
4942 /* Ordinary automatic local variables are now in the scope of the
4943 new function. */
1b369fae 4944 DECL_CONTEXT (copy) = id->dst_fn;
19734dd8
RL
4945
4946 return copy;
4947}
4948
1b369fae
RH
4949static tree
4950copy_decl_to_var (tree decl, copy_body_data *id)
4951{
4952 tree copy, type;
4953
4954 gcc_assert (TREE_CODE (decl) == PARM_DECL
4955 || TREE_CODE (decl) == RESULT_DECL);
4956
4957 type = TREE_TYPE (decl);
4958
c2255bc4
AH
4959 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4960 VAR_DECL, DECL_NAME (decl), type);
25a6a873
RG
4961 if (DECL_PT_UID_SET_P (decl))
4962 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
1b369fae
RH
4963 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4964 TREE_READONLY (copy) = TREE_READONLY (decl);
4965 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
0890b981 4966 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
1b369fae
RH
4967
4968 return copy_decl_for_dup_finish (id, decl, copy);
4969}
4970
c08cd4c1
JM
4971/* Like copy_decl_to_var, but create a return slot object instead of a
4972 pointer variable for return by invisible reference. */
4973
4974static tree
4975copy_result_decl_to_var (tree decl, copy_body_data *id)
4976{
4977 tree copy, type;
4978
4979 gcc_assert (TREE_CODE (decl) == PARM_DECL
4980 || TREE_CODE (decl) == RESULT_DECL);
4981
4982 type = TREE_TYPE (decl);
4983 if (DECL_BY_REFERENCE (decl))
4984 type = TREE_TYPE (type);
4985
c2255bc4
AH
4986 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4987 VAR_DECL, DECL_NAME (decl), type);
25a6a873
RG
4988 if (DECL_PT_UID_SET_P (decl))
4989 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
c08cd4c1
JM
4990 TREE_READONLY (copy) = TREE_READONLY (decl);
4991 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4992 if (!DECL_BY_REFERENCE (decl))
4993 {
4994 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
0890b981 4995 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
c08cd4c1
JM
4996 }
4997
4998 return copy_decl_for_dup_finish (id, decl, copy);
4999}
5000
9ff420f1 5001tree
1b369fae
RH
5002copy_decl_no_change (tree decl, copy_body_data *id)
5003{
5004 tree copy;
5005
5006 copy = copy_node (decl);
5007
5008 /* The COPY is not abstract; it will be generated in DST_FN. */
5009 DECL_ABSTRACT (copy) = 0;
5010 lang_hooks.dup_lang_specific_decl (copy);
5011
5012 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5013 been taken; it's for internal bookkeeping in expand_goto_internal. */
5014 if (TREE_CODE (copy) == LABEL_DECL)
5015 {
5016 TREE_ADDRESSABLE (copy) = 0;
5017 LABEL_DECL_UID (copy) = -1;
5018 }
5019
5020 return copy_decl_for_dup_finish (id, decl, copy);
5021}
5022
5023static tree
5024copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5025{
5026 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5027 return copy_decl_to_var (decl, id);
5028 else
5029 return copy_decl_no_change (decl, id);
5030}
5031
19734dd8
RL
5032/* Return a copy of the function's argument tree. */
5033static tree
c6f7cfc1
JH
5034copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5035 bitmap args_to_skip, tree *vars)
19734dd8 5036{
c6f7cfc1
JH
5037 tree arg, *parg;
5038 tree new_parm = NULL;
5039 int i = 0;
19734dd8 5040
c6f7cfc1
JH
5041 parg = &new_parm;
5042
910ad8de 5043 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
c6f7cfc1
JH
5044 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5045 {
5046 tree new_tree = remap_decl (arg, id);
d7da5cc8
MJ
5047 if (TREE_CODE (new_tree) != PARM_DECL)
5048 new_tree = id->copy_decl (arg, id);
c6f7cfc1
JH
5049 lang_hooks.dup_lang_specific_decl (new_tree);
5050 *parg = new_tree;
910ad8de 5051 parg = &DECL_CHAIN (new_tree);
c6f7cfc1 5052 }
eb50f5f4 5053 else if (!pointer_map_contains (id->decl_map, arg))
c6f7cfc1
JH
5054 {
5055 /* Make an equivalent VAR_DECL. If the argument was used
5056 as temporary variable later in function, the uses will be
5057 replaced by local variable. */
5058 tree var = copy_decl_to_var (arg, id);
c6f7cfc1
JH
5059 insert_decl_map (id, arg, var);
5060 /* Declare this new variable. */
910ad8de 5061 DECL_CHAIN (var) = *vars;
c6f7cfc1
JH
5062 *vars = var;
5063 }
5064 return new_parm;
19734dd8
RL
5065}
5066
5067/* Return a copy of the function's static chain. */
5068static tree
1b369fae 5069copy_static_chain (tree static_chain, copy_body_data * id)
19734dd8
RL
5070{
5071 tree *chain_copy, *pvar;
5072
5073 chain_copy = &static_chain;
910ad8de 5074 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
19734dd8 5075 {
82d6e6fc
KG
5076 tree new_tree = remap_decl (*pvar, id);
5077 lang_hooks.dup_lang_specific_decl (new_tree);
910ad8de 5078 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
82d6e6fc 5079 *pvar = new_tree;
19734dd8
RL
5080 }
5081 return static_chain;
5082}
5083
5084/* Return true if the function is allowed to be versioned.
5085 This is a guard for the versioning functionality. */
27dbd3ac 5086
19734dd8
RL
5087bool
5088tree_versionable_function_p (tree fndecl)
5089{
86631ea3
MJ
5090 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5091 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
19734dd8
RL
5092}
5093
9187e02d
JH
5094/* Delete all unreachable basic blocks and update callgraph.
5095 Doing so is somewhat nontrivial because we need to update all clones and
5096 remove inline function that become unreachable. */
9f5e9983 5097
9187e02d
JH
5098static bool
5099delete_unreachable_blocks_update_callgraph (copy_body_data *id)
9f5e9983 5100{
9187e02d
JH
5101 bool changed = false;
5102 basic_block b, next_bb;
5103
5104 find_unreachable_blocks ();
5105
5106 /* Delete all unreachable basic blocks. */
5107
fefa31b5
DM
5108 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5109 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
9187e02d
JH
5110 {
5111 next_bb = b->next_bb;
5112
5113 if (!(b->flags & BB_REACHABLE))
5114 {
5115 gimple_stmt_iterator bsi;
5116
5117 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
042ae7d2
JH
5118 {
5119 struct cgraph_edge *e;
5120 struct cgraph_node *node;
9187e02d 5121
67348ccc 5122 ipa_remove_stmt_references (id->dst_node, gsi_stmt (bsi));
042ae7d2
JH
5123
5124 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5125 &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5126 {
5127 if (!e->inline_failed)
5128 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5129 else
5130 cgraph_remove_edge (e);
5131 }
5132 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5133 && id->dst_node->clones)
5134 for (node = id->dst_node->clones; node != id->dst_node;)
9187e02d 5135 {
67348ccc 5136 ipa_remove_stmt_references (node, gsi_stmt (bsi));
042ae7d2
JH
5137 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5138 && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5139 {
5140 if (!e->inline_failed)
5141 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5142 else
5143 cgraph_remove_edge (e);
5144 }
5145
5146 if (node->clones)
5147 node = node->clones;
5148 else if (node->next_sibling_clone)
5149 node = node->next_sibling_clone;
9187e02d 5150 else
042ae7d2
JH
5151 {
5152 while (node != id->dst_node && !node->next_sibling_clone)
5153 node = node->clone_of;
5154 if (node != id->dst_node)
5155 node = node->next_sibling_clone;
5156 }
9187e02d 5157 }
042ae7d2 5158 }
9187e02d
JH
5159 delete_basic_block (b);
5160 changed = true;
5161 }
5162 }
5163
9187e02d 5164 return changed;
9f5e9983
JJ
5165}
5166
08ad1d6d
JH
5167/* Update clone info after duplication. */
5168
5169static void
5170update_clone_info (copy_body_data * id)
5171{
5172 struct cgraph_node *node;
5173 if (!id->dst_node->clones)
5174 return;
5175 for (node = id->dst_node->clones; node != id->dst_node;)
5176 {
5177 /* First update replace maps to match the new body. */
5178 if (node->clone.tree_map)
5179 {
5180 unsigned int i;
9771b263 5181 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
08ad1d6d
JH
5182 {
5183 struct ipa_replace_map *replace_info;
9771b263 5184 replace_info = (*node->clone.tree_map)[i];
08ad1d6d
JH
5185 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5186 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5187 }
5188 }
5189 if (node->clones)
5190 node = node->clones;
5191 else if (node->next_sibling_clone)
5192 node = node->next_sibling_clone;
5193 else
5194 {
5195 while (node != id->dst_node && !node->next_sibling_clone)
5196 node = node->clone_of;
5197 if (node != id->dst_node)
5198 node = node->next_sibling_clone;
5199 }
5200 }
5201}
5202
19734dd8
RL
5203/* Create a copy of a function's tree.
5204 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5205 of the original function and the new copied function
b8698a0f
L
5206 respectively. In case we want to replace a DECL
5207 tree with another tree while duplicating the function's
5208 body, TREE_MAP represents the mapping between these
ea99e0be 5209 trees. If UPDATE_CLONES is set, the call_stmt fields
91382288
JH
5210 of edges of clones of the function will be updated.
5211
5212 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5213 from new version.
1a2c27e9 5214 If SKIP_RETURN is true, the new version will return void.
91382288
JH
5215 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5216 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5217*/
19734dd8 5218void
27dbd3ac 5219tree_function_versioning (tree old_decl, tree new_decl,
9771b263 5220 vec<ipa_replace_map_p, va_gc> *tree_map,
91382288 5221 bool update_clones, bitmap args_to_skip,
1a2c27e9
EB
5222 bool skip_return, bitmap blocks_to_copy,
5223 basic_block new_entry)
19734dd8
RL
5224{
5225 struct cgraph_node *old_version_node;
5226 struct cgraph_node *new_version_node;
1b369fae 5227 copy_body_data id;
110cfe1c 5228 tree p;
19734dd8
RL
5229 unsigned i;
5230 struct ipa_replace_map *replace_info;
b5b8b0ac 5231 basic_block old_entry_block, bb;
07687835 5232 stack_vec<gimple, 10> init_stmts;
0f1961a2 5233 tree vars = NULL_TREE;
19734dd8
RL
5234
5235 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5236 && TREE_CODE (new_decl) == FUNCTION_DECL);
5237 DECL_POSSIBLY_INLINED (old_decl) = 1;
5238
fe660d7b
MJ
5239 old_version_node = cgraph_get_node (old_decl);
5240 gcc_checking_assert (old_version_node);
5241 new_version_node = cgraph_get_node (new_decl);
5242 gcc_checking_assert (new_version_node);
19734dd8 5243
ddb555ed
JJ
5244 /* Copy over debug args. */
5245 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5246 {
9771b263 5247 vec<tree, va_gc> **new_debug_args, **old_debug_args;
ddb555ed
JJ
5248 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5249 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5250 old_debug_args = decl_debug_args_lookup (old_decl);
5251 if (old_debug_args)
5252 {
5253 new_debug_args = decl_debug_args_insert (new_decl);
9771b263 5254 *new_debug_args = vec_safe_copy (*old_debug_args);
ddb555ed
JJ
5255 }
5256 }
5257
a3aadcc5
JH
5258 /* Output the inlining info for this abstract function, since it has been
5259 inlined. If we don't do this now, we can lose the information about the
5260 variables in the function when the blocks get blown away as soon as we
5261 remove the cgraph node. */
5262 (*debug_hooks->outlining_inline_function) (old_decl);
5263
19734dd8
RL
5264 DECL_ARTIFICIAL (new_decl) = 1;
5265 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
c0c123ef
JH
5266 if (DECL_ORIGIN (old_decl) == old_decl)
5267 old_version_node->used_as_abstract_origin = true;
f9417da1 5268 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
19734dd8 5269
3d283195
JH
5270 /* Prepare the data structures for the tree copy. */
5271 memset (&id, 0, sizeof (id));
5272
19734dd8 5273 /* Generate a new name for the new version. */
9187e02d 5274 id.statements_to_fold = pointer_set_create ();
b5b8b0ac 5275
6be42dd4 5276 id.decl_map = pointer_map_create ();
b5b8b0ac 5277 id.debug_map = NULL;
1b369fae
RH
5278 id.src_fn = old_decl;
5279 id.dst_fn = new_decl;
5280 id.src_node = old_version_node;
5281 id.dst_node = new_version_node;
5282 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4029a5e0 5283 id.blocks_to_copy = blocks_to_copy;
9771b263 5284 if (id.src_node->ipa_transforms_to_apply.exists ())
0e3776db 5285 {
9771b263
DN
5286 vec<ipa_opt_pass> old_transforms_to_apply
5287 = id.dst_node->ipa_transforms_to_apply;
0e3776db
JH
5288 unsigned int i;
5289
9771b263
DN
5290 id.dst_node->ipa_transforms_to_apply
5291 = id.src_node->ipa_transforms_to_apply.copy ();
5292 for (i = 0; i < old_transforms_to_apply.length (); i++)
5293 id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
5294 old_transforms_to_apply.release ();
0e3776db 5295 }
b8698a0f 5296
1b369fae
RH
5297 id.copy_decl = copy_decl_no_change;
5298 id.transform_call_graph_edges
5299 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5300 id.transform_new_cfg = true;
5301 id.transform_return_to_modify = false;
78bbd765 5302 id.transform_parameter = false;
9ff420f1 5303 id.transform_lang_insert_block = NULL;
1b369fae 5304
fefa31b5 5305 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
110cfe1c 5306 (DECL_STRUCT_FUNCTION (old_decl));
c0c123ef
JH
5307 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5308 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
110cfe1c 5309 initialize_cfun (new_decl, old_decl,
0d63a740 5310 old_entry_block->count);
1755aad0
RG
5311 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5312 = id.src_cfun->gimple_df->ipa_pta;
b8698a0f 5313
19734dd8
RL
5314 /* Copy the function's static chain. */
5315 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5316 if (p)
5317 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5318 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5319 &id);
b8698a0f 5320
19734dd8
RL
5321 /* If there's a tree_map, prepare for substitution. */
5322 if (tree_map)
9771b263 5323 for (i = 0; i < tree_map->length (); i++)
19734dd8 5324 {
0f1961a2 5325 gimple init;
9771b263 5326 replace_info = (*tree_map)[i];
1b369fae 5327 if (replace_info->replace_p)
00fc2333 5328 {
922f15c2
JH
5329 if (!replace_info->old_tree)
5330 {
5331 int i = replace_info->parm_num;
5332 tree parm;
0e8853ee
JH
5333 tree req_type;
5334
910ad8de 5335 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
922f15c2
JH
5336 i --;
5337 replace_info->old_tree = parm;
0e8853ee
JH
5338 req_type = TREE_TYPE (parm);
5339 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5340 {
5341 if (fold_convertible_p (req_type, replace_info->new_tree))
5342 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5343 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5344 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5345 else
5346 {
5347 if (dump_file)
5348 {
5349 fprintf (dump_file, " const ");
5350 print_generic_expr (dump_file, replace_info->new_tree, 0);
5351 fprintf (dump_file, " can't be converted to param ");
5352 print_generic_expr (dump_file, parm, 0);
5353 fprintf (dump_file, "\n");
5354 }
5355 replace_info->old_tree = NULL;
5356 }
5357 }
5358 }
5359 else
5360 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5361 if (replace_info->old_tree)
5362 {
5363 init = setup_one_parameter (&id, replace_info->old_tree,
5364 replace_info->new_tree, id.src_fn,
5365 NULL,
5366 &vars);
5367 if (init)
5368 init_stmts.safe_push (init);
922f15c2 5369 }
00fc2333 5370 }
19734dd8 5371 }
eb50f5f4
JH
5372 /* Copy the function's arguments. */
5373 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5374 DECL_ARGUMENTS (new_decl) =
5375 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5376 args_to_skip, &vars);
b8698a0f 5377
eb50f5f4 5378 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
474086eb 5379 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
b8698a0f 5380
0f1961a2 5381 declare_inline_vars (DECL_INITIAL (new_decl), vars);
9187e02d 5382
9771b263 5383 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
19734dd8 5384 /* Add local vars. */
ae0379fc 5385 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
b8698a0f 5386
90dda0e9 5387 if (DECL_RESULT (old_decl) == NULL_TREE)
1a2c27e9 5388 ;
90dda0e9 5389 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
1a2c27e9
EB
5390 {
5391 DECL_RESULT (new_decl)
5392 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5393 RESULT_DECL, NULL_TREE, void_type_node);
5394 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5395 cfun->returns_struct = 0;
5396 cfun->returns_pcc_struct = 0;
5397 }
5398 else
19734dd8 5399 {
6ff38230
RG
5400 tree old_name;
5401 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
19734dd8 5402 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6ff38230
RG
5403 if (gimple_in_ssa_p (id.src_cfun)
5404 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
32244553 5405 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6ff38230
RG
5406 {
5407 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5408 insert_decl_map (&id, old_name, new_name);
5409 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
32244553 5410 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6ff38230 5411 }
19734dd8 5412 }
b8698a0f 5413
a9e0d843 5414 /* Set up the destination functions loop tree. */
0fc822d0 5415 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
a9e0d843
RB
5416 {
5417 cfun->curr_properties &= ~PROP_loops;
5418 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5419 cfun->curr_properties |= PROP_loops;
5420 }
5421
6ff38230
RG
5422 /* Copy the Function's body. */
5423 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
fefa31b5
DM
5424 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5425 new_entry);
6ff38230 5426
19734dd8
RL
5427 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5428 number_blocks (new_decl);
5429
b5b8b0ac
AO
5430 /* We want to create the BB unconditionally, so that the addition of
5431 debug stmts doesn't affect BB count, which may in the end cause
5432 codegen differences. */
fefa31b5 5433 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
9771b263
DN
5434 while (init_stmts.length ())
5435 insert_init_stmt (&id, bb, init_stmts.pop ());
08ad1d6d 5436 update_clone_info (&id);
0f1961a2 5437
27dbd3ac
RH
5438 /* Remap the nonlocal_goto_save_area, if any. */
5439 if (cfun->nonlocal_goto_save_area)
5440 {
5441 struct walk_stmt_info wi;
5442
5443 memset (&wi, 0, sizeof (wi));
5444 wi.info = &id;
5445 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5446 }
5447
19734dd8 5448 /* Clean up. */
6be42dd4 5449 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
5450 if (id.debug_map)
5451 pointer_map_destroy (id.debug_map);
5006671f
RG
5452 free_dominance_info (CDI_DOMINATORS);
5453 free_dominance_info (CDI_POST_DOMINATORS);
9187e02d
JH
5454
5455 fold_marked_statements (0, id.statements_to_fold);
5456 pointer_set_destroy (id.statements_to_fold);
5457 fold_cond_expr_cond ();
5458 delete_unreachable_blocks_update_callgraph (&id);
67348ccc 5459 if (id.dst_node->definition)
99b766fc 5460 cgraph_rebuild_references ();
9187e02d 5461 update_ssa (TODO_update_ssa);
b35366ce
JH
5462
5463 /* After partial cloning we need to rescale frequencies, so they are
5464 within proper range in the cloned function. */
5465 if (new_entry)
5466 {
5467 struct cgraph_edge *e;
5468 rebuild_frequencies ();
5469
fefa31b5 5470 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
b35366ce
JH
5471 for (e = new_version_node->callees; e; e = e->next_callee)
5472 {
5473 basic_block bb = gimple_bb (e->call_stmt);
02ec6988
MJ
5474 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5475 bb);
5476 e->count = bb->count;
5477 }
5478 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5479 {
5480 basic_block bb = gimple_bb (e->call_stmt);
5481 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5482 bb);
b35366ce
JH
5483 e->count = bb->count;
5484 }
5485 }
5486
9187e02d
JH
5487 free_dominance_info (CDI_DOMINATORS);
5488 free_dominance_info (CDI_POST_DOMINATORS);
5489
9771b263 5490 gcc_assert (!id.debug_stmts.exists ());
110cfe1c 5491 pop_cfun ();
19734dd8
RL
5492 return;
5493}
5494
f82a627c
EB
5495/* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5496 the callee and return the inlined body on success. */
5497
5498tree
5499maybe_inline_call_in_expr (tree exp)
5500{
5501 tree fn = get_callee_fndecl (exp);
5502
5503 /* We can only try to inline "const" functions. */
5504 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5505 {
5506 struct pointer_map_t *decl_map = pointer_map_create ();
5507 call_expr_arg_iterator iter;
5508 copy_body_data id;
5509 tree param, arg, t;
5510
5511 /* Remap the parameters. */
5512 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5513 param;
910ad8de 5514 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
f82a627c
EB
5515 *pointer_map_insert (decl_map, param) = arg;
5516
5517 memset (&id, 0, sizeof (id));
5518 id.src_fn = fn;
5519 id.dst_fn = current_function_decl;
5520 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5521 id.decl_map = decl_map;
5522
5523 id.copy_decl = copy_decl_no_change;
5524 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5525 id.transform_new_cfg = false;
5526 id.transform_return_to_modify = true;
78bbd765 5527 id.transform_parameter = true;
267ffce3 5528 id.transform_lang_insert_block = NULL;
f82a627c
EB
5529
5530 /* Make sure not to unshare trees behind the front-end's back
5531 since front-end specific mechanisms may rely on sharing. */
5532 id.regimplify = false;
5533 id.do_not_unshare = true;
5534
5535 /* We're not inside any EH region. */
1d65f45c 5536 id.eh_lp_nr = 0;
f82a627c
EB
5537
5538 t = copy_tree_body (&id);
5539 pointer_map_destroy (decl_map);
5540
5541 /* We can only return something suitable for use in a GENERIC
5542 expression tree. */
5543 if (TREE_CODE (t) == MODIFY_EXPR)
5544 return TREE_OPERAND (t, 1);
5545 }
5546
5547 return NULL_TREE;
5548}
5549
52dd234b
RH
5550/* Duplicate a type, fields and all. */
5551
5552tree
5553build_duplicate_type (tree type)
5554{
1b369fae 5555 struct copy_body_data id;
52dd234b
RH
5556
5557 memset (&id, 0, sizeof (id));
1b369fae
RH
5558 id.src_fn = current_function_decl;
5559 id.dst_fn = current_function_decl;
5560 id.src_cfun = cfun;
6be42dd4 5561 id.decl_map = pointer_map_create ();
b5b8b0ac 5562 id.debug_map = NULL;
4009f2e7 5563 id.copy_decl = copy_decl_no_change;
52dd234b
RH
5564
5565 type = remap_type_1 (type, &id);
5566
6be42dd4 5567 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
5568 if (id.debug_map)
5569 pointer_map_destroy (id.debug_map);
52dd234b 5570
f31c9f09
DG
5571 TYPE_CANONICAL (type) = type;
5572
52dd234b
RH
5573 return type;
5574}